file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
event.rs | //! Event handling (mouse, keyboard, controller, touch screen, etc.)
//!
//! See [`Event`](enum.Event.html) for more information.
//!
//! # Unstable
//!
//! There are still many unanswered questions about the design of the events API in the turtle
//! crate. This module may change or be completely removed in the future. There will definitely
//! be *some* events API in the future, but it may end up looking different than it does today.
use serde::{Serialize, Deserialize};
use glutin::{
dpi::{LogicalSize, PhysicalPosition},
event::{self as glutin_event, WindowEvent, KeyboardInput},
};
use crate::Point;
/// Possible events returned from [`Drawing::poll_event()`](../struct.Drawing.html#method.poll_event).
///
/// Events are used to make programs more interactive. See that method's documentation for more
/// information about how to use events.
#[non_exhaustive]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum Event {
/// Sent when a keyboard key is pressed or released
Key(Key, PressedState),
/// Sent when a mouse button is pressed or released
MouseButton(MouseButton, PressedState),
/// Sent when the mouse is moving. Only sent when the mouse is over the window.
/// `x` and `y` represent the new coordinates of where the mouse is currently.
///
/// Coordinates are relative to the center of the window.
MouseMove(Point),
/// Sent when the mouse is scrolled. Only sent when the mouse is over the window.
/// `x` and `y` are in scroll ticks.
MouseScroll { x: f64, y: f64 },
/// Sent when the window gets resized
WindowResized { width: u32, height: u32 },
/// Sent when the window focus changes
///
/// The boolean value is true if the window is in focus.
WindowFocused(bool),
/// Sent when the cursor enters or leaves the window
///
/// The boolean value is true if the cursor entered the window, and false if it left.
WindowCursor(bool),
/// Sent when the window is closed
WindowClosed,
}
impl Event {
/// Returns `None` if the input event is not a supported variant of `Event`
#[cfg_attr(any(feature = "test", test), allow(dead_code))]
pub(crate) fn from_window_event(
event: WindowEvent,
scale_factor: f64,
to_logical: impl FnOnce(PhysicalPosition<f64>) -> Point,
) -> Option<Self> {
match event {
WindowEvent::Resized(size) => {
let LogicalSize {width, height} = size.to_logical(scale_factor);
Some(Event::WindowResized {width, height})
},
WindowEvent::KeyboardInput {input: KeyboardInput {state, virtual_keycode, ..}, ..} => {
Some(Event::Key(
Key::from_keycode(virtual_keycode?)?,
PressedState::from_state(state),
))
},
WindowEvent::CursorEntered {..} => Some(Event::WindowCursor(true)),
WindowEvent::CursorLeft {..} => Some(Event::WindowCursor(false)),
WindowEvent::CursorMoved {position, ..} => {
Some(Event::MouseMove(to_logical(position)))
},
WindowEvent::MouseInput {state, button, ..} => Some(Event::MouseButton(
MouseButton::from_button(button)?,
PressedState::from_state(state),
)),
WindowEvent::Focused(focused) => Some(Event::WindowFocused(focused)),
WindowEvent::Destroyed => Some(Event::WindowClosed),
WindowEvent::Moved(_) |
WindowEvent::CloseRequested |
WindowEvent::DroppedFile(_) |
WindowEvent::HoveredFile(_) |
WindowEvent::HoveredFileCancelled |
WindowEvent::ReceivedCharacter(_) |
WindowEvent::ModifiersChanged(_) |
WindowEvent::MouseWheel {..} |
WindowEvent::TouchpadPressure {..} |
WindowEvent::AxisMotion {..} |
WindowEvent::Touch(_) |
WindowEvent::ScaleFactorChanged {..} |
WindowEvent::ThemeChanged(_) => None, // Not supported
}
}
}
//TODO: Documentation
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub enum PressedState {
Pressed,
Released,
}
impl PressedState {
#[cfg_attr(any(feature = "test", test), allow(dead_code))]
fn from_state(state: glutin_event::ElementState) -> PressedState {
match state {
glutin_event::ElementState::Pressed => PressedState::Pressed,
glutin_event::ElementState::Released => PressedState::Released,
}
}
}
//TODO: Documentation
#[non_exhaustive]
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub enum Key {
/// The '1' key above the letters.
Num1,
/// The '2' key above the letters.
Num2,
/// The '3' key above the letters.
Num3,
/// The '4' key above the letters.
Num4,
/// The '5' key above the letters.
Num5,
/// The '6' key above the letters.
Num6,
/// The '7' key above the letters.
Num7,
/// The '8' key above the letters.
Num8,
/// The '9' key above the letters.
Num9,
/// The '0' key above the letters.
Num0,
A,
B,
C,
D,
E,
F,
G,
H,
I,
J,
K,
L,
M,
N,
O,
P,
Q,
R,
S,
T,
U,
V,
W,
X,
Y,
Z,
/// The Escape key, next to F1
Esc,
F1,
F2, | F6,
F7,
F8,
F9,
F10,
F11,
F12,
F13,
F14,
F15,
F16,
F17,
F18,
F19,
F20,
F21,
F22,
F23,
F24,
Home,
Delete,
End,
/// The PageDown (PgDn) key
PageDown,
/// The PageUp (PgUp) key
PageUp,
/// The backspace key, right above Enter/Return
Backspace,
/// The Enter/Return key, below Backspace
Return,
/// The spacebar key
Space,
/// The up arrow key
UpArrow,
/// The left arrow key
LeftArrow,
/// The right arrow key
RightArrow,
/// The down arrow key
DownArrow,
Numpad0,
Numpad1,
Numpad2,
Numpad3,
Numpad4,
Numpad5,
Numpad6,
Numpad7,
Numpad8,
Numpad9,
NumpadComma,
NumpadEnter,
NumpadEquals,
Apostrophe,
At,
Backslash,
Backtick,
Colon,
Comma,
Decimal,
Divide,
Equals,
Minus,
Multiply,
Period,
Plus,
/// The left bracket `[` key
LeftBracket,
/// The left bracket `]` key
RightBracket,
Semicolon,
Slash,
Tab,
}
impl Key {
#[cfg_attr(any(feature = "test", test), allow(dead_code))]
fn from_keycode(key: glutin_event::VirtualKeyCode) -> Option<Self> {
use glutin_event::VirtualKeyCode::*;
#[deny(unreachable_patterns, unused_variables)]
Some(match key {
Key1 => Key::Num1,
Key2 => Key::Num2,
Key3 => Key::Num3,
Key4 => Key::Num4,
Key5 => Key::Num5,
Key6 => Key::Num6,
Key7 => Key::Num7,
Key8 => Key::Num8,
Key9 => Key::Num9,
Key0 => Key::Num0,
A => Key::A,
B => Key::B,
C => Key::C,
D => Key::D,
E => Key::E,
F => Key::F,
G => Key::G,
H => Key::H,
I => Key::I,
J => Key::J,
K => Key::K,
L => Key::L,
M => Key::M,
N => Key::N,
O => Key::O,
P => Key::P,
Q => Key::Q,
R => Key::R,
S => Key::S,
T => Key::T,
U => Key::U,
V => Key::V,
W => Key::W,
X => Key::X,
Y => Key::Y,
Z => Key::Z,
Escape => Key::Esc,
F1 => Key::F1,
F2 => Key::F2,
F3 => Key::F3,
F4 => Key::F4,
F5 => Key::F5,
F6 => Key::F6,
F7 => Key::F7,
F8 => Key::F8,
F9 => Key::F9,
F10 => Key::F10,
F11 => Key::F11,
F12 => Key::F12,
F13 => Key::F13,
F14 => Key::F14,
F15 => Key::F15,
F16 => Key::F16,
F17 => Key::F17,
F18 => Key::F18,
F19 => Key::F19,
F20 => Key::F20,
F21 => Key::F21,
F22 => Key::F22,
F23 => Key::F23,
F24 => Key::F24,
Home => Key::Home,
Delete => Key::Delete,
End => Key::End,
PageDown => Key::PageDown,
PageUp => Key::PageUp,
Back => Key::Backspace,
Return => Key::Return,
Space => Key::Space,
Left => Key::LeftArrow,
Up => Key::UpArrow,
Right => Key::RightArrow,
Down => Key::DownArrow,
Numpad0 => Key::Numpad0,
Numpad1 => Key::Numpad1,
Numpad2 => Key::Numpad2,
Numpad3 => Key::Numpad3,
Numpad4 => Key::Numpad4,
Numpad5 => Key::Numpad5,
Numpad6 => Key::Numpad6,
Numpad7 => Key::Numpad7,
Numpad8 => Key::Numpad8,
Numpad9 => Key::Numpad9,
Apostrophe => Key::Apostrophe,
At => Key::At,
Backslash => Key::Backslash,
Colon => Key::Colon,
Comma => Key::Comma,
Equals => Key::Equals,
Grave => Key::Backtick,
LBracket => Key::LeftBracket,
NumpadAdd | Plus => Key::Plus,
NumpadComma => Key::NumpadComma,
NumpadDecimal => Key::Decimal,
NumpadDivide => Key::Divide,
NumpadEnter => Key::NumpadEnter,
NumpadEquals => Key::NumpadEquals,
NumpadMultiply | Asterisk => Key::Multiply,
NumpadSubtract | Minus => Key::Minus,
Period => Key::Period,
RBracket => Key::RightBracket,
Semicolon => Key::Semicolon,
Slash => Key::Slash,
Tab => Key::Tab,
// Unsupported keys (could be changed in the future)
Snapshot |
Scroll |
Pause |
Insert |
Compose |
Caret |
Numlock |
AbntC1 |
AbntC2 |
Apps |
Ax |
Calculator |
Capital |
Convert |
Kana |
Kanji |
LAlt |
LControl |
LShift |
LWin |
Mail |
MediaSelect |
MediaStop |
Mute |
MyComputer |
NavigateForward |
NavigateBackward |
NextTrack |
NoConvert |
OEM102 |
PlayPause |
Power |
PrevTrack |
RAlt |
RControl |
RShift |
RWin |
Sleep |
Stop |
Sysrq |
Underline |
Unlabeled |
VolumeDown |
VolumeUp |
Wake |
WebBack |
WebFavorites |
WebForward |
WebHome |
WebRefresh |
WebSearch |
WebStop |
Yen |
Copy |
Paste |
Cut => return None,
})
}
}
//TODO: Documentation
#[non_exhaustive]
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub enum MouseButton {
/// The left mouse button
LeftButton,
/// The middle mouse button
MiddleButton,
/// The right mouse button
RightButton,
}
impl MouseButton {
#[cfg_attr(any(feature = "test", test), allow(dead_code))]
fn from_button(button: glutin_event::MouseButton) -> Option<Self> {
use glutin_event::MouseButton::*;
#[deny(unreachable_patterns, unused_variables)]
match button {
Left => Some(MouseButton::LeftButton),
Middle => Some(MouseButton::MiddleButton),
Right => Some(MouseButton::RightButton),
Other(_) => None,
}
}
} | F3,
F4,
F5, | random_line_split |
event.rs | //! Event handling (mouse, keyboard, controller, touch screen, etc.)
//!
//! See [`Event`](enum.Event.html) for more information.
//!
//! # Unstable
//!
//! There are still many unanswered questions about the design of the events API in the turtle
//! crate. This module may change or be completely removed in the future. There will definitely
//! be *some* events API in the future, but it may end up looking different than it does today.
use serde::{Serialize, Deserialize};
use glutin::{
dpi::{LogicalSize, PhysicalPosition},
event::{self as glutin_event, WindowEvent, KeyboardInput},
};
use crate::Point;
/// Possible events returned from [`Drawing::poll_event()`](../struct.Drawing.html#method.poll_event).
///
/// Events are used to make programs more interactive. See that method's documentation for more
/// information about how to use events.
#[non_exhaustive]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum Event {
/// Sent when a keyboard key is pressed or released
Key(Key, PressedState),
/// Sent when a mouse button is pressed or released
MouseButton(MouseButton, PressedState),
/// Sent when the mouse is moving. Only sent when the mouse is over the window.
/// `x` and `y` represent the new coordinates of where the mouse is currently.
///
/// Coordinates are relative to the center of the window.
MouseMove(Point),
/// Sent when the mouse is scrolled. Only sent when the mouse is over the window.
/// `x` and `y` are in scroll ticks.
MouseScroll { x: f64, y: f64 },
/// Sent when the window gets resized
WindowResized { width: u32, height: u32 },
/// Sent when the window focus changes
///
/// The boolean value is true if the window is in focus.
WindowFocused(bool),
/// Sent when the cursor enters or leaves the window
///
/// The boolean value is true if the cursor entered the window, and false if it left.
WindowCursor(bool),
/// Sent when the window is closed
WindowClosed,
}
impl Event {
/// Returns `None` if the input event is not a supported variant of `Event`
#[cfg_attr(any(feature = "test", test), allow(dead_code))]
pub(crate) fn from_window_event(
event: WindowEvent,
scale_factor: f64,
to_logical: impl FnOnce(PhysicalPosition<f64>) -> Point,
) -> Option<Self> {
match event {
WindowEvent::Resized(size) => {
let LogicalSize {width, height} = size.to_logical(scale_factor);
Some(Event::WindowResized {width, height})
},
WindowEvent::KeyboardInput {input: KeyboardInput {state, virtual_keycode, ..}, ..} => {
Some(Event::Key(
Key::from_keycode(virtual_keycode?)?,
PressedState::from_state(state),
))
},
WindowEvent::CursorEntered {..} => Some(Event::WindowCursor(true)),
WindowEvent::CursorLeft {..} => Some(Event::WindowCursor(false)),
WindowEvent::CursorMoved {position, ..} => {
Some(Event::MouseMove(to_logical(position)))
},
WindowEvent::MouseInput {state, button, ..} => Some(Event::MouseButton(
MouseButton::from_button(button)?,
PressedState::from_state(state),
)),
WindowEvent::Focused(focused) => Some(Event::WindowFocused(focused)),
WindowEvent::Destroyed => Some(Event::WindowClosed),
WindowEvent::Moved(_) |
WindowEvent::CloseRequested |
WindowEvent::DroppedFile(_) |
WindowEvent::HoveredFile(_) |
WindowEvent::HoveredFileCancelled |
WindowEvent::ReceivedCharacter(_) |
WindowEvent::ModifiersChanged(_) |
WindowEvent::MouseWheel {..} |
WindowEvent::TouchpadPressure {..} |
WindowEvent::AxisMotion {..} |
WindowEvent::Touch(_) |
WindowEvent::ScaleFactorChanged {..} |
WindowEvent::ThemeChanged(_) => None, // Not supported
}
}
}
//TODO: Documentation
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub enum PressedState {
Pressed,
Released,
}
impl PressedState {
#[cfg_attr(any(feature = "test", test), allow(dead_code))]
fn | (state: glutin_event::ElementState) -> PressedState {
match state {
glutin_event::ElementState::Pressed => PressedState::Pressed,
glutin_event::ElementState::Released => PressedState::Released,
}
}
}
//TODO: Documentation
#[non_exhaustive]
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub enum Key {
/// The '1' key above the letters.
Num1,
/// The '2' key above the letters.
Num2,
/// The '3' key above the letters.
Num3,
/// The '4' key above the letters.
Num4,
/// The '5' key above the letters.
Num5,
/// The '6' key above the letters.
Num6,
/// The '7' key above the letters.
Num7,
/// The '8' key above the letters.
Num8,
/// The '9' key above the letters.
Num9,
/// The '0' key above the letters.
Num0,
A,
B,
C,
D,
E,
F,
G,
H,
I,
J,
K,
L,
M,
N,
O,
P,
Q,
R,
S,
T,
U,
V,
W,
X,
Y,
Z,
/// The Escape key, next to F1
Esc,
F1,
F2,
F3,
F4,
F5,
F6,
F7,
F8,
F9,
F10,
F11,
F12,
F13,
F14,
F15,
F16,
F17,
F18,
F19,
F20,
F21,
F22,
F23,
F24,
Home,
Delete,
End,
/// The PageDown (PgDn) key
PageDown,
/// The PageUp (PgUp) key
PageUp,
/// The backspace key, right above Enter/Return
Backspace,
/// The Enter/Return key, below Backspace
Return,
/// The spacebar key
Space,
/// The up arrow key
UpArrow,
/// The left arrow key
LeftArrow,
/// The right arrow key
RightArrow,
/// The down arrow key
DownArrow,
Numpad0,
Numpad1,
Numpad2,
Numpad3,
Numpad4,
Numpad5,
Numpad6,
Numpad7,
Numpad8,
Numpad9,
NumpadComma,
NumpadEnter,
NumpadEquals,
Apostrophe,
At,
Backslash,
Backtick,
Colon,
Comma,
Decimal,
Divide,
Equals,
Minus,
Multiply,
Period,
Plus,
/// The left bracket `[` key
LeftBracket,
/// The left bracket `]` key
RightBracket,
Semicolon,
Slash,
Tab,
}
impl Key {
#[cfg_attr(any(feature = "test", test), allow(dead_code))]
fn from_keycode(key: glutin_event::VirtualKeyCode) -> Option<Self> {
use glutin_event::VirtualKeyCode::*;
#[deny(unreachable_patterns, unused_variables)]
Some(match key {
Key1 => Key::Num1,
Key2 => Key::Num2,
Key3 => Key::Num3,
Key4 => Key::Num4,
Key5 => Key::Num5,
Key6 => Key::Num6,
Key7 => Key::Num7,
Key8 => Key::Num8,
Key9 => Key::Num9,
Key0 => Key::Num0,
A => Key::A,
B => Key::B,
C => Key::C,
D => Key::D,
E => Key::E,
F => Key::F,
G => Key::G,
H => Key::H,
I => Key::I,
J => Key::J,
K => Key::K,
L => Key::L,
M => Key::M,
N => Key::N,
O => Key::O,
P => Key::P,
Q => Key::Q,
R => Key::R,
S => Key::S,
T => Key::T,
U => Key::U,
V => Key::V,
W => Key::W,
X => Key::X,
Y => Key::Y,
Z => Key::Z,
Escape => Key::Esc,
F1 => Key::F1,
F2 => Key::F2,
F3 => Key::F3,
F4 => Key::F4,
F5 => Key::F5,
F6 => Key::F6,
F7 => Key::F7,
F8 => Key::F8,
F9 => Key::F9,
F10 => Key::F10,
F11 => Key::F11,
F12 => Key::F12,
F13 => Key::F13,
F14 => Key::F14,
F15 => Key::F15,
F16 => Key::F16,
F17 => Key::F17,
F18 => Key::F18,
F19 => Key::F19,
F20 => Key::F20,
F21 => Key::F21,
F22 => Key::F22,
F23 => Key::F23,
F24 => Key::F24,
Home => Key::Home,
Delete => Key::Delete,
End => Key::End,
PageDown => Key::PageDown,
PageUp => Key::PageUp,
Back => Key::Backspace,
Return => Key::Return,
Space => Key::Space,
Left => Key::LeftArrow,
Up => Key::UpArrow,
Right => Key::RightArrow,
Down => Key::DownArrow,
Numpad0 => Key::Numpad0,
Numpad1 => Key::Numpad1,
Numpad2 => Key::Numpad2,
Numpad3 => Key::Numpad3,
Numpad4 => Key::Numpad4,
Numpad5 => Key::Numpad5,
Numpad6 => Key::Numpad6,
Numpad7 => Key::Numpad7,
Numpad8 => Key::Numpad8,
Numpad9 => Key::Numpad9,
Apostrophe => Key::Apostrophe,
At => Key::At,
Backslash => Key::Backslash,
Colon => Key::Colon,
Comma => Key::Comma,
Equals => Key::Equals,
Grave => Key::Backtick,
LBracket => Key::LeftBracket,
NumpadAdd | Plus => Key::Plus,
NumpadComma => Key::NumpadComma,
NumpadDecimal => Key::Decimal,
NumpadDivide => Key::Divide,
NumpadEnter => Key::NumpadEnter,
NumpadEquals => Key::NumpadEquals,
NumpadMultiply | Asterisk => Key::Multiply,
NumpadSubtract | Minus => Key::Minus,
Period => Key::Period,
RBracket => Key::RightBracket,
Semicolon => Key::Semicolon,
Slash => Key::Slash,
Tab => Key::Tab,
// Unsupported keys (could be changed in the future)
Snapshot |
Scroll |
Pause |
Insert |
Compose |
Caret |
Numlock |
AbntC1 |
AbntC2 |
Apps |
Ax |
Calculator |
Capital |
Convert |
Kana |
Kanji |
LAlt |
LControl |
LShift |
LWin |
Mail |
MediaSelect |
MediaStop |
Mute |
MyComputer |
NavigateForward |
NavigateBackward |
NextTrack |
NoConvert |
OEM102 |
PlayPause |
Power |
PrevTrack |
RAlt |
RControl |
RShift |
RWin |
Sleep |
Stop |
Sysrq |
Underline |
Unlabeled |
VolumeDown |
VolumeUp |
Wake |
WebBack |
WebFavorites |
WebForward |
WebHome |
WebRefresh |
WebSearch |
WebStop |
Yen |
Copy |
Paste |
Cut => return None,
})
}
}
//TODO: Documentation
#[non_exhaustive]
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub enum MouseButton {
/// The left mouse button
LeftButton,
/// The middle mouse button
MiddleButton,
/// The right mouse button
RightButton,
}
impl MouseButton {
#[cfg_attr(any(feature = "test", test), allow(dead_code))]
fn from_button(button: glutin_event::MouseButton) -> Option<Self> {
use glutin_event::MouseButton::*;
#[deny(unreachable_patterns, unused_variables)]
match button {
Left => Some(MouseButton::LeftButton),
Middle => Some(MouseButton::MiddleButton),
Right => Some(MouseButton::RightButton),
Other(_) => None,
}
}
}
| from_state | identifier_name |
event.rs | //! Event handling (mouse, keyboard, controller, touch screen, etc.)
//!
//! See [`Event`](enum.Event.html) for more information.
//!
//! # Unstable
//!
//! There are still many unanswered questions about the design of the events API in the turtle
//! crate. This module may change or be completely removed in the future. There will definitely
//! be *some* events API in the future, but it may end up looking different than it does today.
use serde::{Serialize, Deserialize};
use glutin::{
dpi::{LogicalSize, PhysicalPosition},
event::{self as glutin_event, WindowEvent, KeyboardInput},
};
use crate::Point;
/// Possible events returned from [`Drawing::poll_event()`](../struct.Drawing.html#method.poll_event).
///
/// Events are used to make programs more interactive. See that method's documentation for more
/// information about how to use events.
#[non_exhaustive]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum Event {
/// Sent when a keyboard key is pressed or released
Key(Key, PressedState),
/// Sent when a mouse button is pressed or released
MouseButton(MouseButton, PressedState),
/// Sent when the mouse is moving. Only sent when the mouse is over the window.
/// `x` and `y` represent the new coordinates of where the mouse is currently.
///
/// Coordinates are relative to the center of the window.
MouseMove(Point),
/// Sent when the mouse is scrolled. Only sent when the mouse is over the window.
/// `x` and `y` are in scroll ticks.
MouseScroll { x: f64, y: f64 },
/// Sent when the window gets resized
WindowResized { width: u32, height: u32 },
/// Sent when the window focus changes
///
/// The boolean value is true if the window is in focus.
WindowFocused(bool),
/// Sent when the cursor enters or leaves the window
///
/// The boolean value is true if the cursor entered the window, and false if it left.
WindowCursor(bool),
/// Sent when the window is closed
WindowClosed,
}
impl Event {
/// Returns `None` if the input event is not a supported variant of `Event`
#[cfg_attr(any(feature = "test", test), allow(dead_code))]
pub(crate) fn from_window_event(
event: WindowEvent,
scale_factor: f64,
to_logical: impl FnOnce(PhysicalPosition<f64>) -> Point,
) -> Option<Self> {
match event {
WindowEvent::Resized(size) => {
let LogicalSize {width, height} = size.to_logical(scale_factor);
Some(Event::WindowResized {width, height})
},
WindowEvent::KeyboardInput {input: KeyboardInput {state, virtual_keycode, ..}, ..} => {
Some(Event::Key(
Key::from_keycode(virtual_keycode?)?,
PressedState::from_state(state),
))
},
WindowEvent::CursorEntered {..} => Some(Event::WindowCursor(true)),
WindowEvent::CursorLeft {..} => Some(Event::WindowCursor(false)),
WindowEvent::CursorMoved {position, ..} => {
Some(Event::MouseMove(to_logical(position)))
},
WindowEvent::MouseInput {state, button, ..} => Some(Event::MouseButton(
MouseButton::from_button(button)?,
PressedState::from_state(state),
)),
WindowEvent::Focused(focused) => Some(Event::WindowFocused(focused)),
WindowEvent::Destroyed => Some(Event::WindowClosed),
WindowEvent::Moved(_) |
WindowEvent::CloseRequested |
WindowEvent::DroppedFile(_) |
WindowEvent::HoveredFile(_) |
WindowEvent::HoveredFileCancelled |
WindowEvent::ReceivedCharacter(_) |
WindowEvent::ModifiersChanged(_) |
WindowEvent::MouseWheel {..} |
WindowEvent::TouchpadPressure {..} |
WindowEvent::AxisMotion {..} |
WindowEvent::Touch(_) |
WindowEvent::ScaleFactorChanged {..} |
WindowEvent::ThemeChanged(_) => None, // Not supported
}
}
}
//TODO: Documentation
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub enum PressedState {
Pressed,
Released,
}
impl PressedState {
#[cfg_attr(any(feature = "test", test), allow(dead_code))]
fn from_state(state: glutin_event::ElementState) -> PressedState |
}
//TODO: Documentation
#[non_exhaustive]
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub enum Key {
/// The '1' key above the letters.
Num1,
/// The '2' key above the letters.
Num2,
/// The '3' key above the letters.
Num3,
/// The '4' key above the letters.
Num4,
/// The '5' key above the letters.
Num5,
/// The '6' key above the letters.
Num6,
/// The '7' key above the letters.
Num7,
/// The '8' key above the letters.
Num8,
/// The '9' key above the letters.
Num9,
/// The '0' key above the letters.
Num0,
A,
B,
C,
D,
E,
F,
G,
H,
I,
J,
K,
L,
M,
N,
O,
P,
Q,
R,
S,
T,
U,
V,
W,
X,
Y,
Z,
/// The Escape key, next to F1
Esc,
F1,
F2,
F3,
F4,
F5,
F6,
F7,
F8,
F9,
F10,
F11,
F12,
F13,
F14,
F15,
F16,
F17,
F18,
F19,
F20,
F21,
F22,
F23,
F24,
Home,
Delete,
End,
/// The PageDown (PgDn) key
PageDown,
/// The PageUp (PgUp) key
PageUp,
/// The backspace key, right above Enter/Return
Backspace,
/// The Enter/Return key, below Backspace
Return,
/// The spacebar key
Space,
/// The up arrow key
UpArrow,
/// The left arrow key
LeftArrow,
/// The right arrow key
RightArrow,
/// The down arrow key
DownArrow,
Numpad0,
Numpad1,
Numpad2,
Numpad3,
Numpad4,
Numpad5,
Numpad6,
Numpad7,
Numpad8,
Numpad9,
NumpadComma,
NumpadEnter,
NumpadEquals,
Apostrophe,
At,
Backslash,
Backtick,
Colon,
Comma,
Decimal,
Divide,
Equals,
Minus,
Multiply,
Period,
Plus,
/// The left bracket `[` key
LeftBracket,
/// The left bracket `]` key
RightBracket,
Semicolon,
Slash,
Tab,
}
impl Key {
#[cfg_attr(any(feature = "test", test), allow(dead_code))]
fn from_keycode(key: glutin_event::VirtualKeyCode) -> Option<Self> {
use glutin_event::VirtualKeyCode::*;
#[deny(unreachable_patterns, unused_variables)]
Some(match key {
Key1 => Key::Num1,
Key2 => Key::Num2,
Key3 => Key::Num3,
Key4 => Key::Num4,
Key5 => Key::Num5,
Key6 => Key::Num6,
Key7 => Key::Num7,
Key8 => Key::Num8,
Key9 => Key::Num9,
Key0 => Key::Num0,
A => Key::A,
B => Key::B,
C => Key::C,
D => Key::D,
E => Key::E,
F => Key::F,
G => Key::G,
H => Key::H,
I => Key::I,
J => Key::J,
K => Key::K,
L => Key::L,
M => Key::M,
N => Key::N,
O => Key::O,
P => Key::P,
Q => Key::Q,
R => Key::R,
S => Key::S,
T => Key::T,
U => Key::U,
V => Key::V,
W => Key::W,
X => Key::X,
Y => Key::Y,
Z => Key::Z,
Escape => Key::Esc,
F1 => Key::F1,
F2 => Key::F2,
F3 => Key::F3,
F4 => Key::F4,
F5 => Key::F5,
F6 => Key::F6,
F7 => Key::F7,
F8 => Key::F8,
F9 => Key::F9,
F10 => Key::F10,
F11 => Key::F11,
F12 => Key::F12,
F13 => Key::F13,
F14 => Key::F14,
F15 => Key::F15,
F16 => Key::F16,
F17 => Key::F17,
F18 => Key::F18,
F19 => Key::F19,
F20 => Key::F20,
F21 => Key::F21,
F22 => Key::F22,
F23 => Key::F23,
F24 => Key::F24,
Home => Key::Home,
Delete => Key::Delete,
End => Key::End,
PageDown => Key::PageDown,
PageUp => Key::PageUp,
Back => Key::Backspace,
Return => Key::Return,
Space => Key::Space,
Left => Key::LeftArrow,
Up => Key::UpArrow,
Right => Key::RightArrow,
Down => Key::DownArrow,
Numpad0 => Key::Numpad0,
Numpad1 => Key::Numpad1,
Numpad2 => Key::Numpad2,
Numpad3 => Key::Numpad3,
Numpad4 => Key::Numpad4,
Numpad5 => Key::Numpad5,
Numpad6 => Key::Numpad6,
Numpad7 => Key::Numpad7,
Numpad8 => Key::Numpad8,
Numpad9 => Key::Numpad9,
Apostrophe => Key::Apostrophe,
At => Key::At,
Backslash => Key::Backslash,
Colon => Key::Colon,
Comma => Key::Comma,
Equals => Key::Equals,
Grave => Key::Backtick,
LBracket => Key::LeftBracket,
NumpadAdd | Plus => Key::Plus,
NumpadComma => Key::NumpadComma,
NumpadDecimal => Key::Decimal,
NumpadDivide => Key::Divide,
NumpadEnter => Key::NumpadEnter,
NumpadEquals => Key::NumpadEquals,
NumpadMultiply | Asterisk => Key::Multiply,
NumpadSubtract | Minus => Key::Minus,
Period => Key::Period,
RBracket => Key::RightBracket,
Semicolon => Key::Semicolon,
Slash => Key::Slash,
Tab => Key::Tab,
// Unsupported keys (could be changed in the future)
Snapshot |
Scroll |
Pause |
Insert |
Compose |
Caret |
Numlock |
AbntC1 |
AbntC2 |
Apps |
Ax |
Calculator |
Capital |
Convert |
Kana |
Kanji |
LAlt |
LControl |
LShift |
LWin |
Mail |
MediaSelect |
MediaStop |
Mute |
MyComputer |
NavigateForward |
NavigateBackward |
NextTrack |
NoConvert |
OEM102 |
PlayPause |
Power |
PrevTrack |
RAlt |
RControl |
RShift |
RWin |
Sleep |
Stop |
Sysrq |
Underline |
Unlabeled |
VolumeDown |
VolumeUp |
Wake |
WebBack |
WebFavorites |
WebForward |
WebHome |
WebRefresh |
WebSearch |
WebStop |
Yen |
Copy |
Paste |
Cut => return None,
})
}
}
//TODO: Documentation
#[non_exhaustive]
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub enum MouseButton {
/// The left mouse button
LeftButton,
/// The middle mouse button
MiddleButton,
/// The right mouse button
RightButton,
}
impl MouseButton {
#[cfg_attr(any(feature = "test", test), allow(dead_code))]
fn from_button(button: glutin_event::MouseButton) -> Option<Self> {
use glutin_event::MouseButton::*;
#[deny(unreachable_patterns, unused_variables)]
match button {
Left => Some(MouseButton::LeftButton),
Middle => Some(MouseButton::MiddleButton),
Right => Some(MouseButton::RightButton),
Other(_) => None,
}
}
}
| {
match state {
glutin_event::ElementState::Pressed => PressedState::Pressed,
glutin_event::ElementState::Released => PressedState::Released,
}
} | identifier_body |
index.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
import os
from bottle import route, run, response, request, default_app
from bottle import _stdout as bottlelog
from kombu import Connection, Queue, Exchange
import json
from configlib import getConfig, OptionParser
@route('/status')
@route('/status/')
def status():
'''endpoint for a status/health check'''
if request.body:
request.body.read()
request.body.close()
response.status = 200
response.content_type = "application/json"
response.body = json.dumps(dict(status='ok', service='loginput'))
return response
@route('/test')
@route('/test/')
def testindex():
# ip = request.environ.get('REMOTE_ADDR')
# response.headers['X-IP'] = '{0}'.format(ip)
response.status=200
# act like elastic search bulk index
@route('/_bulk',method='POST')
@route('/_bulk/',method='POST')
def bulkindex():
if request.body:
|
return
@route('/_status')
@route('/_status/')
@route('/nxlog/', method=['POST','PUT'])
@route('/nxlog', method=['POST','PUT'])
@route('/events/',method=['POST','PUT'])
@route('/events', method=['POST','PUT'])
def eventsindex():
if request.body:
anevent=request.body.read()
# bottlelog('request:{0}\n'.format(anevent))
request.body.close()
# valid json?
try:
eventDict=json.loads(anevent)
except ValueError:
response.status=500
return
# let the message queue worker who gets this know where it was posted
eventDict['endpoint']='events'
# post to event message queue
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(eventDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
return
@route('/cef', method=['POST','PUT'])
@route('/cef/',method=['POST','PUT'])
# debug(True)
def cefindex():
if request.body:
anevent=request.body.read()
request.body.close()
# valid json?
try:
cefDict=json.loads(anevent)
except ValueError:
response.status=500
return
# let the message queue worker who gets this know where it was posted
cefDict['endpoint']='cef'
# post to eventtask exchange
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(cefDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
return
@route('/custom/<application>',method=['POST','PUT'])
def customindex(application):
'''
and endpoint designed for custom applications that want to post data
to elastic search through the mozdef event interface
post to /custom/vulnerabilities
for example to post vulnerability in a custom format
Posts must be in json and are best formatted using a plugin
to the esworker.py process.
'''
if request.body:
anevent=request.body.read()
request.body.close()
# valid json?
try:
customDict=json.loads(anevent)
except ValueError:
response.status=500
return
# let the message queue worker who gets this know where it was posted
customDict['endpoint']= application
customDict['customendpoint'] = True
# post to eventtask exchange
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(customDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
return
def initConfig():
options.mqserver=getConfig('mqserver','localhost',options.configfile)
options.taskexchange=getConfig('taskexchange','eventtask',options.configfile)
options.mquser=getConfig('mquser','guest',options.configfile)
options.mqpassword=getConfig('mqpassword','guest',options.configfile)
options.mqport=getConfig('mqport',5672,options.configfile)
options.listen_host=getConfig('listen_host', '127.0.0.1', options.configfile)
# get config info:
parser=OptionParser()
parser.add_option("-c", dest='configfile', default=os.path.join(os.path.dirname(__file__), __file__).replace('.py', '.conf'), help="configuration file to use")
(options,args) = parser.parse_args()
initConfig()
# connect and declare the message queue/kombu objects.
connString='amqp://{0}:{1}@{2}:{3}//'.format(options.mquser,options.mqpassword,options.mqserver,options.mqport)
mqConn=Connection(connString)
eventTaskExchange=Exchange(name=options.taskexchange,type='direct',durable=True)
eventTaskExchange(mqConn).declare()
eventTaskQueue=Queue(options.taskexchange,exchange=eventTaskExchange)
eventTaskQueue(mqConn).declare()
mqproducer = mqConn.Producer(serializer='json')
if __name__ == "__main__":
run(host=options.listen_host, port=8080)
else:
application = default_app()
| bulkpost=request.body.read()
# bottlelog('request:{0}\n'.format(bulkpost))
request.body.close()
try: # Handles json array bulk format [{},{},...]
messages = json.loads(bulkpost)
for event in messages:
# don't post the items telling us where to post things..
if 'index' not in event:
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(event,exchange=eventTaskExchange,routing_key=options.taskexchange)
return
except ValueError as e:
bottlelog('Decoded raw input failed with {0}'.format(e))
pass
if len(bulkpost)>10: # Handles single element format {}
# TODO Check for other bulk formats.
# iterate on messages and post to event message queue
eventlist=[]
for i in bulkpost.splitlines():
eventlist.append(i)
for i in eventlist:
try:
# valid json?
try:
eventDict=json.loads(i)
except ValueError:
response.status=500
return
# don't post the items telling us where to post things..
if 'index' not in json.loads(i):
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(eventDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
except ValueError:
bottlelog('value error {0}'.format(i)) | conditional_block |
index.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
import os
from bottle import route, run, response, request, default_app
from bottle import _stdout as bottlelog
from kombu import Connection, Queue, Exchange
import json
from configlib import getConfig, OptionParser
@route('/status')
@route('/status/')
def status():
'''endpoint for a status/health check'''
if request.body:
request.body.read()
request.body.close()
response.status = 200
response.content_type = "application/json"
response.body = json.dumps(dict(status='ok', service='loginput'))
return response
@route('/test')
@route('/test/')
def testindex():
# ip = request.environ.get('REMOTE_ADDR')
# response.headers['X-IP'] = '{0}'.format(ip)
response.status=200
# act like elastic search bulk index
@route('/_bulk',method='POST')
@route('/_bulk/',method='POST')
def bulkindex():
if request.body:
bulkpost=request.body.read()
# bottlelog('request:{0}\n'.format(bulkpost))
request.body.close()
try: # Handles json array bulk format [{},{},...]
messages = json.loads(bulkpost)
for event in messages:
# don't post the items telling us where to post things..
if 'index' not in event:
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(event,exchange=eventTaskExchange,routing_key=options.taskexchange)
return
except ValueError as e:
bottlelog('Decoded raw input failed with {0}'.format(e))
pass
if len(bulkpost)>10: # Handles single element format {}
# TODO Check for other bulk formats.
# iterate on messages and post to event message queue
eventlist=[]
for i in bulkpost.splitlines():
eventlist.append(i)
for i in eventlist:
try:
# valid json?
try:
eventDict=json.loads(i)
except ValueError:
response.status=500
return
# don't post the items telling us where to post things..
if 'index' not in json.loads(i):
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(eventDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
except ValueError:
bottlelog('value error {0}'.format(i))
return
@route('/_status')
@route('/_status/')
@route('/nxlog/', method=['POST','PUT'])
@route('/nxlog', method=['POST','PUT'])
@route('/events/',method=['POST','PUT'])
@route('/events', method=['POST','PUT'])
def eventsindex():
if request.body:
anevent=request.body.read()
# bottlelog('request:{0}\n'.format(anevent))
request.body.close()
# valid json?
try:
eventDict=json.loads(anevent)
except ValueError:
response.status=500
return
# let the message queue worker who gets this know where it was posted
eventDict['endpoint']='events'
# post to event message queue
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(eventDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
return
@route('/cef', method=['POST','PUT'])
@route('/cef/',method=['POST','PUT'])
# debug(True)
def cefindex():
if request.body:
anevent=request.body.read()
request.body.close()
# valid json?
try:
cefDict=json.loads(anevent)
except ValueError:
response.status=500
return
# let the message queue worker who gets this know where it was posted
cefDict['endpoint']='cef'
# post to eventtask exchange
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(cefDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
return
@route('/custom/<application>',method=['POST','PUT'])
def customindex(application):
|
def initConfig():
options.mqserver=getConfig('mqserver','localhost',options.configfile)
options.taskexchange=getConfig('taskexchange','eventtask',options.configfile)
options.mquser=getConfig('mquser','guest',options.configfile)
options.mqpassword=getConfig('mqpassword','guest',options.configfile)
options.mqport=getConfig('mqport',5672,options.configfile)
options.listen_host=getConfig('listen_host', '127.0.0.1', options.configfile)
# get config info:
parser=OptionParser()
parser.add_option("-c", dest='configfile', default=os.path.join(os.path.dirname(__file__), __file__).replace('.py', '.conf'), help="configuration file to use")
(options,args) = parser.parse_args()
initConfig()
# connect and declare the message queue/kombu objects.
connString='amqp://{0}:{1}@{2}:{3}//'.format(options.mquser,options.mqpassword,options.mqserver,options.mqport)
mqConn=Connection(connString)
eventTaskExchange=Exchange(name=options.taskexchange,type='direct',durable=True)
eventTaskExchange(mqConn).declare()
eventTaskQueue=Queue(options.taskexchange,exchange=eventTaskExchange)
eventTaskQueue(mqConn).declare()
mqproducer = mqConn.Producer(serializer='json')
if __name__ == "__main__":
run(host=options.listen_host, port=8080)
else:
application = default_app()
| '''
and endpoint designed for custom applications that want to post data
to elastic search through the mozdef event interface
post to /custom/vulnerabilities
for example to post vulnerability in a custom format
Posts must be in json and are best formatted using a plugin
to the esworker.py process.
'''
if request.body:
anevent=request.body.read()
request.body.close()
# valid json?
try:
customDict=json.loads(anevent)
except ValueError:
response.status=500
return
# let the message queue worker who gets this know where it was posted
customDict['endpoint']= application
customDict['customendpoint'] = True
# post to eventtask exchange
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(customDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
return | identifier_body |
index.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
import os
from bottle import route, run, response, request, default_app
from bottle import _stdout as bottlelog
from kombu import Connection, Queue, Exchange
import json
from configlib import getConfig, OptionParser
@route('/status')
@route('/status/')
def status():
'''endpoint for a status/health check'''
if request.body:
request.body.read()
request.body.close()
response.status = 200
response.content_type = "application/json"
response.body = json.dumps(dict(status='ok', service='loginput'))
return response
@route('/test')
@route('/test/')
def testindex():
# ip = request.environ.get('REMOTE_ADDR')
# response.headers['X-IP'] = '{0}'.format(ip)
response.status=200
# act like elastic search bulk index
@route('/_bulk',method='POST')
@route('/_bulk/',method='POST')
def bulkindex():
if request.body:
bulkpost=request.body.read()
# bottlelog('request:{0}\n'.format(bulkpost))
request.body.close()
try: # Handles json array bulk format [{},{},...]
messages = json.loads(bulkpost)
for event in messages:
# don't post the items telling us where to post things..
if 'index' not in event:
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(event,exchange=eventTaskExchange,routing_key=options.taskexchange)
return
except ValueError as e:
bottlelog('Decoded raw input failed with {0}'.format(e))
pass
if len(bulkpost)>10: # Handles single element format {}
# TODO Check for other bulk formats.
# iterate on messages and post to event message queue
eventlist=[]
for i in bulkpost.splitlines():
eventlist.append(i)
for i in eventlist:
try:
# valid json?
try:
eventDict=json.loads(i)
except ValueError:
response.status=500
return
# don't post the items telling us where to post things..
if 'index' not in json.loads(i):
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(eventDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
except ValueError:
bottlelog('value error {0}'.format(i))
return
@route('/_status')
@route('/_status/')
@route('/nxlog/', method=['POST','PUT'])
@route('/nxlog', method=['POST','PUT'])
@route('/events/',method=['POST','PUT'])
@route('/events', method=['POST','PUT'])
def eventsindex():
if request.body:
anevent=request.body.read()
# bottlelog('request:{0}\n'.format(anevent))
request.body.close()
# valid json?
try:
eventDict=json.loads(anevent)
except ValueError:
response.status=500
return
# let the message queue worker who gets this know where it was posted
eventDict['endpoint']='events'
# post to event message queue
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(eventDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
return
@route('/cef', method=['POST','PUT'])
@route('/cef/',method=['POST','PUT'])
# debug(True)
def cefindex():
if request.body:
anevent=request.body.read()
request.body.close()
# valid json?
try:
cefDict=json.loads(anevent)
except ValueError:
response.status=500
return
# let the message queue worker who gets this know where it was posted
cefDict['endpoint']='cef'
# post to eventtask exchange
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(cefDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
return
@route('/custom/<application>',method=['POST','PUT'])
def customindex(application):
'''
and endpoint designed for custom applications that want to post data
to elastic search through the mozdef event interface
post to /custom/vulnerabilities
for example to post vulnerability in a custom format
Posts must be in json and are best formatted using a plugin
to the esworker.py process.
'''
if request.body:
anevent=request.body.read()
request.body.close()
# valid json?
try:
customDict=json.loads(anevent)
except ValueError:
response.status=500
return
# let the message queue worker who gets this know where it was posted
customDict['endpoint']= application
customDict['customendpoint'] = True
# post to eventtask exchange
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(customDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
return
def | ():
options.mqserver=getConfig('mqserver','localhost',options.configfile)
options.taskexchange=getConfig('taskexchange','eventtask',options.configfile)
options.mquser=getConfig('mquser','guest',options.configfile)
options.mqpassword=getConfig('mqpassword','guest',options.configfile)
options.mqport=getConfig('mqport',5672,options.configfile)
options.listen_host=getConfig('listen_host', '127.0.0.1', options.configfile)
# get config info:
parser=OptionParser()
parser.add_option("-c", dest='configfile', default=os.path.join(os.path.dirname(__file__), __file__).replace('.py', '.conf'), help="configuration file to use")
(options,args) = parser.parse_args()
initConfig()
# connect and declare the message queue/kombu objects.
connString='amqp://{0}:{1}@{2}:{3}//'.format(options.mquser,options.mqpassword,options.mqserver,options.mqport)
mqConn=Connection(connString)
eventTaskExchange=Exchange(name=options.taskexchange,type='direct',durable=True)
eventTaskExchange(mqConn).declare()
eventTaskQueue=Queue(options.taskexchange,exchange=eventTaskExchange)
eventTaskQueue(mqConn).declare()
mqproducer = mqConn.Producer(serializer='json')
if __name__ == "__main__":
run(host=options.listen_host, port=8080)
else:
application = default_app()
| initConfig | identifier_name |
index.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
import os
from bottle import route, run, response, request, default_app
from bottle import _stdout as bottlelog
from kombu import Connection, Queue, Exchange
import json
from configlib import getConfig, OptionParser
@route('/status')
@route('/status/')
def status():
'''endpoint for a status/health check'''
if request.body:
request.body.read()
request.body.close()
response.status = 200
response.content_type = "application/json"
response.body = json.dumps(dict(status='ok', service='loginput'))
return response
@route('/test')
@route('/test/')
def testindex():
# ip = request.environ.get('REMOTE_ADDR')
# response.headers['X-IP'] = '{0}'.format(ip)
response.status=200
# act like elastic search bulk index
@route('/_bulk',method='POST')
@route('/_bulk/',method='POST')
def bulkindex():
if request.body:
bulkpost=request.body.read()
# bottlelog('request:{0}\n'.format(bulkpost))
request.body.close()
try: # Handles json array bulk format [{},{},...]
messages = json.loads(bulkpost)
for event in messages:
# don't post the items telling us where to post things..
if 'index' not in event:
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(event,exchange=eventTaskExchange,routing_key=options.taskexchange)
return
except ValueError as e:
bottlelog('Decoded raw input failed with {0}'.format(e))
pass
if len(bulkpost)>10: # Handles single element format {}
# TODO Check for other bulk formats.
# iterate on messages and post to event message queue
eventlist=[]
for i in bulkpost.splitlines():
eventlist.append(i)
for i in eventlist:
try:
# valid json?
try:
eventDict=json.loads(i)
except ValueError:
response.status=500
return
# don't post the items telling us where to post things..
if 'index' not in json.loads(i):
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(eventDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
except ValueError:
bottlelog('value error {0}'.format(i))
return
@route('/_status')
@route('/_status/')
@route('/nxlog/', method=['POST','PUT'])
@route('/nxlog', method=['POST','PUT'])
@route('/events/',method=['POST','PUT'])
@route('/events', method=['POST','PUT'])
def eventsindex():
if request.body:
anevent=request.body.read()
# bottlelog('request:{0}\n'.format(anevent))
request.body.close()
# valid json?
try:
eventDict=json.loads(anevent)
except ValueError:
response.status=500
return
# let the message queue worker who gets this know where it was posted
eventDict['endpoint']='events'
# post to event message queue
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(eventDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
return
@route('/cef', method=['POST','PUT'])
@route('/cef/',method=['POST','PUT'])
# debug(True)
def cefindex():
if request.body:
anevent=request.body.read()
request.body.close()
# valid json?
try:
cefDict=json.loads(anevent)
except ValueError:
response.status=500
return
# let the message queue worker who gets this know where it was posted
cefDict['endpoint']='cef'
# post to eventtask exchange
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(cefDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
return
@route('/custom/<application>',method=['POST','PUT'])
def customindex(application):
'''
and endpoint designed for custom applications that want to post data
to elastic search through the mozdef event interface
post to /custom/vulnerabilities
for example to post vulnerability in a custom format
Posts must be in json and are best formatted using a plugin
to the esworker.py process.
'''
if request.body:
anevent=request.body.read()
request.body.close()
# valid json?
try:
customDict=json.loads(anevent)
except ValueError:
response.status=500
return
# let the message queue worker who gets this know where it was posted
customDict['endpoint']= application
customDict['customendpoint'] = True
# post to eventtask exchange
ensurePublish=mqConn.ensure(mqproducer,mqproducer.publish,max_retries=10)
ensurePublish(customDict,exchange=eventTaskExchange,routing_key=options.taskexchange)
return
def initConfig():
options.mqserver=getConfig('mqserver','localhost',options.configfile)
options.taskexchange=getConfig('taskexchange','eventtask',options.configfile)
options.mquser=getConfig('mquser','guest',options.configfile)
options.mqpassword=getConfig('mqpassword','guest',options.configfile)
options.mqport=getConfig('mqport',5672,options.configfile)
options.listen_host=getConfig('listen_host', '127.0.0.1', options.configfile)
# get config info:
parser=OptionParser()
parser.add_option("-c", dest='configfile', default=os.path.join(os.path.dirname(__file__), __file__).replace('.py', '.conf'), help="configuration file to use")
(options,args) = parser.parse_args()
initConfig()
# connect and declare the message queue/kombu objects.
connString='amqp://{0}:{1}@{2}:{3}//'.format(options.mquser,options.mqpassword,options.mqserver,options.mqport)
mqConn=Connection(connString)
eventTaskExchange=Exchange(name=options.taskexchange,type='direct',durable=True)
eventTaskExchange(mqConn).declare()
eventTaskQueue=Queue(options.taskexchange,exchange=eventTaskExchange)
eventTaskQueue(mqConn).declare()
mqproducer = mqConn.Producer(serializer='json')
if __name__ == "__main__":
run(host=options.listen_host, port=8080) | application = default_app() | else: | random_line_split |
jquery.markdownify.js | (function ($) {
$.fn.markdownify = function (options) {
if (options && options['cloudinary']) {
var cloudName = options['cloudinary']['cloudName'];
var unsignedUploadingKey = options['cloudinary']['unsignedUploadingKey']; | var current_element = this;
var editor = CodeMirror.fromTextArea(current_element, {
mode: 'markdown',
lineNumbers: true,
lineWrapping: true,
theme: "default",
extraKeys: {"Enter": "newlineAndIndentContinueMarkdownList"}
});
editor.on('change', function () {
$(current_element).html(editor.getValue())
});
// change the default preview button text to the data attribute if it exists
var $previewButton = $('.btn--preview[data-target="' + current_element.id + '"]');
// use sensible defaults if the user doesn't give us any
var defaultPreviewToggleText = $previewButton.text() || 'Preview';
var defaultEditToggleText = $previewButton.data('toggle-text') || 'Edit';
// set the default text to what the user has given us (or the defaults)
$previewButton.text(defaultPreviewToggleText);
var insertions = {
el : function(e) {
editor.replaceSelection(e.data('prefix') + editor.getSelection());
if (e.data('suffix'))
editor.replaceSelection(e.data('suffix'), 'start');
},
link : function () {
var link = window.prompt('Enter an url').trim();
if (link !== null && link.length > 0) {
var selection = editor.getSelection();
selection = selection.length === 0 ? link : selection;
editor.replaceSelection('[' + selection + '](' + link + ')')
}
},
video : function () {
var videoLink = window.prompt('Enter a video url ex: https://www.youtube.com/watch?v=bGutVrdL3M8');
if (videoLink && videoLink.length > 0){
editor.replaceSelection("\n" + $.trim(videoLink)+ "\n\n")
}
},
img : function (e) {
$("#" + e.parent('.markdownify-menu').data('target') + "-upload_field").trigger('click');
}
};
$('.markdownify-menu[data-target="' + current_element.id + '"] .btn--insert').click(function (ev) {
insertions[$(this).data('type') || 'el']($(this))
editor.focus();
ev.preventDefault();
});
$('.btn--preview[data-target="' + current_element.id + '"]').click(function (ev) {
// Cache the selector
var $this = $(this);
$(editor.getWrapperElement()).toggleClass('markdownify--hidden');
$('.' + $this.data('target') + '-preview').toggleClass('markdownify--hidden').html(marked(($('#' + $this.data('target'))).html()))
// When the input text is the same as the default, we use the opposite text (clicked v default)
var textToSwitchTo = defaultPreviewToggleText === $this.text() ? defaultEditToggleText : defaultPreviewToggleText;
$this.text(textToSwitchTo);
});
$('.' + current_element.id + '-preview').toggleClass('markdownify--hidden')
if (cloudName) {
$('body').append("\
<form enctype='multipart/form-data'>\
<input class='upload_field' data-target='" + current_element.id + "' id='" + current_element.id + "-upload_field' name='file' type='file'/>\
</form>\
");
$('.upload_field[data-target=' + current_element.id + ']').unsigned_cloudinary_upload(unsignedUploadingKey,
{ cloud_name: cloudName, tags: 'browser_uploads' },
{ multiple: true }
).bind('cloudinarydone', function(e, data) {
editor.replaceSelection(' + ")\n");
editor.focus();
});
}
});
return this;
}
})(jQuery); | }
this.each(function () { | random_line_split |
borrowck-lend-flow-loop.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Note: the borrowck analysis is currently flow-insensitive.
// Therefore, some of these errors are marked as spurious and could be
// corrected by a simple change to the analysis. The others are
// either genuine or would require more advanced changes. The latter
// cases are noted.
fn borrow(_v: &int) {}
fn borrow_mut(_v: &mut int) {}
fn cond() -> bool { fail!() }
fn for_func(_f: &fn() -> bool) -> bool { fail!() }
fn produce<T>() -> T { fail!(); }
fn inc(v: &mut ~int) {
*v = ~(**v + 1);
}
fn loop_overarching_alias_mut() {
// In this instance, the borrow encompasses the entire loop.
let mut v = ~3;
let mut x = &mut v;
**x += 1;
loop {
borrow(v); //~ ERROR cannot borrow
}
}
fn block_overarching_alias_mut() {
// In this instance, the borrow encompasses the entire closure call.
let mut v = ~3;
let mut x = &mut v;
for 3.times {
borrow(v); //~ ERROR cannot borrow
}
*x = ~5;
}
fn loop_aliased_mut() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
loop {
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
}
}
fn while_aliased_mut() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
while cond() {
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
}
}
fn | () {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
for for_func {
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
}
}
fn loop_aliased_mut_break() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
loop {
borrow_mut(v);
_x = &v;
break;
}
borrow_mut(v); //~ ERROR cannot borrow
}
fn while_aliased_mut_break() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
while cond() {
borrow_mut(v);
_x = &v;
break;
}
borrow_mut(v); //~ ERROR cannot borrow
}
fn for_aliased_mut_break() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
for for_func {
// here we cannot be sure that `for_func` respects the break below
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
break;
}
borrow_mut(v); //~ ERROR cannot borrow
}
fn while_aliased_mut_cond(cond: bool, cond2: bool) {
let mut v = ~3;
let mut w = ~4;
let mut x = &mut w;
while cond {
**x += 1;
borrow(v); //~ ERROR cannot borrow
if cond2 {
x = &mut v; //~ ERROR cannot borrow
}
}
}
fn loop_break_pops_scopes<'r>(_v: &'r mut [uint], f: &fn(&'r mut uint) -> bool) {
// Here we check that when you break out of an inner loop, the
// borrows that go out of scope as you exit the inner loop are
// removed from the bitset.
while cond() {
while cond() {
// this borrow is limited to the scope of `r`...
let r: &'r mut uint = produce();
if !f(&mut *r) {
break; // ...so it is not live as exit the `while` loop here
}
}
}
}
fn loop_loop_pops_scopes<'r>(_v: &'r mut [uint], f: &fn(&'r mut uint) -> bool) {
// Similar to `loop_break_pops_scopes` but for the `loop` keyword
while cond() {
while cond() {
// this borrow is limited to the scope of `r`...
let r: &'r mut uint = produce();
if !f(&mut *r) {
loop; // ...so it is not live as exit (and re-enter) the `while` loop here
}
}
}
}
fn main() {}
| for_loop_aliased_mut | identifier_name |
borrowck-lend-flow-loop.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Note: the borrowck analysis is currently flow-insensitive.
// Therefore, some of these errors are marked as spurious and could be
// corrected by a simple change to the analysis. The others are
// either genuine or would require more advanced changes. The latter
// cases are noted.
fn borrow(_v: &int) {}
fn borrow_mut(_v: &mut int) {}
fn cond() -> bool { fail!() }
fn for_func(_f: &fn() -> bool) -> bool { fail!() }
fn produce<T>() -> T { fail!(); }
fn inc(v: &mut ~int) {
*v = ~(**v + 1);
}
fn loop_overarching_alias_mut() {
// In this instance, the borrow encompasses the entire loop.
let mut v = ~3;
let mut x = &mut v;
**x += 1;
loop {
borrow(v); //~ ERROR cannot borrow
}
}
fn block_overarching_alias_mut() {
// In this instance, the borrow encompasses the entire closure call.
let mut v = ~3;
let mut x = &mut v;
for 3.times {
borrow(v); //~ ERROR cannot borrow
}
*x = ~5;
}
fn loop_aliased_mut() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
loop {
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
}
}
fn while_aliased_mut() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
while cond() {
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
}
}
fn for_loop_aliased_mut() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
for for_func {
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
}
}
fn loop_aliased_mut_break() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
loop {
borrow_mut(v);
_x = &v;
break;
}
borrow_mut(v); //~ ERROR cannot borrow
}
fn while_aliased_mut_break() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
while cond() {
borrow_mut(v);
_x = &v;
break;
}
borrow_mut(v); //~ ERROR cannot borrow
}
fn for_aliased_mut_break() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
for for_func {
// here we cannot be sure that `for_func` respects the break below
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
break;
}
borrow_mut(v); //~ ERROR cannot borrow
}
fn while_aliased_mut_cond(cond: bool, cond2: bool) {
let mut v = ~3;
let mut w = ~4;
let mut x = &mut w;
while cond {
**x += 1;
borrow(v); //~ ERROR cannot borrow
if cond2 {
x = &mut v; //~ ERROR cannot borrow
}
}
}
fn loop_break_pops_scopes<'r>(_v: &'r mut [uint], f: &fn(&'r mut uint) -> bool) {
// Here we check that when you break out of an inner loop, the
// borrows that go out of scope as you exit the inner loop are
// removed from the bitset.
while cond() {
while cond() {
// this borrow is limited to the scope of `r`...
let r: &'r mut uint = produce();
if !f(&mut *r) {
break; // ...so it is not live as exit the `while` loop here
}
}
}
}
fn loop_loop_pops_scopes<'r>(_v: &'r mut [uint], f: &fn(&'r mut uint) -> bool) {
// Similar to `loop_break_pops_scopes` but for the `loop` keyword
while cond() {
while cond() {
// this borrow is limited to the scope of `r`...
let r: &'r mut uint = produce();
if !f(&mut *r) |
}
}
}
fn main() {}
| {
loop; // ...so it is not live as exit (and re-enter) the `while` loop here
} | conditional_block |
borrowck-lend-flow-loop.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Note: the borrowck analysis is currently flow-insensitive.
// Therefore, some of these errors are marked as spurious and could be
// corrected by a simple change to the analysis. The others are
// either genuine or would require more advanced changes. The latter
// cases are noted.
fn borrow(_v: &int) {}
fn borrow_mut(_v: &mut int) |
fn cond() -> bool { fail!() }
fn for_func(_f: &fn() -> bool) -> bool { fail!() }
fn produce<T>() -> T { fail!(); }
fn inc(v: &mut ~int) {
*v = ~(**v + 1);
}
fn loop_overarching_alias_mut() {
// In this instance, the borrow encompasses the entire loop.
let mut v = ~3;
let mut x = &mut v;
**x += 1;
loop {
borrow(v); //~ ERROR cannot borrow
}
}
fn block_overarching_alias_mut() {
// In this instance, the borrow encompasses the entire closure call.
let mut v = ~3;
let mut x = &mut v;
for 3.times {
borrow(v); //~ ERROR cannot borrow
}
*x = ~5;
}
fn loop_aliased_mut() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
loop {
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
}
}
fn while_aliased_mut() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
while cond() {
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
}
}
fn for_loop_aliased_mut() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
for for_func {
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
}
}
fn loop_aliased_mut_break() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
loop {
borrow_mut(v);
_x = &v;
break;
}
borrow_mut(v); //~ ERROR cannot borrow
}
fn while_aliased_mut_break() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
while cond() {
borrow_mut(v);
_x = &v;
break;
}
borrow_mut(v); //~ ERROR cannot borrow
}
fn for_aliased_mut_break() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
for for_func {
// here we cannot be sure that `for_func` respects the break below
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
break;
}
borrow_mut(v); //~ ERROR cannot borrow
}
fn while_aliased_mut_cond(cond: bool, cond2: bool) {
let mut v = ~3;
let mut w = ~4;
let mut x = &mut w;
while cond {
**x += 1;
borrow(v); //~ ERROR cannot borrow
if cond2 {
x = &mut v; //~ ERROR cannot borrow
}
}
}
fn loop_break_pops_scopes<'r>(_v: &'r mut [uint], f: &fn(&'r mut uint) -> bool) {
// Here we check that when you break out of an inner loop, the
// borrows that go out of scope as you exit the inner loop are
// removed from the bitset.
while cond() {
while cond() {
// this borrow is limited to the scope of `r`...
let r: &'r mut uint = produce();
if !f(&mut *r) {
break; // ...so it is not live as exit the `while` loop here
}
}
}
}
fn loop_loop_pops_scopes<'r>(_v: &'r mut [uint], f: &fn(&'r mut uint) -> bool) {
// Similar to `loop_break_pops_scopes` but for the `loop` keyword
while cond() {
while cond() {
// this borrow is limited to the scope of `r`...
let r: &'r mut uint = produce();
if !f(&mut *r) {
loop; // ...so it is not live as exit (and re-enter) the `while` loop here
}
}
}
}
fn main() {}
| {} | identifier_body |
borrowck-lend-flow-loop.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Note: the borrowck analysis is currently flow-insensitive.
// Therefore, some of these errors are marked as spurious and could be
// corrected by a simple change to the analysis. The others are
// either genuine or would require more advanced changes. The latter
// cases are noted.
fn borrow(_v: &int) {}
fn borrow_mut(_v: &mut int) {}
fn cond() -> bool { fail!() }
fn for_func(_f: &fn() -> bool) -> bool { fail!() }
fn produce<T>() -> T { fail!(); }
fn inc(v: &mut ~int) {
*v = ~(**v + 1);
}
fn loop_overarching_alias_mut() {
// In this instance, the borrow encompasses the entire loop.
let mut v = ~3;
let mut x = &mut v;
**x += 1;
loop {
borrow(v); //~ ERROR cannot borrow
}
}
fn block_overarching_alias_mut() {
// In this instance, the borrow encompasses the entire closure call.
let mut v = ~3;
let mut x = &mut v;
for 3.times {
borrow(v); //~ ERROR cannot borrow
}
*x = ~5;
}
fn loop_aliased_mut() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
loop {
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
}
}
fn while_aliased_mut() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
while cond() {
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
}
}
fn for_loop_aliased_mut() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
for for_func {
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
}
}
fn loop_aliased_mut_break() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
loop {
borrow_mut(v);
_x = &v;
break;
}
borrow_mut(v); //~ ERROR cannot borrow
}
fn while_aliased_mut_break() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
while cond() {
borrow_mut(v);
_x = &v;
break;
}
borrow_mut(v); //~ ERROR cannot borrow
}
fn for_aliased_mut_break() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
for for_func {
// here we cannot be sure that `for_func` respects the break below
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
break;
}
borrow_mut(v); //~ ERROR cannot borrow
}
fn while_aliased_mut_cond(cond: bool, cond2: bool) {
let mut v = ~3;
let mut w = ~4;
let mut x = &mut w;
while cond {
**x += 1;
borrow(v); //~ ERROR cannot borrow
if cond2 {
x = &mut v; //~ ERROR cannot borrow
}
}
}
fn loop_break_pops_scopes<'r>(_v: &'r mut [uint], f: &fn(&'r mut uint) -> bool) {
// Here we check that when you break out of an inner loop, the
// borrows that go out of scope as you exit the inner loop are
// removed from the bitset.
while cond() {
while cond() {
// this borrow is limited to the scope of `r`... | break; // ...so it is not live as exit the `while` loop here
}
}
}
}
fn loop_loop_pops_scopes<'r>(_v: &'r mut [uint], f: &fn(&'r mut uint) -> bool) {
// Similar to `loop_break_pops_scopes` but for the `loop` keyword
while cond() {
while cond() {
// this borrow is limited to the scope of `r`...
let r: &'r mut uint = produce();
if !f(&mut *r) {
loop; // ...so it is not live as exit (and re-enter) the `while` loop here
}
}
}
}
fn main() {} | let r: &'r mut uint = produce();
if !f(&mut *r) { | random_line_split |
wiki.py | # coding: utf-8
"""
MoinMoin wiki stats about updated pages
Config example::
[wiki]
type = wiki
wiki test = http://moinmo.in/
The optional key 'api' can be used to change the default
xmlrpc api endpoint::
[wiki]
type = wiki
api = ?action=xmlrpc2
wiki test = http://moinmo.in/
"""
import xmlrpc.client
from did.base import Config, ConfigError
from did.stats import Stats, StatsGroup
from did.utils import item
DEFAULT_API = '?action=xmlrpc2'
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Wiki Stats
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class WikiChanges(Stats):
""" Wiki changes """
def __init__(self, option, name=None, parent=None, url=None, api=None):
self.url = url
self.api = api or DEFAULT_API
self.changes = 0
self.proxy = xmlrpc.client.ServerProxy("{0}{1}".format(url, self.api))
Stats.__init__(self, option, name, parent)
def fetch(self):
for change in self.proxy.getRecentChanges(
self.options.since.datetime):
if (change["author"] == self.user.login
and change["lastModified"] < self.options.until.date):
self.changes += 1
url = self.url + change["name"]
if url not in self.stats:
|
self.stats.sort()
def header(self):
""" Show summary header. """
# Different header for wiki: Updates on xxx: x changes of y pages
item(
"{0}: {1} change{2} of {3} page{4}".format(
self.name, self.changes, "" if self.changes == 1 else "s",
len(self.stats), "" if len(self.stats) == 1 else "s"),
level=0, options=self.options)
def merge(self, other):
""" Merge another stats. """
Stats.merge(self, other)
self.changes += other.changes
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Stats Group
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class WikiStats(StatsGroup):
""" Wiki stats """
# Default order
order = 700
def __init__(self, option, name=None, parent=None, user=None):
StatsGroup.__init__(self, option, name, parent, user)
try:
api = Config().item(option, 'api')
except ConfigError:
api = None
for wiki, url in Config().section(option, skip=['type', 'api']):
self.stats.append(WikiChanges(
option=wiki, parent=self, url=url, api=api,
name="Updates on {0}".format(wiki)))
| self.stats.append(url) | conditional_block |
wiki.py | # coding: utf-8
"""
MoinMoin wiki stats about updated pages
Config example::
[wiki]
type = wiki
wiki test = http://moinmo.in/
The optional key 'api' can be used to change the default
xmlrpc api endpoint::
[wiki]
type = wiki
api = ?action=xmlrpc2
wiki test = http://moinmo.in/
"""
import xmlrpc.client
from did.base import Config, ConfigError
from did.stats import Stats, StatsGroup
from did.utils import item
DEFAULT_API = '?action=xmlrpc2'
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Wiki Stats
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class WikiChanges(Stats):
""" Wiki changes """
def __init__(self, option, name=None, parent=None, url=None, api=None):
self.url = url
self.api = api or DEFAULT_API
self.changes = 0
self.proxy = xmlrpc.client.ServerProxy("{0}{1}".format(url, self.api))
Stats.__init__(self, option, name, parent)
def fetch(self):
|
def header(self):
""" Show summary header. """
# Different header for wiki: Updates on xxx: x changes of y pages
item(
"{0}: {1} change{2} of {3} page{4}".format(
self.name, self.changes, "" if self.changes == 1 else "s",
len(self.stats), "" if len(self.stats) == 1 else "s"),
level=0, options=self.options)
def merge(self, other):
""" Merge another stats. """
Stats.merge(self, other)
self.changes += other.changes
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Stats Group
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class WikiStats(StatsGroup):
""" Wiki stats """
# Default order
order = 700
def __init__(self, option, name=None, parent=None, user=None):
StatsGroup.__init__(self, option, name, parent, user)
try:
api = Config().item(option, 'api')
except ConfigError:
api = None
for wiki, url in Config().section(option, skip=['type', 'api']):
self.stats.append(WikiChanges(
option=wiki, parent=self, url=url, api=api,
name="Updates on {0}".format(wiki)))
| for change in self.proxy.getRecentChanges(
self.options.since.datetime):
if (change["author"] == self.user.login
and change["lastModified"] < self.options.until.date):
self.changes += 1
url = self.url + change["name"]
if url not in self.stats:
self.stats.append(url)
self.stats.sort() | identifier_body |
wiki.py | # coding: utf-8
"""
MoinMoin wiki stats about updated pages
Config example::
[wiki]
type = wiki
wiki test = http://moinmo.in/
The optional key 'api' can be used to change the default
xmlrpc api endpoint::
[wiki]
type = wiki
api = ?action=xmlrpc2
wiki test = http://moinmo.in/ | from did.stats import Stats, StatsGroup
from did.utils import item
DEFAULT_API = '?action=xmlrpc2'
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Wiki Stats
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class WikiChanges(Stats):
""" Wiki changes """
def __init__(self, option, name=None, parent=None, url=None, api=None):
self.url = url
self.api = api or DEFAULT_API
self.changes = 0
self.proxy = xmlrpc.client.ServerProxy("{0}{1}".format(url, self.api))
Stats.__init__(self, option, name, parent)
def fetch(self):
for change in self.proxy.getRecentChanges(
self.options.since.datetime):
if (change["author"] == self.user.login
and change["lastModified"] < self.options.until.date):
self.changes += 1
url = self.url + change["name"]
if url not in self.stats:
self.stats.append(url)
self.stats.sort()
def header(self):
""" Show summary header. """
# Different header for wiki: Updates on xxx: x changes of y pages
item(
"{0}: {1} change{2} of {3} page{4}".format(
self.name, self.changes, "" if self.changes == 1 else "s",
len(self.stats), "" if len(self.stats) == 1 else "s"),
level=0, options=self.options)
def merge(self, other):
""" Merge another stats. """
Stats.merge(self, other)
self.changes += other.changes
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Stats Group
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class WikiStats(StatsGroup):
""" Wiki stats """
# Default order
order = 700
def __init__(self, option, name=None, parent=None, user=None):
StatsGroup.__init__(self, option, name, parent, user)
try:
api = Config().item(option, 'api')
except ConfigError:
api = None
for wiki, url in Config().section(option, skip=['type', 'api']):
self.stats.append(WikiChanges(
option=wiki, parent=self, url=url, api=api,
name="Updates on {0}".format(wiki))) | """
import xmlrpc.client
from did.base import Config, ConfigError | random_line_split |
wiki.py | # coding: utf-8
"""
MoinMoin wiki stats about updated pages
Config example::
[wiki]
type = wiki
wiki test = http://moinmo.in/
The optional key 'api' can be used to change the default
xmlrpc api endpoint::
[wiki]
type = wiki
api = ?action=xmlrpc2
wiki test = http://moinmo.in/
"""
import xmlrpc.client
from did.base import Config, ConfigError
from did.stats import Stats, StatsGroup
from did.utils import item
DEFAULT_API = '?action=xmlrpc2'
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Wiki Stats
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class WikiChanges(Stats):
""" Wiki changes """
def __init__(self, option, name=None, parent=None, url=None, api=None):
self.url = url
self.api = api or DEFAULT_API
self.changes = 0
self.proxy = xmlrpc.client.ServerProxy("{0}{1}".format(url, self.api))
Stats.__init__(self, option, name, parent)
def fetch(self):
for change in self.proxy.getRecentChanges(
self.options.since.datetime):
if (change["author"] == self.user.login
and change["lastModified"] < self.options.until.date):
self.changes += 1
url = self.url + change["name"]
if url not in self.stats:
self.stats.append(url)
self.stats.sort()
def header(self):
""" Show summary header. """
# Different header for wiki: Updates on xxx: x changes of y pages
item(
"{0}: {1} change{2} of {3} page{4}".format(
self.name, self.changes, "" if self.changes == 1 else "s",
len(self.stats), "" if len(self.stats) == 1 else "s"),
level=0, options=self.options)
def merge(self, other):
""" Merge another stats. """
Stats.merge(self, other)
self.changes += other.changes
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Stats Group
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class WikiStats(StatsGroup):
""" Wiki stats """
# Default order
order = 700
def | (self, option, name=None, parent=None, user=None):
StatsGroup.__init__(self, option, name, parent, user)
try:
api = Config().item(option, 'api')
except ConfigError:
api = None
for wiki, url in Config().section(option, skip=['type', 'api']):
self.stats.append(WikiChanges(
option=wiki, parent=self, url=url, api=api,
name="Updates on {0}".format(wiki)))
| __init__ | identifier_name |
template.py | #!/usr/bin/env python -OO
# encoding: utf-8
###########
# ORP - Open Robotics Platform
#
# Copyright (c) 2010 John Harrison, William Woodall
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
##########
"""
${TM_NEW_FILE_BASENAME}.py - <PURPOSE>
Created by ${TM_FULLNAME} on ${TM_DATE}.
"""
__author__ = "William Woodall"
__copyright__ = "Copyright (c) 2010 John Harrison, William Woodall"
### Imports ###
# Standard Python Libraries
import sys
import os
try: # try to catch any missing dependancies
# <PKG> for <PURPOSE>
PKGNAME = '<EASY_INSTALL NAME>'
import <LIBRARY NAME>
del PKGNAME
except ImportError as PKG_ERROR: # We are missing something, let them know...
sys.stderr.write(str(PKG_ERROR)+"\nYou might not have the "+PKGNAME+" \
module, try 'easy_install "+PKGNAME+"', else consult google.")
### Class ###
### Functions ###
def | ():
pass
### IfMain ###
if __name__ == '__main__':
main()
| main | identifier_name |
template.py | #!/usr/bin/env python -OO
# encoding: utf-8
###########
# ORP - Open Robotics Platform
#
# Copyright (c) 2010 John Harrison, William Woodall
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
##########
"""
${TM_NEW_FILE_BASENAME}.py - <PURPOSE>
Created by ${TM_FULLNAME} on ${TM_DATE}.
"""
__author__ = "William Woodall"
__copyright__ = "Copyright (c) 2010 John Harrison, William Woodall"
### Imports ###
# Standard Python Libraries
import sys
import os
try: # try to catch any missing dependancies
# <PKG> for <PURPOSE>
PKGNAME = '<EASY_INSTALL NAME>'
import <LIBRARY NAME>
del PKGNAME
except ImportError as PKG_ERROR: # We are missing something, let them know...
sys.stderr.write(str(PKG_ERROR)+"\nYou might not have the "+PKGNAME+" \
module, try 'easy_install "+PKGNAME+"', else consult google.")
### Class ###
### Functions ###
def main():
pass
### IfMain ###
if __name__ == '__main__':
| main() | conditional_block |
|
template.py | #!/usr/bin/env python -OO
# encoding: utf-8 |
###########
# ORP - Open Robotics Platform
#
# Copyright (c) 2010 John Harrison, William Woodall
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
##########
"""
${TM_NEW_FILE_BASENAME}.py - <PURPOSE>
Created by ${TM_FULLNAME} on ${TM_DATE}.
"""
__author__ = "William Woodall"
__copyright__ = "Copyright (c) 2010 John Harrison, William Woodall"
### Imports ###
# Standard Python Libraries
import sys
import os
try: # try to catch any missing dependancies
# <PKG> for <PURPOSE>
PKGNAME = '<EASY_INSTALL NAME>'
import <LIBRARY NAME>
del PKGNAME
except ImportError as PKG_ERROR: # We are missing something, let them know...
sys.stderr.write(str(PKG_ERROR)+"\nYou might not have the "+PKGNAME+" \
module, try 'easy_install "+PKGNAME+"', else consult google.")
### Class ###
### Functions ###
def main():
pass
### IfMain ###
if __name__ == '__main__':
main() | random_line_split |
|
template.py | #!/usr/bin/env python -OO
# encoding: utf-8
###########
# ORP - Open Robotics Platform
#
# Copyright (c) 2010 John Harrison, William Woodall
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
##########
"""
${TM_NEW_FILE_BASENAME}.py - <PURPOSE>
Created by ${TM_FULLNAME} on ${TM_DATE}.
"""
__author__ = "William Woodall"
__copyright__ = "Copyright (c) 2010 John Harrison, William Woodall"
### Imports ###
# Standard Python Libraries
import sys
import os
try: # try to catch any missing dependancies
# <PKG> for <PURPOSE>
PKGNAME = '<EASY_INSTALL NAME>'
import <LIBRARY NAME>
del PKGNAME
except ImportError as PKG_ERROR: # We are missing something, let them know...
sys.stderr.write(str(PKG_ERROR)+"\nYou might not have the "+PKGNAME+" \
module, try 'easy_install "+PKGNAME+"', else consult google.")
### Class ###
### Functions ###
def main():
|
### IfMain ###
if __name__ == '__main__':
main()
| pass | identifier_body |
calibrate.py | from cavicapture import CaviCapture
from process import CaviProcess
import sys, os, getopt
import time, datetime
import numpy as np
import matplotlib.pyplot as plt
def main():
config_path = './config.ini' # default
try:
opts, args = getopt.getopt(sys.argv[1:], "c", ["config="])
except getopt.GetoptError:
print("Argument error")
sys.exit(2)
for opt, arg in opts:
if opt in ("--config"):
config_path = arg
calibrator = CaviCalibrate(config_path)
calibrator.init_calibration()
class CaviCalibrate:
def __init__(self, config_path):
self.output_dir = "./calibration"
if not os.path.exists(self.output_dir):
os.makedirs(self.output_dir)
self.output_dir = self.output_dir + "/" + datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
if not os.path.exists(self.output_dir):
os.makedirs(self.output_dir)
self.cavi_capture = CaviCapture(config_path)
self.cavi_capture.log_file = self.output_dir + "/capture.log.txt"
self.cavi_capture.get_ini_config()
self.cavi_capture.setup_gpio()
self.cavi_capture.setup_camera()
self.cavi_process = CaviProcess(self.output_dir)
self.cavi_process.log_file = self.output_dir + "/process.log.txt"
def init_calibration(self):
files = []
self.cavi_capture.lights(True)
time.sleep(3) # Let lights settle
files.append(self.capture_image(self.output_dir + "/" + "image_1.png"))
files.append(self.capture_image(self.output_dir + "/" + "image_2.png"))
files.append(self.capture_image(self.output_dir + "/" + "image_3.png"))
files.append(self.capture_image(self.output_dir + "/" + "image_4.png"))
self.cavi_capture.lights(False)
self.process_files(files)
def process_files(self, files):
file_1 = files[0]
file_2 = files[1]
file_3 = files[2]
file_4 = files[3]
# Get the image difference and summary using 2 images
# diff = self.cavi_process.subtract_images(file_1, file_2)
# self.cavi_process.write_image(self.output_dir + "/diff.png", diff)
# self.summarise(diff, self.output_dir + "/noise_hist.png")
# Image difference first two and last two
img_group_1_diff = self.cavi_process.subtract_images(file_1, file_2)
self.cavi_process.write_image(self.output_dir + "/image_group_1_diff.png", img_group_1_diff)
self.summarise(img_group_1_diff, self.output_dir + "/image_group_1_diff_hist.png")
img_group_2_diff = self.cavi_process.subtract_images(file_3, file_4)
self.cavi_process.write_image(self.output_dir + "/image_group_2_diff.png", img_group_2_diff)
self.summarise(img_group_2_diff, self.output_dir + "/image_group_2_diff_hist.png")
groups_min = np.minimum(img_group_1_diff, img_group_2_diff)
self.cavi_process.write_image(self.output_dir + "/groups_min.png", groups_min)
self.summarise(groups_min, self.output_dir + "/groups_min_hist.png")
# diff = self.cavi_process.subtract_images(self.output_dir + "/image_1_average.png", self.output_dir + "/image_2_average.png")
# self.cavi_process.write_image(self.output_dir + "/image_average_diff.png", diff)
# self.summarise(diff, self.output_dir + "/image_average_noise_hist.png")
def summarise(self, img, hist_path):
|
def gen_file_path(self):
return self.output_dir + "/" + datetime.datetime.now().strftime('%Y%m%d-%H%M%S') + ".png"
def capture_image(self, file_path):
self.cavi_capture.camera.capture(file_path, 'png')
return file_path
if __name__ == '__main__':
main()
| average_pixel = np.average(img[img>0])
max_pixel = np.max(img[img>0])
min_pixel = np.min(img[img>0])
total_area = len(img[img>0])
self.cavi_process.log("Noise max: " + str(max_pixel))
self.cavi_process.log("Noise min: " + str(min_pixel))
self.cavi_process.log("Noise average: " + str(average_pixel))
self.cavi_process.log("Noise area: " + str(total_area))
plt.hist(img.ravel(),max_pixel,[min_pixel,max_pixel])
plt.savefig(hist_path) | identifier_body |
calibrate.py | from cavicapture import CaviCapture
from process import CaviProcess
import sys, os, getopt
import time, datetime
import numpy as np
import matplotlib.pyplot as plt
def main():
config_path = './config.ini' # default
try:
opts, args = getopt.getopt(sys.argv[1:], "c", ["config="])
except getopt.GetoptError:
print("Argument error")
sys.exit(2)
for opt, arg in opts:
if opt in ("--config"):
config_path = arg
calibrator = CaviCalibrate(config_path)
calibrator.init_calibration()
class CaviCalibrate:
def __init__(self, config_path):
self.output_dir = "./calibration"
if not os.path.exists(self.output_dir):
os.makedirs(self.output_dir)
self.output_dir = self.output_dir + "/" + datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
if not os.path.exists(self.output_dir):
|
self.cavi_capture = CaviCapture(config_path)
self.cavi_capture.log_file = self.output_dir + "/capture.log.txt"
self.cavi_capture.get_ini_config()
self.cavi_capture.setup_gpio()
self.cavi_capture.setup_camera()
self.cavi_process = CaviProcess(self.output_dir)
self.cavi_process.log_file = self.output_dir + "/process.log.txt"
def init_calibration(self):
files = []
self.cavi_capture.lights(True)
time.sleep(3) # Let lights settle
files.append(self.capture_image(self.output_dir + "/" + "image_1.png"))
files.append(self.capture_image(self.output_dir + "/" + "image_2.png"))
files.append(self.capture_image(self.output_dir + "/" + "image_3.png"))
files.append(self.capture_image(self.output_dir + "/" + "image_4.png"))
self.cavi_capture.lights(False)
self.process_files(files)
def process_files(self, files):
file_1 = files[0]
file_2 = files[1]
file_3 = files[2]
file_4 = files[3]
# Get the image difference and summary using 2 images
# diff = self.cavi_process.subtract_images(file_1, file_2)
# self.cavi_process.write_image(self.output_dir + "/diff.png", diff)
# self.summarise(diff, self.output_dir + "/noise_hist.png")
# Image difference first two and last two
img_group_1_diff = self.cavi_process.subtract_images(file_1, file_2)
self.cavi_process.write_image(self.output_dir + "/image_group_1_diff.png", img_group_1_diff)
self.summarise(img_group_1_diff, self.output_dir + "/image_group_1_diff_hist.png")
img_group_2_diff = self.cavi_process.subtract_images(file_3, file_4)
self.cavi_process.write_image(self.output_dir + "/image_group_2_diff.png", img_group_2_diff)
self.summarise(img_group_2_diff, self.output_dir + "/image_group_2_diff_hist.png")
groups_min = np.minimum(img_group_1_diff, img_group_2_diff)
self.cavi_process.write_image(self.output_dir + "/groups_min.png", groups_min)
self.summarise(groups_min, self.output_dir + "/groups_min_hist.png")
# diff = self.cavi_process.subtract_images(self.output_dir + "/image_1_average.png", self.output_dir + "/image_2_average.png")
# self.cavi_process.write_image(self.output_dir + "/image_average_diff.png", diff)
# self.summarise(diff, self.output_dir + "/image_average_noise_hist.png")
def summarise(self, img, hist_path):
average_pixel = np.average(img[img>0])
max_pixel = np.max(img[img>0])
min_pixel = np.min(img[img>0])
total_area = len(img[img>0])
self.cavi_process.log("Noise max: " + str(max_pixel))
self.cavi_process.log("Noise min: " + str(min_pixel))
self.cavi_process.log("Noise average: " + str(average_pixel))
self.cavi_process.log("Noise area: " + str(total_area))
plt.hist(img.ravel(),max_pixel,[min_pixel,max_pixel])
plt.savefig(hist_path)
def gen_file_path(self):
return self.output_dir + "/" + datetime.datetime.now().strftime('%Y%m%d-%H%M%S') + ".png"
def capture_image(self, file_path):
self.cavi_capture.camera.capture(file_path, 'png')
return file_path
if __name__ == '__main__':
main()
| os.makedirs(self.output_dir) | conditional_block |
calibrate.py | from cavicapture import CaviCapture
from process import CaviProcess
import sys, os, getopt
import time, datetime
import numpy as np
import matplotlib.pyplot as plt
def main():
config_path = './config.ini' # default
try:
opts, args = getopt.getopt(sys.argv[1:], "c", ["config="])
except getopt.GetoptError:
print("Argument error")
sys.exit(2)
for opt, arg in opts:
if opt in ("--config"):
config_path = arg
calibrator = CaviCalibrate(config_path)
calibrator.init_calibration()
class CaviCalibrate:
def __init__(self, config_path):
self.output_dir = "./calibration"
if not os.path.exists(self.output_dir):
os.makedirs(self.output_dir)
self.output_dir = self.output_dir + "/" + datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
if not os.path.exists(self.output_dir):
os.makedirs(self.output_dir)
self.cavi_capture = CaviCapture(config_path)
self.cavi_capture.log_file = self.output_dir + "/capture.log.txt"
self.cavi_capture.get_ini_config()
self.cavi_capture.setup_gpio()
self.cavi_capture.setup_camera()
self.cavi_process = CaviProcess(self.output_dir)
self.cavi_process.log_file = self.output_dir + "/process.log.txt"
def init_calibration(self):
files = []
self.cavi_capture.lights(True)
time.sleep(3) # Let lights settle
files.append(self.capture_image(self.output_dir + "/" + "image_1.png"))
files.append(self.capture_image(self.output_dir + "/" + "image_2.png"))
files.append(self.capture_image(self.output_dir + "/" + "image_3.png"))
files.append(self.capture_image(self.output_dir + "/" + "image_4.png"))
self.cavi_capture.lights(False)
self.process_files(files)
def process_files(self, files):
file_1 = files[0]
file_2 = files[1]
file_3 = files[2]
file_4 = files[3]
# Get the image difference and summary using 2 images
# diff = self.cavi_process.subtract_images(file_1, file_2)
# self.cavi_process.write_image(self.output_dir + "/diff.png", diff)
# self.summarise(diff, self.output_dir + "/noise_hist.png")
# Image difference first two and last two
img_group_1_diff = self.cavi_process.subtract_images(file_1, file_2)
self.cavi_process.write_image(self.output_dir + "/image_group_1_diff.png", img_group_1_diff)
self.summarise(img_group_1_diff, self.output_dir + "/image_group_1_diff_hist.png")
img_group_2_diff = self.cavi_process.subtract_images(file_3, file_4)
self.cavi_process.write_image(self.output_dir + "/image_group_2_diff.png", img_group_2_diff)
self.summarise(img_group_2_diff, self.output_dir + "/image_group_2_diff_hist.png")
groups_min = np.minimum(img_group_1_diff, img_group_2_diff)
self.cavi_process.write_image(self.output_dir + "/groups_min.png", groups_min)
self.summarise(groups_min, self.output_dir + "/groups_min_hist.png")
# diff = self.cavi_process.subtract_images(self.output_dir + "/image_1_average.png", self.output_dir + "/image_2_average.png")
# self.cavi_process.write_image(self.output_dir + "/image_average_diff.png", diff)
# self.summarise(diff, self.output_dir + "/image_average_noise_hist.png")
def summarise(self, img, hist_path):
average_pixel = np.average(img[img>0])
max_pixel = np.max(img[img>0])
min_pixel = np.min(img[img>0])
total_area = len(img[img>0])
self.cavi_process.log("Noise max: " + str(max_pixel))
self.cavi_process.log("Noise min: " + str(min_pixel))
self.cavi_process.log("Noise average: " + str(average_pixel))
self.cavi_process.log("Noise area: " + str(total_area))
plt.hist(img.ravel(),max_pixel,[min_pixel,max_pixel])
plt.savefig(hist_path)
def | (self):
return self.output_dir + "/" + datetime.datetime.now().strftime('%Y%m%d-%H%M%S') + ".png"
def capture_image(self, file_path):
self.cavi_capture.camera.capture(file_path, 'png')
return file_path
if __name__ == '__main__':
main()
| gen_file_path | identifier_name |
calibrate.py | from cavicapture import CaviCapture
from process import CaviProcess
import sys, os, getopt
import time, datetime
import numpy as np
import matplotlib.pyplot as plt
def main():
config_path = './config.ini' # default
try:
opts, args = getopt.getopt(sys.argv[1:], "c", ["config="])
except getopt.GetoptError:
print("Argument error")
sys.exit(2)
for opt, arg in opts:
if opt in ("--config"):
config_path = arg
calibrator = CaviCalibrate(config_path)
calibrator.init_calibration()
class CaviCalibrate:
def __init__(self, config_path):
self.output_dir = "./calibration"
if not os.path.exists(self.output_dir):
os.makedirs(self.output_dir)
self.output_dir = self.output_dir + "/" + datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
if not os.path.exists(self.output_dir):
os.makedirs(self.output_dir)
self.cavi_capture = CaviCapture(config_path)
self.cavi_capture.log_file = self.output_dir + "/capture.log.txt"
self.cavi_capture.get_ini_config()
self.cavi_capture.setup_gpio()
self.cavi_capture.setup_camera()
self.cavi_process = CaviProcess(self.output_dir)
self.cavi_process.log_file = self.output_dir + "/process.log.txt"
def init_calibration(self):
files = []
self.cavi_capture.lights(True)
time.sleep(3) # Let lights settle
files.append(self.capture_image(self.output_dir + "/" + "image_1.png"))
files.append(self.capture_image(self.output_dir + "/" + "image_2.png"))
files.append(self.capture_image(self.output_dir + "/" + "image_3.png"))
files.append(self.capture_image(self.output_dir + "/" + "image_4.png"))
self.cavi_capture.lights(False)
self.process_files(files)
def process_files(self, files):
file_1 = files[0]
file_2 = files[1]
file_3 = files[2]
file_4 = files[3]
# Get the image difference and summary using 2 images
# diff = self.cavi_process.subtract_images(file_1, file_2)
# self.cavi_process.write_image(self.output_dir + "/diff.png", diff)
# self.summarise(diff, self.output_dir + "/noise_hist.png")
# Image difference first two and last two
img_group_1_diff = self.cavi_process.subtract_images(file_1, file_2)
self.cavi_process.write_image(self.output_dir + "/image_group_1_diff.png", img_group_1_diff)
self.summarise(img_group_1_diff, self.output_dir + "/image_group_1_diff_hist.png")
img_group_2_diff = self.cavi_process.subtract_images(file_3, file_4)
self.cavi_process.write_image(self.output_dir + "/image_group_2_diff.png", img_group_2_diff)
self.summarise(img_group_2_diff, self.output_dir + "/image_group_2_diff_hist.png")
groups_min = np.minimum(img_group_1_diff, img_group_2_diff)
self.cavi_process.write_image(self.output_dir + "/groups_min.png", groups_min)
self.summarise(groups_min, self.output_dir + "/groups_min_hist.png")
# diff = self.cavi_process.subtract_images(self.output_dir + "/image_1_average.png", self.output_dir + "/image_2_average.png")
# self.cavi_process.write_image(self.output_dir + "/image_average_diff.png", diff)
# self.summarise(diff, self.output_dir + "/image_average_noise_hist.png")
def summarise(self, img, hist_path):
average_pixel = np.average(img[img>0])
max_pixel = np.max(img[img>0])
min_pixel = np.min(img[img>0])
total_area = len(img[img>0])
self.cavi_process.log("Noise max: " + str(max_pixel))
self.cavi_process.log("Noise min: " + str(min_pixel))
self.cavi_process.log("Noise average: " + str(average_pixel))
self.cavi_process.log("Noise area: " + str(total_area))
plt.hist(img.ravel(),max_pixel,[min_pixel,max_pixel])
plt.savefig(hist_path)
def gen_file_path(self):
return self.output_dir + "/" + datetime.datetime.now().strftime('%Y%m%d-%H%M%S') + ".png"
def capture_image(self, file_path):
self.cavi_capture.camera.capture(file_path, 'png')
return file_path
| if __name__ == '__main__':
main() | random_line_split |
|
source.rs | use crate::{Interest, Registry, Token};
use std::io;
/// An event source that may be registered with [`Registry`].
///
/// Types that implement `event::Source` can be registered with
/// `Registry`. Users of Mio **should not** use the `event::Source` trait
/// functions directly. Instead, the equivalent functions on `Registry` should
/// be used.
///
/// See [`Registry`] for more details.
///
/// [`Registry`]: ../struct.Registry.html
///
/// # Implementing `event::Source`
///
/// Event sources are always backed by system handles, such as sockets or other
/// system handles. These `event::Source`s will be monitored by the system
/// selector. An implementation of `Source` will almost always delegates to a
/// lower level handle. Examples of this are [`TcpStream`]s, or the *unix only*
/// [`SourceFd`].
///
/// [`TcpStream`]: ../net/struct.TcpStream.html
/// [`SourceFd`]: ../unix/struct.SourceFd.html
///
/// # Dropping `event::Source`s
///
/// All `event::Source`s, unless otherwise specified, need to be [deregistered]
/// before being dropped for them to not leak resources. This goes against the
/// normal drop behaviour of types in Rust which cleanup after themselves, e.g.
/// a `File` will close itself. However since deregistering needs access to
/// [`Registry`] this cannot be done while being dropped.
///
/// [deregistered]: ../struct.Registry.html#method.deregister
///
/// # Examples
///
/// Implementing `Source` on a struct containing a socket:
///
#[cfg_attr(all(feature = "os-poll", feature = "net"), doc = "```")]
#[cfg_attr(not(all(feature = "os-poll", feature = "net")), doc = "```ignore")]
/// use mio::{Interest, Registry, Token};
/// use mio::event::Source;
/// use mio::net::TcpStream;
///
/// use std::io;
///
/// # #[allow(dead_code)]
/// pub struct MySource {
/// socket: TcpStream,
/// }
///
/// impl Source for MySource {
/// fn register(&mut self, registry: &Registry, token: Token, interests: Interest)
/// -> io::Result<()>
/// {
/// // Delegate the `register` call to `socket`
/// self.socket.register(registry, token, interests)
/// }
///
/// fn reregister(&mut self, registry: &Registry, token: Token, interests: Interest)
/// -> io::Result<()>
/// {
/// // Delegate the `reregister` call to `socket`
/// self.socket.reregister(registry, token, interests)
/// }
///
/// fn deregister(&mut self, registry: &Registry) -> io::Result<()> {
/// // Delegate the `deregister` call to `socket`
/// self.socket.deregister(registry)
/// }
/// }
/// ```
pub trait Source {
/// Register `self` with the given `Registry` instance.
///
/// This function should not be called directly. Use [`Registry::register`]
/// instead. Implementors should handle registration by delegating the call
/// to another `Source` type.
///
/// [`Registry::register`]: ../struct.Registry.html#method.register
fn register(
&mut self,
registry: &Registry,
token: Token,
interests: Interest,
) -> io::Result<()>;
/// Re-register `self` with the given `Registry` instance.
///
/// This function should not be called directly. Use
/// [`Registry::reregister`] instead. Implementors should handle | ///
/// [`Registry::reregister`]: ../struct.Registry.html#method.reregister
fn reregister(
&mut self,
registry: &Registry,
token: Token,
interests: Interest,
) -> io::Result<()>;
/// Deregister `self` from the given `Registry` instance.
///
/// This function should not be called directly. Use
/// [`Registry::deregister`] instead. Implementors should handle
/// deregistration by delegating the call to another `Source` type.
///
/// [`Registry::deregister`]: ../struct.Registry.html#method.deregister
fn deregister(&mut self, registry: &Registry) -> io::Result<()>;
}
impl<T> Source for Box<T>
where
T: Source + ?Sized,
{
fn register(
&mut self,
registry: &Registry,
token: Token,
interests: Interest,
) -> io::Result<()> {
(&mut **self).register(registry, token, interests)
}
fn reregister(
&mut self,
registry: &Registry,
token: Token,
interests: Interest,
) -> io::Result<()> {
(&mut **self).reregister(registry, token, interests)
}
fn deregister(&mut self, registry: &Registry) -> io::Result<()> {
(&mut **self).deregister(registry)
}
} | /// re-registration by either delegating the call to another `Source` type. | random_line_split |
source.rs | use crate::{Interest, Registry, Token};
use std::io;
/// An event source that may be registered with [`Registry`].
///
/// Types that implement `event::Source` can be registered with
/// `Registry`. Users of Mio **should not** use the `event::Source` trait
/// functions directly. Instead, the equivalent functions on `Registry` should
/// be used.
///
/// See [`Registry`] for more details.
///
/// [`Registry`]: ../struct.Registry.html
///
/// # Implementing `event::Source`
///
/// Event sources are always backed by system handles, such as sockets or other
/// system handles. These `event::Source`s will be monitored by the system
/// selector. An implementation of `Source` will almost always delegates to a
/// lower level handle. Examples of this are [`TcpStream`]s, or the *unix only*
/// [`SourceFd`].
///
/// [`TcpStream`]: ../net/struct.TcpStream.html
/// [`SourceFd`]: ../unix/struct.SourceFd.html
///
/// # Dropping `event::Source`s
///
/// All `event::Source`s, unless otherwise specified, need to be [deregistered]
/// before being dropped for them to not leak resources. This goes against the
/// normal drop behaviour of types in Rust which cleanup after themselves, e.g.
/// a `File` will close itself. However since deregistering needs access to
/// [`Registry`] this cannot be done while being dropped.
///
/// [deregistered]: ../struct.Registry.html#method.deregister
///
/// # Examples
///
/// Implementing `Source` on a struct containing a socket:
///
#[cfg_attr(all(feature = "os-poll", feature = "net"), doc = "```")]
#[cfg_attr(not(all(feature = "os-poll", feature = "net")), doc = "```ignore")]
/// use mio::{Interest, Registry, Token};
/// use mio::event::Source;
/// use mio::net::TcpStream;
///
/// use std::io;
///
/// # #[allow(dead_code)]
/// pub struct MySource {
/// socket: TcpStream,
/// }
///
/// impl Source for MySource {
/// fn register(&mut self, registry: &Registry, token: Token, interests: Interest)
/// -> io::Result<()>
/// {
/// // Delegate the `register` call to `socket`
/// self.socket.register(registry, token, interests)
/// }
///
/// fn reregister(&mut self, registry: &Registry, token: Token, interests: Interest)
/// -> io::Result<()>
/// {
/// // Delegate the `reregister` call to `socket`
/// self.socket.reregister(registry, token, interests)
/// }
///
/// fn deregister(&mut self, registry: &Registry) -> io::Result<()> {
/// // Delegate the `deregister` call to `socket`
/// self.socket.deregister(registry)
/// }
/// }
/// ```
pub trait Source {
/// Register `self` with the given `Registry` instance.
///
/// This function should not be called directly. Use [`Registry::register`]
/// instead. Implementors should handle registration by delegating the call
/// to another `Source` type.
///
/// [`Registry::register`]: ../struct.Registry.html#method.register
fn register(
&mut self,
registry: &Registry,
token: Token,
interests: Interest,
) -> io::Result<()>;
/// Re-register `self` with the given `Registry` instance.
///
/// This function should not be called directly. Use
/// [`Registry::reregister`] instead. Implementors should handle
/// re-registration by either delegating the call to another `Source` type.
///
/// [`Registry::reregister`]: ../struct.Registry.html#method.reregister
fn reregister(
&mut self,
registry: &Registry,
token: Token,
interests: Interest,
) -> io::Result<()>;
/// Deregister `self` from the given `Registry` instance.
///
/// This function should not be called directly. Use
/// [`Registry::deregister`] instead. Implementors should handle
/// deregistration by delegating the call to another `Source` type.
///
/// [`Registry::deregister`]: ../struct.Registry.html#method.deregister
fn deregister(&mut self, registry: &Registry) -> io::Result<()>;
}
impl<T> Source for Box<T>
where
T: Source + ?Sized,
{
fn | (
&mut self,
registry: &Registry,
token: Token,
interests: Interest,
) -> io::Result<()> {
(&mut **self).register(registry, token, interests)
}
fn reregister(
&mut self,
registry: &Registry,
token: Token,
interests: Interest,
) -> io::Result<()> {
(&mut **self).reregister(registry, token, interests)
}
fn deregister(&mut self, registry: &Registry) -> io::Result<()> {
(&mut **self).deregister(registry)
}
}
| register | identifier_name |
source.rs | use crate::{Interest, Registry, Token};
use std::io;
/// An event source that may be registered with [`Registry`].
///
/// Types that implement `event::Source` can be registered with
/// `Registry`. Users of Mio **should not** use the `event::Source` trait
/// functions directly. Instead, the equivalent functions on `Registry` should
/// be used.
///
/// See [`Registry`] for more details.
///
/// [`Registry`]: ../struct.Registry.html
///
/// # Implementing `event::Source`
///
/// Event sources are always backed by system handles, such as sockets or other
/// system handles. These `event::Source`s will be monitored by the system
/// selector. An implementation of `Source` will almost always delegates to a
/// lower level handle. Examples of this are [`TcpStream`]s, or the *unix only*
/// [`SourceFd`].
///
/// [`TcpStream`]: ../net/struct.TcpStream.html
/// [`SourceFd`]: ../unix/struct.SourceFd.html
///
/// # Dropping `event::Source`s
///
/// All `event::Source`s, unless otherwise specified, need to be [deregistered]
/// before being dropped for them to not leak resources. This goes against the
/// normal drop behaviour of types in Rust which cleanup after themselves, e.g.
/// a `File` will close itself. However since deregistering needs access to
/// [`Registry`] this cannot be done while being dropped.
///
/// [deregistered]: ../struct.Registry.html#method.deregister
///
/// # Examples
///
/// Implementing `Source` on a struct containing a socket:
///
#[cfg_attr(all(feature = "os-poll", feature = "net"), doc = "```")]
#[cfg_attr(not(all(feature = "os-poll", feature = "net")), doc = "```ignore")]
/// use mio::{Interest, Registry, Token};
/// use mio::event::Source;
/// use mio::net::TcpStream;
///
/// use std::io;
///
/// # #[allow(dead_code)]
/// pub struct MySource {
/// socket: TcpStream,
/// }
///
/// impl Source for MySource {
/// fn register(&mut self, registry: &Registry, token: Token, interests: Interest)
/// -> io::Result<()>
/// {
/// // Delegate the `register` call to `socket`
/// self.socket.register(registry, token, interests)
/// }
///
/// fn reregister(&mut self, registry: &Registry, token: Token, interests: Interest)
/// -> io::Result<()>
/// {
/// // Delegate the `reregister` call to `socket`
/// self.socket.reregister(registry, token, interests)
/// }
///
/// fn deregister(&mut self, registry: &Registry) -> io::Result<()> {
/// // Delegate the `deregister` call to `socket`
/// self.socket.deregister(registry)
/// }
/// }
/// ```
pub trait Source {
/// Register `self` with the given `Registry` instance.
///
/// This function should not be called directly. Use [`Registry::register`]
/// instead. Implementors should handle registration by delegating the call
/// to another `Source` type.
///
/// [`Registry::register`]: ../struct.Registry.html#method.register
fn register(
&mut self,
registry: &Registry,
token: Token,
interests: Interest,
) -> io::Result<()>;
/// Re-register `self` with the given `Registry` instance.
///
/// This function should not be called directly. Use
/// [`Registry::reregister`] instead. Implementors should handle
/// re-registration by either delegating the call to another `Source` type.
///
/// [`Registry::reregister`]: ../struct.Registry.html#method.reregister
fn reregister(
&mut self,
registry: &Registry,
token: Token,
interests: Interest,
) -> io::Result<()>;
/// Deregister `self` from the given `Registry` instance.
///
/// This function should not be called directly. Use
/// [`Registry::deregister`] instead. Implementors should handle
/// deregistration by delegating the call to another `Source` type.
///
/// [`Registry::deregister`]: ../struct.Registry.html#method.deregister
fn deregister(&mut self, registry: &Registry) -> io::Result<()>;
}
impl<T> Source for Box<T>
where
T: Source + ?Sized,
{
fn register(
&mut self,
registry: &Registry,
token: Token,
interests: Interest,
) -> io::Result<()> {
(&mut **self).register(registry, token, interests)
}
fn reregister(
&mut self,
registry: &Registry,
token: Token,
interests: Interest,
) -> io::Result<()> {
(&mut **self).reregister(registry, token, interests)
}
fn deregister(&mut self, registry: &Registry) -> io::Result<()> |
}
| {
(&mut **self).deregister(registry)
} | identifier_body |
random.rs | use rand::{
distributions::Alphanumeric,
prelude::{Rng, SeedableRng, StdRng},
};
const OPERATORS: &[char] = &[
'+', '-', '<', '>', '(', ')', '*', '/', '&', '|', '!', ',', '.',
];
pub struct | (StdRng);
impl Rand {
pub fn new(seed: usize) -> Self {
Rand(StdRng::seed_from_u64(seed as u64))
}
pub fn unsigned(&mut self, max: usize) -> usize {
self.0.gen_range(0..max + 1)
}
pub fn words(&mut self, max_count: usize) -> Vec<u8> {
let mut result = Vec::new();
let word_count = self.unsigned(max_count);
for i in 0..word_count {
if i > 0 {
if self.unsigned(5) == 0 {
result.push('\n' as u8);
} else {
result.push(' ' as u8);
}
}
if self.unsigned(3) == 0 {
let index = self.unsigned(OPERATORS.len() - 1);
result.push(OPERATORS[index] as u8);
} else {
for _ in 0..self.unsigned(8) {
result.push(self.0.sample(Alphanumeric) as u8);
}
}
}
result
}
}
| Rand | identifier_name |
random.rs | use rand::{
distributions::Alphanumeric,
prelude::{Rng, SeedableRng, StdRng},
};
const OPERATORS: &[char] = &[
'+', '-', '<', '>', '(', ')', '*', '/', '&', '|', '!', ',', '.',
];
pub struct Rand(StdRng);
impl Rand {
pub fn new(seed: usize) -> Self {
Rand(StdRng::seed_from_u64(seed as u64))
}
pub fn unsigned(&mut self, max: usize) -> usize {
self.0.gen_range(0..max + 1)
} | for i in 0..word_count {
if i > 0 {
if self.unsigned(5) == 0 {
result.push('\n' as u8);
} else {
result.push(' ' as u8);
}
}
if self.unsigned(3) == 0 {
let index = self.unsigned(OPERATORS.len() - 1);
result.push(OPERATORS[index] as u8);
} else {
for _ in 0..self.unsigned(8) {
result.push(self.0.sample(Alphanumeric) as u8);
}
}
}
result
}
} |
pub fn words(&mut self, max_count: usize) -> Vec<u8> {
let mut result = Vec::new();
let word_count = self.unsigned(max_count); | random_line_split |
random.rs | use rand::{
distributions::Alphanumeric,
prelude::{Rng, SeedableRng, StdRng},
};
const OPERATORS: &[char] = &[
'+', '-', '<', '>', '(', ')', '*', '/', '&', '|', '!', ',', '.',
];
pub struct Rand(StdRng);
impl Rand {
pub fn new(seed: usize) -> Self {
Rand(StdRng::seed_from_u64(seed as u64))
}
pub fn unsigned(&mut self, max: usize) -> usize {
self.0.gen_range(0..max + 1)
}
pub fn words(&mut self, max_count: usize) -> Vec<u8> {
let mut result = Vec::new();
let word_count = self.unsigned(max_count);
for i in 0..word_count {
if i > 0 {
if self.unsigned(5) == 0 {
result.push('\n' as u8);
} else {
result.push(' ' as u8);
}
}
if self.unsigned(3) == 0 {
let index = self.unsigned(OPERATORS.len() - 1);
result.push(OPERATORS[index] as u8);
} else |
}
result
}
}
| {
for _ in 0..self.unsigned(8) {
result.push(self.0.sample(Alphanumeric) as u8);
}
} | conditional_block |
generate_testdata.py | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Generate some standard test data for debugging TensorBoard.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import bisect
import math
import os
import os.path
import random
import shutil
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
tf.flags.DEFINE_string("target", None, """The directoy where serialized data
will be written""")
tf.flags.DEFINE_boolean("overwrite", False, """Whether to remove and overwrite
TARGET if it already exists.""")
FLAGS = tf.flags.FLAGS
# Hardcode a start time and reseed so script always generates the same data.
_start_time = 0
random.seed(0)
def _MakeHistogramBuckets():
v = 1E-12
buckets = []
neg_buckets = []
while v < 1E20:
buckets.append(v)
neg_buckets.append(-v)
v *= 1.1
# Should include DBL_MAX, but won't bother for test data.
return neg_buckets[::-1] + [0] + buckets
def _MakeHistogram(values):
"""Convert values into a histogram proto using logic from histogram.cc."""
limits = _MakeHistogramBuckets()
counts = [0] * len(limits)
for v in values:
idx = bisect.bisect_left(limits, v)
counts[idx] += 1
limit_counts = [(limits[i], counts[i]) for i in xrange(len(limits))
if counts[i]]
bucket_limit = [lc[0] for lc in limit_counts]
bucket = [lc[1] for lc in limit_counts]
sum_sq = sum(v * v for v in values)
return tf.HistogramProto(min=min(values),
max=max(values),
num=len(values),
sum=sum(values),
sum_squares=sum_sq,
bucket_limit=bucket_limit,
bucket=bucket)
def WriteScalarSeries(writer, tag, f, n=5):
"""Write a series of scalar events to writer, using f to create values."""
step = 0
wall_time = _start_time
for i in xrange(n):
v = f(i)
value = tf.Summary.Value(tag=tag, simple_value=v)
summary = tf.Summary(value=[value])
event = tf.Event(wall_time=wall_time, step=step, summary=summary)
writer.add_event(event)
step += 1
wall_time += 10
def WriteHistogramSeries(writer, tag, mu_sigma_tuples, n=20):
"""Write a sequence of normally distributed histograms to writer."""
step = 0
wall_time = _start_time
for [mean, stddev] in mu_sigma_tuples:
data = [random.normalvariate(mean, stddev) for _ in xrange(n)]
histo = _MakeHistogram(data)
summary = tf.Summary(value=[tf.Summary.Value(tag=tag, histo=histo)])
event = tf.Event(wall_time=wall_time, step=step, summary=summary)
writer.add_event(event)
step += 10
wall_time += 100
def WriteImageSeries(writer, tag, n_images=1):
"""Write a few dummy images to writer."""
step = 0
session = tf.Session()
p = tf.placeholder("uint8", (1, 4, 4, 3))
s = tf.image_summary(tag, p)
for _ in xrange(n_images):
im = np.random.random_integers(0, 255, (1, 4, 4, 3))
summ = session.run(s, feed_dict={p: im})
writer.add_summary(summ, step)
step += 20
session.close()
def WriteAudioSeries(writer, tag, n_audio=1):
"""Write a few dummy audio clips to writer."""
step = 0
session = tf.Session()
min_frequency_hz = 440
max_frequency_hz = 880
sample_rate = 4000
duration_frames = sample_rate * 0.5 # 0.5 seconds.
frequencies_per_run = 1
num_channels = 2
p = tf.placeholder("float32", (frequencies_per_run, duration_frames,
num_channels))
s = tf.audio_summary(tag, p, sample_rate)
for _ in xrange(n_audio):
# Generate a different frequency for each channel to show stereo works.
frequencies = np.random.random_integers(
min_frequency_hz, max_frequency_hz,
size=(frequencies_per_run, num_channels))
tiled_frequencies = np.tile(frequencies, (1, duration_frames))
tiled_increments = np.tile(
np.arange(0, duration_frames), (num_channels, 1)).T.reshape(
1, duration_frames * num_channels)
tones = np.sin(2.0 * np.pi * tiled_frequencies * tiled_increments /
sample_rate)
tones = tones.reshape(frequencies_per_run, duration_frames, num_channels)
summ = session.run(s, feed_dict={p: tones})
writer.add_summary(summ, step)
step += 20
session.close()
def GenerateTestData(path):
|
def main(unused_argv=None):
target = FLAGS.target
if not target:
print("The --target flag is required.")
return -1
if os.path.exists(target):
if FLAGS.overwrite:
if os.path.isdir(target):
shutil.rmtree(target)
else:
os.remove(target)
else:
print("Refusing to overwrite target %s without --overwrite" % target)
return -2
GenerateTestData(target)
if __name__ == "__main__":
tf.app.run()
| """Generates the test data directory."""
run1_path = os.path.join(path, "run1")
os.makedirs(run1_path)
writer1 = tf.train.SummaryWriter(run1_path)
WriteScalarSeries(writer1, "foo/square", lambda x: x * x)
WriteScalarSeries(writer1, "bar/square", lambda x: x * x)
WriteScalarSeries(writer1, "foo/sin", math.sin)
WriteScalarSeries(writer1, "foo/cos", math.cos)
WriteHistogramSeries(writer1, "histo1", [[0, 1], [0.3, 1], [0.5, 1], [0.7, 1],
[1, 1]])
WriteImageSeries(writer1, "im1")
WriteImageSeries(writer1, "im2")
WriteAudioSeries(writer1, "au1")
run2_path = os.path.join(path, "run2")
os.makedirs(run2_path)
writer2 = tf.train.SummaryWriter(run2_path)
WriteScalarSeries(writer2, "foo/square", lambda x: x * x * 2)
WriteScalarSeries(writer2, "bar/square", lambda x: x * x * 3)
WriteScalarSeries(writer2, "foo/cos", lambda x: math.cos(x) * 2)
WriteHistogramSeries(writer2, "histo1", [[0, 2], [0.3, 2], [0.5, 2], [0.7, 2],
[1, 2]])
WriteHistogramSeries(writer2, "histo2", [[0, 1], [0.3, 1], [0.5, 1], [0.7, 1],
[1, 1]])
WriteImageSeries(writer2, "im1")
WriteAudioSeries(writer2, "au2")
graph_def = tf.GraphDef()
node1 = graph_def.node.add()
node1.name = "a"
node1.op = "matmul"
node2 = graph_def.node.add()
node2.name = "b"
node2.op = "matmul"
node2.input.extend(["a:0"])
writer1.add_graph(graph_def)
node3 = graph_def.node.add()
node3.name = "c"
node3.op = "matmul"
node3.input.extend(["a:0", "b:0"])
writer2.add_graph(graph_def)
writer1.close()
writer2.close() | identifier_body |
generate_testdata.py | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Generate some standard test data for debugging TensorBoard.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import bisect
import math
import os
import os.path
import random
import shutil
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
tf.flags.DEFINE_string("target", None, """The directoy where serialized data
will be written""")
tf.flags.DEFINE_boolean("overwrite", False, """Whether to remove and overwrite
TARGET if it already exists.""")
FLAGS = tf.flags.FLAGS
# Hardcode a start time and reseed so script always generates the same data.
_start_time = 0
random.seed(0)
def _MakeHistogramBuckets():
v = 1E-12
buckets = []
neg_buckets = []
while v < 1E20:
buckets.append(v)
neg_buckets.append(-v)
v *= 1.1
# Should include DBL_MAX, but won't bother for test data.
return neg_buckets[::-1] + [0] + buckets
def _MakeHistogram(values):
"""Convert values into a histogram proto using logic from histogram.cc."""
limits = _MakeHistogramBuckets()
counts = [0] * len(limits)
for v in values:
idx = bisect.bisect_left(limits, v)
counts[idx] += 1
limit_counts = [(limits[i], counts[i]) for i in xrange(len(limits))
if counts[i]]
bucket_limit = [lc[0] for lc in limit_counts]
bucket = [lc[1] for lc in limit_counts]
sum_sq = sum(v * v for v in values)
return tf.HistogramProto(min=min(values),
max=max(values),
num=len(values),
sum=sum(values),
sum_squares=sum_sq,
bucket_limit=bucket_limit,
bucket=bucket)
def WriteScalarSeries(writer, tag, f, n=5):
"""Write a series of scalar events to writer, using f to create values."""
step = 0
wall_time = _start_time
for i in xrange(n):
v = f(i)
value = tf.Summary.Value(tag=tag, simple_value=v)
summary = tf.Summary(value=[value])
event = tf.Event(wall_time=wall_time, step=step, summary=summary)
writer.add_event(event)
step += 1
wall_time += 10
def WriteHistogramSeries(writer, tag, mu_sigma_tuples, n=20):
"""Write a sequence of normally distributed histograms to writer."""
step = 0
wall_time = _start_time
for [mean, stddev] in mu_sigma_tuples:
data = [random.normalvariate(mean, stddev) for _ in xrange(n)]
histo = _MakeHistogram(data)
summary = tf.Summary(value=[tf.Summary.Value(tag=tag, histo=histo)])
event = tf.Event(wall_time=wall_time, step=step, summary=summary)
writer.add_event(event)
step += 10
wall_time += 100
def WriteImageSeries(writer, tag, n_images=1):
"""Write a few dummy images to writer."""
step = 0
session = tf.Session()
p = tf.placeholder("uint8", (1, 4, 4, 3))
s = tf.image_summary(tag, p)
for _ in xrange(n_images):
im = np.random.random_integers(0, 255, (1, 4, 4, 3))
summ = session.run(s, feed_dict={p: im})
writer.add_summary(summ, step)
step += 20
session.close()
def WriteAudioSeries(writer, tag, n_audio=1):
"""Write a few dummy audio clips to writer."""
step = 0
session = tf.Session()
min_frequency_hz = 440
max_frequency_hz = 880
sample_rate = 4000
duration_frames = sample_rate * 0.5 # 0.5 seconds.
frequencies_per_run = 1
num_channels = 2
p = tf.placeholder("float32", (frequencies_per_run, duration_frames,
num_channels))
s = tf.audio_summary(tag, p, sample_rate)
for _ in xrange(n_audio):
# Generate a different frequency for each channel to show stereo works.
|
session.close()
def GenerateTestData(path):
"""Generates the test data directory."""
run1_path = os.path.join(path, "run1")
os.makedirs(run1_path)
writer1 = tf.train.SummaryWriter(run1_path)
WriteScalarSeries(writer1, "foo/square", lambda x: x * x)
WriteScalarSeries(writer1, "bar/square", lambda x: x * x)
WriteScalarSeries(writer1, "foo/sin", math.sin)
WriteScalarSeries(writer1, "foo/cos", math.cos)
WriteHistogramSeries(writer1, "histo1", [[0, 1], [0.3, 1], [0.5, 1], [0.7, 1],
[1, 1]])
WriteImageSeries(writer1, "im1")
WriteImageSeries(writer1, "im2")
WriteAudioSeries(writer1, "au1")
run2_path = os.path.join(path, "run2")
os.makedirs(run2_path)
writer2 = tf.train.SummaryWriter(run2_path)
WriteScalarSeries(writer2, "foo/square", lambda x: x * x * 2)
WriteScalarSeries(writer2, "bar/square", lambda x: x * x * 3)
WriteScalarSeries(writer2, "foo/cos", lambda x: math.cos(x) * 2)
WriteHistogramSeries(writer2, "histo1", [[0, 2], [0.3, 2], [0.5, 2], [0.7, 2],
[1, 2]])
WriteHistogramSeries(writer2, "histo2", [[0, 1], [0.3, 1], [0.5, 1], [0.7, 1],
[1, 1]])
WriteImageSeries(writer2, "im1")
WriteAudioSeries(writer2, "au2")
graph_def = tf.GraphDef()
node1 = graph_def.node.add()
node1.name = "a"
node1.op = "matmul"
node2 = graph_def.node.add()
node2.name = "b"
node2.op = "matmul"
node2.input.extend(["a:0"])
writer1.add_graph(graph_def)
node3 = graph_def.node.add()
node3.name = "c"
node3.op = "matmul"
node3.input.extend(["a:0", "b:0"])
writer2.add_graph(graph_def)
writer1.close()
writer2.close()
def main(unused_argv=None):
target = FLAGS.target
if not target:
print("The --target flag is required.")
return -1
if os.path.exists(target):
if FLAGS.overwrite:
if os.path.isdir(target):
shutil.rmtree(target)
else:
os.remove(target)
else:
print("Refusing to overwrite target %s without --overwrite" % target)
return -2
GenerateTestData(target)
if __name__ == "__main__":
tf.app.run()
| frequencies = np.random.random_integers(
min_frequency_hz, max_frequency_hz,
size=(frequencies_per_run, num_channels))
tiled_frequencies = np.tile(frequencies, (1, duration_frames))
tiled_increments = np.tile(
np.arange(0, duration_frames), (num_channels, 1)).T.reshape(
1, duration_frames * num_channels)
tones = np.sin(2.0 * np.pi * tiled_frequencies * tiled_increments /
sample_rate)
tones = tones.reshape(frequencies_per_run, duration_frames, num_channels)
summ = session.run(s, feed_dict={p: tones})
writer.add_summary(summ, step)
step += 20 | conditional_block |
generate_testdata.py | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Generate some standard test data for debugging TensorBoard.
"""
from __future__ import absolute_import
from __future__ import division | import os
import os.path
import random
import shutil
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
tf.flags.DEFINE_string("target", None, """The directoy where serialized data
will be written""")
tf.flags.DEFINE_boolean("overwrite", False, """Whether to remove and overwrite
TARGET if it already exists.""")
FLAGS = tf.flags.FLAGS
# Hardcode a start time and reseed so script always generates the same data.
_start_time = 0
random.seed(0)
def _MakeHistogramBuckets():
v = 1E-12
buckets = []
neg_buckets = []
while v < 1E20:
buckets.append(v)
neg_buckets.append(-v)
v *= 1.1
# Should include DBL_MAX, but won't bother for test data.
return neg_buckets[::-1] + [0] + buckets
def _MakeHistogram(values):
"""Convert values into a histogram proto using logic from histogram.cc."""
limits = _MakeHistogramBuckets()
counts = [0] * len(limits)
for v in values:
idx = bisect.bisect_left(limits, v)
counts[idx] += 1
limit_counts = [(limits[i], counts[i]) for i in xrange(len(limits))
if counts[i]]
bucket_limit = [lc[0] for lc in limit_counts]
bucket = [lc[1] for lc in limit_counts]
sum_sq = sum(v * v for v in values)
return tf.HistogramProto(min=min(values),
max=max(values),
num=len(values),
sum=sum(values),
sum_squares=sum_sq,
bucket_limit=bucket_limit,
bucket=bucket)
def WriteScalarSeries(writer, tag, f, n=5):
"""Write a series of scalar events to writer, using f to create values."""
step = 0
wall_time = _start_time
for i in xrange(n):
v = f(i)
value = tf.Summary.Value(tag=tag, simple_value=v)
summary = tf.Summary(value=[value])
event = tf.Event(wall_time=wall_time, step=step, summary=summary)
writer.add_event(event)
step += 1
wall_time += 10
def WriteHistogramSeries(writer, tag, mu_sigma_tuples, n=20):
"""Write a sequence of normally distributed histograms to writer."""
step = 0
wall_time = _start_time
for [mean, stddev] in mu_sigma_tuples:
data = [random.normalvariate(mean, stddev) for _ in xrange(n)]
histo = _MakeHistogram(data)
summary = tf.Summary(value=[tf.Summary.Value(tag=tag, histo=histo)])
event = tf.Event(wall_time=wall_time, step=step, summary=summary)
writer.add_event(event)
step += 10
wall_time += 100
def WriteImageSeries(writer, tag, n_images=1):
"""Write a few dummy images to writer."""
step = 0
session = tf.Session()
p = tf.placeholder("uint8", (1, 4, 4, 3))
s = tf.image_summary(tag, p)
for _ in xrange(n_images):
im = np.random.random_integers(0, 255, (1, 4, 4, 3))
summ = session.run(s, feed_dict={p: im})
writer.add_summary(summ, step)
step += 20
session.close()
def WriteAudioSeries(writer, tag, n_audio=1):
"""Write a few dummy audio clips to writer."""
step = 0
session = tf.Session()
min_frequency_hz = 440
max_frequency_hz = 880
sample_rate = 4000
duration_frames = sample_rate * 0.5 # 0.5 seconds.
frequencies_per_run = 1
num_channels = 2
p = tf.placeholder("float32", (frequencies_per_run, duration_frames,
num_channels))
s = tf.audio_summary(tag, p, sample_rate)
for _ in xrange(n_audio):
# Generate a different frequency for each channel to show stereo works.
frequencies = np.random.random_integers(
min_frequency_hz, max_frequency_hz,
size=(frequencies_per_run, num_channels))
tiled_frequencies = np.tile(frequencies, (1, duration_frames))
tiled_increments = np.tile(
np.arange(0, duration_frames), (num_channels, 1)).T.reshape(
1, duration_frames * num_channels)
tones = np.sin(2.0 * np.pi * tiled_frequencies * tiled_increments /
sample_rate)
tones = tones.reshape(frequencies_per_run, duration_frames, num_channels)
summ = session.run(s, feed_dict={p: tones})
writer.add_summary(summ, step)
step += 20
session.close()
def GenerateTestData(path):
"""Generates the test data directory."""
run1_path = os.path.join(path, "run1")
os.makedirs(run1_path)
writer1 = tf.train.SummaryWriter(run1_path)
WriteScalarSeries(writer1, "foo/square", lambda x: x * x)
WriteScalarSeries(writer1, "bar/square", lambda x: x * x)
WriteScalarSeries(writer1, "foo/sin", math.sin)
WriteScalarSeries(writer1, "foo/cos", math.cos)
WriteHistogramSeries(writer1, "histo1", [[0, 1], [0.3, 1], [0.5, 1], [0.7, 1],
[1, 1]])
WriteImageSeries(writer1, "im1")
WriteImageSeries(writer1, "im2")
WriteAudioSeries(writer1, "au1")
run2_path = os.path.join(path, "run2")
os.makedirs(run2_path)
writer2 = tf.train.SummaryWriter(run2_path)
WriteScalarSeries(writer2, "foo/square", lambda x: x * x * 2)
WriteScalarSeries(writer2, "bar/square", lambda x: x * x * 3)
WriteScalarSeries(writer2, "foo/cos", lambda x: math.cos(x) * 2)
WriteHistogramSeries(writer2, "histo1", [[0, 2], [0.3, 2], [0.5, 2], [0.7, 2],
[1, 2]])
WriteHistogramSeries(writer2, "histo2", [[0, 1], [0.3, 1], [0.5, 1], [0.7, 1],
[1, 1]])
WriteImageSeries(writer2, "im1")
WriteAudioSeries(writer2, "au2")
graph_def = tf.GraphDef()
node1 = graph_def.node.add()
node1.name = "a"
node1.op = "matmul"
node2 = graph_def.node.add()
node2.name = "b"
node2.op = "matmul"
node2.input.extend(["a:0"])
writer1.add_graph(graph_def)
node3 = graph_def.node.add()
node3.name = "c"
node3.op = "matmul"
node3.input.extend(["a:0", "b:0"])
writer2.add_graph(graph_def)
writer1.close()
writer2.close()
def main(unused_argv=None):
target = FLAGS.target
if not target:
print("The --target flag is required.")
return -1
if os.path.exists(target):
if FLAGS.overwrite:
if os.path.isdir(target):
shutil.rmtree(target)
else:
os.remove(target)
else:
print("Refusing to overwrite target %s without --overwrite" % target)
return -2
GenerateTestData(target)
if __name__ == "__main__":
tf.app.run() | from __future__ import print_function
import bisect
import math | random_line_split |
generate_testdata.py | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Generate some standard test data for debugging TensorBoard.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import bisect
import math
import os
import os.path
import random
import shutil
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
tf.flags.DEFINE_string("target", None, """The directoy where serialized data
will be written""")
tf.flags.DEFINE_boolean("overwrite", False, """Whether to remove and overwrite
TARGET if it already exists.""")
FLAGS = tf.flags.FLAGS
# Hardcode a start time and reseed so script always generates the same data.
_start_time = 0
random.seed(0)
def | ():
v = 1E-12
buckets = []
neg_buckets = []
while v < 1E20:
buckets.append(v)
neg_buckets.append(-v)
v *= 1.1
# Should include DBL_MAX, but won't bother for test data.
return neg_buckets[::-1] + [0] + buckets
def _MakeHistogram(values):
"""Convert values into a histogram proto using logic from histogram.cc."""
limits = _MakeHistogramBuckets()
counts = [0] * len(limits)
for v in values:
idx = bisect.bisect_left(limits, v)
counts[idx] += 1
limit_counts = [(limits[i], counts[i]) for i in xrange(len(limits))
if counts[i]]
bucket_limit = [lc[0] for lc in limit_counts]
bucket = [lc[1] for lc in limit_counts]
sum_sq = sum(v * v for v in values)
return tf.HistogramProto(min=min(values),
max=max(values),
num=len(values),
sum=sum(values),
sum_squares=sum_sq,
bucket_limit=bucket_limit,
bucket=bucket)
def WriteScalarSeries(writer, tag, f, n=5):
"""Write a series of scalar events to writer, using f to create values."""
step = 0
wall_time = _start_time
for i in xrange(n):
v = f(i)
value = tf.Summary.Value(tag=tag, simple_value=v)
summary = tf.Summary(value=[value])
event = tf.Event(wall_time=wall_time, step=step, summary=summary)
writer.add_event(event)
step += 1
wall_time += 10
def WriteHistogramSeries(writer, tag, mu_sigma_tuples, n=20):
"""Write a sequence of normally distributed histograms to writer."""
step = 0
wall_time = _start_time
for [mean, stddev] in mu_sigma_tuples:
data = [random.normalvariate(mean, stddev) for _ in xrange(n)]
histo = _MakeHistogram(data)
summary = tf.Summary(value=[tf.Summary.Value(tag=tag, histo=histo)])
event = tf.Event(wall_time=wall_time, step=step, summary=summary)
writer.add_event(event)
step += 10
wall_time += 100
def WriteImageSeries(writer, tag, n_images=1):
"""Write a few dummy images to writer."""
step = 0
session = tf.Session()
p = tf.placeholder("uint8", (1, 4, 4, 3))
s = tf.image_summary(tag, p)
for _ in xrange(n_images):
im = np.random.random_integers(0, 255, (1, 4, 4, 3))
summ = session.run(s, feed_dict={p: im})
writer.add_summary(summ, step)
step += 20
session.close()
def WriteAudioSeries(writer, tag, n_audio=1):
"""Write a few dummy audio clips to writer."""
step = 0
session = tf.Session()
min_frequency_hz = 440
max_frequency_hz = 880
sample_rate = 4000
duration_frames = sample_rate * 0.5 # 0.5 seconds.
frequencies_per_run = 1
num_channels = 2
p = tf.placeholder("float32", (frequencies_per_run, duration_frames,
num_channels))
s = tf.audio_summary(tag, p, sample_rate)
for _ in xrange(n_audio):
# Generate a different frequency for each channel to show stereo works.
frequencies = np.random.random_integers(
min_frequency_hz, max_frequency_hz,
size=(frequencies_per_run, num_channels))
tiled_frequencies = np.tile(frequencies, (1, duration_frames))
tiled_increments = np.tile(
np.arange(0, duration_frames), (num_channels, 1)).T.reshape(
1, duration_frames * num_channels)
tones = np.sin(2.0 * np.pi * tiled_frequencies * tiled_increments /
sample_rate)
tones = tones.reshape(frequencies_per_run, duration_frames, num_channels)
summ = session.run(s, feed_dict={p: tones})
writer.add_summary(summ, step)
step += 20
session.close()
def GenerateTestData(path):
"""Generates the test data directory."""
run1_path = os.path.join(path, "run1")
os.makedirs(run1_path)
writer1 = tf.train.SummaryWriter(run1_path)
WriteScalarSeries(writer1, "foo/square", lambda x: x * x)
WriteScalarSeries(writer1, "bar/square", lambda x: x * x)
WriteScalarSeries(writer1, "foo/sin", math.sin)
WriteScalarSeries(writer1, "foo/cos", math.cos)
WriteHistogramSeries(writer1, "histo1", [[0, 1], [0.3, 1], [0.5, 1], [0.7, 1],
[1, 1]])
WriteImageSeries(writer1, "im1")
WriteImageSeries(writer1, "im2")
WriteAudioSeries(writer1, "au1")
run2_path = os.path.join(path, "run2")
os.makedirs(run2_path)
writer2 = tf.train.SummaryWriter(run2_path)
WriteScalarSeries(writer2, "foo/square", lambda x: x * x * 2)
WriteScalarSeries(writer2, "bar/square", lambda x: x * x * 3)
WriteScalarSeries(writer2, "foo/cos", lambda x: math.cos(x) * 2)
WriteHistogramSeries(writer2, "histo1", [[0, 2], [0.3, 2], [0.5, 2], [0.7, 2],
[1, 2]])
WriteHistogramSeries(writer2, "histo2", [[0, 1], [0.3, 1], [0.5, 1], [0.7, 1],
[1, 1]])
WriteImageSeries(writer2, "im1")
WriteAudioSeries(writer2, "au2")
graph_def = tf.GraphDef()
node1 = graph_def.node.add()
node1.name = "a"
node1.op = "matmul"
node2 = graph_def.node.add()
node2.name = "b"
node2.op = "matmul"
node2.input.extend(["a:0"])
writer1.add_graph(graph_def)
node3 = graph_def.node.add()
node3.name = "c"
node3.op = "matmul"
node3.input.extend(["a:0", "b:0"])
writer2.add_graph(graph_def)
writer1.close()
writer2.close()
def main(unused_argv=None):
target = FLAGS.target
if not target:
print("The --target flag is required.")
return -1
if os.path.exists(target):
if FLAGS.overwrite:
if os.path.isdir(target):
shutil.rmtree(target)
else:
os.remove(target)
else:
print("Refusing to overwrite target %s without --overwrite" % target)
return -2
GenerateTestData(target)
if __name__ == "__main__":
tf.app.run()
| _MakeHistogramBuckets | identifier_name |
version.rs | /*!
Querying SDL Version
*/
use std::ffi::CStr;
use std::fmt;
use crate::sys;
/// A structure that contains information about the version of SDL in use.
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub struct Version {
/// major version
pub major: u8,
/// minor version
pub minor: u8,
/// update version (patchlevel)
pub patch: u8,
}
impl Version {
/// Convert a raw *SDL_version to Version.
pub fn from_ll(v: sys::SDL_version) -> Version {
Version {
major: v.major,
minor: v.minor,
patch: v.patch,
}
}
}
impl fmt::Display for Version {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}.{}.{}", self.major, self.minor, self.patch) |
/// Get the version of SDL that is linked against your program.
#[doc(alias = "SDL_GetVersion")]
pub fn version() -> Version {
unsafe {
let mut cver = sys::SDL_version {
major: 0,
minor: 0,
patch: 0,
};
sys::SDL_GetVersion(&mut cver);
Version::from_ll(cver)
}
}
/// Get the code revision of SDL that is linked against your program.
#[doc(alias = "SDL_GetRevision")]
pub fn revision() -> String {
unsafe {
let rev = sys::SDL_GetRevision();
CStr::from_ptr(rev as *const _).to_str().unwrap().to_owned()
}
}
/// Get the revision number of SDL that is linked against your program.
#[doc(alias = "SDL_GetRevisionNumber")]
pub fn revision_number() -> i32 {
unsafe { sys::SDL_GetRevisionNumber() }
} | }
} | random_line_split |
version.rs | /*!
Querying SDL Version
*/
use std::ffi::CStr;
use std::fmt;
use crate::sys;
/// A structure that contains information about the version of SDL in use.
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub struct Version {
/// major version
pub major: u8,
/// minor version
pub minor: u8,
/// update version (patchlevel)
pub patch: u8,
}
impl Version {
/// Convert a raw *SDL_version to Version.
pub fn from_ll(v: sys::SDL_version) -> Version {
Version {
major: v.major,
minor: v.minor,
patch: v.patch,
}
}
}
impl fmt::Display for Version {
fn | (&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
}
}
/// Get the version of SDL that is linked against your program.
#[doc(alias = "SDL_GetVersion")]
pub fn version() -> Version {
unsafe {
let mut cver = sys::SDL_version {
major: 0,
minor: 0,
patch: 0,
};
sys::SDL_GetVersion(&mut cver);
Version::from_ll(cver)
}
}
/// Get the code revision of SDL that is linked against your program.
#[doc(alias = "SDL_GetRevision")]
pub fn revision() -> String {
unsafe {
let rev = sys::SDL_GetRevision();
CStr::from_ptr(rev as *const _).to_str().unwrap().to_owned()
}
}
/// Get the revision number of SDL that is linked against your program.
#[doc(alias = "SDL_GetRevisionNumber")]
pub fn revision_number() -> i32 {
unsafe { sys::SDL_GetRevisionNumber() }
}
| fmt | identifier_name |
version.rs | /*!
Querying SDL Version
*/
use std::ffi::CStr;
use std::fmt;
use crate::sys;
/// A structure that contains information about the version of SDL in use.
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub struct Version {
/// major version
pub major: u8,
/// minor version
pub minor: u8,
/// update version (patchlevel)
pub patch: u8,
}
impl Version {
/// Convert a raw *SDL_version to Version.
pub fn from_ll(v: sys::SDL_version) -> Version {
Version {
major: v.major,
minor: v.minor,
patch: v.patch,
}
}
}
impl fmt::Display for Version {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result |
}
/// Get the version of SDL that is linked against your program.
#[doc(alias = "SDL_GetVersion")]
pub fn version() -> Version {
unsafe {
let mut cver = sys::SDL_version {
major: 0,
minor: 0,
patch: 0,
};
sys::SDL_GetVersion(&mut cver);
Version::from_ll(cver)
}
}
/// Get the code revision of SDL that is linked against your program.
#[doc(alias = "SDL_GetRevision")]
pub fn revision() -> String {
unsafe {
let rev = sys::SDL_GetRevision();
CStr::from_ptr(rev as *const _).to_str().unwrap().to_owned()
}
}
/// Get the revision number of SDL that is linked against your program.
#[doc(alias = "SDL_GetRevisionNumber")]
pub fn revision_number() -> i32 {
unsafe { sys::SDL_GetRevisionNumber() }
}
| {
write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
} | identifier_body |
SHT21.py | from __future__ import print_function
from numpy import int16
import time
def connect(route,**args):
'''
route can either be I.I2C , or a radioLink instance
'''
return SHT21(route,**args)
class SHT21():
RESET = 0xFE
TEMP_ADDRESS = 0xF3
HUMIDITY_ADDRESS = 0xF5
selected=0xF3 | PLOTNAMES = ['Data']
ADDRESS = 0x40
name = 'Humidity/Temperature'
def __init__(self,I2C,**args):
self.I2C=I2C
self.ADDRESS = args.get('address',self.ADDRESS)
self.name = 'Humidity/Temperature'
'''
try:
print ('switching baud to 400k')
self.I2C.configI2C(400e3)
except:
print ('FAILED TO CHANGE BAUD RATE')
'''
self.params={'selectParameter':['temperature','humidity']}
self.init('')
def init(self,x):
self.I2C.writeBulk(self.ADDRESS,[self.RESET]) #soft reset
time.sleep(0.1)
def rawToTemp(self,vals):
if vals:
if len(vals):
v = (vals[0]<<8)|(vals[1]&0xFC) #make integer & remove status bits
v*=175.72; v/= (1<<16); v-=46.85
return [v]
return False
def rawToRH(self,vals):
if vals:
if len(vals):
v = (vals[0]<<8)|(vals[1]&0xFC) #make integer & remove status bits
v*=125.; v/= (1<<16); v-=6
return [v]
return False
@staticmethod
def _calculate_checksum(data, number_of_bytes):
"""5.7 CRC Checksum using the polynomial given in the datasheet
Credits: https://github.com/jaques/sht21_python/blob/master/sht21.py
"""
# CRC
POLYNOMIAL = 0x131 # //P(x)=x^8+x^5+x^4+1 = 100110001
crc = 0
# calculates 8-Bit checksum with given polynomial
for byteCtr in range(number_of_bytes):
crc ^= (data[byteCtr])
for bit in range(8, 0, -1):
if crc & 0x80:
crc = (crc << 1) ^ POLYNOMIAL
else:
crc = (crc << 1)
return crc
def selectParameter(self,param):
if param=='temperature':self.selected=self.TEMP_ADDRESS
elif param=='humidity':self.selected=self.HUMIDITY_ADDRESS
def getRaw(self):
self.I2C.writeBulk(self.ADDRESS,[self.selected])
if self.selected==self.TEMP_ADDRESS:time.sleep(0.1)
elif self.selected==self.HUMIDITY_ADDRESS:time.sleep(0.05)
vals = self.I2C.simpleRead(self.ADDRESS,3)
if vals:
if self._calculate_checksum(vals,2)!=vals[2]:
return False
print (vals)
if self.selected==self.TEMP_ADDRESS:return self.rawToTemp(vals)
elif self.selected==self.HUMIDITY_ADDRESS:return self.rawToRH(vals) | NUMPLOTS=1 | random_line_split |
SHT21.py | from __future__ import print_function
from numpy import int16
import time
def connect(route,**args):
'''
route can either be I.I2C , or a radioLink instance
'''
return SHT21(route,**args)
class SHT21():
| RESET = 0xFE
TEMP_ADDRESS = 0xF3
HUMIDITY_ADDRESS = 0xF5
selected=0xF3
NUMPLOTS=1
PLOTNAMES = ['Data']
ADDRESS = 0x40
name = 'Humidity/Temperature'
def __init__(self,I2C,**args):
self.I2C=I2C
self.ADDRESS = args.get('address',self.ADDRESS)
self.name = 'Humidity/Temperature'
'''
try:
print ('switching baud to 400k')
self.I2C.configI2C(400e3)
except:
print ('FAILED TO CHANGE BAUD RATE')
'''
self.params={'selectParameter':['temperature','humidity']}
self.init('')
def init(self,x):
self.I2C.writeBulk(self.ADDRESS,[self.RESET]) #soft reset
time.sleep(0.1)
def rawToTemp(self,vals):
if vals:
if len(vals):
v = (vals[0]<<8)|(vals[1]&0xFC) #make integer & remove status bits
v*=175.72; v/= (1<<16); v-=46.85
return [v]
return False
def rawToRH(self,vals):
if vals:
if len(vals):
v = (vals[0]<<8)|(vals[1]&0xFC) #make integer & remove status bits
v*=125.; v/= (1<<16); v-=6
return [v]
return False
@staticmethod
def _calculate_checksum(data, number_of_bytes):
"""5.7 CRC Checksum using the polynomial given in the datasheet
Credits: https://github.com/jaques/sht21_python/blob/master/sht21.py
"""
# CRC
POLYNOMIAL = 0x131 # //P(x)=x^8+x^5+x^4+1 = 100110001
crc = 0
# calculates 8-Bit checksum with given polynomial
for byteCtr in range(number_of_bytes):
crc ^= (data[byteCtr])
for bit in range(8, 0, -1):
if crc & 0x80:
crc = (crc << 1) ^ POLYNOMIAL
else:
crc = (crc << 1)
return crc
def selectParameter(self,param):
if param=='temperature':self.selected=self.TEMP_ADDRESS
elif param=='humidity':self.selected=self.HUMIDITY_ADDRESS
def getRaw(self):
self.I2C.writeBulk(self.ADDRESS,[self.selected])
if self.selected==self.TEMP_ADDRESS:time.sleep(0.1)
elif self.selected==self.HUMIDITY_ADDRESS:time.sleep(0.05)
vals = self.I2C.simpleRead(self.ADDRESS,3)
if vals:
if self._calculate_checksum(vals,2)!=vals[2]:
return False
print (vals)
if self.selected==self.TEMP_ADDRESS:return self.rawToTemp(vals)
elif self.selected==self.HUMIDITY_ADDRESS:return self.rawToRH(vals) | identifier_body |
|
SHT21.py | from __future__ import print_function
from numpy import int16
import time
def connect(route,**args):
'''
route can either be I.I2C , or a radioLink instance
'''
return SHT21(route,**args)
class SHT21():
RESET = 0xFE
TEMP_ADDRESS = 0xF3
HUMIDITY_ADDRESS = 0xF5
selected=0xF3
NUMPLOTS=1
PLOTNAMES = ['Data']
ADDRESS = 0x40
name = 'Humidity/Temperature'
def __init__(self,I2C,**args):
self.I2C=I2C
self.ADDRESS = args.get('address',self.ADDRESS)
self.name = 'Humidity/Temperature'
'''
try:
print ('switching baud to 400k')
self.I2C.configI2C(400e3)
except:
print ('FAILED TO CHANGE BAUD RATE')
'''
self.params={'selectParameter':['temperature','humidity']}
self.init('')
def init(self,x):
self.I2C.writeBulk(self.ADDRESS,[self.RESET]) #soft reset
time.sleep(0.1)
def rawToTemp(self,vals):
if vals:
if len(vals):
v = (vals[0]<<8)|(vals[1]&0xFC) #make integer & remove status bits
v*=175.72; v/= (1<<16); v-=46.85
return [v]
return False
def rawToRH(self,vals):
if vals:
if len(vals):
v = (vals[0]<<8)|(vals[1]&0xFC) #make integer & remove status bits
v*=125.; v/= (1<<16); v-=6
return [v]
return False
@staticmethod
def _calculate_checksum(data, number_of_bytes):
"""5.7 CRC Checksum using the polynomial given in the datasheet
Credits: https://github.com/jaques/sht21_python/blob/master/sht21.py
"""
# CRC
POLYNOMIAL = 0x131 # //P(x)=x^8+x^5+x^4+1 = 100110001
crc = 0
# calculates 8-Bit checksum with given polynomial
for byteCtr in range(number_of_bytes):
crc ^= (data[byteCtr])
for bit in range(8, 0, -1):
if crc & 0x80:
crc = (crc << 1) ^ POLYNOMIAL
else:
crc = (crc << 1)
return crc
def selectParameter(self,param):
if param=='temperature':self.selected=self.TEMP_ADDRESS
elif param=='humidity':self.selected=self.HUMIDITY_ADDRESS
def getRaw(self):
self.I2C.writeBulk(self.ADDRESS,[self.selected])
if self.selected==self.TEMP_ADDRESS:time.sleep(0.1)
elif self.selected==self.HUMIDITY_ADDRESS:time.sleep(0.05)
vals = self.I2C.simpleRead(self.ADDRESS,3)
if vals:
|
if self.selected==self.TEMP_ADDRESS:return self.rawToTemp(vals)
elif self.selected==self.HUMIDITY_ADDRESS:return self.rawToRH(vals)
| if self._calculate_checksum(vals,2)!=vals[2]:
return False
print (vals) | conditional_block |
SHT21.py | from __future__ import print_function
from numpy import int16
import time
def | (route,**args):
'''
route can either be I.I2C , or a radioLink instance
'''
return SHT21(route,**args)
class SHT21():
RESET = 0xFE
TEMP_ADDRESS = 0xF3
HUMIDITY_ADDRESS = 0xF5
selected=0xF3
NUMPLOTS=1
PLOTNAMES = ['Data']
ADDRESS = 0x40
name = 'Humidity/Temperature'
def __init__(self,I2C,**args):
self.I2C=I2C
self.ADDRESS = args.get('address',self.ADDRESS)
self.name = 'Humidity/Temperature'
'''
try:
print ('switching baud to 400k')
self.I2C.configI2C(400e3)
except:
print ('FAILED TO CHANGE BAUD RATE')
'''
self.params={'selectParameter':['temperature','humidity']}
self.init('')
def init(self,x):
self.I2C.writeBulk(self.ADDRESS,[self.RESET]) #soft reset
time.sleep(0.1)
def rawToTemp(self,vals):
if vals:
if len(vals):
v = (vals[0]<<8)|(vals[1]&0xFC) #make integer & remove status bits
v*=175.72; v/= (1<<16); v-=46.85
return [v]
return False
def rawToRH(self,vals):
if vals:
if len(vals):
v = (vals[0]<<8)|(vals[1]&0xFC) #make integer & remove status bits
v*=125.; v/= (1<<16); v-=6
return [v]
return False
@staticmethod
def _calculate_checksum(data, number_of_bytes):
"""5.7 CRC Checksum using the polynomial given in the datasheet
Credits: https://github.com/jaques/sht21_python/blob/master/sht21.py
"""
# CRC
POLYNOMIAL = 0x131 # //P(x)=x^8+x^5+x^4+1 = 100110001
crc = 0
# calculates 8-Bit checksum with given polynomial
for byteCtr in range(number_of_bytes):
crc ^= (data[byteCtr])
for bit in range(8, 0, -1):
if crc & 0x80:
crc = (crc << 1) ^ POLYNOMIAL
else:
crc = (crc << 1)
return crc
def selectParameter(self,param):
if param=='temperature':self.selected=self.TEMP_ADDRESS
elif param=='humidity':self.selected=self.HUMIDITY_ADDRESS
def getRaw(self):
self.I2C.writeBulk(self.ADDRESS,[self.selected])
if self.selected==self.TEMP_ADDRESS:time.sleep(0.1)
elif self.selected==self.HUMIDITY_ADDRESS:time.sleep(0.05)
vals = self.I2C.simpleRead(self.ADDRESS,3)
if vals:
if self._calculate_checksum(vals,2)!=vals[2]:
return False
print (vals)
if self.selected==self.TEMP_ADDRESS:return self.rawToTemp(vals)
elif self.selected==self.HUMIDITY_ADDRESS:return self.rawToRH(vals)
| connect | identifier_name |
forceGraph0-0-1.ts | module powerbi.visuals {
export class ForceGraph implements IVisual {
public static capabilities: VisualCapabilities = {
dataRoles: [
{
name: 'Values',
kind: VisualDataRoleKind.GroupingOrMeasure,
},
],
objects: {
general: {
properties: {
formatString: {
type: { formatting: { formatString: true } },
},
},
}
},
dataViewMappings: [{
table: {
rows: {
for: { in: 'Values' },
dataReductionAlgorithm: { window: {} }
},
rowCount: { preferred: { min: 1 } }
},
}],
suppressDefaultTitle: true,
};
private root: D3.Selection;
private dataView: DataView;
// converts data from Values to two dimensional array
// expected order: MemberFrom MemberTo Value Valu2 (optional - for coloring)
public static converter(dataView: DataView): any {
var nodes = {};
var minFiles = Number.MAX_VALUE;
var maxFiles = 0;
var linkedByName = {};
//var links = [
// { "source": "john", "target": "joe", "filecount": 50 },
// { "source": "john", "target": "bob", "filecount": 150 },
// { "source": "mary", "target": "joe", "filecount": 80 },
// { "source": "bob", "target": "mary", "filecount": 70 },
// { "source": "joe", "target": "bob", "filecount": 20 },
//];
//links.forEach(function (link) {
// link.source = nodes[link.source] ||
// (nodes[link.source] = { name: link.source });
// link.target = nodes[link.target] ||
// (nodes[link.target] = { name: link.target });
// //link.value = +link.filecount;
// if (link.filecount < minFiles) { minFiles = link.filecount };
// if (link.filecount > maxFiles) { maxFiles = link.filecount };
// linkedByName[link.source.name + "," + link.target.name] = 1;
//});
var links = [];
//var rows = [
// ["Harry", "Sally", 4631],
// ["Harry", "Mario", 4018]
//];
if (dataView && dataView.table) {
var rows = dataView.table.rows;
rows.forEach(function (item) {
linkedByName[item[0] + "," + item[1]] = 1;
var link = {
"source": nodes[item[0]] || (nodes[item[0]] = { name: item[0] }),
"target": nodes[item[1]] || (nodes[item[1]] = { name: item[1] }),
"filecount": item[2]
};
if (link.filecount < minFiles) { minFiles = link.filecount };
if (link.filecount > maxFiles) { maxFiles = link.filecount };
links.push(link);
});
}; | var data = {
"nodes": nodes, "links": links, "minFiles": minFiles, "maxFiles": maxFiles, "linkedByName": linkedByName
};
return data;
}
public init(options: VisualInitOptions): void {
this.root = d3.select(options.element.get(0));
}
public update(options: VisualUpdateOptions) {
if (!options.dataViews || (options.dataViews.length < 1)) return;
var data = ForceGraph.converter(this.dataView = options.dataViews[0]);
var viewport = options.viewport;
var w = viewport.width,
h = viewport.height;
var k = Math.sqrt(Object.keys(data.nodes).length / (w * h));
this.root.selectAll("svg").remove();
var svg = this.root
.append("svg")
.attr("width", w)
.attr("height", h);
var force = d3.layout.force()
.gravity(100 * k)
.nodes(d3.values(data.nodes))
.links(data.links)
.size([w, h])
.linkDistance(100)
.charge(-15 / k)
.on("tick", tick)
.start();
var scale0to100 = d3.scale.linear().domain([data.minFiles, data.maxFiles]).range([2, 10]).clamp(true);
var path = svg.selectAll(".link")
.data(force.links())
.enter().append("path")
.attr("class", "link")
.attr("stroke-width", function (d) {
return scale0to100(d.filecount);
})
.on("mouseover", fadePath(.3))
.on("mouseout", fadePath(1))
;
path.append("title")
.text(function (d) { return d.source.name + "-" + d.target.name + ":" + d.filecount });
// define the nodes
var node = svg.selectAll(".node")
.data(force.nodes())
.enter().append("g")
.attr("class", "node")
.call(force.drag)
.on("mouseover", fadeNode(.3))
.on("mouseout", fadeNode(1))
.on("mousedown", function () { d3.event.stopPropagation(); })
;
// add the nodes
node.append("circle")
.attr("r", function (d) {
return d.weight < 10 ? 10 : d.weight;
});
// add the text
node.append("text")
.attr("x", 12)
.attr("dy", ".35em")
.text(function (d) {
return d.name;
});
function isConnected(a, b) {
return data.linkedByName[a.name + "," + b.name] || data.linkedByName[b.name + "," + a.name] || a.name == b.name;
}
// add the curvy lines
function tick() {
path.each(function () { this.parentNode.insertBefore(this, this); });
path.attr("d", function (d) {
var dx = d.target.x - d.source.x,
dy = d.target.y - d.source.y,
dr = Math.sqrt(dx * dx + dy * dy);
return "M" +
d.source.x + "," +
d.source.y + "A" +
dr + "," + dr + " 0 0,1 " +
d.target.x + "," +
d.target.y;
});
node
.attr("transform", function (d) {
return "translate(" + d.x + "," + d.y + ")";
});
};
function fadeNode(opacity) {
return function (d) {
node.style("stroke-opacity", function (o) {
var thisOpacity = isConnected(d, o) ? 1 : opacity;
this.setAttribute('fill-opacity', thisOpacity);
return thisOpacity;
});
path.style("stroke-opacity", function (o) {
return o.source === d || o.target === d ? 1 : opacity;
});
path.style("stroke", function (o) {
return o.source === d || o.target === d ? "#f00" : "#bbb";
});
};
}
function fadePath(opacity) {
return function (d) {
path.style("stroke-opacity", function (o) {
return o.source === d.source && o.target === d.target ? 1 : opacity;
});
path.style("stroke", function (o) {
return o.source === d.source && o.target === d.target ? "#f00" : "#bbb";
});
};
}
}
public destroy(): void {
this.root = null;
}
}
} | random_line_split |
|
forceGraph0-0-1.ts | module powerbi.visuals {
export class ForceGraph implements IVisual {
public static capabilities: VisualCapabilities = {
dataRoles: [
{
name: 'Values',
kind: VisualDataRoleKind.GroupingOrMeasure,
},
],
objects: {
general: {
properties: {
formatString: {
type: { formatting: { formatString: true } },
},
},
}
},
dataViewMappings: [{
table: {
rows: {
for: { in: 'Values' },
dataReductionAlgorithm: { window: {} }
},
rowCount: { preferred: { min: 1 } }
},
}],
suppressDefaultTitle: true,
};
private root: D3.Selection;
private dataView: DataView;
// converts data from Values to two dimensional array
// expected order: MemberFrom MemberTo Value Valu2 (optional - for coloring)
public static converter(dataView: DataView): any {
var nodes = {};
var minFiles = Number.MAX_VALUE;
var maxFiles = 0;
var linkedByName = {};
//var links = [
// { "source": "john", "target": "joe", "filecount": 50 },
// { "source": "john", "target": "bob", "filecount": 150 },
// { "source": "mary", "target": "joe", "filecount": 80 },
// { "source": "bob", "target": "mary", "filecount": 70 },
// { "source": "joe", "target": "bob", "filecount": 20 },
//];
//links.forEach(function (link) {
// link.source = nodes[link.source] ||
// (nodes[link.source] = { name: link.source });
// link.target = nodes[link.target] ||
// (nodes[link.target] = { name: link.target });
// //link.value = +link.filecount;
// if (link.filecount < minFiles) { minFiles = link.filecount };
// if (link.filecount > maxFiles) { maxFiles = link.filecount };
// linkedByName[link.source.name + "," + link.target.name] = 1;
//});
var links = [];
//var rows = [
// ["Harry", "Sally", 4631],
// ["Harry", "Mario", 4018]
//];
if (dataView && dataView.table) {
var rows = dataView.table.rows;
rows.forEach(function (item) {
linkedByName[item[0] + "," + item[1]] = 1;
var link = {
"source": nodes[item[0]] || (nodes[item[0]] = { name: item[0] }),
"target": nodes[item[1]] || (nodes[item[1]] = { name: item[1] }),
"filecount": item[2]
};
if (link.filecount < minFiles) { minFiles = link.filecount };
if (link.filecount > maxFiles) { maxFiles = link.filecount };
links.push(link);
});
};
var data = {
"nodes": nodes, "links": links, "minFiles": minFiles, "maxFiles": maxFiles, "linkedByName": linkedByName
};
return data;
}
public init(options: VisualInitOptions): void {
this.root = d3.select(options.element.get(0));
}
public update(options: VisualUpdateOptions) {
if (!options.dataViews || (options.dataViews.length < 1)) return;
var data = ForceGraph.converter(this.dataView = options.dataViews[0]);
var viewport = options.viewport;
var w = viewport.width,
h = viewport.height;
var k = Math.sqrt(Object.keys(data.nodes).length / (w * h));
this.root.selectAll("svg").remove();
var svg = this.root
.append("svg")
.attr("width", w)
.attr("height", h);
var force = d3.layout.force()
.gravity(100 * k)
.nodes(d3.values(data.nodes))
.links(data.links)
.size([w, h])
.linkDistance(100)
.charge(-15 / k)
.on("tick", tick)
.start();
var scale0to100 = d3.scale.linear().domain([data.minFiles, data.maxFiles]).range([2, 10]).clamp(true);
var path = svg.selectAll(".link")
.data(force.links())
.enter().append("path")
.attr("class", "link")
.attr("stroke-width", function (d) {
return scale0to100(d.filecount);
})
.on("mouseover", fadePath(.3))
.on("mouseout", fadePath(1))
;
path.append("title")
.text(function (d) { return d.source.name + "-" + d.target.name + ":" + d.filecount });
// define the nodes
var node = svg.selectAll(".node")
.data(force.nodes())
.enter().append("g")
.attr("class", "node")
.call(force.drag)
.on("mouseover", fadeNode(.3))
.on("mouseout", fadeNode(1))
.on("mousedown", function () { d3.event.stopPropagation(); })
;
// add the nodes
node.append("circle")
.attr("r", function (d) {
return d.weight < 10 ? 10 : d.weight;
});
// add the text
node.append("text")
.attr("x", 12)
.attr("dy", ".35em")
.text(function (d) {
return d.name;
});
function isConnected(a, b) |
// add the curvy lines
function tick() {
path.each(function () { this.parentNode.insertBefore(this, this); });
path.attr("d", function (d) {
var dx = d.target.x - d.source.x,
dy = d.target.y - d.source.y,
dr = Math.sqrt(dx * dx + dy * dy);
return "M" +
d.source.x + "," +
d.source.y + "A" +
dr + "," + dr + " 0 0,1 " +
d.target.x + "," +
d.target.y;
});
node
.attr("transform", function (d) {
return "translate(" + d.x + "," + d.y + ")";
});
};
function fadeNode(opacity) {
return function (d) {
node.style("stroke-opacity", function (o) {
var thisOpacity = isConnected(d, o) ? 1 : opacity;
this.setAttribute('fill-opacity', thisOpacity);
return thisOpacity;
});
path.style("stroke-opacity", function (o) {
return o.source === d || o.target === d ? 1 : opacity;
});
path.style("stroke", function (o) {
return o.source === d || o.target === d ? "#f00" : "#bbb";
});
};
}
function fadePath(opacity) {
return function (d) {
path.style("stroke-opacity", function (o) {
return o.source === d.source && o.target === d.target ? 1 : opacity;
});
path.style("stroke", function (o) {
return o.source === d.source && o.target === d.target ? "#f00" : "#bbb";
});
};
}
}
public destroy(): void {
this.root = null;
}
}
} | {
return data.linkedByName[a.name + "," + b.name] || data.linkedByName[b.name + "," + a.name] || a.name == b.name;
} | identifier_body |
forceGraph0-0-1.ts | module powerbi.visuals {
export class ForceGraph implements IVisual {
public static capabilities: VisualCapabilities = {
dataRoles: [
{
name: 'Values',
kind: VisualDataRoleKind.GroupingOrMeasure,
},
],
objects: {
general: {
properties: {
formatString: {
type: { formatting: { formatString: true } },
},
},
}
},
dataViewMappings: [{
table: {
rows: {
for: { in: 'Values' },
dataReductionAlgorithm: { window: {} }
},
rowCount: { preferred: { min: 1 } }
},
}],
suppressDefaultTitle: true,
};
private root: D3.Selection;
private dataView: DataView;
// converts data from Values to two dimensional array
// expected order: MemberFrom MemberTo Value Valu2 (optional - for coloring)
public static converter(dataView: DataView): any {
var nodes = {};
var minFiles = Number.MAX_VALUE;
var maxFiles = 0;
var linkedByName = {};
//var links = [
// { "source": "john", "target": "joe", "filecount": 50 },
// { "source": "john", "target": "bob", "filecount": 150 },
// { "source": "mary", "target": "joe", "filecount": 80 },
// { "source": "bob", "target": "mary", "filecount": 70 },
// { "source": "joe", "target": "bob", "filecount": 20 },
//];
//links.forEach(function (link) {
// link.source = nodes[link.source] ||
// (nodes[link.source] = { name: link.source });
// link.target = nodes[link.target] ||
// (nodes[link.target] = { name: link.target });
// //link.value = +link.filecount;
// if (link.filecount < minFiles) { minFiles = link.filecount };
// if (link.filecount > maxFiles) { maxFiles = link.filecount };
// linkedByName[link.source.name + "," + link.target.name] = 1;
//});
var links = [];
//var rows = [
// ["Harry", "Sally", 4631],
// ["Harry", "Mario", 4018]
//];
if (dataView && dataView.table) {
var rows = dataView.table.rows;
rows.forEach(function (item) {
linkedByName[item[0] + "," + item[1]] = 1;
var link = {
"source": nodes[item[0]] || (nodes[item[0]] = { name: item[0] }),
"target": nodes[item[1]] || (nodes[item[1]] = { name: item[1] }),
"filecount": item[2]
};
if (link.filecount < minFiles) { minFiles = link.filecount };
if (link.filecount > maxFiles) { maxFiles = link.filecount };
links.push(link);
});
};
var data = {
"nodes": nodes, "links": links, "minFiles": minFiles, "maxFiles": maxFiles, "linkedByName": linkedByName
};
return data;
}
public init(options: VisualInitOptions): void {
this.root = d3.select(options.element.get(0));
}
public update(options: VisualUpdateOptions) {
if (!options.dataViews || (options.dataViews.length < 1)) return;
var data = ForceGraph.converter(this.dataView = options.dataViews[0]);
var viewport = options.viewport;
var w = viewport.width,
h = viewport.height;
var k = Math.sqrt(Object.keys(data.nodes).length / (w * h));
this.root.selectAll("svg").remove();
var svg = this.root
.append("svg")
.attr("width", w)
.attr("height", h);
var force = d3.layout.force()
.gravity(100 * k)
.nodes(d3.values(data.nodes))
.links(data.links)
.size([w, h])
.linkDistance(100)
.charge(-15 / k)
.on("tick", tick)
.start();
var scale0to100 = d3.scale.linear().domain([data.minFiles, data.maxFiles]).range([2, 10]).clamp(true);
var path = svg.selectAll(".link")
.data(force.links())
.enter().append("path")
.attr("class", "link")
.attr("stroke-width", function (d) {
return scale0to100(d.filecount);
})
.on("mouseover", fadePath(.3))
.on("mouseout", fadePath(1))
;
path.append("title")
.text(function (d) { return d.source.name + "-" + d.target.name + ":" + d.filecount });
// define the nodes
var node = svg.selectAll(".node")
.data(force.nodes())
.enter().append("g")
.attr("class", "node")
.call(force.drag)
.on("mouseover", fadeNode(.3))
.on("mouseout", fadeNode(1))
.on("mousedown", function () { d3.event.stopPropagation(); })
;
// add the nodes
node.append("circle")
.attr("r", function (d) {
return d.weight < 10 ? 10 : d.weight;
});
// add the text
node.append("text")
.attr("x", 12)
.attr("dy", ".35em")
.text(function (d) {
return d.name;
});
function isConnected(a, b) {
return data.linkedByName[a.name + "," + b.name] || data.linkedByName[b.name + "," + a.name] || a.name == b.name;
}
// add the curvy lines
function tick() {
path.each(function () { this.parentNode.insertBefore(this, this); });
path.attr("d", function (d) {
var dx = d.target.x - d.source.x,
dy = d.target.y - d.source.y,
dr = Math.sqrt(dx * dx + dy * dy);
return "M" +
d.source.x + "," +
d.source.y + "A" +
dr + "," + dr + " 0 0,1 " +
d.target.x + "," +
d.target.y;
});
node
.attr("transform", function (d) {
return "translate(" + d.x + "," + d.y + ")";
});
};
function | (opacity) {
return function (d) {
node.style("stroke-opacity", function (o) {
var thisOpacity = isConnected(d, o) ? 1 : opacity;
this.setAttribute('fill-opacity', thisOpacity);
return thisOpacity;
});
path.style("stroke-opacity", function (o) {
return o.source === d || o.target === d ? 1 : opacity;
});
path.style("stroke", function (o) {
return o.source === d || o.target === d ? "#f00" : "#bbb";
});
};
}
function fadePath(opacity) {
return function (d) {
path.style("stroke-opacity", function (o) {
return o.source === d.source && o.target === d.target ? 1 : opacity;
});
path.style("stroke", function (o) {
return o.source === d.source && o.target === d.target ? "#f00" : "#bbb";
});
};
}
}
public destroy(): void {
this.root = null;
}
}
} | fadeNode | identifier_name |
events.spec.ts | /* tslint:disable:component-selector */
import {
Component, NgZone
} from '@angular/core';
import {
TestBed
} from '@angular/core/testing';
import {
DxDataGridModule
} from 'devextreme-angular';
import readyCallbacks from 'devextreme/core/utils/ready_callbacks';
import { on } from 'devextreme/events';
@Component({
selector: 'test-container-component',
template: ''
})
class | {
}
describe('global events', () => {
it('should be subscribed within Angular Zone', () => {
let readyCallbacksCalls = 0;
readyCallbacks.fire();
readyCallbacks.add(() => {
readyCallbacksCalls++;
NgZone.assertInAngularZone();
});
TestBed.configureTestingModule({
declarations: [TestContainerComponent],
imports: [DxDataGridModule]
});
TestBed.overrideComponent(TestContainerComponent, {
set: { template: `` }
});
TestBed.createComponent(TestContainerComponent);
expect(readyCallbacksCalls).toBe(1);
readyCallbacks.add(() => {
readyCallbacksCalls++;
NgZone.assertInAngularZone();
});
expect(readyCallbacksCalls).toBe(2);
});
});
describe('events', () => {
it('should be fired within Angular Zone', () => {
TestBed.configureTestingModule({
declarations: [TestContainerComponent],
imports: [DxDataGridModule]
});
TestBed.overrideComponent(TestContainerComponent, {
set: { template: `
<div class="elem"><div>
` }
});
const fixture = TestBed.createComponent(TestContainerComponent);
fixture.detectChanges();
const element = fixture.nativeElement.querySelector('.elem');
let counter = 0;
fixture.ngZone.runOutsideAngular(() => {
on(element, 'click', () => {
expect(NgZone.isInAngularZone()).toBe(true);
counter++;
});
});
element.click();
expect(counter).toBe(1);
});
});
| TestContainerComponent | identifier_name |
events.spec.ts | /* tslint:disable:component-selector */
import {
Component, NgZone
} from '@angular/core';
import {
TestBed
} from '@angular/core/testing';
import {
DxDataGridModule
} from 'devextreme-angular';
import readyCallbacks from 'devextreme/core/utils/ready_callbacks';
import { on } from 'devextreme/events';
@Component({
selector: 'test-container-component',
template: ''
})
class TestContainerComponent {
}
describe('global events', () => {
it('should be subscribed within Angular Zone', () => {
let readyCallbacksCalls = 0;
readyCallbacks.fire();
readyCallbacks.add(() => {
readyCallbacksCalls++;
NgZone.assertInAngularZone();
}); | declarations: [TestContainerComponent],
imports: [DxDataGridModule]
});
TestBed.overrideComponent(TestContainerComponent, {
set: { template: `` }
});
TestBed.createComponent(TestContainerComponent);
expect(readyCallbacksCalls).toBe(1);
readyCallbacks.add(() => {
readyCallbacksCalls++;
NgZone.assertInAngularZone();
});
expect(readyCallbacksCalls).toBe(2);
});
});
describe('events', () => {
it('should be fired within Angular Zone', () => {
TestBed.configureTestingModule({
declarations: [TestContainerComponent],
imports: [DxDataGridModule]
});
TestBed.overrideComponent(TestContainerComponent, {
set: { template: `
<div class="elem"><div>
` }
});
const fixture = TestBed.createComponent(TestContainerComponent);
fixture.detectChanges();
const element = fixture.nativeElement.querySelector('.elem');
let counter = 0;
fixture.ngZone.runOutsideAngular(() => {
on(element, 'click', () => {
expect(NgZone.isInAngularZone()).toBe(true);
counter++;
});
});
element.click();
expect(counter).toBe(1);
});
}); |
TestBed.configureTestingModule({ | random_line_split |
parseUrl.js | export default (original) => {
const url = getHashUrl(original);
let [path, params] = url.split('?');
if (path.length >= 2) {
path = path.replace(/\/$/, '');
}
if (params) {
params = parseSearchParams(params);
} else {
params = {}
}
const actual = path + joinSearchParams(params);
return {
path, | }
const getHashUrl = (original) => {
let url = original.split('#');
if (url.length >= 2) {
url = url[1];
} else {
url = '/';
}
if (url === '') {
url = '/';
}
if (url[0] !== '/') {
url = '/' + url;
}
return url;
}
const parseSearchParams = (searchString) => {
let pairSplit;
return (searchString || '').replace(/^\?/, '').split('&').reduce((p, pair) => {
pairSplit = pair.split('=');
if (pairSplit.length >= 1 && pairSplit[0].length >= 1) {
p[decodeURIComponent(pairSplit[0])] = decodeURIComponent(pairSplit[1]) || '';
}
return p;
}, {});
}
const joinSearchParams = (searchParams) => {
const searchString = Object
.keys(searchParams)
.reduce((p, paramKey) => p += `&${paramKey}=${searchParams[paramKey]}`, '?');
if (searchString.length <= 1) {
return '';
}
return searchString.replace('?&', '?');
} | params,
original,
actual
}; | random_line_split |
parseUrl.js | export default (original) => {
const url = getHashUrl(original);
let [path, params] = url.split('?');
if (path.length >= 2) {
path = path.replace(/\/$/, '');
}
if (params) {
params = parseSearchParams(params);
} else {
params = {}
}
const actual = path + joinSearchParams(params);
return {
path,
params,
original,
actual
};
}
const getHashUrl = (original) => {
let url = original.split('#');
if (url.length >= 2) | else {
url = '/';
}
if (url === '') {
url = '/';
}
if (url[0] !== '/') {
url = '/' + url;
}
return url;
}
const parseSearchParams = (searchString) => {
let pairSplit;
return (searchString || '').replace(/^\?/, '').split('&').reduce((p, pair) => {
pairSplit = pair.split('=');
if (pairSplit.length >= 1 && pairSplit[0].length >= 1) {
p[decodeURIComponent(pairSplit[0])] = decodeURIComponent(pairSplit[1]) || '';
}
return p;
}, {});
}
const joinSearchParams = (searchParams) => {
const searchString = Object
.keys(searchParams)
.reduce((p, paramKey) => p += `&${paramKey}=${searchParams[paramKey]}`, '?');
if (searchString.length <= 1) {
return '';
}
return searchString.replace('?&', '?');
}
| {
url = url[1];
} | conditional_block |
app.rs | use actix::prelude::*;
use actix_web::*;
use actors::{ForwardA2AMsg, GetEndpoint};
use actors::forward_agent::ForwardAgent;
use bytes::Bytes;
use domain::config::AppConfig;
use futures::*;
const MAX_PAYLOAD_SIZE: usize = 105_906_176;
pub struct AppState {
pub forward_agent: Addr<ForwardAgent>,
}
pub fn | (config: AppConfig, forward_agent: Addr<ForwardAgent>) -> App<AppState> {
App::with_state(AppState { forward_agent })
.prefix(config.prefix)
.middleware(middleware::Logger::default()) // enable logger
.resource("", |r| r.method(http::Method::GET).with(_get_endpoint_details))
.resource("/msg", |r| r.method(http::Method::POST).with(_forward_message))
}
fn _get_endpoint_details(state: State<AppState>) -> FutureResponse<HttpResponse> {
state.forward_agent
.send(GetEndpoint {})
.from_err()
.map(|res| match res {
Ok(endpoint) => HttpResponse::Ok().json(&endpoint),
Err(err) => HttpResponse::InternalServerError().body(format!("{:?}", err)).into(), // FIXME: Better error
})
.responder()
}
fn _forward_message((state, req): (State<AppState>, HttpRequest<AppState>)) -> FutureResponse<HttpResponse> {
req
.body()
.limit(MAX_PAYLOAD_SIZE)
.from_err()
.and_then(move |body| {
state.forward_agent
.send(ForwardA2AMsg(body.to_vec()))
.from_err()
.and_then(|res| match res {
Ok(msg) => Ok(Bytes::from(msg).into()),
Err(err) => Ok(HttpResponse::InternalServerError().body(format!("{:?}", err)).into()), // FIXME: Better error
})
})
.responder()
}
| new | identifier_name |
app.rs | use actix::prelude::*;
use actix_web::*;
use actors::{ForwardA2AMsg, GetEndpoint};
use actors::forward_agent::ForwardAgent;
use bytes::Bytes;
use domain::config::AppConfig;
use futures::*;
const MAX_PAYLOAD_SIZE: usize = 105_906_176;
pub struct AppState {
pub forward_agent: Addr<ForwardAgent>,
}
pub fn new(config: AppConfig, forward_agent: Addr<ForwardAgent>) -> App<AppState> |
fn _get_endpoint_details(state: State<AppState>) -> FutureResponse<HttpResponse> {
state.forward_agent
.send(GetEndpoint {})
.from_err()
.map(|res| match res {
Ok(endpoint) => HttpResponse::Ok().json(&endpoint),
Err(err) => HttpResponse::InternalServerError().body(format!("{:?}", err)).into(), // FIXME: Better error
})
.responder()
}
fn _forward_message((state, req): (State<AppState>, HttpRequest<AppState>)) -> FutureResponse<HttpResponse> {
req
.body()
.limit(MAX_PAYLOAD_SIZE)
.from_err()
.and_then(move |body| {
state.forward_agent
.send(ForwardA2AMsg(body.to_vec()))
.from_err()
.and_then(|res| match res {
Ok(msg) => Ok(Bytes::from(msg).into()),
Err(err) => Ok(HttpResponse::InternalServerError().body(format!("{:?}", err)).into()), // FIXME: Better error
})
})
.responder()
}
| {
App::with_state(AppState { forward_agent })
.prefix(config.prefix)
.middleware(middleware::Logger::default()) // enable logger
.resource("", |r| r.method(http::Method::GET).with(_get_endpoint_details))
.resource("/msg", |r| r.method(http::Method::POST).with(_forward_message))
} | identifier_body |
app.rs | use actix::prelude::*;
use actix_web::*;
use actors::{ForwardA2AMsg, GetEndpoint};
use actors::forward_agent::ForwardAgent;
use bytes::Bytes;
use domain::config::AppConfig;
use futures::*;
const MAX_PAYLOAD_SIZE: usize = 105_906_176;
pub struct AppState {
pub forward_agent: Addr<ForwardAgent>,
}
pub fn new(config: AppConfig, forward_agent: Addr<ForwardAgent>) -> App<AppState> {
App::with_state(AppState { forward_agent })
.prefix(config.prefix)
.middleware(middleware::Logger::default()) // enable logger
.resource("", |r| r.method(http::Method::GET).with(_get_endpoint_details))
.resource("/msg", |r| r.method(http::Method::POST).with(_forward_message))
}
fn _get_endpoint_details(state: State<AppState>) -> FutureResponse<HttpResponse> {
state.forward_agent
.send(GetEndpoint {})
.from_err()
.map(|res| match res {
Ok(endpoint) => HttpResponse::Ok().json(&endpoint),
Err(err) => HttpResponse::InternalServerError().body(format!("{:?}", err)).into(), // FIXME: Better error | })
.responder()
}
fn _forward_message((state, req): (State<AppState>, HttpRequest<AppState>)) -> FutureResponse<HttpResponse> {
req
.body()
.limit(MAX_PAYLOAD_SIZE)
.from_err()
.and_then(move |body| {
state.forward_agent
.send(ForwardA2AMsg(body.to_vec()))
.from_err()
.and_then(|res| match res {
Ok(msg) => Ok(Bytes::from(msg).into()),
Err(err) => Ok(HttpResponse::InternalServerError().body(format!("{:?}", err)).into()), // FIXME: Better error
})
})
.responder()
} | random_line_split |
|
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(ascii)]
#![feature(as_unsafe_cell)]
#![feature(borrow_state)]
#![feature(box_syntax)]
#![feature(cell_extras)]
#![feature(const_fn)]
#![feature(core_intrinsics)]
#![feature(custom_attribute)]
#![feature(custom_derive)]
#![feature(fnbox)]
#![feature(hashmap_hasher)]
#![feature(iter_arith)]
#![feature(mpsc_select)]
#![feature(nonzero)]
#![feature(on_unimplemented)]
#![feature(peekable_is_empty)]
#![feature(plugin)]
#![feature(slice_patterns)]
#![feature(str_utf16)]
#![feature(unicode)]
#![deny(unsafe_code)]
#![allow(non_snake_case)]
#![doc = "The script crate contains all matters DOM."]
#![plugin(plugins)]
extern crate angle;
extern crate app_units;
#[macro_use]
extern crate bitflags;
extern crate canvas;
extern crate canvas_traits;
extern crate caseless;
extern crate core;
extern crate cssparser;
extern crate devtools_traits;
extern crate encoding;
extern crate euclid;
extern crate fnv;
extern crate html5ever;
extern crate hyper;
extern crate image;
extern crate ipc_channel;
extern crate js;
extern crate libc;
#[macro_use]
extern crate log;
extern crate msg;
extern crate net_traits;
extern crate num;
extern crate offscreen_gl_context;
#[macro_use]
extern crate profile_traits;
extern crate rand;
extern crate ref_slice;
extern crate rustc_serialize;
extern crate rustc_unicode;
extern crate script_traits;
#[macro_use(state_pseudo_classes)] extern crate selectors;
extern crate serde;
extern crate smallvec;
#[macro_use(atom, ns)] extern crate string_cache;
#[macro_use]
extern crate style;
extern crate style_traits;
extern crate tendril;
extern crate time;
extern crate unicase;
extern crate url;
#[macro_use]
extern crate util;
extern crate uuid;
extern crate websocket;
extern crate xml5ever;
pub mod clipboard_provider;
pub mod cors;
mod devtools;
pub mod document_loader;
#[macro_use]
pub mod dom;
pub mod layout_interface;
mod mem;
mod network_listener;
pub mod page;
pub mod parse;
pub mod reporter;
#[allow(unsafe_code)]
pub mod script_task;
pub mod textinput;
mod timers;
mod unpremultiplytable;
mod webdriver_handlers;
use dom::bindings::codegen::RegisterBindings;
use js::jsapi::SetDOMProxyInformation;
use std::ptr;
#[cfg(target_os = "linux")]
#[allow(unsafe_code)]
fn perform_platform_specific_initialization() {
use std::mem;
// 4096 is default max on many linux systems
const MAX_FILE_LIMIT: libc::rlim_t = 4096;
// Bump up our number of file descriptors to save us from impending doom caused by an onslaught
// of iframes.
unsafe {
let mut rlim: libc::rlimit = mem::uninitialized();
match libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) {
0 => {
if rlim.rlim_cur >= MAX_FILE_LIMIT {
// we have more than enough
return;
}
rlim.rlim_cur = match rlim.rlim_max {
libc::RLIM_INFINITY => MAX_FILE_LIMIT,
_ => {
if rlim.rlim_max < MAX_FILE_LIMIT | else {
MAX_FILE_LIMIT
}
}
};
match libc::setrlimit(libc::RLIMIT_NOFILE, &mut rlim) {
0 => (),
_ => warn!("Failed to set file count limit"),
};
},
_ => warn!("Failed to get file count limit"),
};
}
}
#[cfg(not(target_os = "linux"))]
fn perform_platform_specific_initialization() {}
#[allow(unsafe_code)]
pub fn init() {
unsafe {
assert_eq!(js::jsapi::JS_Init(), true);
SetDOMProxyInformation(ptr::null(), 0, Some(script_task::shadow_check_callback));
}
// Create the global vtables used by the (generated) DOM
// bindings to implement JS proxies.
RegisterBindings::RegisterProxyHandlers();
perform_platform_specific_initialization();
}
| {
rlim.rlim_max
} | conditional_block |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(ascii)]
#![feature(as_unsafe_cell)]
#![feature(borrow_state)]
#![feature(box_syntax)]
#![feature(cell_extras)]
#![feature(const_fn)]
#![feature(core_intrinsics)]
#![feature(custom_attribute)]
#![feature(custom_derive)]
#![feature(fnbox)]
#![feature(hashmap_hasher)]
#![feature(iter_arith)]
#![feature(mpsc_select)]
#![feature(nonzero)]
#![feature(on_unimplemented)]
#![feature(peekable_is_empty)]
#![feature(plugin)]
#![feature(slice_patterns)]
#![feature(str_utf16)]
#![feature(unicode)]
#![deny(unsafe_code)]
#![allow(non_snake_case)]
#![doc = "The script crate contains all matters DOM."]
#![plugin(plugins)]
extern crate angle;
extern crate app_units;
#[macro_use]
extern crate bitflags;
extern crate canvas;
extern crate canvas_traits;
extern crate caseless;
extern crate core;
extern crate cssparser;
extern crate devtools_traits;
extern crate encoding;
extern crate euclid;
extern crate fnv;
extern crate html5ever;
extern crate hyper;
extern crate image;
extern crate ipc_channel;
extern crate js;
extern crate libc;
#[macro_use]
extern crate log;
extern crate msg;
extern crate net_traits;
extern crate num;
extern crate offscreen_gl_context;
#[macro_use]
extern crate profile_traits;
extern crate rand;
extern crate ref_slice;
extern crate rustc_serialize;
extern crate rustc_unicode;
extern crate script_traits;
#[macro_use(state_pseudo_classes)] extern crate selectors;
extern crate serde;
extern crate smallvec;
#[macro_use(atom, ns)] extern crate string_cache;
#[macro_use]
extern crate style;
extern crate style_traits;
extern crate tendril;
extern crate time;
extern crate unicase;
extern crate url;
#[macro_use]
extern crate util;
extern crate uuid;
extern crate websocket;
extern crate xml5ever;
pub mod clipboard_provider;
pub mod cors;
mod devtools;
pub mod document_loader;
#[macro_use]
pub mod dom;
pub mod layout_interface;
mod mem;
mod network_listener;
pub mod page;
pub mod parse;
pub mod reporter;
#[allow(unsafe_code)]
pub mod script_task;
pub mod textinput;
mod timers;
mod unpremultiplytable;
mod webdriver_handlers;
use dom::bindings::codegen::RegisterBindings;
use js::jsapi::SetDOMProxyInformation;
use std::ptr;
#[cfg(target_os = "linux")]
#[allow(unsafe_code)]
fn perform_platform_specific_initialization() {
use std::mem;
// 4096 is default max on many linux systems | // Bump up our number of file descriptors to save us from impending doom caused by an onslaught
// of iframes.
unsafe {
let mut rlim: libc::rlimit = mem::uninitialized();
match libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) {
0 => {
if rlim.rlim_cur >= MAX_FILE_LIMIT {
// we have more than enough
return;
}
rlim.rlim_cur = match rlim.rlim_max {
libc::RLIM_INFINITY => MAX_FILE_LIMIT,
_ => {
if rlim.rlim_max < MAX_FILE_LIMIT {
rlim.rlim_max
} else {
MAX_FILE_LIMIT
}
}
};
match libc::setrlimit(libc::RLIMIT_NOFILE, &mut rlim) {
0 => (),
_ => warn!("Failed to set file count limit"),
};
},
_ => warn!("Failed to get file count limit"),
};
}
}
#[cfg(not(target_os = "linux"))]
fn perform_platform_specific_initialization() {}
#[allow(unsafe_code)]
pub fn init() {
unsafe {
assert_eq!(js::jsapi::JS_Init(), true);
SetDOMProxyInformation(ptr::null(), 0, Some(script_task::shadow_check_callback));
}
// Create the global vtables used by the (generated) DOM
// bindings to implement JS proxies.
RegisterBindings::RegisterProxyHandlers();
perform_platform_specific_initialization();
} | const MAX_FILE_LIMIT: libc::rlim_t = 4096;
| random_line_split |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(ascii)]
#![feature(as_unsafe_cell)]
#![feature(borrow_state)]
#![feature(box_syntax)]
#![feature(cell_extras)]
#![feature(const_fn)]
#![feature(core_intrinsics)]
#![feature(custom_attribute)]
#![feature(custom_derive)]
#![feature(fnbox)]
#![feature(hashmap_hasher)]
#![feature(iter_arith)]
#![feature(mpsc_select)]
#![feature(nonzero)]
#![feature(on_unimplemented)]
#![feature(peekable_is_empty)]
#![feature(plugin)]
#![feature(slice_patterns)]
#![feature(str_utf16)]
#![feature(unicode)]
#![deny(unsafe_code)]
#![allow(non_snake_case)]
#![doc = "The script crate contains all matters DOM."]
#![plugin(plugins)]
extern crate angle;
extern crate app_units;
#[macro_use]
extern crate bitflags;
extern crate canvas;
extern crate canvas_traits;
extern crate caseless;
extern crate core;
extern crate cssparser;
extern crate devtools_traits;
extern crate encoding;
extern crate euclid;
extern crate fnv;
extern crate html5ever;
extern crate hyper;
extern crate image;
extern crate ipc_channel;
extern crate js;
extern crate libc;
#[macro_use]
extern crate log;
extern crate msg;
extern crate net_traits;
extern crate num;
extern crate offscreen_gl_context;
#[macro_use]
extern crate profile_traits;
extern crate rand;
extern crate ref_slice;
extern crate rustc_serialize;
extern crate rustc_unicode;
extern crate script_traits;
#[macro_use(state_pseudo_classes)] extern crate selectors;
extern crate serde;
extern crate smallvec;
#[macro_use(atom, ns)] extern crate string_cache;
#[macro_use]
extern crate style;
extern crate style_traits;
extern crate tendril;
extern crate time;
extern crate unicase;
extern crate url;
#[macro_use]
extern crate util;
extern crate uuid;
extern crate websocket;
extern crate xml5ever;
pub mod clipboard_provider;
pub mod cors;
mod devtools;
pub mod document_loader;
#[macro_use]
pub mod dom;
pub mod layout_interface;
mod mem;
mod network_listener;
pub mod page;
pub mod parse;
pub mod reporter;
#[allow(unsafe_code)]
pub mod script_task;
pub mod textinput;
mod timers;
mod unpremultiplytable;
mod webdriver_handlers;
use dom::bindings::codegen::RegisterBindings;
use js::jsapi::SetDOMProxyInformation;
use std::ptr;
#[cfg(target_os = "linux")]
#[allow(unsafe_code)]
fn | () {
use std::mem;
// 4096 is default max on many linux systems
const MAX_FILE_LIMIT: libc::rlim_t = 4096;
// Bump up our number of file descriptors to save us from impending doom caused by an onslaught
// of iframes.
unsafe {
let mut rlim: libc::rlimit = mem::uninitialized();
match libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) {
0 => {
if rlim.rlim_cur >= MAX_FILE_LIMIT {
// we have more than enough
return;
}
rlim.rlim_cur = match rlim.rlim_max {
libc::RLIM_INFINITY => MAX_FILE_LIMIT,
_ => {
if rlim.rlim_max < MAX_FILE_LIMIT {
rlim.rlim_max
} else {
MAX_FILE_LIMIT
}
}
};
match libc::setrlimit(libc::RLIMIT_NOFILE, &mut rlim) {
0 => (),
_ => warn!("Failed to set file count limit"),
};
},
_ => warn!("Failed to get file count limit"),
};
}
}
#[cfg(not(target_os = "linux"))]
fn perform_platform_specific_initialization() {}
#[allow(unsafe_code)]
pub fn init() {
unsafe {
assert_eq!(js::jsapi::JS_Init(), true);
SetDOMProxyInformation(ptr::null(), 0, Some(script_task::shadow_check_callback));
}
// Create the global vtables used by the (generated) DOM
// bindings to implement JS proxies.
RegisterBindings::RegisterProxyHandlers();
perform_platform_specific_initialization();
}
| perform_platform_specific_initialization | identifier_name |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(ascii)]
#![feature(as_unsafe_cell)]
#![feature(borrow_state)]
#![feature(box_syntax)]
#![feature(cell_extras)]
#![feature(const_fn)]
#![feature(core_intrinsics)]
#![feature(custom_attribute)]
#![feature(custom_derive)]
#![feature(fnbox)]
#![feature(hashmap_hasher)]
#![feature(iter_arith)]
#![feature(mpsc_select)]
#![feature(nonzero)]
#![feature(on_unimplemented)]
#![feature(peekable_is_empty)]
#![feature(plugin)]
#![feature(slice_patterns)]
#![feature(str_utf16)]
#![feature(unicode)]
#![deny(unsafe_code)]
#![allow(non_snake_case)]
#![doc = "The script crate contains all matters DOM."]
#![plugin(plugins)]
extern crate angle;
extern crate app_units;
#[macro_use]
extern crate bitflags;
extern crate canvas;
extern crate canvas_traits;
extern crate caseless;
extern crate core;
extern crate cssparser;
extern crate devtools_traits;
extern crate encoding;
extern crate euclid;
extern crate fnv;
extern crate html5ever;
extern crate hyper;
extern crate image;
extern crate ipc_channel;
extern crate js;
extern crate libc;
#[macro_use]
extern crate log;
extern crate msg;
extern crate net_traits;
extern crate num;
extern crate offscreen_gl_context;
#[macro_use]
extern crate profile_traits;
extern crate rand;
extern crate ref_slice;
extern crate rustc_serialize;
extern crate rustc_unicode;
extern crate script_traits;
#[macro_use(state_pseudo_classes)] extern crate selectors;
extern crate serde;
extern crate smallvec;
#[macro_use(atom, ns)] extern crate string_cache;
#[macro_use]
extern crate style;
extern crate style_traits;
extern crate tendril;
extern crate time;
extern crate unicase;
extern crate url;
#[macro_use]
extern crate util;
extern crate uuid;
extern crate websocket;
extern crate xml5ever;
pub mod clipboard_provider;
pub mod cors;
mod devtools;
pub mod document_loader;
#[macro_use]
pub mod dom;
pub mod layout_interface;
mod mem;
mod network_listener;
pub mod page;
pub mod parse;
pub mod reporter;
#[allow(unsafe_code)]
pub mod script_task;
pub mod textinput;
mod timers;
mod unpremultiplytable;
mod webdriver_handlers;
use dom::bindings::codegen::RegisterBindings;
use js::jsapi::SetDOMProxyInformation;
use std::ptr;
#[cfg(target_os = "linux")]
#[allow(unsafe_code)]
fn perform_platform_specific_initialization() |
#[cfg(not(target_os = "linux"))]
fn perform_platform_specific_initialization() {}
#[allow(unsafe_code)]
pub fn init() {
unsafe {
assert_eq!(js::jsapi::JS_Init(), true);
SetDOMProxyInformation(ptr::null(), 0, Some(script_task::shadow_check_callback));
}
// Create the global vtables used by the (generated) DOM
// bindings to implement JS proxies.
RegisterBindings::RegisterProxyHandlers();
perform_platform_specific_initialization();
}
| {
use std::mem;
// 4096 is default max on many linux systems
const MAX_FILE_LIMIT: libc::rlim_t = 4096;
// Bump up our number of file descriptors to save us from impending doom caused by an onslaught
// of iframes.
unsafe {
let mut rlim: libc::rlimit = mem::uninitialized();
match libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) {
0 => {
if rlim.rlim_cur >= MAX_FILE_LIMIT {
// we have more than enough
return;
}
rlim.rlim_cur = match rlim.rlim_max {
libc::RLIM_INFINITY => MAX_FILE_LIMIT,
_ => {
if rlim.rlim_max < MAX_FILE_LIMIT {
rlim.rlim_max
} else {
MAX_FILE_LIMIT
}
}
};
match libc::setrlimit(libc::RLIMIT_NOFILE, &mut rlim) {
0 => (),
_ => warn!("Failed to set file count limit"),
};
},
_ => warn!("Failed to get file count limit"),
};
}
} | identifier_body |
events.rs | use std::sync::mpsc::{Sender, Receiver, channel};
use std::iter::Iterator;
use std::error::Error;
use error;
use frame::events::{ServerEvent as FrameServerEvent, SimpleServerEvent as FrameSimpleServerEvent,
SchemaChange as FrameSchemaChange};
use frame::parser::parse_frame;
use compression::Compression;
use transport::CDRSTransport;
/// Full Server Event which includes all details about occured change.
pub type ServerEvent = FrameServerEvent;
/// Simplified Server event. It should be used to represent an event
/// which consumer wants listen to.
pub type SimpleServerEvent = FrameSimpleServerEvent;
/// Reexport of `FrameSchemaChange`.
pub type SchemaChange = FrameSchemaChange;
/// Factory function which returns a `Listener` and related `EventStream.`
///
/// `Listener` provides only one function `start` to start listening. It
/// blocks a thread so should be moved into a separate one to no release
/// main thread.
///
/// `EventStream` is an iterator which returns new events once they come.
/// It is similar to `Receiver::iter`.
pub fn new_listener<X>(transport: X) -> (Listener<X>, EventStream) {
let (tx, rx) = channel();
let listener = Listener {
transport: transport,
tx: tx,
};
let stream = EventStream { rx: rx };
(listener, stream)
}
/// `Listener` provides only one function `start` to start listening. It
/// blocks a thread so should be moved into a separate one to no release
/// main thread.
pub struct Listener<X> {
transport: X,
tx: Sender<ServerEvent>,
}
impl<X: CDRSTransport> Listener<X> {
/// It starts a process of listening to new events. Locks a frame.
pub fn start(&mut self, compressor: &Compression) -> error::Result<()> {
loop {
let event_opt = try!(parse_frame(&mut self.transport, compressor))
.get_body()?
.into_server_event();
let event = if event_opt.is_some() | else {
continue;
};
match self.tx.send(event) {
Err(err) => return Err(error::Error::General(err.description().to_string())),
_ => continue,
}
}
}
}
/// `EventStream` is an iterator which returns new events once they come.
/// It is similar to `Receiver::iter`.
pub struct EventStream {
rx: Receiver<ServerEvent>,
}
impl Iterator for EventStream {
type Item = ServerEvent;
fn next(&mut self) -> Option<Self::Item> {
self.rx.recv().ok()
}
}
| {
// unwrap is safe is we've checked that event_opt.is_some()
event_opt.unwrap().event as ServerEvent
} | conditional_block |
events.rs | use std::sync::mpsc::{Sender, Receiver, channel};
use std::iter::Iterator;
use std::error::Error;
use error;
use frame::events::{ServerEvent as FrameServerEvent, SimpleServerEvent as FrameSimpleServerEvent,
SchemaChange as FrameSchemaChange};
use frame::parser::parse_frame;
use compression::Compression;
use transport::CDRSTransport;
/// Full Server Event which includes all details about occured change.
pub type ServerEvent = FrameServerEvent;
/// Simplified Server event. It should be used to represent an event
/// which consumer wants listen to.
pub type SimpleServerEvent = FrameSimpleServerEvent;
/// Reexport of `FrameSchemaChange`.
pub type SchemaChange = FrameSchemaChange;
/// Factory function which returns a `Listener` and related `EventStream.`
///
/// `Listener` provides only one function `start` to start listening. It
/// blocks a thread so should be moved into a separate one to no release | /// It is similar to `Receiver::iter`.
pub fn new_listener<X>(transport: X) -> (Listener<X>, EventStream) {
let (tx, rx) = channel();
let listener = Listener {
transport: transport,
tx: tx,
};
let stream = EventStream { rx: rx };
(listener, stream)
}
/// `Listener` provides only one function `start` to start listening. It
/// blocks a thread so should be moved into a separate one to no release
/// main thread.
pub struct Listener<X> {
transport: X,
tx: Sender<ServerEvent>,
}
impl<X: CDRSTransport> Listener<X> {
/// It starts a process of listening to new events. Locks a frame.
pub fn start(&mut self, compressor: &Compression) -> error::Result<()> {
loop {
let event_opt = try!(parse_frame(&mut self.transport, compressor))
.get_body()?
.into_server_event();
let event = if event_opt.is_some() {
// unwrap is safe is we've checked that event_opt.is_some()
event_opt.unwrap().event as ServerEvent
} else {
continue;
};
match self.tx.send(event) {
Err(err) => return Err(error::Error::General(err.description().to_string())),
_ => continue,
}
}
}
}
/// `EventStream` is an iterator which returns new events once they come.
/// It is similar to `Receiver::iter`.
pub struct EventStream {
rx: Receiver<ServerEvent>,
}
impl Iterator for EventStream {
type Item = ServerEvent;
fn next(&mut self) -> Option<Self::Item> {
self.rx.recv().ok()
}
} | /// main thread.
///
/// `EventStream` is an iterator which returns new events once they come. | random_line_split |
events.rs | use std::sync::mpsc::{Sender, Receiver, channel};
use std::iter::Iterator;
use std::error::Error;
use error;
use frame::events::{ServerEvent as FrameServerEvent, SimpleServerEvent as FrameSimpleServerEvent,
SchemaChange as FrameSchemaChange};
use frame::parser::parse_frame;
use compression::Compression;
use transport::CDRSTransport;
/// Full Server Event which includes all details about occured change.
pub type ServerEvent = FrameServerEvent;
/// Simplified Server event. It should be used to represent an event
/// which consumer wants listen to.
pub type SimpleServerEvent = FrameSimpleServerEvent;
/// Reexport of `FrameSchemaChange`.
pub type SchemaChange = FrameSchemaChange;
/// Factory function which returns a `Listener` and related `EventStream.`
///
/// `Listener` provides only one function `start` to start listening. It
/// blocks a thread so should be moved into a separate one to no release
/// main thread.
///
/// `EventStream` is an iterator which returns new events once they come.
/// It is similar to `Receiver::iter`.
pub fn new_listener<X>(transport: X) -> (Listener<X>, EventStream) {
let (tx, rx) = channel();
let listener = Listener {
transport: transport,
tx: tx,
};
let stream = EventStream { rx: rx };
(listener, stream)
}
/// `Listener` provides only one function `start` to start listening. It
/// blocks a thread so should be moved into a separate one to no release
/// main thread.
pub struct Listener<X> {
transport: X,
tx: Sender<ServerEvent>,
}
impl<X: CDRSTransport> Listener<X> {
/// It starts a process of listening to new events. Locks a frame.
pub fn start(&mut self, compressor: &Compression) -> error::Result<()> |
}
/// `EventStream` is an iterator which returns new events once they come.
/// It is similar to `Receiver::iter`.
pub struct EventStream {
rx: Receiver<ServerEvent>,
}
impl Iterator for EventStream {
type Item = ServerEvent;
fn next(&mut self) -> Option<Self::Item> {
self.rx.recv().ok()
}
}
| {
loop {
let event_opt = try!(parse_frame(&mut self.transport, compressor))
.get_body()?
.into_server_event();
let event = if event_opt.is_some() {
// unwrap is safe is we've checked that event_opt.is_some()
event_opt.unwrap().event as ServerEvent
} else {
continue;
};
match self.tx.send(event) {
Err(err) => return Err(error::Error::General(err.description().to_string())),
_ => continue,
}
}
} | identifier_body |
events.rs | use std::sync::mpsc::{Sender, Receiver, channel};
use std::iter::Iterator;
use std::error::Error;
use error;
use frame::events::{ServerEvent as FrameServerEvent, SimpleServerEvent as FrameSimpleServerEvent,
SchemaChange as FrameSchemaChange};
use frame::parser::parse_frame;
use compression::Compression;
use transport::CDRSTransport;
/// Full Server Event which includes all details about occured change.
pub type ServerEvent = FrameServerEvent;
/// Simplified Server event. It should be used to represent an event
/// which consumer wants listen to.
pub type SimpleServerEvent = FrameSimpleServerEvent;
/// Reexport of `FrameSchemaChange`.
pub type SchemaChange = FrameSchemaChange;
/// Factory function which returns a `Listener` and related `EventStream.`
///
/// `Listener` provides only one function `start` to start listening. It
/// blocks a thread so should be moved into a separate one to no release
/// main thread.
///
/// `EventStream` is an iterator which returns new events once they come.
/// It is similar to `Receiver::iter`.
pub fn new_listener<X>(transport: X) -> (Listener<X>, EventStream) {
let (tx, rx) = channel();
let listener = Listener {
transport: transport,
tx: tx,
};
let stream = EventStream { rx: rx };
(listener, stream)
}
/// `Listener` provides only one function `start` to start listening. It
/// blocks a thread so should be moved into a separate one to no release
/// main thread.
pub struct | <X> {
transport: X,
tx: Sender<ServerEvent>,
}
impl<X: CDRSTransport> Listener<X> {
/// It starts a process of listening to new events. Locks a frame.
pub fn start(&mut self, compressor: &Compression) -> error::Result<()> {
loop {
let event_opt = try!(parse_frame(&mut self.transport, compressor))
.get_body()?
.into_server_event();
let event = if event_opt.is_some() {
// unwrap is safe is we've checked that event_opt.is_some()
event_opt.unwrap().event as ServerEvent
} else {
continue;
};
match self.tx.send(event) {
Err(err) => return Err(error::Error::General(err.description().to_string())),
_ => continue,
}
}
}
}
/// `EventStream` is an iterator which returns new events once they come.
/// It is similar to `Receiver::iter`.
pub struct EventStream {
rx: Receiver<ServerEvent>,
}
impl Iterator for EventStream {
type Item = ServerEvent;
fn next(&mut self) -> Option<Self::Item> {
self.rx.recv().ok()
}
}
| Listener | identifier_name |
checkout.py | # Copyright 2006 John Duda
# This file is part of Infoshopkeeper.
# Infoshopkeeper is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or any later version.
# Infoshopkeeper is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Infoshopkeeper; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
# USA
from wxPython.wx import *
import os
import datetime
from objects.emprunt import Emprunt
from popups.members import AddMemberPanel, ShowMembersPanel
class CheckoutPopup(wxDialog):
def __init__(self, parent):
self.parent=parent |
wxDialog.__init__(self, parent,-1,"Check out items")
self.mastersizer = wxBoxSizer(wxVERTICAL)
self.static1 = wxStaticText(self, -1, "Check out to :")
self.mastersizer.Add(self.static1)
self.notebook = wxNotebook(self, -1, style=wxNB_TOP)
self.new_member_panel = AddMemberPanel(parent=self.notebook, main_window=parent,
on_successful_add=self.Borrow, cancel=self.Close)
self.notebook.AddPage(self.new_member_panel, "New member")
self.show_member_panel = ShowMembersPanel(parent=self.notebook, main_window=parent, motherDialog=self, on_select=self.Borrow)
self.notebook.AddPage(self.show_member_panel, "Existing member")
self.mastersizer.Add(self.notebook)
self.SetSizer(self.mastersizer)
for i in self.parent.orderbox.items:
print i.database_id, "... ", i.id
#self.b = wxButton(self, -1, "Checkout", (15, 80))
#EVT_BUTTON(self, self.b.GetId(), self.Checkout)
#self.b.SetDefault()
self.mastersizer.SetSizeHints(self)
def Borrow(self, id):
borrower = self.parent.membersList.get(id)
print borrower
for i in self.parent.orderbox.items:
# Check if this work on sqlobject 0.7... I got
# lots of problem on 0.6.1, and itemID __isn't__
# defined in emprunt, which is plain weirdness
e = Emprunt(borrower = id, itemID=i.database_id)
print i.database_id
self.parent.orderbox.setBorrowed()
self.parent.orderbox.void()
self.Close()
def OnCancel(self,event):
self.EndModal(1)
def Checkout(self,event):
borrower=self.borrower.GetValue()
if len(borrower)>0:
today="%s" % datetime.date.today()
self.parent.orderbox.change_status(today+"-"+borrower)
self.parent.orderbox.void()
self.Close() | random_line_split |
|
checkout.py | # Copyright 2006 John Duda
# This file is part of Infoshopkeeper.
# Infoshopkeeper is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or any later version.
# Infoshopkeeper is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Infoshopkeeper; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
# USA
from wxPython.wx import *
import os
import datetime
from objects.emprunt import Emprunt
from popups.members import AddMemberPanel, ShowMembersPanel
class CheckoutPopup(wxDialog):
def __init__(self, parent):
|
def Borrow(self, id):
borrower = self.parent.membersList.get(id)
print borrower
for i in self.parent.orderbox.items:
# Check if this work on sqlobject 0.7... I got
# lots of problem on 0.6.1, and itemID __isn't__
# defined in emprunt, which is plain weirdness
e = Emprunt(borrower = id, itemID=i.database_id)
print i.database_id
self.parent.orderbox.setBorrowed()
self.parent.orderbox.void()
self.Close()
def OnCancel(self,event):
self.EndModal(1)
def Checkout(self,event):
borrower=self.borrower.GetValue()
if len(borrower)>0:
today="%s" % datetime.date.today()
self.parent.orderbox.change_status(today+"-"+borrower)
self.parent.orderbox.void()
self.Close()
| self.parent=parent
wxDialog.__init__(self, parent,-1,"Check out items")
self.mastersizer = wxBoxSizer(wxVERTICAL)
self.static1 = wxStaticText(self, -1, "Check out to :")
self.mastersizer.Add(self.static1)
self.notebook = wxNotebook(self, -1, style=wxNB_TOP)
self.new_member_panel = AddMemberPanel(parent=self.notebook, main_window=parent,
on_successful_add=self.Borrow, cancel=self.Close)
self.notebook.AddPage(self.new_member_panel, "New member")
self.show_member_panel = ShowMembersPanel(parent=self.notebook, main_window=parent, motherDialog=self, on_select=self.Borrow)
self.notebook.AddPage(self.show_member_panel, "Existing member")
self.mastersizer.Add(self.notebook)
self.SetSizer(self.mastersizer)
for i in self.parent.orderbox.items:
print i.database_id, "... ", i.id
#self.b = wxButton(self, -1, "Checkout", (15, 80))
#EVT_BUTTON(self, self.b.GetId(), self.Checkout)
#self.b.SetDefault()
self.mastersizer.SetSizeHints(self) | identifier_body |
checkout.py | # Copyright 2006 John Duda
# This file is part of Infoshopkeeper.
# Infoshopkeeper is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or any later version.
# Infoshopkeeper is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Infoshopkeeper; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
# USA
from wxPython.wx import *
import os
import datetime
from objects.emprunt import Emprunt
from popups.members import AddMemberPanel, ShowMembersPanel
class CheckoutPopup(wxDialog):
def __init__(self, parent):
self.parent=parent
wxDialog.__init__(self, parent,-1,"Check out items")
self.mastersizer = wxBoxSizer(wxVERTICAL)
self.static1 = wxStaticText(self, -1, "Check out to :")
self.mastersizer.Add(self.static1)
self.notebook = wxNotebook(self, -1, style=wxNB_TOP)
self.new_member_panel = AddMemberPanel(parent=self.notebook, main_window=parent,
on_successful_add=self.Borrow, cancel=self.Close)
self.notebook.AddPage(self.new_member_panel, "New member")
self.show_member_panel = ShowMembersPanel(parent=self.notebook, main_window=parent, motherDialog=self, on_select=self.Borrow)
self.notebook.AddPage(self.show_member_panel, "Existing member")
self.mastersizer.Add(self.notebook)
self.SetSizer(self.mastersizer)
for i in self.parent.orderbox.items:
print i.database_id, "... ", i.id
#self.b = wxButton(self, -1, "Checkout", (15, 80))
#EVT_BUTTON(self, self.b.GetId(), self.Checkout)
#self.b.SetDefault()
self.mastersizer.SetSizeHints(self)
def Borrow(self, id):
borrower = self.parent.membersList.get(id)
print borrower
for i in self.parent.orderbox.items:
# Check if this work on sqlobject 0.7... I got
# lots of problem on 0.6.1, and itemID __isn't__
# defined in emprunt, which is plain weirdness
e = Emprunt(borrower = id, itemID=i.database_id)
print i.database_id
self.parent.orderbox.setBorrowed()
self.parent.orderbox.void()
self.Close()
def | (self,event):
self.EndModal(1)
def Checkout(self,event):
borrower=self.borrower.GetValue()
if len(borrower)>0:
today="%s" % datetime.date.today()
self.parent.orderbox.change_status(today+"-"+borrower)
self.parent.orderbox.void()
self.Close()
| OnCancel | identifier_name |
checkout.py | # Copyright 2006 John Duda
# This file is part of Infoshopkeeper.
# Infoshopkeeper is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or any later version.
# Infoshopkeeper is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Infoshopkeeper; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
# USA
from wxPython.wx import *
import os
import datetime
from objects.emprunt import Emprunt
from popups.members import AddMemberPanel, ShowMembersPanel
class CheckoutPopup(wxDialog):
def __init__(self, parent):
self.parent=parent
wxDialog.__init__(self, parent,-1,"Check out items")
self.mastersizer = wxBoxSizer(wxVERTICAL)
self.static1 = wxStaticText(self, -1, "Check out to :")
self.mastersizer.Add(self.static1)
self.notebook = wxNotebook(self, -1, style=wxNB_TOP)
self.new_member_panel = AddMemberPanel(parent=self.notebook, main_window=parent,
on_successful_add=self.Borrow, cancel=self.Close)
self.notebook.AddPage(self.new_member_panel, "New member")
self.show_member_panel = ShowMembersPanel(parent=self.notebook, main_window=parent, motherDialog=self, on_select=self.Borrow)
self.notebook.AddPage(self.show_member_panel, "Existing member")
self.mastersizer.Add(self.notebook)
self.SetSizer(self.mastersizer)
for i in self.parent.orderbox.items:
print i.database_id, "... ", i.id
#self.b = wxButton(self, -1, "Checkout", (15, 80))
#EVT_BUTTON(self, self.b.GetId(), self.Checkout)
#self.b.SetDefault()
self.mastersizer.SetSizeHints(self)
def Borrow(self, id):
borrower = self.parent.membersList.get(id)
print borrower
for i in self.parent.orderbox.items:
# Check if this work on sqlobject 0.7... I got
# lots of problem on 0.6.1, and itemID __isn't__
# defined in emprunt, which is plain weirdness
|
self.parent.orderbox.setBorrowed()
self.parent.orderbox.void()
self.Close()
def OnCancel(self,event):
self.EndModal(1)
def Checkout(self,event):
borrower=self.borrower.GetValue()
if len(borrower)>0:
today="%s" % datetime.date.today()
self.parent.orderbox.change_status(today+"-"+borrower)
self.parent.orderbox.void()
self.Close()
| e = Emprunt(borrower = id, itemID=i.database_id)
print i.database_id | conditional_block |
bitten.py | import codecs
unicode_string = "Hello Python 3 String"
bytes_object = b"Hello Python 3 Bytes"
print(unicode_string, type(unicode_string))
print(bytes_object, type(bytes_object))
#decode to unicode_string
ux = str(object=bytes_object, encoding="utf-8", errors="strict")
print(ux, type(ux))
ux = bytes_object.decode(encoding="utf-8", errors="strict")
print(ux, type(ux))
hex_bytes = codecs.encode(b"Binary Object", "hex_codec")
def string_to_bytes( text ):
return bin(int.from_bytes(text.encode(), 'big'))
def bytes_to_string( btext ):
#btext = int('0b110100001100101011011000110110001101111', 2)
return btext.to_bytes((btext.bit_length() + 7) // 8, 'big').decode()
def char_to_bytes(char):
return bin(ord(char))
def encodes(text):
bext = text.encode(encoding="utf-8")
enc_bext = codecs.encode(bext, "hex_codec")
return enc_bext.decode("utf-8")
def decodes():
|
if __name__ == "__main__":
print( encodes("walla") )
| pass | identifier_body |
bitten.py | import codecs
unicode_string = "Hello Python 3 String"
bytes_object = b"Hello Python 3 Bytes"
print(unicode_string, type(unicode_string))
print(bytes_object, type(bytes_object))
#decode to unicode_string
ux = str(object=bytes_object, encoding="utf-8", errors="strict")
print(ux, type(ux))
ux = bytes_object.decode(encoding="utf-8", errors="strict")
print(ux, type(ux))
hex_bytes = codecs.encode(b"Binary Object", "hex_codec")
def string_to_bytes( text ):
return bin(int.from_bytes(text.encode(), 'big'))
def | ( btext ):
#btext = int('0b110100001100101011011000110110001101111', 2)
return btext.to_bytes((btext.bit_length() + 7) // 8, 'big').decode()
def char_to_bytes(char):
return bin(ord(char))
def encodes(text):
bext = text.encode(encoding="utf-8")
enc_bext = codecs.encode(bext, "hex_codec")
return enc_bext.decode("utf-8")
def decodes():
pass
if __name__ == "__main__":
print( encodes("walla") )
| bytes_to_string | identifier_name |
bitten.py | import codecs
unicode_string = "Hello Python 3 String"
bytes_object = b"Hello Python 3 Bytes"
print(unicode_string, type(unicode_string))
print(bytes_object, type(bytes_object))
#decode to unicode_string
ux = str(object=bytes_object, encoding="utf-8", errors="strict")
print(ux, type(ux))
ux = bytes_object.decode(encoding="utf-8", errors="strict")
print(ux, type(ux))
hex_bytes = codecs.encode(b"Binary Object", "hex_codec")
def string_to_bytes( text ):
return bin(int.from_bytes(text.encode(), 'big'))
| #btext = int('0b110100001100101011011000110110001101111', 2)
return btext.to_bytes((btext.bit_length() + 7) // 8, 'big').decode()
def char_to_bytes(char):
return bin(ord(char))
def encodes(text):
bext = text.encode(encoding="utf-8")
enc_bext = codecs.encode(bext, "hex_codec")
return enc_bext.decode("utf-8")
def decodes():
pass
if __name__ == "__main__":
print( encodes("walla") ) | def bytes_to_string( btext ): | random_line_split |
bitten.py | import codecs
unicode_string = "Hello Python 3 String"
bytes_object = b"Hello Python 3 Bytes"
print(unicode_string, type(unicode_string))
print(bytes_object, type(bytes_object))
#decode to unicode_string
ux = str(object=bytes_object, encoding="utf-8", errors="strict")
print(ux, type(ux))
ux = bytes_object.decode(encoding="utf-8", errors="strict")
print(ux, type(ux))
hex_bytes = codecs.encode(b"Binary Object", "hex_codec")
def string_to_bytes( text ):
return bin(int.from_bytes(text.encode(), 'big'))
def bytes_to_string( btext ):
#btext = int('0b110100001100101011011000110110001101111', 2)
return btext.to_bytes((btext.bit_length() + 7) // 8, 'big').decode()
def char_to_bytes(char):
return bin(ord(char))
def encodes(text):
bext = text.encode(encoding="utf-8")
enc_bext = codecs.encode(bext, "hex_codec")
return enc_bext.decode("utf-8")
def decodes():
pass
if __name__ == "__main__":
| print( encodes("walla") ) | conditional_block |
|
css-tag_test.ts | /**
* @license
* Copyright 2020 Google LLC
* SPDX-License-Identifier: BSD-3-Clause
*/
import {css, CSSResult, unsafeCSS} from '../css-tag.js';
import {assert} from '@esm-bundle/chai';
suite('Styling', () => {
suite('css tag', () => {
test('CSSResults always produce the same stylesheet', () => {
// Alias avoids syntax highlighting issues in editors
const cssValue = css;
const makeStyle = () => cssValue`foo`;
const style1 = makeStyle();
assert.equal(
(style1 as CSSResult).styleSheet,
(style1 as CSSResult).styleSheet
);
const style2 = makeStyle();
assert.equal(
(style1 as CSSResult).styleSheet,
(style2 as CSSResult).styleSheet
);
});
test('css with same values always produce the same stylesheet', () => {
// Alias avoids syntax highlighting issues in editors
const cssValue = css;
const makeStyle = () => cssValue`background: ${cssValue`blue`}`;
const style1 = makeStyle();
assert.equal(
(style1 as CSSResult).styleSheet,
(style1 as CSSResult).styleSheet
);
const style2 = makeStyle();
assert.equal(
(style1 as CSSResult).styleSheet,
(style2 as CSSResult).styleSheet
);
});
test('unsafeCSS() CSSResults always produce the same stylesheet', () => {
// Alias avoids syntax highlighting issues in editors
const makeStyle = () => unsafeCSS(`foo`);
const style1 = makeStyle();
assert.equal(
(style1 as CSSResult).styleSheet,
(style1 as CSSResult).styleSheet
);
const style2 = makeStyle();
assert.equal(
(style1 as CSSResult).styleSheet,
(style2 as CSSResult).styleSheet
);
});
test('`css` get styles throws when unsafe values are used', async () => {
assert.throws(() => {
css`
div {
border: ${`2px solid blue;` as any};
}
`;
});
});
test('`css` allows real JavaScript numbers', async () => {
const spacer = 2;
// Alias avoids syntax highlighting issues in editors
const cssValue = css;
const result = cssValue`
div {
margin: ${spacer * 2}px;
}
`;
assert.equal(
(result as CSSResult).cssText.replace(/\s/g, ''),
'div{margin:4px;}'
);
});
test('`CSSResult` cannot be constructed', async () => {
// Note, this is done for security, instead use `css` or `unsafeCSS`
assert.throws(() => {
new (CSSResult as any)('throw', Symbol());
});
});
test('`CSSResult` allows for String type coercion via toString()', async () => {
const cssModule = css`
.my-module { | // Example use case: apply cssModule as global page styles at
// document.body level.
const bodyStyles = `${cssModule}`;
assert.equal(bodyStyles.replace(/\s/g, ''), '.my-module{color:yellow;}');
});
});
}); | color: yellow;
}
`;
// Coercion allows for reusage of css-tag outcomes in regular strings. | random_line_split |
index.js | (function() {
'use strict';
let EE = require('./ee');
let ES6 = require('./es6');
let log = require('ee-log');
let iterations = 100;
let start;
let ee = 0;
let es6 = 0;
for (let k = 0; k < 1000; k++) {
start = process.hrtime();
for (let i = 0; i < iterations; i++) {
new ES6({
name: "fabian"
, age: 15 | log.success('ES6: '+(process.hrtime(start)[1]));
start = process.hrtime();
for (let i = 0; i < iterations; i++) {
new EE({
name: "fabian"
, age: 15
, isAlive: true
}).describe();
}
ee += process.hrtime(start)[1];
log.success('EE: '+(process.hrtime(start)[1]));
}
log.warn('EE: '+(ee/1000/1000));
log.warn('ES6: '+(es6/1000/1000));
})(); | , isAlive: true
}).describe();
}
es6 += process.hrtime(start)[1]; | random_line_split |
index.js | (function() {
'use strict';
let EE = require('./ee');
let ES6 = require('./es6');
let log = require('ee-log');
let iterations = 100;
let start;
let ee = 0;
let es6 = 0;
for (let k = 0; k < 1000; k++) {
start = process.hrtime();
for (let i = 0; i < iterations; i++) |
es6 += process.hrtime(start)[1];
log.success('ES6: '+(process.hrtime(start)[1]));
start = process.hrtime();
for (let i = 0; i < iterations; i++) {
new EE({
name: "fabian"
, age: 15
, isAlive: true
}).describe();
}
ee += process.hrtime(start)[1];
log.success('EE: '+(process.hrtime(start)[1]));
}
log.warn('EE: '+(ee/1000/1000));
log.warn('ES6: '+(es6/1000/1000));
})(); | {
new ES6({
name: "fabian"
, age: 15
, isAlive: true
}).describe();
} | conditional_block |
cache.py |
# Copyright (C) 2014 - Oscar Campos <[email protected]>
# This program is Free Software see LICENSE file for details
import os
import json
import platform
from collections import defaultdict
from anaconda_go.lib import go
from anaconda_go.lib.plugin import typing
cachepath = {
'linux': os.path.join('~', '.local', 'share', 'anaconda', 'cache'),
'darwin': os.path.join('~', 'Library', 'Cache', 'anaconda'),
'windows': os.path.join(os.getenv('APPDATA') or '~', 'Anaconda', 'Cache')
}
cache_directory = os.path.expanduser(
cachepath.get(platform.system().lower())
)
PACKAGES_CACHE = defaultdict(lambda: [])
def append(package: typing.Dict) -> None:
"""Append the given package into the cache
"""
global PACKAGES_CACHE
if not package_in_cache(package):
PACKAGES_CACHE[go.GOROOT].append(package)
def package_in_cache(package: typing.Dict) -> bool:
"""Look for the given package in the cache and return true if is there
"""
for pkg in PACKAGES_CACHE[go.GOROOT]:
if pkg['ImportPath'] == package['ImportPath']:
return True
return False
def lookup(node_name: str='') -> typing.Dict:
"""Lookup the given node_name in the cache and return it back
"""
node = {}
if node_name == '':
node = PACKAGES_CACHE[go.GOROOT]
else:
for pkg in PACKAGES_CACHE[go.GOROOT]:
guru = pkg.get('Guru')
if guru is None:
continue
path = guru['package'].get('path')
if path is not None and path == node_name:
|
for member in guru['package'].get('members', []):
if member.get('name') == node_name:
node = member
break
for method in member.get('methods', []):
if method['name'] == node_name:
node = method
break
return node
def persist_package_cache() -> None:
"""Write the contents of the package cache for this GOROOT into the disk
"""
gopath = go.GOPATH.replace(os.path.sep, '_')
cachefile = os.path.join(cache_directory, gopath, 'packages.cache')
if not os.path.exists(os.path.dirname(cachefile)):
os.makedirs(os.path.dirname(cachefile))
with open(cachefile, 'w') as fd:
json.dump(PACKAGES_CACHE[go.GOROOT], fd)
def load_package_cache() -> typing.List:
"""Load a previously stores package cache file
"""
global PACKAGES_CACHE
gopath = go.GOPATH.replace(os.path.sep, '_')
cachefile = os.path.join(cache_directory, gopath, 'packages.cache')
try:
with open(cachefile, 'r') as fd:
PACKAGES_CACHE[go.GOROOT] = json.load(fd)
except FileNotFoundError:
pass
| node = guru
break | conditional_block |
cache.py |
# Copyright (C) 2014 - Oscar Campos <[email protected]>
# This program is Free Software see LICENSE file for details
import os
import json
import platform
from collections import defaultdict
from anaconda_go.lib import go
from anaconda_go.lib.plugin import typing
cachepath = {
'linux': os.path.join('~', '.local', 'share', 'anaconda', 'cache'),
'darwin': os.path.join('~', 'Library', 'Cache', 'anaconda'),
'windows': os.path.join(os.getenv('APPDATA') or '~', 'Anaconda', 'Cache')
}
cache_directory = os.path.expanduser(
cachepath.get(platform.system().lower())
)
PACKAGES_CACHE = defaultdict(lambda: [])
def append(package: typing.Dict) -> None:
"""Append the given package into the cache
"""
global PACKAGES_CACHE
if not package_in_cache(package):
PACKAGES_CACHE[go.GOROOT].append(package)
def package_in_cache(package: typing.Dict) -> bool:
"""Look for the given package in the cache and return true if is there
"""
for pkg in PACKAGES_CACHE[go.GOROOT]:
if pkg['ImportPath'] == package['ImportPath']:
return True
return False
def lookup(node_name: str='') -> typing.Dict:
|
def persist_package_cache() -> None:
"""Write the contents of the package cache for this GOROOT into the disk
"""
gopath = go.GOPATH.replace(os.path.sep, '_')
cachefile = os.path.join(cache_directory, gopath, 'packages.cache')
if not os.path.exists(os.path.dirname(cachefile)):
os.makedirs(os.path.dirname(cachefile))
with open(cachefile, 'w') as fd:
json.dump(PACKAGES_CACHE[go.GOROOT], fd)
def load_package_cache() -> typing.List:
"""Load a previously stores package cache file
"""
global PACKAGES_CACHE
gopath = go.GOPATH.replace(os.path.sep, '_')
cachefile = os.path.join(cache_directory, gopath, 'packages.cache')
try:
with open(cachefile, 'r') as fd:
PACKAGES_CACHE[go.GOROOT] = json.load(fd)
except FileNotFoundError:
pass
| """Lookup the given node_name in the cache and return it back
"""
node = {}
if node_name == '':
node = PACKAGES_CACHE[go.GOROOT]
else:
for pkg in PACKAGES_CACHE[go.GOROOT]:
guru = pkg.get('Guru')
if guru is None:
continue
path = guru['package'].get('path')
if path is not None and path == node_name:
node = guru
break
for member in guru['package'].get('members', []):
if member.get('name') == node_name:
node = member
break
for method in member.get('methods', []):
if method['name'] == node_name:
node = method
break
return node | identifier_body |
cache.py | # Copyright (C) 2014 - Oscar Campos <[email protected]>
# This program is Free Software see LICENSE file for details
import os
import json
import platform
from collections import defaultdict
from anaconda_go.lib import go
from anaconda_go.lib.plugin import typing
cachepath = {
'linux': os.path.join('~', '.local', 'share', 'anaconda', 'cache'),
'darwin': os.path.join('~', 'Library', 'Cache', 'anaconda'),
'windows': os.path.join(os.getenv('APPDATA') or '~', 'Anaconda', 'Cache')
}
cache_directory = os.path.expanduser(
cachepath.get(platform.system().lower())
)
PACKAGES_CACHE = defaultdict(lambda: [])
def append(package: typing.Dict) -> None:
"""Append the given package into the cache
"""
global PACKAGES_CACHE
if not package_in_cache(package):
PACKAGES_CACHE[go.GOROOT].append(package)
def package_in_cache(package: typing.Dict) -> bool:
"""Look for the given package in the cache and return true if is there
"""
for pkg in PACKAGES_CACHE[go.GOROOT]:
if pkg['ImportPath'] == package['ImportPath']:
return True
return False
def lookup(node_name: str='') -> typing.Dict:
"""Lookup the given node_name in the cache and return it back
"""
node = {}
if node_name == '':
node = PACKAGES_CACHE[go.GOROOT]
else:
for pkg in PACKAGES_CACHE[go.GOROOT]:
guru = pkg.get('Guru')
if guru is None:
continue
path = guru['package'].get('path')
if path is not None and path == node_name:
node = guru
break
for member in guru['package'].get('members', []):
if member.get('name') == node_name:
node = member
break
for method in member.get('methods', []):
if method['name'] == node_name:
node = method
break
return node
def persist_package_cache() -> None:
"""Write the contents of the package cache for this GOROOT into the disk
"""
gopath = go.GOPATH.replace(os.path.sep, '_')
cachefile = os.path.join(cache_directory, gopath, 'packages.cache')
if not os.path.exists(os.path.dirname(cachefile)):
os.makedirs(os.path.dirname(cachefile))
with open(cachefile, 'w') as fd:
json.dump(PACKAGES_CACHE[go.GOROOT], fd)
def load_package_cache() -> typing.List:
"""Load a previously stores package cache file
"""
global PACKAGES_CACHE
gopath = go.GOPATH.replace(os.path.sep, '_')
cachefile = os.path.join(cache_directory, gopath, 'packages.cache')
try:
with open(cachefile, 'r') as fd:
PACKAGES_CACHE[go.GOROOT] = json.load(fd) | except FileNotFoundError:
pass | random_line_split |
|
cache.py |
# Copyright (C) 2014 - Oscar Campos <[email protected]>
# This program is Free Software see LICENSE file for details
import os
import json
import platform
from collections import defaultdict
from anaconda_go.lib import go
from anaconda_go.lib.plugin import typing
cachepath = {
'linux': os.path.join('~', '.local', 'share', 'anaconda', 'cache'),
'darwin': os.path.join('~', 'Library', 'Cache', 'anaconda'),
'windows': os.path.join(os.getenv('APPDATA') or '~', 'Anaconda', 'Cache')
}
cache_directory = os.path.expanduser(
cachepath.get(platform.system().lower())
)
PACKAGES_CACHE = defaultdict(lambda: [])
def append(package: typing.Dict) -> None:
"""Append the given package into the cache
"""
global PACKAGES_CACHE
if not package_in_cache(package):
PACKAGES_CACHE[go.GOROOT].append(package)
def | (package: typing.Dict) -> bool:
"""Look for the given package in the cache and return true if is there
"""
for pkg in PACKAGES_CACHE[go.GOROOT]:
if pkg['ImportPath'] == package['ImportPath']:
return True
return False
def lookup(node_name: str='') -> typing.Dict:
"""Lookup the given node_name in the cache and return it back
"""
node = {}
if node_name == '':
node = PACKAGES_CACHE[go.GOROOT]
else:
for pkg in PACKAGES_CACHE[go.GOROOT]:
guru = pkg.get('Guru')
if guru is None:
continue
path = guru['package'].get('path')
if path is not None and path == node_name:
node = guru
break
for member in guru['package'].get('members', []):
if member.get('name') == node_name:
node = member
break
for method in member.get('methods', []):
if method['name'] == node_name:
node = method
break
return node
def persist_package_cache() -> None:
"""Write the contents of the package cache for this GOROOT into the disk
"""
gopath = go.GOPATH.replace(os.path.sep, '_')
cachefile = os.path.join(cache_directory, gopath, 'packages.cache')
if not os.path.exists(os.path.dirname(cachefile)):
os.makedirs(os.path.dirname(cachefile))
with open(cachefile, 'w') as fd:
json.dump(PACKAGES_CACHE[go.GOROOT], fd)
def load_package_cache() -> typing.List:
"""Load a previously stores package cache file
"""
global PACKAGES_CACHE
gopath = go.GOPATH.replace(os.path.sep, '_')
cachefile = os.path.join(cache_directory, gopath, 'packages.cache')
try:
with open(cachefile, 'r') as fd:
PACKAGES_CACHE[go.GOROOT] = json.load(fd)
except FileNotFoundError:
pass
| package_in_cache | identifier_name |
pay.js | 'use strict';
let lib = {
"_id": "5e409c94c5a59210a815262c",
"name": "pay",
"description": "MS pay service for marketplace",
"type": "service",
"configuration": {
"subType": "ecommerce",
"port": 4102,
"group": "Marketplace",
"requestTimeout": 30,
"requestTimeoutRenewal": 5,
"maintenance": { | "readiness": "/heartbeat"
}
},
"versions": [
{
"version": "1",
"extKeyRequired": true,
"oauth": true,
"provision_ACL": false,
"tenant_Profile": false,
"urac": false,
"urac_ACL": false,
"urac_Config": false,
"urac_GroupConfig": false,
"urac_Profile": false,
"apis": [
{
l: "pay items",
v: "/pay",
m: "post",
group: "Pay"
},
{
l: "Get all pay ",
v: "/pays",
m: "get",
group: "Pay"
}
],
"documentation": {}
}
],
"metadata": {
"tags": ["order", "ecommerce"],
"program": ["marketplace"]
},
"ui": {
"main": "Gateway",
"sub": ""
},
"settings": {
"acl": {
"public": {
"ro": true
}
},
"recipes": [],
"environments": {}
},
"src": {
"provider": "github",
"owner": "ht",
"repo": "mkpl.order"
}
};
module.exports = lib; | "port": {
"type": "maintenance"
}, | random_line_split |
sample.rs | use std::collections::HashMap; | /// total number of events.
pub struct Sample<T> {
pub counts: HashMap<T,usize>,
pub total: usize,
}
impl<T: Eq + Hash> Sample<T> {
/// Creates a new Sample.
pub fn new() -> Sample<T> {
Sample {
counts: HashMap::new(),
total: 0,
}
}
/// Add an event to a sample.
pub fn add(&mut self, event: T) {
let count = self.counts.entry(event).or_insert(0);
*count += 1;
self.total += 1;
}
/// The probability of an event in a sample.
pub fn p(&self, event: &T) -> f64 {
let c = *self.counts.get(event).unwrap_or(&0);
(c as f64) / (self.total as f64)
}
}
// ---------------------------
impl<T: Eq + Hash> Extend<T> for Sample<T> {
fn extend<I: IntoIterator<Item=T>>(&mut self, iter: I) {
for k in iter { self.add(k); }
}
}
impl<T: Eq + Hash> FromIterator<T> for Sample<T> {
fn from_iter<I: IntoIterator<Item=T>>(iterable: I) -> Sample<T> {
let mut sample = Sample::new();
sample.extend( iterable.into_iter() );
sample
}
} | use std::hash::Hash;
use std::iter::FromIterator;
/// A collection of events, with a running total of counts for each event and | random_line_split |
sample.rs | use std::collections::HashMap;
use std::hash::Hash;
use std::iter::FromIterator;
/// A collection of events, with a running total of counts for each event and
/// total number of events.
pub struct Sample<T> {
pub counts: HashMap<T,usize>,
pub total: usize,
}
impl<T: Eq + Hash> Sample<T> {
/// Creates a new Sample.
pub fn new() -> Sample<T> {
Sample {
counts: HashMap::new(),
total: 0,
}
}
/// Add an event to a sample.
pub fn add(&mut self, event: T) {
let count = self.counts.entry(event).or_insert(0);
*count += 1;
self.total += 1;
}
/// The probability of an event in a sample.
pub fn | (&self, event: &T) -> f64 {
let c = *self.counts.get(event).unwrap_or(&0);
(c as f64) / (self.total as f64)
}
}
// ---------------------------
impl<T: Eq + Hash> Extend<T> for Sample<T> {
fn extend<I: IntoIterator<Item=T>>(&mut self, iter: I) {
for k in iter { self.add(k); }
}
}
impl<T: Eq + Hash> FromIterator<T> for Sample<T> {
fn from_iter<I: IntoIterator<Item=T>>(iterable: I) -> Sample<T> {
let mut sample = Sample::new();
sample.extend( iterable.into_iter() );
sample
}
}
| p | identifier_name |
htmlhrelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::RGBA;
use dom::bindings::codegen::Bindings::HTMLHRElementBinding::{self, HTMLHRElementMethods};
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{LayoutJS, Root};
use dom::bindings::str::DOMString;
use dom::document::Document;
use dom::element::{Element, RawLayoutElementHelpers};
use dom::htmlelement::HTMLElement;
use dom::node::Node;
use dom::virtualmethods::VirtualMethods;
use dom_struct::dom_struct;
use html5ever_atoms::LocalName;
use style::attr::{AttrValue, LengthOrPercentageOrAuto};
#[dom_struct]
pub struct HTMLHRElement {
htmlelement: HTMLElement,
}
impl HTMLHRElement {
fn new_inherited(local_name: LocalName, prefix: Option<DOMString>, document: &Document) -> HTMLHRElement {
HTMLHRElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(local_name: LocalName,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLHRElement> {
Node::reflect_node(box HTMLHRElement::new_inherited(local_name, prefix, document),
document,
HTMLHRElementBinding::Wrap)
}
}
impl HTMLHRElementMethods for HTMLHRElement {
// https://html.spec.whatwg.org/multipage/#dom-hr-align
make_getter!(Align, "align");
// https://html.spec.whatwg.org/multipage/#dom-hr-align
make_atomic_setter!(SetAlign, "align");
// https://html.spec.whatwg.org/multipage/#dom-hr-color
make_getter!(Color, "color");
// https://html.spec.whatwg.org/multipage/#dom-hr-color
make_legacy_color_setter!(SetColor, "color");
// https://html.spec.whatwg.org/multipage/#dom-hr-width
make_getter!(Width, "width");
// https://html.spec.whatwg.org/multipage/#dom-hr-width
make_dimension_setter!(SetWidth, "width");
}
pub trait HTMLHRLayoutHelpers {
fn get_color(&self) -> Option<RGBA>;
fn get_width(&self) -> LengthOrPercentageOrAuto;
}
impl HTMLHRLayoutHelpers for LayoutJS<HTMLHRElement> {
#[allow(unsafe_code)]
fn get_color(&self) -> Option<RGBA> {
unsafe {
(&*self.upcast::<Element>().unsafe_get())
.get_attr_for_layout(&ns!(), &local_name!("color"))
.and_then(AttrValue::as_color)
.cloned()
}
}
#[allow(unsafe_code)]
fn get_width(&self) -> LengthOrPercentageOrAuto {
unsafe {
(&*self.upcast::<Element>().unsafe_get())
.get_attr_for_layout(&ns!(), &local_name!("width"))
.map(AttrValue::as_dimension)
.cloned()
.unwrap_or(LengthOrPercentageOrAuto::Auto)
}
}
}
impl VirtualMethods for HTMLHRElement {
fn | (&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn parse_plain_attribute(&self, name: &LocalName, value: DOMString) -> AttrValue {
match name {
&local_name!("align") => AttrValue::from_dimension(value.into()),
&local_name!("color") => AttrValue::from_legacy_color(value.into()),
&local_name!("width") => AttrValue::from_dimension(value.into()),
_ => self.super_type().unwrap().parse_plain_attribute(name, value),
}
}
}
| super_type | identifier_name |
htmlhrelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::RGBA;
use dom::bindings::codegen::Bindings::HTMLHRElementBinding::{self, HTMLHRElementMethods};
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{LayoutJS, Root};
use dom::bindings::str::DOMString;
use dom::document::Document;
use dom::element::{Element, RawLayoutElementHelpers};
use dom::htmlelement::HTMLElement;
use dom::node::Node;
use dom::virtualmethods::VirtualMethods;
use dom_struct::dom_struct;
use html5ever_atoms::LocalName;
use style::attr::{AttrValue, LengthOrPercentageOrAuto};
#[dom_struct]
pub struct HTMLHRElement {
htmlelement: HTMLElement,
}
impl HTMLHRElement {
fn new_inherited(local_name: LocalName, prefix: Option<DOMString>, document: &Document) -> HTMLHRElement {
HTMLHRElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(local_name: LocalName,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLHRElement> {
Node::reflect_node(box HTMLHRElement::new_inherited(local_name, prefix, document),
document,
HTMLHRElementBinding::Wrap)
}
}
| impl HTMLHRElementMethods for HTMLHRElement {
// https://html.spec.whatwg.org/multipage/#dom-hr-align
make_getter!(Align, "align");
// https://html.spec.whatwg.org/multipage/#dom-hr-align
make_atomic_setter!(SetAlign, "align");
// https://html.spec.whatwg.org/multipage/#dom-hr-color
make_getter!(Color, "color");
// https://html.spec.whatwg.org/multipage/#dom-hr-color
make_legacy_color_setter!(SetColor, "color");
// https://html.spec.whatwg.org/multipage/#dom-hr-width
make_getter!(Width, "width");
// https://html.spec.whatwg.org/multipage/#dom-hr-width
make_dimension_setter!(SetWidth, "width");
}
pub trait HTMLHRLayoutHelpers {
fn get_color(&self) -> Option<RGBA>;
fn get_width(&self) -> LengthOrPercentageOrAuto;
}
impl HTMLHRLayoutHelpers for LayoutJS<HTMLHRElement> {
#[allow(unsafe_code)]
fn get_color(&self) -> Option<RGBA> {
unsafe {
(&*self.upcast::<Element>().unsafe_get())
.get_attr_for_layout(&ns!(), &local_name!("color"))
.and_then(AttrValue::as_color)
.cloned()
}
}
#[allow(unsafe_code)]
fn get_width(&self) -> LengthOrPercentageOrAuto {
unsafe {
(&*self.upcast::<Element>().unsafe_get())
.get_attr_for_layout(&ns!(), &local_name!("width"))
.map(AttrValue::as_dimension)
.cloned()
.unwrap_or(LengthOrPercentageOrAuto::Auto)
}
}
}
impl VirtualMethods for HTMLHRElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn parse_plain_attribute(&self, name: &LocalName, value: DOMString) -> AttrValue {
match name {
&local_name!("align") => AttrValue::from_dimension(value.into()),
&local_name!("color") => AttrValue::from_legacy_color(value.into()),
&local_name!("width") => AttrValue::from_dimension(value.into()),
_ => self.super_type().unwrap().parse_plain_attribute(name, value),
}
}
} | random_line_split |
|
sig.rs | use std::collections::HashMap; |
use crypto::hmac::Hmac;
use crypto::mac::Mac;
use crypto::sha1::Sha1;
use super::util;
fn transform_payload<K, V>(d: &HashMap<K, V>) -> String
where K: AsRef<str> + Eq + Hash,
V: AsRef<str> + Eq + Hash
{
let mut kv_list: Vec<_> = d.iter().map(|(k, v)| (k.as_ref(), v.as_ref())).collect();
kv_list.sort_by(|a, b| a.0.cmp(b.0));
let mut result = String::new();
for (i, (k, v)) in kv_list.into_iter().enumerate() {
if i > 0 {
result.push('&');
}
result.push_str(k);
result.push('=');
result.push_str(v);
}
result
}
pub fn sign<K, V, S>(d: &HashMap<K, V>, secret: S) -> String
where K: AsRef<str> + Eq + Hash,
V: AsRef<str> + Eq + Hash,
S: AsRef<str>
{
let payload = transform_payload(d);
let mut hmac = Hmac::new(Sha1::new(), secret.as_ref().as_bytes());
hmac.input(payload.as_bytes());
util::b64encode(hmac.result().code())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_transform_payload() {
let x = {
let mut tmp: HashMap<&str, String> = HashMap::new();
tmp.insert("k2", "v2".to_string());
tmp.insert("k3", "v3".to_string());
tmp.insert("k1", "v1".to_string());
tmp
};
assert_eq!(transform_payload(&x), "k1=v1&k2=v2&k3=v3");
}
#[test]
fn test_sign() {
let x = {
let mut tmp: HashMap<&str, String> = HashMap::new();
tmp.insert("k2", "v2".to_string());
tmp.insert("k3", "v3".to_string());
tmp.insert("k1", "v1".to_string());
tmp
};
assert_eq!(sign(&x, "012345"), "iAKpGb9i8EKY8q4HPfiMdfb27OM=");
}
} | use std::hash::Hash; | random_line_split |
sig.rs | use std::collections::HashMap;
use std::hash::Hash;
use crypto::hmac::Hmac;
use crypto::mac::Mac;
use crypto::sha1::Sha1;
use super::util;
fn transform_payload<K, V>(d: &HashMap<K, V>) -> String
where K: AsRef<str> + Eq + Hash,
V: AsRef<str> + Eq + Hash
{
let mut kv_list: Vec<_> = d.iter().map(|(k, v)| (k.as_ref(), v.as_ref())).collect();
kv_list.sort_by(|a, b| a.0.cmp(b.0));
let mut result = String::new();
for (i, (k, v)) in kv_list.into_iter().enumerate() {
if i > 0 {
result.push('&');
}
result.push_str(k);
result.push('=');
result.push_str(v);
}
result
}
pub fn | <K, V, S>(d: &HashMap<K, V>, secret: S) -> String
where K: AsRef<str> + Eq + Hash,
V: AsRef<str> + Eq + Hash,
S: AsRef<str>
{
let payload = transform_payload(d);
let mut hmac = Hmac::new(Sha1::new(), secret.as_ref().as_bytes());
hmac.input(payload.as_bytes());
util::b64encode(hmac.result().code())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_transform_payload() {
let x = {
let mut tmp: HashMap<&str, String> = HashMap::new();
tmp.insert("k2", "v2".to_string());
tmp.insert("k3", "v3".to_string());
tmp.insert("k1", "v1".to_string());
tmp
};
assert_eq!(transform_payload(&x), "k1=v1&k2=v2&k3=v3");
}
#[test]
fn test_sign() {
let x = {
let mut tmp: HashMap<&str, String> = HashMap::new();
tmp.insert("k2", "v2".to_string());
tmp.insert("k3", "v3".to_string());
tmp.insert("k1", "v1".to_string());
tmp
};
assert_eq!(sign(&x, "012345"), "iAKpGb9i8EKY8q4HPfiMdfb27OM=");
}
}
| sign | identifier_name |
sig.rs | use std::collections::HashMap;
use std::hash::Hash;
use crypto::hmac::Hmac;
use crypto::mac::Mac;
use crypto::sha1::Sha1;
use super::util;
fn transform_payload<K, V>(d: &HashMap<K, V>) -> String
where K: AsRef<str> + Eq + Hash,
V: AsRef<str> + Eq + Hash
{
let mut kv_list: Vec<_> = d.iter().map(|(k, v)| (k.as_ref(), v.as_ref())).collect();
kv_list.sort_by(|a, b| a.0.cmp(b.0));
let mut result = String::new();
for (i, (k, v)) in kv_list.into_iter().enumerate() {
if i > 0 |
result.push_str(k);
result.push('=');
result.push_str(v);
}
result
}
pub fn sign<K, V, S>(d: &HashMap<K, V>, secret: S) -> String
where K: AsRef<str> + Eq + Hash,
V: AsRef<str> + Eq + Hash,
S: AsRef<str>
{
let payload = transform_payload(d);
let mut hmac = Hmac::new(Sha1::new(), secret.as_ref().as_bytes());
hmac.input(payload.as_bytes());
util::b64encode(hmac.result().code())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_transform_payload() {
let x = {
let mut tmp: HashMap<&str, String> = HashMap::new();
tmp.insert("k2", "v2".to_string());
tmp.insert("k3", "v3".to_string());
tmp.insert("k1", "v1".to_string());
tmp
};
assert_eq!(transform_payload(&x), "k1=v1&k2=v2&k3=v3");
}
#[test]
fn test_sign() {
let x = {
let mut tmp: HashMap<&str, String> = HashMap::new();
tmp.insert("k2", "v2".to_string());
tmp.insert("k3", "v3".to_string());
tmp.insert("k1", "v1".to_string());
tmp
};
assert_eq!(sign(&x, "012345"), "iAKpGb9i8EKY8q4HPfiMdfb27OM=");
}
}
| {
result.push('&');
} | conditional_block |
sig.rs | use std::collections::HashMap;
use std::hash::Hash;
use crypto::hmac::Hmac;
use crypto::mac::Mac;
use crypto::sha1::Sha1;
use super::util;
fn transform_payload<K, V>(d: &HashMap<K, V>) -> String
where K: AsRef<str> + Eq + Hash,
V: AsRef<str> + Eq + Hash
{
let mut kv_list: Vec<_> = d.iter().map(|(k, v)| (k.as_ref(), v.as_ref())).collect();
kv_list.sort_by(|a, b| a.0.cmp(b.0));
let mut result = String::new();
for (i, (k, v)) in kv_list.into_iter().enumerate() {
if i > 0 {
result.push('&');
}
result.push_str(k);
result.push('=');
result.push_str(v);
}
result
}
pub fn sign<K, V, S>(d: &HashMap<K, V>, secret: S) -> String
where K: AsRef<str> + Eq + Hash,
V: AsRef<str> + Eq + Hash,
S: AsRef<str>
{
let payload = transform_payload(d);
let mut hmac = Hmac::new(Sha1::new(), secret.as_ref().as_bytes());
hmac.input(payload.as_bytes());
util::b64encode(hmac.result().code())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_transform_payload() {
let x = {
let mut tmp: HashMap<&str, String> = HashMap::new();
tmp.insert("k2", "v2".to_string());
tmp.insert("k3", "v3".to_string());
tmp.insert("k1", "v1".to_string());
tmp
};
assert_eq!(transform_payload(&x), "k1=v1&k2=v2&k3=v3");
}
#[test]
fn test_sign() |
}
| {
let x = {
let mut tmp: HashMap<&str, String> = HashMap::new();
tmp.insert("k2", "v2".to_string());
tmp.insert("k3", "v3".to_string());
tmp.insert("k1", "v1".to_string());
tmp
};
assert_eq!(sign(&x, "012345"), "iAKpGb9i8EKY8q4HPfiMdfb27OM=");
} | identifier_body |
closeDialog.js | // -*- mode: js; js-indent-level: 4; indent-tabs-mode: nil -*-
const Clutter = imports.gi.Clutter;
const Gio = imports.gi.Gio;
const GLib = imports.gi.GLib;
const GObject = imports.gi.GObject;
const Lang = imports.lang;
const Meta = imports.gi.Meta;
const Shell = imports.gi.Shell;
const Dialog = imports.ui.dialog;
const Main = imports.ui.main;
const Tweener = imports.ui.tweener;
var FROZEN_WINDOW_BRIGHTNESS = -0.3
var DIALOG_TRANSITION_TIME = 0.15
var ALIVE_TIMEOUT = 5000;
var CloseDialog = new Lang.Class({
Name: 'CloseDialog',
Extends: GObject.Object,
Implements: [ Meta.CloseDialog ],
Properties: {
'window': GObject.ParamSpec.override('window', Meta.CloseDialog)
},
_init(window) {
this.parent();
this._window = window;
this._dialog = null;
this._timeoutId = 0;
},
get window() {
return this._window;
},
set window(window) {
this._window = window;
},
_createDialogContent() {
let tracker = Shell.WindowTracker.get_default();
let windowApp = tracker.get_window_app(this._window);
/* Translators: %s is an application name */
let title = _("“%s” is not responding.").format(windowApp.get_name());
let subtitle = _("You may choose to wait a short while for it to " +
"continue or force the application to quit entirely.");
let icon = new Gio.ThemedIcon({ name: 'dialog-warning-symbolic' });
return new Dialog.MessageDialogContent({ icon, title, subtitle });
},
_initDialog() {
if (this._dialog)
return;
let windowActor = this._window.get_compositor_private();
this._dialog = new Dialog.Dialog(windowActor, 'close-dialog');
this._dialog.width = windowActor.width;
this._dialog.height = windowActor.height;
this._dialog.addContent(this._createDialogContent());
this._dialog.addButton({ label: _('Force Quit'),
action: this._onClose.bind(this),
default: true });
this._dialog.addButton({ label: _('Wait'),
action: this._onWait.bind(this),
key: Clutter.Escape });
global.focus_manager.add_group(this._dialog);
},
_addWindowEffect() {
// We set the effect on the surface actor, so the dialog itself
// (which is a child of the MetaWindowActor) does not get the
// effect applied itself.
let windowActor = this._window.get_compositor_private();
let surfaceActor = windowActor.get_first_child();
let effect = new Clutter.BrightnessContrastEffect();
effect.set_brightness(FROZEN_WINDOW_BRIGHTNESS);
surfaceActor.add_effect_with_name("gnome-shell-frozen-window", effect);
},
_removeWindowEffect() {
let windowActor = this._window.get_compositor_private();
let surfaceActor = windowActor.get_first_child();
surfaceActor.remove_effect_by_name("gnome-shell-frozen-window");
},
_onWait() {
this.response(Meta.CloseDialogResponse.WAIT);
},
_onClose() {
this.response(Meta.CloseDialogResponse.FORCE_CLOSE);
},
vfunc_show() {
if (this._dialog != null)
return;
Meta.disable_unredirect_for_display(global.display);
this._timeoutId = GLib.timeout_add(GLib.PRIORITY_DEFAULT, ALIVE_TIMEOUT,
() => {
this._window.check_alive(global.display.get_current_time_roundtrip());
return GLib.SOURCE_CONTINUE;
}); | this._addWindowEffect();
this._initDialog();
this._dialog.scale_y = 0;
this._dialog.set_pivot_point(0.5, 0.5);
Tweener.addTween(this._dialog,
{ scale_y: 1,
transition: 'linear',
time: DIALOG_TRANSITION_TIME,
onComplete: () => {
Main.layoutManager.trackChrome(this._dialog, { affectsInputRegion: true });
}
});
},
vfunc_hide() {
if (this._dialog == null)
return;
Meta.enable_unredirect_for_display(global.display);
GLib.source_remove(this._timeoutId);
this._timeoutId = 0;
let dialog = this._dialog;
this._dialog = null;
this._removeWindowEffect();
Tweener.addTween(dialog,
{ scale_y: 0,
transition: 'linear',
time: DIALOG_TRANSITION_TIME,
onComplete: () => {
dialog.destroy();
}
});
},
vfunc_focus() {
if (this._dialog)
this._dialog.grab_key_focus();
}
}); | random_line_split |
|
closeDialog.js | // -*- mode: js; js-indent-level: 4; indent-tabs-mode: nil -*-
const Clutter = imports.gi.Clutter;
const Gio = imports.gi.Gio;
const GLib = imports.gi.GLib;
const GObject = imports.gi.GObject;
const Lang = imports.lang;
const Meta = imports.gi.Meta;
const Shell = imports.gi.Shell;
const Dialog = imports.ui.dialog;
const Main = imports.ui.main;
const Tweener = imports.ui.tweener;
var FROZEN_WINDOW_BRIGHTNESS = -0.3
var DIALOG_TRANSITION_TIME = 0.15
var ALIVE_TIMEOUT = 5000;
var CloseDialog = new Lang.Class({
Name: 'CloseDialog',
Extends: GObject.Object,
Implements: [ Meta.CloseDialog ],
Properties: {
'window': GObject.ParamSpec.override('window', Meta.CloseDialog)
},
_init(window) {
this.parent();
this._window = window;
this._dialog = null;
this._timeoutId = 0;
},
get window() {
return this._window;
},
set window(window) | ,
_createDialogContent() {
let tracker = Shell.WindowTracker.get_default();
let windowApp = tracker.get_window_app(this._window);
/* Translators: %s is an application name */
let title = _("“%s” is not responding.").format(windowApp.get_name());
let subtitle = _("You may choose to wait a short while for it to " +
"continue or force the application to quit entirely.");
let icon = new Gio.ThemedIcon({ name: 'dialog-warning-symbolic' });
return new Dialog.MessageDialogContent({ icon, title, subtitle });
},
_initDialog() {
if (this._dialog)
return;
let windowActor = this._window.get_compositor_private();
this._dialog = new Dialog.Dialog(windowActor, 'close-dialog');
this._dialog.width = windowActor.width;
this._dialog.height = windowActor.height;
this._dialog.addContent(this._createDialogContent());
this._dialog.addButton({ label: _('Force Quit'),
action: this._onClose.bind(this),
default: true });
this._dialog.addButton({ label: _('Wait'),
action: this._onWait.bind(this),
key: Clutter.Escape });
global.focus_manager.add_group(this._dialog);
},
_addWindowEffect() {
// We set the effect on the surface actor, so the dialog itself
// (which is a child of the MetaWindowActor) does not get the
// effect applied itself.
let windowActor = this._window.get_compositor_private();
let surfaceActor = windowActor.get_first_child();
let effect = new Clutter.BrightnessContrastEffect();
effect.set_brightness(FROZEN_WINDOW_BRIGHTNESS);
surfaceActor.add_effect_with_name("gnome-shell-frozen-window", effect);
},
_removeWindowEffect() {
let windowActor = this._window.get_compositor_private();
let surfaceActor = windowActor.get_first_child();
surfaceActor.remove_effect_by_name("gnome-shell-frozen-window");
},
_onWait() {
this.response(Meta.CloseDialogResponse.WAIT);
},
_onClose() {
this.response(Meta.CloseDialogResponse.FORCE_CLOSE);
},
vfunc_show() {
if (this._dialog != null)
return;
Meta.disable_unredirect_for_display(global.display);
this._timeoutId = GLib.timeout_add(GLib.PRIORITY_DEFAULT, ALIVE_TIMEOUT,
() => {
this._window.check_alive(global.display.get_current_time_roundtrip());
return GLib.SOURCE_CONTINUE;
});
this._addWindowEffect();
this._initDialog();
this._dialog.scale_y = 0;
this._dialog.set_pivot_point(0.5, 0.5);
Tweener.addTween(this._dialog,
{ scale_y: 1,
transition: 'linear',
time: DIALOG_TRANSITION_TIME,
onComplete: () => {
Main.layoutManager.trackChrome(this._dialog, { affectsInputRegion: true });
}
});
},
vfunc_hide() {
if (this._dialog == null)
return;
Meta.enable_unredirect_for_display(global.display);
GLib.source_remove(this._timeoutId);
this._timeoutId = 0;
let dialog = this._dialog;
this._dialog = null;
this._removeWindowEffect();
Tweener.addTween(dialog,
{ scale_y: 0,
transition: 'linear',
time: DIALOG_TRANSITION_TIME,
onComplete: () => {
dialog.destroy();
}
});
},
vfunc_focus() {
if (this._dialog)
this._dialog.grab_key_focus();
}
});
| {
this._window = window;
} | identifier_body |
closeDialog.js | // -*- mode: js; js-indent-level: 4; indent-tabs-mode: nil -*-
const Clutter = imports.gi.Clutter;
const Gio = imports.gi.Gio;
const GLib = imports.gi.GLib;
const GObject = imports.gi.GObject;
const Lang = imports.lang;
const Meta = imports.gi.Meta;
const Shell = imports.gi.Shell;
const Dialog = imports.ui.dialog;
const Main = imports.ui.main;
const Tweener = imports.ui.tweener;
var FROZEN_WINDOW_BRIGHTNESS = -0.3
var DIALOG_TRANSITION_TIME = 0.15
var ALIVE_TIMEOUT = 5000;
var CloseDialog = new Lang.Class({
Name: 'CloseDialog',
Extends: GObject.Object,
Implements: [ Meta.CloseDialog ],
Properties: {
'window': GObject.ParamSpec.override('window', Meta.CloseDialog)
},
_init(window) {
this.parent();
this._window = window;
this._dialog = null;
this._timeoutId = 0;
},
get window() {
return this._window;
},
set | (window) {
this._window = window;
},
_createDialogContent() {
let tracker = Shell.WindowTracker.get_default();
let windowApp = tracker.get_window_app(this._window);
/* Translators: %s is an application name */
let title = _("“%s” is not responding.").format(windowApp.get_name());
let subtitle = _("You may choose to wait a short while for it to " +
"continue or force the application to quit entirely.");
let icon = new Gio.ThemedIcon({ name: 'dialog-warning-symbolic' });
return new Dialog.MessageDialogContent({ icon, title, subtitle });
},
_initDialog() {
if (this._dialog)
return;
let windowActor = this._window.get_compositor_private();
this._dialog = new Dialog.Dialog(windowActor, 'close-dialog');
this._dialog.width = windowActor.width;
this._dialog.height = windowActor.height;
this._dialog.addContent(this._createDialogContent());
this._dialog.addButton({ label: _('Force Quit'),
action: this._onClose.bind(this),
default: true });
this._dialog.addButton({ label: _('Wait'),
action: this._onWait.bind(this),
key: Clutter.Escape });
global.focus_manager.add_group(this._dialog);
},
_addWindowEffect() {
// We set the effect on the surface actor, so the dialog itself
// (which is a child of the MetaWindowActor) does not get the
// effect applied itself.
let windowActor = this._window.get_compositor_private();
let surfaceActor = windowActor.get_first_child();
let effect = new Clutter.BrightnessContrastEffect();
effect.set_brightness(FROZEN_WINDOW_BRIGHTNESS);
surfaceActor.add_effect_with_name("gnome-shell-frozen-window", effect);
},
_removeWindowEffect() {
let windowActor = this._window.get_compositor_private();
let surfaceActor = windowActor.get_first_child();
surfaceActor.remove_effect_by_name("gnome-shell-frozen-window");
},
_onWait() {
this.response(Meta.CloseDialogResponse.WAIT);
},
_onClose() {
this.response(Meta.CloseDialogResponse.FORCE_CLOSE);
},
vfunc_show() {
if (this._dialog != null)
return;
Meta.disable_unredirect_for_display(global.display);
this._timeoutId = GLib.timeout_add(GLib.PRIORITY_DEFAULT, ALIVE_TIMEOUT,
() => {
this._window.check_alive(global.display.get_current_time_roundtrip());
return GLib.SOURCE_CONTINUE;
});
this._addWindowEffect();
this._initDialog();
this._dialog.scale_y = 0;
this._dialog.set_pivot_point(0.5, 0.5);
Tweener.addTween(this._dialog,
{ scale_y: 1,
transition: 'linear',
time: DIALOG_TRANSITION_TIME,
onComplete: () => {
Main.layoutManager.trackChrome(this._dialog, { affectsInputRegion: true });
}
});
},
vfunc_hide() {
if (this._dialog == null)
return;
Meta.enable_unredirect_for_display(global.display);
GLib.source_remove(this._timeoutId);
this._timeoutId = 0;
let dialog = this._dialog;
this._dialog = null;
this._removeWindowEffect();
Tweener.addTween(dialog,
{ scale_y: 0,
transition: 'linear',
time: DIALOG_TRANSITION_TIME,
onComplete: () => {
dialog.destroy();
}
});
},
vfunc_focus() {
if (this._dialog)
this._dialog.grab_key_focus();
}
});
| window | identifier_name |
deprecated.js | if ( 'undefined' !== typeof(jQuery.fn.bxSlider) ) {
jQuery( '.bxslider' ).each( function () {
var $slider = jQuery( this );
$slider.bxSlider( $slider.data( 'settings' ) );
} );
}
if ( 'undefined' !== typeof(window.Swiper) ) {
jQuery( '.swiper-container' ).each( function () {
var $this = jQuery( this ),
my_swiper,
max_slide_size = 0,
options = jQuery( this ).data( 'settings' );
if ( 'vertical' === options.mode ) {
$this.find( '.swiper-slide' ).each( function () {
var height = jQuery( this ).outerHeight( true );
if ( height > max_slide_size ) {
max_slide_size = height;
}
} ); | }
jQuery( window ).resize( function () {
$this.find( '.swiper-slide' ).each( function () {
var height = jQuery( this ).outerHeight( true );
if ( height > max_slide_size ) {
max_slide_size = height;
}
} );
$this.height( max_slide_size );
} );
my_swiper = jQuery( this ).swiper( jQuery.extend( options, {
onFirstInit: function ( swiper ) {
if ( 2 > swiper.slides.length ) {
$this.find( '.vc_arrow-left,.vc_arrow-right' ).hide();
} else if ( 0 === swiper.activeIndex && true !== swiper.params.loop ) {
$this.find( '.vc_arrow-left' ).hide();
} else {
$this.find( '.vc_arrow-left' ).show();
}
},
onSlideChangeStart: function ( swiper ) {
if ( 1 < swiper.slides.length && true !== swiper.params.loop ) {
if ( 0 === swiper.activeIndex ) {
$this.find( '.vc_arrow-left' ).hide();
} else {
$this.find( '.vc_arrow-left' ).show();
}
if ( swiper.slides.length - 1 === swiper.activeIndex ) {
$this.find( '.vc_arrow-right' ).hide();
} else {
$this.find( '.vc_arrow-right' ).show();
}
}
}
} ) );
$this.find( '.vc_arrow-left' ).click( function ( e ) {
e.preventDefault();
my_swiper.swipePrev();
} );
$this.find( '.vc_arrow-right' ).click( function ( e ) {
e.preventDefault();
my_swiper.swipeNext();
} );
my_swiper.reInit();
} );
} | $this.height( max_slide_size );
$this.css( 'overflow', 'hidden' ); | random_line_split |
BlurFilter.js | //////////////////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2014-2015, Egret Technology Inc.
// All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of the Egret nor the
// names of its contributors may be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY EGRET AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
// IN NO EVENT SHALL EGRET AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;LOSS OF USE, DATA,
// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//////////////////////////////////////////////////////////////////////////////////////
var egret;
(function (egret) {
/**
* @private
*/
var BlurFilter = (function (_super) {
__extends(BlurFilter, _super);
function | (blurX, blurY) {
_super.call(this);
this.blurX = blurX;
this.blurY = blurY;
this.type = "blur";
}
var __egretProto__ = BlurFilter.prototype;
return BlurFilter;
})(egret.Filter);
egret.BlurFilter = BlurFilter;
BlurFilter.prototype.__class__ = "egret.BlurFilter";
})(egret || (egret = {}));
| BlurFilter | identifier_name |
BlurFilter.js | //////////////////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2014-2015, Egret Technology Inc.
// All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of the Egret nor the
// names of its contributors may be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY EGRET AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
// IN NO EVENT SHALL EGRET AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;LOSS OF USE, DATA,
// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//////////////////////////////////////////////////////////////////////////////////////
var egret;
(function (egret) {
/**
* @private
*/
var BlurFilter = (function (_super) {
__extends(BlurFilter, _super);
function BlurFilter(blurX, blurY) |
var __egretProto__ = BlurFilter.prototype;
return BlurFilter;
})(egret.Filter);
egret.BlurFilter = BlurFilter;
BlurFilter.prototype.__class__ = "egret.BlurFilter";
})(egret || (egret = {}));
| {
_super.call(this);
this.blurX = blurX;
this.blurY = blurY;
this.type = "blur";
} | identifier_body |
BlurFilter.js | //////////////////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2014-2015, Egret Technology Inc.
// All rights reserved. | // modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of the Egret nor the
// names of its contributors may be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY EGRET AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
// IN NO EVENT SHALL EGRET AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;LOSS OF USE, DATA,
// OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//////////////////////////////////////////////////////////////////////////////////////
var egret;
(function (egret) {
/**
* @private
*/
var BlurFilter = (function (_super) {
__extends(BlurFilter, _super);
function BlurFilter(blurX, blurY) {
_super.call(this);
this.blurX = blurX;
this.blurY = blurY;
this.type = "blur";
}
var __egretProto__ = BlurFilter.prototype;
return BlurFilter;
})(egret.Filter);
egret.BlurFilter = BlurFilter;
BlurFilter.prototype.__class__ = "egret.BlurFilter";
})(egret || (egret = {})); | // Redistribution and use in source and binary forms, with or without | random_line_split |
zdt1.rs | /// ZDT1 bi-objective test function
///
/// Evaluates solution parameters using the ZDT1 [1] synthetic test
/// test function to produce two objective values.
///
/// [1 ]E. Zitzler, K. Deb, and L. Thiele. Comparison of Multiobjective
/// Evolutionary Algorithms: Empirical Results. Evolutionary
/// Computation, 8(2):173-195, 2000
pub fn | (parameters: [f32; 30]) -> [f32; 2] {
// objective function 1
let f1 = parameters[0];
// objective function 2
let mut g = 1_f32;
// g(x)
for i in 1..parameters.len() {
g = g + ((9_f32 / (parameters.len() as f32 - 1_f32)) * parameters[i]);
}
// h(f1, x)
let h = 1_f32 - (f1 / g).sqrt();
// f2(x)
let f2 = g * h;
return [f1, f2];
}
| zdt1 | identifier_name |
zdt1.rs | /// ZDT1 bi-objective test function
///
/// Evaluates solution parameters using the ZDT1 [1] synthetic test
/// test function to produce two objective values.
///
/// [1 ]E. Zitzler, K. Deb, and L. Thiele. Comparison of Multiobjective
/// Evolutionary Algorithms: Empirical Results. Evolutionary
/// Computation, 8(2):173-195, 2000
pub fn zdt1(parameters: [f32; 30]) -> [f32; 2] | {
// objective function 1
let f1 = parameters[0];
// objective function 2
let mut g = 1_f32;
// g(x)
for i in 1..parameters.len() {
g = g + ((9_f32 / (parameters.len() as f32 - 1_f32)) * parameters[i]);
}
// h(f1, x)
let h = 1_f32 - (f1 / g).sqrt();
// f2(x)
let f2 = g * h;
return [f1, f2];
} | identifier_body |
|
zdt1.rs | /// ZDT1 bi-objective test function
///
/// Evaluates solution parameters using the ZDT1 [1] synthetic test
/// test function to produce two objective values.
///
/// [1 ]E. Zitzler, K. Deb, and L. Thiele. Comparison of Multiobjective
/// Evolutionary Algorithms: Empirical Results. Evolutionary
/// Computation, 8(2):173-195, 2000 |
// objective function 1
let f1 = parameters[0];
// objective function 2
let mut g = 1_f32;
// g(x)
for i in 1..parameters.len() {
g = g + ((9_f32 / (parameters.len() as f32 - 1_f32)) * parameters[i]);
}
// h(f1, x)
let h = 1_f32 - (f1 / g).sqrt();
// f2(x)
let f2 = g * h;
return [f1, f2];
} | pub fn zdt1(parameters: [f32; 30]) -> [f32; 2] { | random_line_split |
470 Implement Rand10() Using Rand7().py | #!/usr/bin/python3
"""
Given a function rand7 which generates a uniform random integer in the range 1
to 7, write a function rand10 which generates a uniform random integer in the
range 1 to 10.
Do NOT use system's Math.random().
"""
# The rand7() API is already defined for you.
def rand7():
return 0
class Solution:
def | (self):
"""
generate 7 twice, (rv1, rv2), 49 combination
assign 40 combinations for the 1 to 10 respectively
7-ary system
:rtype: int
"""
while True:
rv1 = rand7()
rv2 = rand7()
s = (rv1 - 1) * 7 + (rv2 - 1) # make it start from 0
if s < 40: # s \in [0, 40)
return s % 10 + 1 # since I make it start from 0
| rand10 | identifier_name |
470 Implement Rand10() Using Rand7().py | #!/usr/bin/python3
"""
Given a function rand7 which generates a uniform random integer in the range 1
to 7, write a function rand10 which generates a uniform random integer in the
range 1 to 10.
Do NOT use system's Math.random().
"""
| return 0
class Solution:
def rand10(self):
"""
generate 7 twice, (rv1, rv2), 49 combination
assign 40 combinations for the 1 to 10 respectively
7-ary system
:rtype: int
"""
while True:
rv1 = rand7()
rv2 = rand7()
s = (rv1 - 1) * 7 + (rv2 - 1) # make it start from 0
if s < 40: # s \in [0, 40)
return s % 10 + 1 # since I make it start from 0 | # The rand7() API is already defined for you.
def rand7(): | random_line_split |
470 Implement Rand10() Using Rand7().py | #!/usr/bin/python3
"""
Given a function rand7 which generates a uniform random integer in the range 1
to 7, write a function rand10 which generates a uniform random integer in the
range 1 to 10.
Do NOT use system's Math.random().
"""
# The rand7() API is already defined for you.
def rand7():
return 0
class Solution:
def rand10(self):
| """
generate 7 twice, (rv1, rv2), 49 combination
assign 40 combinations for the 1 to 10 respectively
7-ary system
:rtype: int
"""
while True:
rv1 = rand7()
rv2 = rand7()
s = (rv1 - 1) * 7 + (rv2 - 1) # make it start from 0
if s < 40: # s \in [0, 40)
return s % 10 + 1 # since I make it start from 0 | identifier_body |
|
470 Implement Rand10() Using Rand7().py | #!/usr/bin/python3
"""
Given a function rand7 which generates a uniform random integer in the range 1
to 7, write a function rand10 which generates a uniform random integer in the
range 1 to 10.
Do NOT use system's Math.random().
"""
# The rand7() API is already defined for you.
def rand7():
return 0
class Solution:
def rand10(self):
"""
generate 7 twice, (rv1, rv2), 49 combination
assign 40 combinations for the 1 to 10 respectively
7-ary system
:rtype: int
"""
while True:
rv1 = rand7()
rv2 = rand7()
s = (rv1 - 1) * 7 + (rv2 - 1) # make it start from 0
if s < 40: # s \in [0, 40)
| return s % 10 + 1 # since I make it start from 0 | conditional_block |
|
search.py | # Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Search module that uses Google App Engine's full text search."""
__author__ = 'Ellis Michael ([email protected])'
import collections
import gettext
import logging
import math
import mimetypes
import os
import time
import traceback
import jinja2
import messages
import resources
import webapp2
import appengine_config
from common import crypto
from common import safe_dom
from common import schema_fields
from controllers import sites
from controllers import utils
from models import config
from models import counters
from models import courses
from models import custom_modules
from models import jobs
from models import services
from models import transforms
from modules.dashboard import dashboard
from google.appengine.api import namespace_manager
from google.appengine.api import search
from google.appengine.ext import db
MODULE_NAME = 'Full Text Search'
DEPRECATED = config.ConfigProperty(
'gcb_can_index_automatically', bool, safe_dom.Text(
'This property has been deprecated; it is retained so that we '
'will not generate no-such-variable error messages for existing '
'installations that have this property set.'),
default_value=False, label='Automatically index search', deprecated=True)
SEARCH_QUERIES_MADE = counters.PerfCounter(
'gcb-search-queries-made',
'The number of student queries made to the search module.')
SEARCH_RESULTS_RETURNED = counters.PerfCounter(
'gcb-search-results-returned',
'The number of search results returned across all student queries.')
SEARCH_FAILURES = counters.PerfCounter(
'gcb-search-failures',
'The number of search failure messages returned across all student '
'queries.')
INDEX_NAME = 'gcb_search_index_loc_%s'
RESULTS_LIMIT = 10
GCB_SEARCH_FOLDER_NAME = os.path.normpath('/modules/search/')
MAX_RETRIES = 5
# Name of a per-course setting determining whether automatic indexing is enabled
AUTO_INDEX_SETTING = 'auto_index'
# I18N: Message displayed on search results page when error occurs.
SEARCH_ERROR_TEXT = gettext.gettext('Search is currently unavailable.')
class ModuleDisabledException(Exception):
"""Exception thrown when the search module is disabled."""
pass
def get_index(namespace, locale):
assert locale, 'Must have a non-null locale'
return search.Index(name=INDEX_NAME % locale, namespace=namespace)
def index_all_docs(course, incremental):
"""Index all of the docs for a given models.Course object.
Args:
course: models.courses.Course. the course to index.
incremental: boolean. whether or not to index only new or out-of-date
items.
Returns:
A dict with three keys.
'num_indexed_docs' maps to an int, the number of documents added to the
index.
'doc_type' maps to a counter with resource types as keys mapping to the
number of that resource added to the index.
'indexing_time_secs' maps to a float representing the number of seconds
the indexing job took.
Raises:
ModuleDisabledException: The search module is currently disabled.
"""
if not custom_module.enabled:
raise ModuleDisabledException('The search module is disabled.')
start_time = time.time()
index = get_index(
course.app_context.get_namespace_name(),
course.app_context.get_current_locale())
timestamps, doc_types = (_get_index_metadata(index) if incremental
else ({}, {}))
for doc in resources.generate_all_documents(course, timestamps):
retry_count = 0
while retry_count < MAX_RETRIES:
try:
index.put(doc)
timestamps[doc.doc_id] = doc['date'][0].value
doc_types[doc.doc_id] = doc['type'][0].value
break
except search.Error, e:
if e.results[0].code == search.OperationResult.TRANSIENT_ERROR:
retry_count += 1
if retry_count >= MAX_RETRIES:
logging.error(
'Multiple transient errors indexing doc_id: %s',
doc.doc_id)
else:
logging.error('Failed to index doc_id: %s', doc.doc_id)
break
indexed_doc_types = collections.Counter()
for type_name in doc_types.values():
indexed_doc_types[type_name] += 1
return {'num_indexed_docs': len(timestamps),
'doc_types': indexed_doc_types,
'indexing_time_secs': time.time() - start_time}
def clear_index(namespace, locale):
"""Delete all docs in the index for a given models.Course object."""
if not custom_module.enabled:
raise ModuleDisabledException('The search module is disabled.')
index = get_index(namespace, locale)
doc_ids = [document.doc_id for document in index.get_range(ids_only=True)]
total_docs = len(doc_ids)
while doc_ids:
index.delete(doc_ids)
doc_ids = [document.doc_id
for document in index.get_range(ids_only=True)]
return {'deleted_docs': total_docs}
def _get_index_metadata(index):
"""Returns dict from doc_id to timestamp and one from doc_id to doc_type."""
timestamps = []
doc_types = []
cursor = search.Cursor()
while cursor:
options = search.QueryOptions(
limit=1000,
cursor=cursor,
returned_fields=['date', 'type'])
query = search.Query(query_string='', options=options)
current_docs = index.search(query)
cursor = current_docs.cursor
for doc in current_docs:
timestamps.append((doc.doc_id, doc['date'][0].value))
doc_types.append((doc.doc_id, doc['type'][0].value))
return dict(timestamps), dict(doc_types)
def fetch(course, query_string, offset=0, limit=RESULTS_LIMIT):
"""Return an HTML fragment with the results of a search for query_string.
Args:
course: models.courses.Course. the course to search.
query_string: str. the user's specified query.
offset: int. the number of results to skip.
limit: int. the number of results to return.
Returns:
A dict with two keys.
'results' maps to an ordered list of resources.Result objects.
'total_found' maps to the total number of results in the index which
match query_string.
Raises:
ModuleDisabledException: The search module is currently disabled.
"""
if not custom_module.enabled:
raise ModuleDisabledException('The search module is disabled.')
index = get_index(
course.app_context.get_namespace_name(),
course.app_context.get_current_locale())
try:
# TODO(emichael): Don't compute these for every query
returned_fields = resources.get_returned_fields()
snippeted_fields = resources.get_snippeted_fields()
options = search.QueryOptions(
limit=limit,
offset=offset,
returned_fields=returned_fields,
number_found_accuracy=100,
snippeted_fields=snippeted_fields)
query = search.Query(query_string=query_string, options=options)
results = index.search(query)
except search.Error:
logging.info('Failed searching for: %s', query_string)
return {'results': None, 'total_found': 0}
processed_results = resources.process_results(results)
return {'results': processed_results, 'total_found': results.number_found}
class SearchHandler(utils.BaseHandler):
"""Handler for generating the search results page."""
def get(self):
"""Process GET request."""
# TODO(emichael): move timing to Javascript
if not custom_module.enabled:
self.error(404)
return
student = self.personalize_page_and_get_enrolled(
supports_transient_student=True)
if not student:
return
try:
start = time.time()
# TODO(emichael): Don't use get because it can't handle utf-8
query = self.request.get('query')
offset = self.request.get('offset')
self.template_value['navbar'] = {}
if query:
try:
offset = int(offset)
except (ValueError, TypeError):
offset = 0
self.template_value['query'] = query
SEARCH_QUERIES_MADE.inc()
response = fetch(self.get_course(), query, offset=offset)
response = self.filter(response, student)
self.template_value['time'] = '%.2f' % (time.time() - start)
self.template_value['search_results'] = response['results']
total_found = response['total_found']
if offset + RESULTS_LIMIT < total_found:
self.template_value['next_link'] = (
'search?query=%s&offset=%d' %
(query, offset + RESULTS_LIMIT))
if offset - RESULTS_LIMIT >= 0:
self.template_value['previous_link'] = (
'search?query=%s&offset=%d' %
(query, offset - RESULTS_LIMIT))
self.template_value['page_number'] = offset / RESULTS_LIMIT + 1
self.template_value['total_pages'] = int(math.ceil(
float(total_found) / RESULTS_LIMIT))
if response['results']:
SEARCH_RESULTS_RETURNED.inc(len(response['results']))
# TODO(emichael): Remove this check when the unicode issue is fixed in
# dev_appserver.
except UnicodeEncodeError as e:
SEARCH_FAILURES.inc()
if not appengine_config.PRODUCTION_MODE:
# This message will only be displayed to the course author in
# dev, so it does not need to be I18N'd
self.template_value['search_error'] = (
'There is a known issue in App Engine\'s SDK '
'(code.google.com/p/googleappengine/issues/detail?id=9335) '
'which causes an error when generating search snippets '
'which contain non-ASCII characters. This error does not '
'occur in the production environment, so you can safely '
'run your course with unicode characters on appspot.com.')
logging.error('[Unicode/Dev server issue] Error rendering the '
'search page: %s.', e)
else:
self.template_value['search_error'] = SEARCH_ERROR_TEXT
logging.error('Error rendering the search page: %s. %s',
e, traceback.format_exc())
except Exception as e: # pylint: disable=broad-except
SEARCH_FAILURES.inc()
self.template_value['search_error'] = SEARCH_ERROR_TEXT
logging.error('Error rendering the search page: %s. %s',
e, traceback.format_exc())
finally:
path = sites.abspath(self.app_context.get_home_folder(),
GCB_SEARCH_FOLDER_NAME)
template = self.get_template('search.html', additional_dirs=[path])
self.template_value['navbar'] = {}
self.response.out.write(template.render(self.template_value))
def filter(self, response, student):
if not response['results']:
return response
filtered_results = []
units, lessons = self.get_course().get_track_matching_student(student)
available_unit_ids = set(str(unit.unit_id) for unit in units)
for result in response['results']:
if not result.unit_id or str(result.unit_id) in available_unit_ids:
filtered_results.append(result)
return {
'results': filtered_results,
'total_found': len(filtered_results)
}
class AssetsHandler(webapp2.RequestHandler):
"""Content handler for assets associated with search."""
def get(self):
"""Respond to HTTP GET methods."""
if not custom_module.enabled:
self.error(404)
return
path = self.request.path
if path.startswith('/'):
path = path[1:]
path = os.path.normpath(path)
if os.path.basename(os.path.dirname(path)) != 'assets':
self.error(404)
return
resource_file = os.path.join(appengine_config.BUNDLE_ROOT, path)
mimetype = mimetypes.guess_type(resource_file)[0]
if mimetype is None:
mimetype = 'application/octet-stream'
try:
sites.set_static_resource_cache_control(self)
self.response.status = 200
stream = open(resource_file)
content = stream.read()
self.response.headers['Content-Type'] = mimetype
self.response.write(content)
except IOError:
self.error(404)
def _get_search(handler):
"""Renders course indexing view."""
template_values = {'page_title': handler.format_title('Search')}
mc_template_value = {}
mc_template_value['module_enabled'] = custom_module.enabled
indexing_job = IndexCourse(handler.app_context).load()
if indexing_job:
if indexing_job.status_code in [jobs.STATUS_CODE_STARTED,
jobs.STATUS_CODE_QUEUED]:
mc_template_value['status_message'] = 'Indexing in progress.'
mc_template_value['job_in_progress'] = True
elif indexing_job.status_code == jobs.STATUS_CODE_COMPLETED:
mc_template_value['indexed'] = True
mc_template_value['last_updated'] = (
indexing_job.updated_on.strftime(
utils.HUMAN_READABLE_DATETIME_FORMAT))
mc_template_value['index_info'] = transforms.loads(
indexing_job.output)
elif indexing_job.status_code == jobs.STATUS_CODE_FAILED:
mc_template_value['status_message'] = (
'Indexing job failed with error: %s' % indexing_job.output)
else:
mc_template_value['status_message'] = (
'No indexing job has been run yet.')
mc_template_value['index_course_xsrf_token'] = (
crypto.XsrfTokenManager.create_xsrf_token('index_course'))
template_values['main_content'] = jinja2.Markup(handler.get_template(
'search_dashboard.html', [os.path.dirname(__file__)]
).render(mc_template_value, autoescape=True))
return template_values
def _post_index_course(handler):
"""Submits a new indexing operation."""
try:
check_job_and_submit(handler.app_context, incremental=False)
except db.TransactionFailedError:
# Double submission from multiple browsers, just pass
pass
handler.redirect('/dashboard?action=settings_search')
class CronIndexCourse(utils.AbstractAllCoursesCronHandler):
"""Index courses where auto-indexing is enabled.
All jobs should be submitted through the transactional check_job_and_submit
method to prevent multiple index operations from running at the same time.
If an index job is currently running when this cron job attempts to start
one, this operation will be a noop for that course.
"""
URL = '/cron/search/index_courses'
@classmethod
def | (cls):
return True
@classmethod
def is_enabled_for_course(cls, app_context):
course_settings = app_context.get_environ().get('course')
return course_settings and course_settings.get(AUTO_INDEX_SETTING)
def cron_action(self, app_context, unused_global_state):
try:
check_job_and_submit(app_context, incremental=True)
logging.info('Index submitted for namespace %s.',
app_context.get_namespace_name())
except db.TransactionFailedError as e:
logging.info(
'Failed to submit re-index job in namespace %s: %s',
app_context.get_namespace_name(), e)
@db.transactional(xg=True)
def check_job_and_submit(app_context, incremental=True):
"""Determines whether an indexing job is running and submits if not."""
indexing_job = IndexCourse(app_context, incremental=False)
job_entity = IndexCourse(app_context).load()
bad_status_codes = [jobs.STATUS_CODE_STARTED, jobs.STATUS_CODE_QUEUED]
if job_entity and job_entity.status_code in bad_status_codes:
raise db.TransactionFailedError('Index job is currently running.')
indexing_job.non_transactional_submit()
class IndexCourse(jobs.DurableJob):
"""A job that indexes the course."""
@staticmethod
def get_description():
return 'course index'
def __init__(self, app_context, incremental=True):
super(IndexCourse, self).__init__(app_context)
self.incremental = incremental
def run(self):
"""Index the course."""
namespace = namespace_manager.get_namespace()
logging.info('Running indexing job for namespace %s. Incremental: %s',
namespace_manager.get_namespace(), self.incremental)
app_context = sites.get_app_context_for_namespace(namespace)
# Make a request URL to make sites.get_course_for_current_request work
sites.set_path_info(app_context.slug)
indexing_stats = {
'deleted_docs': 0,
'num_indexed_docs': 0,
'doc_types': collections.Counter(),
'indexing_time_secs': 0,
'locales': []
}
for locale in app_context.get_allowed_locales():
stats = clear_index(namespace, locale)
indexing_stats['deleted_docs'] += stats['deleted_docs']
for locale in app_context.get_allowed_locales():
app_context.set_current_locale(locale)
course = courses.Course(None, app_context=app_context)
stats = index_all_docs(course, self.incremental)
indexing_stats['num_indexed_docs'] += stats['num_indexed_docs']
indexing_stats['doc_types'] += stats['doc_types']
indexing_stats['indexing_time_secs'] += stats['indexing_time_secs']
indexing_stats['locales'].append(locale)
return indexing_stats
# Module registration
custom_module = None
def register_module():
"""Registers this module in the registry."""
global_routes = [
('/modules/search/assets/.*', AssetsHandler),
(CronIndexCourse.URL, CronIndexCourse)
]
namespaced_routes = [
('/search', SearchHandler)
]
auto_index_enabled = schema_fields.SchemaField(
'course:' + AUTO_INDEX_SETTING, 'Auto-Index', 'boolean',
description=services.help_urls.make_learn_more_message(
messages.SEARCH_AUTO_INDEX_DESCRIPTION, 'course:auto_index'),
i18n=False, optional=True)
course_settings_fields = [
lambda course: auto_index_enabled
]
def notify_module_enabled():
dashboard.DashboardHandler.add_sub_nav_mapping(
'publish', 'search', 'Search', action='settings_search',
contents=_get_search, placement=1000)
dashboard.DashboardHandler.add_custom_post_action(
'index_course', _post_index_course)
courses.Course.OPTIONS_SCHEMA_PROVIDERS[
courses.Course.SCHEMA_SECTION_COURSE] += course_settings_fields
global custom_module # pylint: disable=global-statement
custom_module = custom_modules.Module(
MODULE_NAME,
'Provides search capabilities for courses',
global_routes, namespaced_routes,
notify_module_enabled=notify_module_enabled)
return custom_module
| is_globally_enabled | identifier_name |
search.py | # Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Search module that uses Google App Engine's full text search."""
__author__ = 'Ellis Michael ([email protected])'
import collections
import gettext
import logging
import math
import mimetypes
import os
import time
import traceback
import jinja2
import messages
import resources
import webapp2
import appengine_config
from common import crypto
from common import safe_dom
from common import schema_fields
from controllers import sites
from controllers import utils
from models import config
from models import counters
from models import courses
from models import custom_modules
from models import jobs
from models import services
from models import transforms
from modules.dashboard import dashboard
from google.appengine.api import namespace_manager
from google.appengine.api import search
from google.appengine.ext import db
MODULE_NAME = 'Full Text Search'
DEPRECATED = config.ConfigProperty(
'gcb_can_index_automatically', bool, safe_dom.Text(
'This property has been deprecated; it is retained so that we '
'will not generate no-such-variable error messages for existing '
'installations that have this property set.'),
default_value=False, label='Automatically index search', deprecated=True)
SEARCH_QUERIES_MADE = counters.PerfCounter(
'gcb-search-queries-made',
'The number of student queries made to the search module.')
SEARCH_RESULTS_RETURNED = counters.PerfCounter(
'gcb-search-results-returned',
'The number of search results returned across all student queries.')
SEARCH_FAILURES = counters.PerfCounter(
'gcb-search-failures',
'The number of search failure messages returned across all student '
'queries.')
INDEX_NAME = 'gcb_search_index_loc_%s'
RESULTS_LIMIT = 10
GCB_SEARCH_FOLDER_NAME = os.path.normpath('/modules/search/')
MAX_RETRIES = 5
# Name of a per-course setting determining whether automatic indexing is enabled
AUTO_INDEX_SETTING = 'auto_index'
# I18N: Message displayed on search results page when error occurs.
SEARCH_ERROR_TEXT = gettext.gettext('Search is currently unavailable.')
class ModuleDisabledException(Exception):
"""Exception thrown when the search module is disabled."""
pass
def get_index(namespace, locale):
assert locale, 'Must have a non-null locale'
return search.Index(name=INDEX_NAME % locale, namespace=namespace)
def index_all_docs(course, incremental):
"""Index all of the docs for a given models.Course object.
Args:
course: models.courses.Course. the course to index.
incremental: boolean. whether or not to index only new or out-of-date
items.
Returns:
A dict with three keys.
'num_indexed_docs' maps to an int, the number of documents added to the
index.
'doc_type' maps to a counter with resource types as keys mapping to the
number of that resource added to the index.
'indexing_time_secs' maps to a float representing the number of seconds
the indexing job took.
Raises:
ModuleDisabledException: The search module is currently disabled.
"""
if not custom_module.enabled:
raise ModuleDisabledException('The search module is disabled.')
start_time = time.time()
index = get_index(
course.app_context.get_namespace_name(),
course.app_context.get_current_locale())
timestamps, doc_types = (_get_index_metadata(index) if incremental
else ({}, {}))
for doc in resources.generate_all_documents(course, timestamps):
retry_count = 0
while retry_count < MAX_RETRIES:
try:
index.put(doc)
timestamps[doc.doc_id] = doc['date'][0].value
doc_types[doc.doc_id] = doc['type'][0].value
break
except search.Error, e:
if e.results[0].code == search.OperationResult.TRANSIENT_ERROR:
retry_count += 1
if retry_count >= MAX_RETRIES:
logging.error(
'Multiple transient errors indexing doc_id: %s',
doc.doc_id)
else:
logging.error('Failed to index doc_id: %s', doc.doc_id)
break
indexed_doc_types = collections.Counter()
for type_name in doc_types.values():
|
return {'num_indexed_docs': len(timestamps),
'doc_types': indexed_doc_types,
'indexing_time_secs': time.time() - start_time}
def clear_index(namespace, locale):
"""Delete all docs in the index for a given models.Course object."""
if not custom_module.enabled:
raise ModuleDisabledException('The search module is disabled.')
index = get_index(namespace, locale)
doc_ids = [document.doc_id for document in index.get_range(ids_only=True)]
total_docs = len(doc_ids)
while doc_ids:
index.delete(doc_ids)
doc_ids = [document.doc_id
for document in index.get_range(ids_only=True)]
return {'deleted_docs': total_docs}
def _get_index_metadata(index):
"""Returns dict from doc_id to timestamp and one from doc_id to doc_type."""
timestamps = []
doc_types = []
cursor = search.Cursor()
while cursor:
options = search.QueryOptions(
limit=1000,
cursor=cursor,
returned_fields=['date', 'type'])
query = search.Query(query_string='', options=options)
current_docs = index.search(query)
cursor = current_docs.cursor
for doc in current_docs:
timestamps.append((doc.doc_id, doc['date'][0].value))
doc_types.append((doc.doc_id, doc['type'][0].value))
return dict(timestamps), dict(doc_types)
def fetch(course, query_string, offset=0, limit=RESULTS_LIMIT):
"""Return an HTML fragment with the results of a search for query_string.
Args:
course: models.courses.Course. the course to search.
query_string: str. the user's specified query.
offset: int. the number of results to skip.
limit: int. the number of results to return.
Returns:
A dict with two keys.
'results' maps to an ordered list of resources.Result objects.
'total_found' maps to the total number of results in the index which
match query_string.
Raises:
ModuleDisabledException: The search module is currently disabled.
"""
if not custom_module.enabled:
raise ModuleDisabledException('The search module is disabled.')
index = get_index(
course.app_context.get_namespace_name(),
course.app_context.get_current_locale())
try:
# TODO(emichael): Don't compute these for every query
returned_fields = resources.get_returned_fields()
snippeted_fields = resources.get_snippeted_fields()
options = search.QueryOptions(
limit=limit,
offset=offset,
returned_fields=returned_fields,
number_found_accuracy=100,
snippeted_fields=snippeted_fields)
query = search.Query(query_string=query_string, options=options)
results = index.search(query)
except search.Error:
logging.info('Failed searching for: %s', query_string)
return {'results': None, 'total_found': 0}
processed_results = resources.process_results(results)
return {'results': processed_results, 'total_found': results.number_found}
class SearchHandler(utils.BaseHandler):
"""Handler for generating the search results page."""
def get(self):
"""Process GET request."""
# TODO(emichael): move timing to Javascript
if not custom_module.enabled:
self.error(404)
return
student = self.personalize_page_and_get_enrolled(
supports_transient_student=True)
if not student:
return
try:
start = time.time()
# TODO(emichael): Don't use get because it can't handle utf-8
query = self.request.get('query')
offset = self.request.get('offset')
self.template_value['navbar'] = {}
if query:
try:
offset = int(offset)
except (ValueError, TypeError):
offset = 0
self.template_value['query'] = query
SEARCH_QUERIES_MADE.inc()
response = fetch(self.get_course(), query, offset=offset)
response = self.filter(response, student)
self.template_value['time'] = '%.2f' % (time.time() - start)
self.template_value['search_results'] = response['results']
total_found = response['total_found']
if offset + RESULTS_LIMIT < total_found:
self.template_value['next_link'] = (
'search?query=%s&offset=%d' %
(query, offset + RESULTS_LIMIT))
if offset - RESULTS_LIMIT >= 0:
self.template_value['previous_link'] = (
'search?query=%s&offset=%d' %
(query, offset - RESULTS_LIMIT))
self.template_value['page_number'] = offset / RESULTS_LIMIT + 1
self.template_value['total_pages'] = int(math.ceil(
float(total_found) / RESULTS_LIMIT))
if response['results']:
SEARCH_RESULTS_RETURNED.inc(len(response['results']))
# TODO(emichael): Remove this check when the unicode issue is fixed in
# dev_appserver.
except UnicodeEncodeError as e:
SEARCH_FAILURES.inc()
if not appengine_config.PRODUCTION_MODE:
# This message will only be displayed to the course author in
# dev, so it does not need to be I18N'd
self.template_value['search_error'] = (
'There is a known issue in App Engine\'s SDK '
'(code.google.com/p/googleappengine/issues/detail?id=9335) '
'which causes an error when generating search snippets '
'which contain non-ASCII characters. This error does not '
'occur in the production environment, so you can safely '
'run your course with unicode characters on appspot.com.')
logging.error('[Unicode/Dev server issue] Error rendering the '
'search page: %s.', e)
else:
self.template_value['search_error'] = SEARCH_ERROR_TEXT
logging.error('Error rendering the search page: %s. %s',
e, traceback.format_exc())
except Exception as e: # pylint: disable=broad-except
SEARCH_FAILURES.inc()
self.template_value['search_error'] = SEARCH_ERROR_TEXT
logging.error('Error rendering the search page: %s. %s',
e, traceback.format_exc())
finally:
path = sites.abspath(self.app_context.get_home_folder(),
GCB_SEARCH_FOLDER_NAME)
template = self.get_template('search.html', additional_dirs=[path])
self.template_value['navbar'] = {}
self.response.out.write(template.render(self.template_value))
def filter(self, response, student):
if not response['results']:
return response
filtered_results = []
units, lessons = self.get_course().get_track_matching_student(student)
available_unit_ids = set(str(unit.unit_id) for unit in units)
for result in response['results']:
if not result.unit_id or str(result.unit_id) in available_unit_ids:
filtered_results.append(result)
return {
'results': filtered_results,
'total_found': len(filtered_results)
}
class AssetsHandler(webapp2.RequestHandler):
"""Content handler for assets associated with search."""
def get(self):
"""Respond to HTTP GET methods."""
if not custom_module.enabled:
self.error(404)
return
path = self.request.path
if path.startswith('/'):
path = path[1:]
path = os.path.normpath(path)
if os.path.basename(os.path.dirname(path)) != 'assets':
self.error(404)
return
resource_file = os.path.join(appengine_config.BUNDLE_ROOT, path)
mimetype = mimetypes.guess_type(resource_file)[0]
if mimetype is None:
mimetype = 'application/octet-stream'
try:
sites.set_static_resource_cache_control(self)
self.response.status = 200
stream = open(resource_file)
content = stream.read()
self.response.headers['Content-Type'] = mimetype
self.response.write(content)
except IOError:
self.error(404)
def _get_search(handler):
"""Renders course indexing view."""
template_values = {'page_title': handler.format_title('Search')}
mc_template_value = {}
mc_template_value['module_enabled'] = custom_module.enabled
indexing_job = IndexCourse(handler.app_context).load()
if indexing_job:
if indexing_job.status_code in [jobs.STATUS_CODE_STARTED,
jobs.STATUS_CODE_QUEUED]:
mc_template_value['status_message'] = 'Indexing in progress.'
mc_template_value['job_in_progress'] = True
elif indexing_job.status_code == jobs.STATUS_CODE_COMPLETED:
mc_template_value['indexed'] = True
mc_template_value['last_updated'] = (
indexing_job.updated_on.strftime(
utils.HUMAN_READABLE_DATETIME_FORMAT))
mc_template_value['index_info'] = transforms.loads(
indexing_job.output)
elif indexing_job.status_code == jobs.STATUS_CODE_FAILED:
mc_template_value['status_message'] = (
'Indexing job failed with error: %s' % indexing_job.output)
else:
mc_template_value['status_message'] = (
'No indexing job has been run yet.')
mc_template_value['index_course_xsrf_token'] = (
crypto.XsrfTokenManager.create_xsrf_token('index_course'))
template_values['main_content'] = jinja2.Markup(handler.get_template(
'search_dashboard.html', [os.path.dirname(__file__)]
).render(mc_template_value, autoescape=True))
return template_values
def _post_index_course(handler):
"""Submits a new indexing operation."""
try:
check_job_and_submit(handler.app_context, incremental=False)
except db.TransactionFailedError:
# Double submission from multiple browsers, just pass
pass
handler.redirect('/dashboard?action=settings_search')
class CronIndexCourse(utils.AbstractAllCoursesCronHandler):
"""Index courses where auto-indexing is enabled.
All jobs should be submitted through the transactional check_job_and_submit
method to prevent multiple index operations from running at the same time.
If an index job is currently running when this cron job attempts to start
one, this operation will be a noop for that course.
"""
URL = '/cron/search/index_courses'
@classmethod
def is_globally_enabled(cls):
return True
@classmethod
def is_enabled_for_course(cls, app_context):
course_settings = app_context.get_environ().get('course')
return course_settings and course_settings.get(AUTO_INDEX_SETTING)
def cron_action(self, app_context, unused_global_state):
try:
check_job_and_submit(app_context, incremental=True)
logging.info('Index submitted for namespace %s.',
app_context.get_namespace_name())
except db.TransactionFailedError as e:
logging.info(
'Failed to submit re-index job in namespace %s: %s',
app_context.get_namespace_name(), e)
@db.transactional(xg=True)
def check_job_and_submit(app_context, incremental=True):
"""Determines whether an indexing job is running and submits if not."""
indexing_job = IndexCourse(app_context, incremental=False)
job_entity = IndexCourse(app_context).load()
bad_status_codes = [jobs.STATUS_CODE_STARTED, jobs.STATUS_CODE_QUEUED]
if job_entity and job_entity.status_code in bad_status_codes:
raise db.TransactionFailedError('Index job is currently running.')
indexing_job.non_transactional_submit()
class IndexCourse(jobs.DurableJob):
"""A job that indexes the course."""
@staticmethod
def get_description():
return 'course index'
def __init__(self, app_context, incremental=True):
super(IndexCourse, self).__init__(app_context)
self.incremental = incremental
def run(self):
"""Index the course."""
namespace = namespace_manager.get_namespace()
logging.info('Running indexing job for namespace %s. Incremental: %s',
namespace_manager.get_namespace(), self.incremental)
app_context = sites.get_app_context_for_namespace(namespace)
# Make a request URL to make sites.get_course_for_current_request work
sites.set_path_info(app_context.slug)
indexing_stats = {
'deleted_docs': 0,
'num_indexed_docs': 0,
'doc_types': collections.Counter(),
'indexing_time_secs': 0,
'locales': []
}
for locale in app_context.get_allowed_locales():
stats = clear_index(namespace, locale)
indexing_stats['deleted_docs'] += stats['deleted_docs']
for locale in app_context.get_allowed_locales():
app_context.set_current_locale(locale)
course = courses.Course(None, app_context=app_context)
stats = index_all_docs(course, self.incremental)
indexing_stats['num_indexed_docs'] += stats['num_indexed_docs']
indexing_stats['doc_types'] += stats['doc_types']
indexing_stats['indexing_time_secs'] += stats['indexing_time_secs']
indexing_stats['locales'].append(locale)
return indexing_stats
# Module registration
custom_module = None
def register_module():
"""Registers this module in the registry."""
global_routes = [
('/modules/search/assets/.*', AssetsHandler),
(CronIndexCourse.URL, CronIndexCourse)
]
namespaced_routes = [
('/search', SearchHandler)
]
auto_index_enabled = schema_fields.SchemaField(
'course:' + AUTO_INDEX_SETTING, 'Auto-Index', 'boolean',
description=services.help_urls.make_learn_more_message(
messages.SEARCH_AUTO_INDEX_DESCRIPTION, 'course:auto_index'),
i18n=False, optional=True)
course_settings_fields = [
lambda course: auto_index_enabled
]
def notify_module_enabled():
dashboard.DashboardHandler.add_sub_nav_mapping(
'publish', 'search', 'Search', action='settings_search',
contents=_get_search, placement=1000)
dashboard.DashboardHandler.add_custom_post_action(
'index_course', _post_index_course)
courses.Course.OPTIONS_SCHEMA_PROVIDERS[
courses.Course.SCHEMA_SECTION_COURSE] += course_settings_fields
global custom_module # pylint: disable=global-statement
custom_module = custom_modules.Module(
MODULE_NAME,
'Provides search capabilities for courses',
global_routes, namespaced_routes,
notify_module_enabled=notify_module_enabled)
return custom_module
| indexed_doc_types[type_name] += 1 | conditional_block |
search.py | # Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Search module that uses Google App Engine's full text search."""
__author__ = 'Ellis Michael ([email protected])'
import collections
import gettext
import logging
import math
import mimetypes
import os
import time
import traceback
import jinja2
import messages
import resources
import webapp2
import appengine_config
from common import crypto
from common import safe_dom
from common import schema_fields
from controllers import sites
from controllers import utils
from models import config
from models import counters
from models import courses
from models import custom_modules
from models import jobs
from models import services
from models import transforms
from modules.dashboard import dashboard
from google.appengine.api import namespace_manager
from google.appengine.api import search
from google.appengine.ext import db
MODULE_NAME = 'Full Text Search'
DEPRECATED = config.ConfigProperty(
'gcb_can_index_automatically', bool, safe_dom.Text(
'This property has been deprecated; it is retained so that we '
'will not generate no-such-variable error messages for existing '
'installations that have this property set.'),
default_value=False, label='Automatically index search', deprecated=True)
SEARCH_QUERIES_MADE = counters.PerfCounter(
'gcb-search-queries-made',
'The number of student queries made to the search module.')
SEARCH_RESULTS_RETURNED = counters.PerfCounter(
'gcb-search-results-returned',
'The number of search results returned across all student queries.')
SEARCH_FAILURES = counters.PerfCounter(
'gcb-search-failures',
'The number of search failure messages returned across all student '
'queries.')
INDEX_NAME = 'gcb_search_index_loc_%s'
RESULTS_LIMIT = 10
GCB_SEARCH_FOLDER_NAME = os.path.normpath('/modules/search/')
MAX_RETRIES = 5
# Name of a per-course setting determining whether automatic indexing is enabled
AUTO_INDEX_SETTING = 'auto_index'
# I18N: Message displayed on search results page when error occurs.
SEARCH_ERROR_TEXT = gettext.gettext('Search is currently unavailable.')
class ModuleDisabledException(Exception):
"""Exception thrown when the search module is disabled."""
pass
def get_index(namespace, locale):
assert locale, 'Must have a non-null locale'
return search.Index(name=INDEX_NAME % locale, namespace=namespace)
def index_all_docs(course, incremental):
"""Index all of the docs for a given models.Course object.
Args:
course: models.courses.Course. the course to index.
incremental: boolean. whether or not to index only new or out-of-date
items.
Returns:
A dict with three keys.
'num_indexed_docs' maps to an int, the number of documents added to the
index.
'doc_type' maps to a counter with resource types as keys mapping to the
number of that resource added to the index.
'indexing_time_secs' maps to a float representing the number of seconds
the indexing job took.
Raises:
ModuleDisabledException: The search module is currently disabled.
"""
if not custom_module.enabled:
raise ModuleDisabledException('The search module is disabled.')
start_time = time.time()
index = get_index(
course.app_context.get_namespace_name(),
course.app_context.get_current_locale())
timestamps, doc_types = (_get_index_metadata(index) if incremental
else ({}, {}))
for doc in resources.generate_all_documents(course, timestamps):
retry_count = 0
while retry_count < MAX_RETRIES:
try:
index.put(doc)
timestamps[doc.doc_id] = doc['date'][0].value
doc_types[doc.doc_id] = doc['type'][0].value
break
except search.Error, e:
if e.results[0].code == search.OperationResult.TRANSIENT_ERROR:
retry_count += 1
if retry_count >= MAX_RETRIES:
logging.error(
'Multiple transient errors indexing doc_id: %s',
doc.doc_id)
else:
logging.error('Failed to index doc_id: %s', doc.doc_id)
break
indexed_doc_types = collections.Counter()
for type_name in doc_types.values():
indexed_doc_types[type_name] += 1
return {'num_indexed_docs': len(timestamps),
'doc_types': indexed_doc_types,
'indexing_time_secs': time.time() - start_time}
def clear_index(namespace, locale):
"""Delete all docs in the index for a given models.Course object."""
if not custom_module.enabled:
raise ModuleDisabledException('The search module is disabled.')
index = get_index(namespace, locale)
doc_ids = [document.doc_id for document in index.get_range(ids_only=True)]
total_docs = len(doc_ids)
while doc_ids:
index.delete(doc_ids)
doc_ids = [document.doc_id
for document in index.get_range(ids_only=True)]
return {'deleted_docs': total_docs}
def _get_index_metadata(index):
"""Returns dict from doc_id to timestamp and one from doc_id to doc_type."""
timestamps = []
doc_types = []
cursor = search.Cursor()
while cursor:
options = search.QueryOptions(
limit=1000,
cursor=cursor,
returned_fields=['date', 'type'])
query = search.Query(query_string='', options=options)
current_docs = index.search(query)
cursor = current_docs.cursor
for doc in current_docs:
timestamps.append((doc.doc_id, doc['date'][0].value))
doc_types.append((doc.doc_id, doc['type'][0].value))
return dict(timestamps), dict(doc_types)
def fetch(course, query_string, offset=0, limit=RESULTS_LIMIT):
"""Return an HTML fragment with the results of a search for query_string.
Args:
course: models.courses.Course. the course to search.
query_string: str. the user's specified query.
offset: int. the number of results to skip.
limit: int. the number of results to return.
Returns:
A dict with two keys.
'results' maps to an ordered list of resources.Result objects.
'total_found' maps to the total number of results in the index which
match query_string.
Raises:
ModuleDisabledException: The search module is currently disabled.
"""
if not custom_module.enabled:
raise ModuleDisabledException('The search module is disabled.')
index = get_index(
course.app_context.get_namespace_name(),
course.app_context.get_current_locale())
try:
# TODO(emichael): Don't compute these for every query
returned_fields = resources.get_returned_fields()
snippeted_fields = resources.get_snippeted_fields()
options = search.QueryOptions(
limit=limit, | number_found_accuracy=100,
snippeted_fields=snippeted_fields)
query = search.Query(query_string=query_string, options=options)
results = index.search(query)
except search.Error:
logging.info('Failed searching for: %s', query_string)
return {'results': None, 'total_found': 0}
processed_results = resources.process_results(results)
return {'results': processed_results, 'total_found': results.number_found}
class SearchHandler(utils.BaseHandler):
"""Handler for generating the search results page."""
def get(self):
"""Process GET request."""
# TODO(emichael): move timing to Javascript
if not custom_module.enabled:
self.error(404)
return
student = self.personalize_page_and_get_enrolled(
supports_transient_student=True)
if not student:
return
try:
start = time.time()
# TODO(emichael): Don't use get because it can't handle utf-8
query = self.request.get('query')
offset = self.request.get('offset')
self.template_value['navbar'] = {}
if query:
try:
offset = int(offset)
except (ValueError, TypeError):
offset = 0
self.template_value['query'] = query
SEARCH_QUERIES_MADE.inc()
response = fetch(self.get_course(), query, offset=offset)
response = self.filter(response, student)
self.template_value['time'] = '%.2f' % (time.time() - start)
self.template_value['search_results'] = response['results']
total_found = response['total_found']
if offset + RESULTS_LIMIT < total_found:
self.template_value['next_link'] = (
'search?query=%s&offset=%d' %
(query, offset + RESULTS_LIMIT))
if offset - RESULTS_LIMIT >= 0:
self.template_value['previous_link'] = (
'search?query=%s&offset=%d' %
(query, offset - RESULTS_LIMIT))
self.template_value['page_number'] = offset / RESULTS_LIMIT + 1
self.template_value['total_pages'] = int(math.ceil(
float(total_found) / RESULTS_LIMIT))
if response['results']:
SEARCH_RESULTS_RETURNED.inc(len(response['results']))
# TODO(emichael): Remove this check when the unicode issue is fixed in
# dev_appserver.
except UnicodeEncodeError as e:
SEARCH_FAILURES.inc()
if not appengine_config.PRODUCTION_MODE:
# This message will only be displayed to the course author in
# dev, so it does not need to be I18N'd
self.template_value['search_error'] = (
'There is a known issue in App Engine\'s SDK '
'(code.google.com/p/googleappengine/issues/detail?id=9335) '
'which causes an error when generating search snippets '
'which contain non-ASCII characters. This error does not '
'occur in the production environment, so you can safely '
'run your course with unicode characters on appspot.com.')
logging.error('[Unicode/Dev server issue] Error rendering the '
'search page: %s.', e)
else:
self.template_value['search_error'] = SEARCH_ERROR_TEXT
logging.error('Error rendering the search page: %s. %s',
e, traceback.format_exc())
except Exception as e: # pylint: disable=broad-except
SEARCH_FAILURES.inc()
self.template_value['search_error'] = SEARCH_ERROR_TEXT
logging.error('Error rendering the search page: %s. %s',
e, traceback.format_exc())
finally:
path = sites.abspath(self.app_context.get_home_folder(),
GCB_SEARCH_FOLDER_NAME)
template = self.get_template('search.html', additional_dirs=[path])
self.template_value['navbar'] = {}
self.response.out.write(template.render(self.template_value))
def filter(self, response, student):
if not response['results']:
return response
filtered_results = []
units, lessons = self.get_course().get_track_matching_student(student)
available_unit_ids = set(str(unit.unit_id) for unit in units)
for result in response['results']:
if not result.unit_id or str(result.unit_id) in available_unit_ids:
filtered_results.append(result)
return {
'results': filtered_results,
'total_found': len(filtered_results)
}
class AssetsHandler(webapp2.RequestHandler):
"""Content handler for assets associated with search."""
def get(self):
"""Respond to HTTP GET methods."""
if not custom_module.enabled:
self.error(404)
return
path = self.request.path
if path.startswith('/'):
path = path[1:]
path = os.path.normpath(path)
if os.path.basename(os.path.dirname(path)) != 'assets':
self.error(404)
return
resource_file = os.path.join(appengine_config.BUNDLE_ROOT, path)
mimetype = mimetypes.guess_type(resource_file)[0]
if mimetype is None:
mimetype = 'application/octet-stream'
try:
sites.set_static_resource_cache_control(self)
self.response.status = 200
stream = open(resource_file)
content = stream.read()
self.response.headers['Content-Type'] = mimetype
self.response.write(content)
except IOError:
self.error(404)
def _get_search(handler):
"""Renders course indexing view."""
template_values = {'page_title': handler.format_title('Search')}
mc_template_value = {}
mc_template_value['module_enabled'] = custom_module.enabled
indexing_job = IndexCourse(handler.app_context).load()
if indexing_job:
if indexing_job.status_code in [jobs.STATUS_CODE_STARTED,
jobs.STATUS_CODE_QUEUED]:
mc_template_value['status_message'] = 'Indexing in progress.'
mc_template_value['job_in_progress'] = True
elif indexing_job.status_code == jobs.STATUS_CODE_COMPLETED:
mc_template_value['indexed'] = True
mc_template_value['last_updated'] = (
indexing_job.updated_on.strftime(
utils.HUMAN_READABLE_DATETIME_FORMAT))
mc_template_value['index_info'] = transforms.loads(
indexing_job.output)
elif indexing_job.status_code == jobs.STATUS_CODE_FAILED:
mc_template_value['status_message'] = (
'Indexing job failed with error: %s' % indexing_job.output)
else:
mc_template_value['status_message'] = (
'No indexing job has been run yet.')
mc_template_value['index_course_xsrf_token'] = (
crypto.XsrfTokenManager.create_xsrf_token('index_course'))
template_values['main_content'] = jinja2.Markup(handler.get_template(
'search_dashboard.html', [os.path.dirname(__file__)]
).render(mc_template_value, autoescape=True))
return template_values
def _post_index_course(handler):
"""Submits a new indexing operation."""
try:
check_job_and_submit(handler.app_context, incremental=False)
except db.TransactionFailedError:
# Double submission from multiple browsers, just pass
pass
handler.redirect('/dashboard?action=settings_search')
class CronIndexCourse(utils.AbstractAllCoursesCronHandler):
"""Index courses where auto-indexing is enabled.
All jobs should be submitted through the transactional check_job_and_submit
method to prevent multiple index operations from running at the same time.
If an index job is currently running when this cron job attempts to start
one, this operation will be a noop for that course.
"""
URL = '/cron/search/index_courses'
@classmethod
def is_globally_enabled(cls):
return True
@classmethod
def is_enabled_for_course(cls, app_context):
course_settings = app_context.get_environ().get('course')
return course_settings and course_settings.get(AUTO_INDEX_SETTING)
def cron_action(self, app_context, unused_global_state):
try:
check_job_and_submit(app_context, incremental=True)
logging.info('Index submitted for namespace %s.',
app_context.get_namespace_name())
except db.TransactionFailedError as e:
logging.info(
'Failed to submit re-index job in namespace %s: %s',
app_context.get_namespace_name(), e)
@db.transactional(xg=True)
def check_job_and_submit(app_context, incremental=True):
"""Determines whether an indexing job is running and submits if not."""
indexing_job = IndexCourse(app_context, incremental=False)
job_entity = IndexCourse(app_context).load()
bad_status_codes = [jobs.STATUS_CODE_STARTED, jobs.STATUS_CODE_QUEUED]
if job_entity and job_entity.status_code in bad_status_codes:
raise db.TransactionFailedError('Index job is currently running.')
indexing_job.non_transactional_submit()
class IndexCourse(jobs.DurableJob):
"""A job that indexes the course."""
@staticmethod
def get_description():
return 'course index'
def __init__(self, app_context, incremental=True):
super(IndexCourse, self).__init__(app_context)
self.incremental = incremental
def run(self):
"""Index the course."""
namespace = namespace_manager.get_namespace()
logging.info('Running indexing job for namespace %s. Incremental: %s',
namespace_manager.get_namespace(), self.incremental)
app_context = sites.get_app_context_for_namespace(namespace)
# Make a request URL to make sites.get_course_for_current_request work
sites.set_path_info(app_context.slug)
indexing_stats = {
'deleted_docs': 0,
'num_indexed_docs': 0,
'doc_types': collections.Counter(),
'indexing_time_secs': 0,
'locales': []
}
for locale in app_context.get_allowed_locales():
stats = clear_index(namespace, locale)
indexing_stats['deleted_docs'] += stats['deleted_docs']
for locale in app_context.get_allowed_locales():
app_context.set_current_locale(locale)
course = courses.Course(None, app_context=app_context)
stats = index_all_docs(course, self.incremental)
indexing_stats['num_indexed_docs'] += stats['num_indexed_docs']
indexing_stats['doc_types'] += stats['doc_types']
indexing_stats['indexing_time_secs'] += stats['indexing_time_secs']
indexing_stats['locales'].append(locale)
return indexing_stats
# Module registration
custom_module = None
def register_module():
"""Registers this module in the registry."""
global_routes = [
('/modules/search/assets/.*', AssetsHandler),
(CronIndexCourse.URL, CronIndexCourse)
]
namespaced_routes = [
('/search', SearchHandler)
]
auto_index_enabled = schema_fields.SchemaField(
'course:' + AUTO_INDEX_SETTING, 'Auto-Index', 'boolean',
description=services.help_urls.make_learn_more_message(
messages.SEARCH_AUTO_INDEX_DESCRIPTION, 'course:auto_index'),
i18n=False, optional=True)
course_settings_fields = [
lambda course: auto_index_enabled
]
def notify_module_enabled():
dashboard.DashboardHandler.add_sub_nav_mapping(
'publish', 'search', 'Search', action='settings_search',
contents=_get_search, placement=1000)
dashboard.DashboardHandler.add_custom_post_action(
'index_course', _post_index_course)
courses.Course.OPTIONS_SCHEMA_PROVIDERS[
courses.Course.SCHEMA_SECTION_COURSE] += course_settings_fields
global custom_module # pylint: disable=global-statement
custom_module = custom_modules.Module(
MODULE_NAME,
'Provides search capabilities for courses',
global_routes, namespaced_routes,
notify_module_enabled=notify_module_enabled)
return custom_module | offset=offset,
returned_fields=returned_fields, | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.