file_name
large_stringlengths
4
69
prefix
large_stringlengths
0
26.7k
suffix
large_stringlengths
0
24.8k
middle
large_stringlengths
0
2.12k
fim_type
large_stringclasses
4 values
skin.rs
// Copyright 2015 Birunthan Mohanathas // // Licensed under the MIT license <http://opensource.org/licenses/MIT>. This // file may not be copied, modified, or distributed except according to those // terms. use measure::Measureable; pub struct Skin<'a> { name: String, measures: Vec<Box<Measureable<'a> + 'a>>, } impl<'a> Skin<'a> { pub fn new(name: &str) -> Skin { Skin { name: name.to_string(), measures: Vec::new(), } } pub fn name(&self) -> &str { self.name.as_slice() } pub fn add_measure(&mut self, measure: Box<Measureable<'a> + 'a>) { self.measures.push(measure); } pub fn measures(&self) -> &Vec<Box<Measureable<'a> + 'a>> { &self.measures } } #[test] fn test_name()
#[test] fn test_add_measure() { use time_measure::TimeMeasure; let mut skin = Skin::new("skin"); skin.add_measure(Box::new(TimeMeasure::new("foo"))); assert_eq!(1, skin.measures().len()); }
{ let skin = Skin::new("skin"); assert_eq!("skin", skin.name()); }
identifier_body
skin.rs
// Copyright 2015 Birunthan Mohanathas // // Licensed under the MIT license <http://opensource.org/licenses/MIT>. This // file may not be copied, modified, or distributed except according to those // terms. use measure::Measureable; pub struct Skin<'a> { name: String, measures: Vec<Box<Measureable<'a> + 'a>>, } impl<'a> Skin<'a> { pub fn new(name: &str) -> Skin { Skin { name: name.to_string(), measures: Vec::new(), } }
pub fn name(&self) -> &str { self.name.as_slice() } pub fn add_measure(&mut self, measure: Box<Measureable<'a> + 'a>) { self.measures.push(measure); } pub fn measures(&self) -> &Vec<Box<Measureable<'a> + 'a>> { &self.measures } } #[test] fn test_name() { let skin = Skin::new("skin"); assert_eq!("skin", skin.name()); } #[test] fn test_add_measure() { use time_measure::TimeMeasure; let mut skin = Skin::new("skin"); skin.add_measure(Box::new(TimeMeasure::new("foo"))); assert_eq!(1, skin.measures().len()); }
random_line_split
mod.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![allow(missing_docs)] use prelude::v1::*; pub mod backtrace; pub mod condvar; pub mod mutex; pub mod net; pub mod io; pub mod poison; pub mod remutex; pub mod rwlock; pub mod stack; pub mod thread; pub mod thread_info; pub mod thread_local; pub mod wtf8; // common error constructors
} /// A trait for viewing representations from std types #[doc(hidden)] pub trait AsInnerMut<Inner:?Sized> { fn as_inner_mut(&mut self) -> &mut Inner; } /// A trait for extracting representations from std types #[doc(hidden)] pub trait IntoInner<Inner> { fn into_inner(self) -> Inner; } /// A trait for creating std types from internal representations #[doc(hidden)] pub trait FromInner<Inner> { fn from_inner(inner: Inner) -> Self; }
/// A trait for viewing representations from std types #[doc(hidden)] pub trait AsInner<Inner: ?Sized> { fn as_inner(&self) -> &Inner;
random_line_split
demo.rs
#!/usr/bin/env run-cargo-script //! ```cargo //! [dependencies] //! unixbar = "0" //! systemstat = "0" //! ``` #[macro_use] extern crate unixbar; extern crate systemstat; use systemstat::{Platform, System}; use unixbar::*; fn main() { UnixBar::new(I3BarFormatter::new()) .add(Volume::new(default_volume(), |volume| match volume.muted { false => bfmt![fmt["Volume {}", (volume.volume * 100.0) as i32]], true => bfmt![fmt["(muted) {}", (volume.volume * 100.0) as i32]] } )) .register_fn("prev", move || { MPRISMusic::new().prev(); }) .register_fn("play_pause", move || { MPRISMusic::new().play_pause(); }) .register_fn("next", move || { MPRISMusic::new().next(); }) .add(Music::new(MPRISMusic::new(), |song| { if let Some(playing) = song.playback.map(|playback| playback.playing)
else { bfmt![click[MouseButton::Left => sh "rhythmbox"] fg["#bbbbbb"] text["[start music]"]] } } )) .add(Text::new(bfmt![click[MouseButton::Left => sh "notify-send hi"] click[MouseButton::Right => sh "notify-send 'what?'"] fg["#11bb55"] text[" Hello World! "]])) // .add(Bspwm::new(|bsp| Format::Concat(bsp.desktops.iter().map(|d| Box::new({ // let bg = if d.focused { "#99aa11" } else { "#111111" }; // bfmt![click[MouseButton::Left => sh format!("bspc desktop -f {}", d.name)] // bg[bg] fmt[" {} ", d.name]] // })).collect()))) .add(Text::new(bfmt![right])) .add(Periodic::new( Duration::from_secs(2), || match System::new().memory() { Ok(mem) => bfmt![bg["#556677"] fmt[" {}/{} RAM ", mem.free.to_string(false).replace(" GB", ""), mem.total]], Err(_) => bfmt![fg["#bb1155"] text["error"]], })) .add(Delayed::new( Duration::from_secs(1), || System::new().cpu_load_aggregate().unwrap(), |res| match res { Ok(cpu) => bfmt![fg["#99aaff"] fmt[" {:04.1}% CPU ", (1.0 - cpu.idle) * 100.0]], Err(_) => bfmt![fg["#bb1155"] text["error"]], })) .add(Wrap::new( |f| bfmt![fg["#bb1155"] f], DateTime::new(" %Y-%m-%d %H:%M:%S %z "))) .run(); }
{ bfmt![ fg["#bbbbbb"] multi[ (click[MouseButton::Left => fn "prev"] no_sep text["[|<]"]), (click[MouseButton::Left => fn "play_pause"] no_sep text[if playing { "[||]" } else { "[>]" }]), (click[MouseButton::Left => sh format!("firefox 'https://musicbrainz.org/artist/{}'", song.musicbrainz_artist.unwrap_or("".to_owned()))] no_sep pad[4] text[song.artist]), (no_sep pad[4] text["-"]), (click[MouseButton::Left => sh format!("firefox 'https://musicbrainz.org/recording/{}'", song.musicbrainz_track.unwrap_or("".to_owned()))] no_sep text[song.title]), (click[MouseButton::Left => fn "next"] text["[>|]"]) ] ] }
conditional_block
demo.rs
#!/usr/bin/env run-cargo-script //! ```cargo //! [dependencies] //! unixbar = "0" //! systemstat = "0" //! ``` #[macro_use] extern crate unixbar; extern crate systemstat; use systemstat::{Platform, System}; use unixbar::*; fn main()
(click[MouseButton::Left => fn "prev"] no_sep text["[|<]"]), (click[MouseButton::Left => fn "play_pause"] no_sep text[if playing { "[||]" } else { "[>]" }]), (click[MouseButton::Left => sh format!("firefox 'https://musicbrainz.org/artist/{}'", song.musicbrainz_artist.unwrap_or("".to_owned()))] no_sep pad[4] text[song.artist]), (no_sep pad[4] text["-"]), (click[MouseButton::Left => sh format!("firefox 'https://musicbrainz.org/recording/{}'", song.musicbrainz_track.unwrap_or("".to_owned()))] no_sep text[song.title]), (click[MouseButton::Left => fn "next"] text["[>|]"]) ] ] } else { bfmt![click[MouseButton::Left => sh "rhythmbox"] fg["#bbbbbb"] text["[start music]"]] } } )) .add(Text::new(bfmt![click[MouseButton::Left => sh "notify-send hi"] click[MouseButton::Right => sh "notify-send 'what?'"] fg["#11bb55"] text[" Hello World! "]])) // .add(Bspwm::new(|bsp| Format::Concat(bsp.desktops.iter().map(|d| Box::new({ // let bg = if d.focused { "#99aa11" } else { "#111111" }; // bfmt![click[MouseButton::Left => sh format!("bspc desktop -f {}", d.name)] // bg[bg] fmt[" {} ", d.name]] // })).collect()))) .add(Text::new(bfmt![right])) .add(Periodic::new( Duration::from_secs(2), || match System::new().memory() { Ok(mem) => bfmt![bg["#556677"] fmt[" {}/{} RAM ", mem.free.to_string(false).replace(" GB", ""), mem.total]], Err(_) => bfmt![fg["#bb1155"] text["error"]], })) .add(Delayed::new( Duration::from_secs(1), || System::new().cpu_load_aggregate().unwrap(), |res| match res { Ok(cpu) => bfmt![fg["#99aaff"] fmt[" {:04.1}% CPU ", (1.0 - cpu.idle) * 100.0]], Err(_) => bfmt![fg["#bb1155"] text["error"]], })) .add(Wrap::new( |f| bfmt![fg["#bb1155"] f], DateTime::new(" %Y-%m-%d %H:%M:%S %z "))) .run(); }
{ UnixBar::new(I3BarFormatter::new()) .add(Volume::new(default_volume(), |volume| match volume.muted { false => bfmt![fmt["Volume {}", (volume.volume * 100.0) as i32]], true => bfmt![fmt["(muted) {}", (volume.volume * 100.0) as i32]] } )) .register_fn("prev", move || { MPRISMusic::new().prev(); }) .register_fn("play_pause", move || { MPRISMusic::new().play_pause(); }) .register_fn("next", move || { MPRISMusic::new().next(); }) .add(Music::new(MPRISMusic::new(), |song| { if let Some(playing) = song.playback.map(|playback| playback.playing) { bfmt![ fg["#bbbbbb"] multi[
identifier_body
demo.rs
#!/usr/bin/env run-cargo-script //! ```cargo //! [dependencies] //! unixbar = "0" //! systemstat = "0" //! ``` #[macro_use] extern crate unixbar; extern crate systemstat; use systemstat::{Platform, System}; use unixbar::*; fn
() { UnixBar::new(I3BarFormatter::new()) .add(Volume::new(default_volume(), |volume| match volume.muted { false => bfmt![fmt["Volume {}", (volume.volume * 100.0) as i32]], true => bfmt![fmt["(muted) {}", (volume.volume * 100.0) as i32]] } )) .register_fn("prev", move || { MPRISMusic::new().prev(); }) .register_fn("play_pause", move || { MPRISMusic::new().play_pause(); }) .register_fn("next", move || { MPRISMusic::new().next(); }) .add(Music::new(MPRISMusic::new(), |song| { if let Some(playing) = song.playback.map(|playback| playback.playing) { bfmt![ fg["#bbbbbb"] multi[ (click[MouseButton::Left => fn "prev"] no_sep text["[|<]"]), (click[MouseButton::Left => fn "play_pause"] no_sep text[if playing { "[||]" } else { "[>]" }]), (click[MouseButton::Left => sh format!("firefox 'https://musicbrainz.org/artist/{}'", song.musicbrainz_artist.unwrap_or("".to_owned()))] no_sep pad[4] text[song.artist]), (no_sep pad[4] text["-"]), (click[MouseButton::Left => sh format!("firefox 'https://musicbrainz.org/recording/{}'", song.musicbrainz_track.unwrap_or("".to_owned()))] no_sep text[song.title]), (click[MouseButton::Left => fn "next"] text["[>|]"]) ] ] } else { bfmt![click[MouseButton::Left => sh "rhythmbox"] fg["#bbbbbb"] text["[start music]"]] } } )) .add(Text::new(bfmt![click[MouseButton::Left => sh "notify-send hi"] click[MouseButton::Right => sh "notify-send 'what?'"] fg["#11bb55"] text[" Hello World! "]])) // .add(Bspwm::new(|bsp| Format::Concat(bsp.desktops.iter().map(|d| Box::new({ // let bg = if d.focused { "#99aa11" } else { "#111111" }; // bfmt![click[MouseButton::Left => sh format!("bspc desktop -f {}", d.name)] // bg[bg] fmt[" {} ", d.name]] // })).collect()))) .add(Text::new(bfmt![right])) .add(Periodic::new( Duration::from_secs(2), || match System::new().memory() { Ok(mem) => bfmt![bg["#556677"] fmt[" {}/{} RAM ", mem.free.to_string(false).replace(" GB", ""), mem.total]], Err(_) => bfmt![fg["#bb1155"] text["error"]], })) .add(Delayed::new( Duration::from_secs(1), || System::new().cpu_load_aggregate().unwrap(), |res| match res { Ok(cpu) => bfmt![fg["#99aaff"] fmt[" {:04.1}% CPU ", (1.0 - cpu.idle) * 100.0]], Err(_) => bfmt![fg["#bb1155"] text["error"]], })) .add(Wrap::new( |f| bfmt![fg["#bb1155"] f], DateTime::new(" %Y-%m-%d %H:%M:%S %z "))) .run(); }
main
identifier_name
demo.rs
#!/usr/bin/env run-cargo-script //! ```cargo //! [dependencies] //! unixbar = "0" //! systemstat = "0" //! ``` #[macro_use] extern crate unixbar; extern crate systemstat; use systemstat::{Platform, System}; use unixbar::*; fn main() { UnixBar::new(I3BarFormatter::new()) .add(Volume::new(default_volume(), |volume| match volume.muted { false => bfmt![fmt["Volume {}", (volume.volume * 100.0) as i32]], true => bfmt![fmt["(muted) {}", (volume.volume * 100.0) as i32]] } )) .register_fn("prev", move || { MPRISMusic::new().prev(); })
.register_fn("play_pause", move || { MPRISMusic::new().play_pause(); }) .register_fn("next", move || { MPRISMusic::new().next(); }) .add(Music::new(MPRISMusic::new(), |song| { if let Some(playing) = song.playback.map(|playback| playback.playing) { bfmt![ fg["#bbbbbb"] multi[ (click[MouseButton::Left => fn "prev"] no_sep text["[|<]"]), (click[MouseButton::Left => fn "play_pause"] no_sep text[if playing { "[||]" } else { "[>]" }]), (click[MouseButton::Left => sh format!("firefox 'https://musicbrainz.org/artist/{}'", song.musicbrainz_artist.unwrap_or("".to_owned()))] no_sep pad[4] text[song.artist]), (no_sep pad[4] text["-"]), (click[MouseButton::Left => sh format!("firefox 'https://musicbrainz.org/recording/{}'", song.musicbrainz_track.unwrap_or("".to_owned()))] no_sep text[song.title]), (click[MouseButton::Left => fn "next"] text["[>|]"]) ] ] } else { bfmt![click[MouseButton::Left => sh "rhythmbox"] fg["#bbbbbb"] text["[start music]"]] } } )) .add(Text::new(bfmt![click[MouseButton::Left => sh "notify-send hi"] click[MouseButton::Right => sh "notify-send 'what?'"] fg["#11bb55"] text[" Hello World! "]])) // .add(Bspwm::new(|bsp| Format::Concat(bsp.desktops.iter().map(|d| Box::new({ // let bg = if d.focused { "#99aa11" } else { "#111111" }; // bfmt![click[MouseButton::Left => sh format!("bspc desktop -f {}", d.name)] // bg[bg] fmt[" {} ", d.name]] // })).collect()))) .add(Text::new(bfmt![right])) .add(Periodic::new( Duration::from_secs(2), || match System::new().memory() { Ok(mem) => bfmt![bg["#556677"] fmt[" {}/{} RAM ", mem.free.to_string(false).replace(" GB", ""), mem.total]], Err(_) => bfmt![fg["#bb1155"] text["error"]], })) .add(Delayed::new( Duration::from_secs(1), || System::new().cpu_load_aggregate().unwrap(), |res| match res { Ok(cpu) => bfmt![fg["#99aaff"] fmt[" {:04.1}% CPU ", (1.0 - cpu.idle) * 100.0]], Err(_) => bfmt![fg["#bb1155"] text["error"]], })) .add(Wrap::new( |f| bfmt![fg["#bb1155"] f], DateTime::new(" %Y-%m-%d %H:%M:%S %z "))) .run(); }
random_line_split
pattern-tyvar-2.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. extern mod extra; enum bar { t1((), Option<~[int]>), t2, } // n.b. my change changes this error message, but I think it's right -- tjc fn foo(t: bar) -> int { match t { t1(_, Some(x)) => { return x * 3; } _ => { fail!(); } } } //~ ERROR binary operation * cannot be applied to fn
() { }
main
identifier_name
pattern-tyvar-2.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. extern mod extra; enum bar { t1((), Option<~[int]>), t2, } // n.b. my change changes this error message, but I think it's right -- tjc fn foo(t: bar) -> int
//~ ERROR binary operation * cannot be applied to fn main() { }
{ match t { t1(_, Some(x)) => { return x * 3; } _ => { fail!(); } } }
identifier_body
pattern-tyvar-2.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. extern mod extra;
enum bar { t1((), Option<~[int]>), t2, } // n.b. my change changes this error message, but I think it's right -- tjc fn foo(t: bar) -> int { match t { t1(_, Some(x)) => { return x * 3; } _ => { fail!(); } } } //~ ERROR binary operation * cannot be applied to fn main() { }
random_line_split
htmlpreelement.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::HTMLPreElementBinding; use dom::bindings::js::Root; use dom::document::Document; use dom::htmlelement::HTMLElement; use dom::node::Node; use dom_struct::dom_struct; use html5ever::{LocalName, Prefix}; #[dom_struct] pub struct HTMLPreElement { htmlelement: HTMLElement, } impl HTMLPreElement { fn new_inherited(local_name: LocalName, prefix: Option<Prefix>, document: &Document) -> HTMLPreElement { HTMLPreElement { htmlelement: HTMLElement::new_inherited(local_name, prefix, document) } } #[allow(unrooted_must_root)] pub fn new(local_name: LocalName, prefix: Option<Prefix>, document: &Document) -> Root<HTMLPreElement>
}
{ Node::reflect_node(box HTMLPreElement::new_inherited(local_name, prefix, document), document, HTMLPreElementBinding::Wrap) }
identifier_body
htmlpreelement.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::HTMLPreElementBinding; use dom::bindings::js::Root; use dom::document::Document; use dom::htmlelement::HTMLElement; use dom::node::Node; use dom_struct::dom_struct; use html5ever::{LocalName, Prefix}; #[dom_struct] pub struct HTMLPreElement { htmlelement: HTMLElement, } impl HTMLPreElement { fn new_inherited(local_name: LocalName, prefix: Option<Prefix>, document: &Document) -> HTMLPreElement { HTMLPreElement {
} } #[allow(unrooted_must_root)] pub fn new(local_name: LocalName, prefix: Option<Prefix>, document: &Document) -> Root<HTMLPreElement> { Node::reflect_node(box HTMLPreElement::new_inherited(local_name, prefix, document), document, HTMLPreElementBinding::Wrap) } }
htmlelement: HTMLElement::new_inherited(local_name, prefix, document)
random_line_split
htmlpreelement.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::codegen::Bindings::HTMLPreElementBinding; use dom::bindings::js::Root; use dom::document::Document; use dom::htmlelement::HTMLElement; use dom::node::Node; use dom_struct::dom_struct; use html5ever::{LocalName, Prefix}; #[dom_struct] pub struct HTMLPreElement { htmlelement: HTMLElement, } impl HTMLPreElement { fn new_inherited(local_name: LocalName, prefix: Option<Prefix>, document: &Document) -> HTMLPreElement { HTMLPreElement { htmlelement: HTMLElement::new_inherited(local_name, prefix, document) } } #[allow(unrooted_must_root)] pub fn
(local_name: LocalName, prefix: Option<Prefix>, document: &Document) -> Root<HTMLPreElement> { Node::reflect_node(box HTMLPreElement::new_inherited(local_name, prefix, document), document, HTMLPreElementBinding::Wrap) } }
new
identifier_name
simple.rs
// SPDX-License-Identifier: MIT use orbclient::{Color, EventOption, GraphicsPath, Mode, Renderer, Window}; fn main()
0, 0, (win_w / 3) as i32, (win_h / 2) as i32, Color::rgb(128, 128, 128), Color::rgb(255, 255, 255), ); // horizontal gradient window.linear_gradient( (win_w / 3) as i32, 0, win_w / 3, win_h, (win_w / 3) as i32, 0, (2 * win_w / 3) as i32, 0, Color::rgb(128, 255, 255), Color::rgb(255, 255, 255), ); // vertical gradient window.linear_gradient( (2 * win_w / 3) as i32, 0, win_w / 3, win_h, (2 * win_w / 3) as i32, 0, (2 * win_w / 3) as i32, win_h as i32, Color::rgb(0, 128, 0), Color::rgb(255, 255, 255), ); window.arc(100, 100, -25, 1 << 0 | 1 << 2, Color::rgb(0, 0, 255)); window.arc(100, 100, -25, 1 << 1 | 1 << 3, Color::rgb(0, 255, 255)); window.arc(100, 100, -25, 1 << 4 | 1 << 6, Color::rgb(255, 0, 255)); window.arc(100, 100, -25, 1 << 5 | 1 << 7, Color::rgb(255, 255, 0)); window.circle(100, 100, 25, Color::rgb(0, 0, 0)); window.circle(100, 101, -25, Color::rgb(0, 255, 0)); window.circle(220, 220, -100, Color::rgba(128, 128, 128, 80)); window.wu_circle(150, 220, 100, Color::rgba(255, 0, 0, 255)); window.line(0, 0, 200, 200, Color::rgb(255, 0, 0)); window.line(0, 200, 200, 0, Color::rgb(128, 255, 0)); // vertical and horizontal line test window.line(100, 0, 100, 200, Color::rgb(0, 0, 255)); window.line(0, 100, 200, 100, Color::rgb(255, 255, 0)); window.wu_line(100, 220, 400, 250, Color::rgba(255, 0, 0, 255)); window.line(100, 230, 400, 260, Color::rgba(255, 0, 0, 255)); // path and bezier curve example draw a cloud let mut cloud_path = GraphicsPath::new(); cloud_path.move_to(170, 80); cloud_path.bezier_curve_to(130, 100, 130, 150, 230, 150); cloud_path.bezier_curve_to(250, 180, 320, 180, 340, 150); cloud_path.bezier_curve_to(420, 150, 420, 120, 390, 100); cloud_path.bezier_curve_to(430, 40, 370, 30, 340, 50); cloud_path.bezier_curve_to(320, 5, 250, 20, 250, 50); cloud_path.bezier_curve_to(200, 5, 150, 20, 170, 80); window.draw_path_stroke(cloud_path, Color::rgb(0, 0, 255)); // path and quadratic curve example draw a balloon let mut balloon_path = GraphicsPath::new(); balloon_path.move_to(75, 25); balloon_path.quadratic_curve_to(25, 25, 25, 62); balloon_path.quadratic_curve_to(25, 100, 50, 100); balloon_path.quadratic_curve_to(50, 120, 30, 125); balloon_path.quadratic_curve_to(60, 120, 65, 100); balloon_path.quadratic_curve_to(125, 100, 125, 62); balloon_path.quadratic_curve_to(125, 25, 75, 25); window.draw_path_stroke(balloon_path, Color::rgb(0, 0, 255)); window.char(200, 200, '═', Color::rgb(0, 0, 0)); window.char(208, 200, '═', Color::rgb(0, 0, 0)); // testing for non existent x,y position : does not panic but returns Color(0,0,0,0) let _non_existent_pixel = window.getpixel(width as i32 + 10, height as i32 + 10); // testing PartialEq for Color if Color::rgb(11, 2, 3) == Color::rgba(1, 2, 3, 100) { println!("Testing colors: they are the same!") } else { println!("Testing colors: they are NOT the same!") } //Draw a transparent rectangle over window content // default mode is Blend window.rect(250, 200, 80, 80, Color::rgba(100, 100, 100, 100)); //Draw an opaque rectangle replacing window content window.mode().set(Mode::Overwrite); // set window drawing mode to Overwrite from now on window.rect(300, 220, 80, 80, Color::rgb(100, 100, 100)); //Draw a hole in the window replacing alpha channel (Only in Orbital, not in SDL2) window.rect(300, 100, 80, 80, Color::rgba(10, 10, 10, 1)); //Draw a transparent rectangle over window content window.mode().set(Mode::Blend); //set mode to Blend fron now on window.rect(200, 230, 80, 80, Color::rgba(100, 100, 100, 100)); //Draw a blured box over window content window.box_blur(170, 100, 150, 150, 10); //Draw a shadow around a box window.box_shadow(170, 100, 150, 150, 0, 0, 20, Color::rgba(0, 0, 0, 255)); window.sync(); 'events: loop { for event in window.events() { match event.to_option() { EventOption::Quit(_quit_event) => break 'events, EventOption::Mouse(evt) => println!( "At position {:?} pixel color is : {:?}", (evt.x, evt.y), window.getpixel(evt.x, evt.y) ), event_option => println!("{:?}", event_option), } } } }
{ let (width, height) = orbclient::get_display_size().unwrap(); let mut window = Window::new( (width as i32) / 4, (height as i32) / 4, width / 2, height / 2, "TITLE", ) .unwrap(); let (win_w, win_h) = (width / 2, height / 2); // top left -> bottom rigth window.linear_gradient( 0, 0, win_w / 3, win_h,
identifier_body
simple.rs
// SPDX-License-Identifier: MIT use orbclient::{Color, EventOption, GraphicsPath, Mode, Renderer, Window}; fn
() { let (width, height) = orbclient::get_display_size().unwrap(); let mut window = Window::new( (width as i32) / 4, (height as i32) / 4, width / 2, height / 2, "TITLE", ) .unwrap(); let (win_w, win_h) = (width / 2, height / 2); // top left -> bottom rigth window.linear_gradient( 0, 0, win_w / 3, win_h, 0, 0, (win_w / 3) as i32, (win_h / 2) as i32, Color::rgb(128, 128, 128), Color::rgb(255, 255, 255), ); // horizontal gradient window.linear_gradient( (win_w / 3) as i32, 0, win_w / 3, win_h, (win_w / 3) as i32, 0, (2 * win_w / 3) as i32, 0, Color::rgb(128, 255, 255), Color::rgb(255, 255, 255), ); // vertical gradient window.linear_gradient( (2 * win_w / 3) as i32, 0, win_w / 3, win_h, (2 * win_w / 3) as i32, 0, (2 * win_w / 3) as i32, win_h as i32, Color::rgb(0, 128, 0), Color::rgb(255, 255, 255), ); window.arc(100, 100, -25, 1 << 0 | 1 << 2, Color::rgb(0, 0, 255)); window.arc(100, 100, -25, 1 << 1 | 1 << 3, Color::rgb(0, 255, 255)); window.arc(100, 100, -25, 1 << 4 | 1 << 6, Color::rgb(255, 0, 255)); window.arc(100, 100, -25, 1 << 5 | 1 << 7, Color::rgb(255, 255, 0)); window.circle(100, 100, 25, Color::rgb(0, 0, 0)); window.circle(100, 101, -25, Color::rgb(0, 255, 0)); window.circle(220, 220, -100, Color::rgba(128, 128, 128, 80)); window.wu_circle(150, 220, 100, Color::rgba(255, 0, 0, 255)); window.line(0, 0, 200, 200, Color::rgb(255, 0, 0)); window.line(0, 200, 200, 0, Color::rgb(128, 255, 0)); // vertical and horizontal line test window.line(100, 0, 100, 200, Color::rgb(0, 0, 255)); window.line(0, 100, 200, 100, Color::rgb(255, 255, 0)); window.wu_line(100, 220, 400, 250, Color::rgba(255, 0, 0, 255)); window.line(100, 230, 400, 260, Color::rgba(255, 0, 0, 255)); // path and bezier curve example draw a cloud let mut cloud_path = GraphicsPath::new(); cloud_path.move_to(170, 80); cloud_path.bezier_curve_to(130, 100, 130, 150, 230, 150); cloud_path.bezier_curve_to(250, 180, 320, 180, 340, 150); cloud_path.bezier_curve_to(420, 150, 420, 120, 390, 100); cloud_path.bezier_curve_to(430, 40, 370, 30, 340, 50); cloud_path.bezier_curve_to(320, 5, 250, 20, 250, 50); cloud_path.bezier_curve_to(200, 5, 150, 20, 170, 80); window.draw_path_stroke(cloud_path, Color::rgb(0, 0, 255)); // path and quadratic curve example draw a balloon let mut balloon_path = GraphicsPath::new(); balloon_path.move_to(75, 25); balloon_path.quadratic_curve_to(25, 25, 25, 62); balloon_path.quadratic_curve_to(25, 100, 50, 100); balloon_path.quadratic_curve_to(50, 120, 30, 125); balloon_path.quadratic_curve_to(60, 120, 65, 100); balloon_path.quadratic_curve_to(125, 100, 125, 62); balloon_path.quadratic_curve_to(125, 25, 75, 25); window.draw_path_stroke(balloon_path, Color::rgb(0, 0, 255)); window.char(200, 200, '═', Color::rgb(0, 0, 0)); window.char(208, 200, '═', Color::rgb(0, 0, 0)); // testing for non existent x,y position : does not panic but returns Color(0,0,0,0) let _non_existent_pixel = window.getpixel(width as i32 + 10, height as i32 + 10); // testing PartialEq for Color if Color::rgb(11, 2, 3) == Color::rgba(1, 2, 3, 100) { println!("Testing colors: they are the same!") } else { println!("Testing colors: they are NOT the same!") } //Draw a transparent rectangle over window content // default mode is Blend window.rect(250, 200, 80, 80, Color::rgba(100, 100, 100, 100)); //Draw an opaque rectangle replacing window content window.mode().set(Mode::Overwrite); // set window drawing mode to Overwrite from now on window.rect(300, 220, 80, 80, Color::rgb(100, 100, 100)); //Draw a hole in the window replacing alpha channel (Only in Orbital, not in SDL2) window.rect(300, 100, 80, 80, Color::rgba(10, 10, 10, 1)); //Draw a transparent rectangle over window content window.mode().set(Mode::Blend); //set mode to Blend fron now on window.rect(200, 230, 80, 80, Color::rgba(100, 100, 100, 100)); //Draw a blured box over window content window.box_blur(170, 100, 150, 150, 10); //Draw a shadow around a box window.box_shadow(170, 100, 150, 150, 0, 0, 20, Color::rgba(0, 0, 0, 255)); window.sync(); 'events: loop { for event in window.events() { match event.to_option() { EventOption::Quit(_quit_event) => break 'events, EventOption::Mouse(evt) => println!( "At position {:?} pixel color is : {:?}", (evt.x, evt.y), window.getpixel(evt.x, evt.y) ), event_option => println!("{:?}", event_option), } } } }
main
identifier_name
simple.rs
// SPDX-License-Identifier: MIT use orbclient::{Color, EventOption, GraphicsPath, Mode, Renderer, Window}; fn main() { let (width, height) = orbclient::get_display_size().unwrap(); let mut window = Window::new( (width as i32) / 4, (height as i32) / 4, width / 2, height / 2, "TITLE", ) .unwrap(); let (win_w, win_h) = (width / 2, height / 2); // top left -> bottom rigth window.linear_gradient( 0, 0, win_w / 3, win_h, 0, 0, (win_w / 3) as i32, (win_h / 2) as i32, Color::rgb(128, 128, 128), Color::rgb(255, 255, 255), ); // horizontal gradient window.linear_gradient( (win_w / 3) as i32, 0, win_w / 3, win_h, (win_w / 3) as i32, 0, (2 * win_w / 3) as i32, 0, Color::rgb(128, 255, 255), Color::rgb(255, 255, 255), ); // vertical gradient window.linear_gradient( (2 * win_w / 3) as i32, 0, win_w / 3, win_h, (2 * win_w / 3) as i32, 0, (2 * win_w / 3) as i32, win_h as i32, Color::rgb(0, 128, 0), Color::rgb(255, 255, 255), ); window.arc(100, 100, -25, 1 << 0 | 1 << 2, Color::rgb(0, 0, 255)); window.arc(100, 100, -25, 1 << 1 | 1 << 3, Color::rgb(0, 255, 255)); window.arc(100, 100, -25, 1 << 4 | 1 << 6, Color::rgb(255, 0, 255)); window.arc(100, 100, -25, 1 << 5 | 1 << 7, Color::rgb(255, 255, 0)); window.circle(100, 100, 25, Color::rgb(0, 0, 0)); window.circle(100, 101, -25, Color::rgb(0, 255, 0)); window.circle(220, 220, -100, Color::rgba(128, 128, 128, 80)); window.wu_circle(150, 220, 100, Color::rgba(255, 0, 0, 255)); window.line(0, 0, 200, 200, Color::rgb(255, 0, 0)); window.line(0, 200, 200, 0, Color::rgb(128, 255, 0)); // vertical and horizontal line test window.line(100, 0, 100, 200, Color::rgb(0, 0, 255)); window.line(0, 100, 200, 100, Color::rgb(255, 255, 0)); window.wu_line(100, 220, 400, 250, Color::rgba(255, 0, 0, 255)); window.line(100, 230, 400, 260, Color::rgba(255, 0, 0, 255)); // path and bezier curve example draw a cloud let mut cloud_path = GraphicsPath::new(); cloud_path.move_to(170, 80); cloud_path.bezier_curve_to(130, 100, 130, 150, 230, 150); cloud_path.bezier_curve_to(250, 180, 320, 180, 340, 150); cloud_path.bezier_curve_to(420, 150, 420, 120, 390, 100); cloud_path.bezier_curve_to(430, 40, 370, 30, 340, 50); cloud_path.bezier_curve_to(320, 5, 250, 20, 250, 50); cloud_path.bezier_curve_to(200, 5, 150, 20, 170, 80); window.draw_path_stroke(cloud_path, Color::rgb(0, 0, 255)); // path and quadratic curve example draw a balloon let mut balloon_path = GraphicsPath::new(); balloon_path.move_to(75, 25); balloon_path.quadratic_curve_to(25, 25, 25, 62); balloon_path.quadratic_curve_to(25, 100, 50, 100); balloon_path.quadratic_curve_to(50, 120, 30, 125); balloon_path.quadratic_curve_to(60, 120, 65, 100); balloon_path.quadratic_curve_to(125, 100, 125, 62); balloon_path.quadratic_curve_to(125, 25, 75, 25); window.draw_path_stroke(balloon_path, Color::rgb(0, 0, 255)); window.char(200, 200, '═', Color::rgb(0, 0, 0)); window.char(208, 200, '═', Color::rgb(0, 0, 0)); // testing for non existent x,y position : does not panic but returns Color(0,0,0,0) let _non_existent_pixel = window.getpixel(width as i32 + 10, height as i32 + 10); // testing PartialEq for Color if Color::rgb(11, 2, 3) == Color::rgba(1, 2, 3, 100) { println!("Testing colors: they are the same!") } else { println!("Testing colors: they are NOT the same!") } //Draw a transparent rectangle over window content // default mode is Blend window.rect(250, 200, 80, 80, Color::rgba(100, 100, 100, 100)); //Draw an opaque rectangle replacing window content window.mode().set(Mode::Overwrite); // set window drawing mode to Overwrite from now on window.rect(300, 220, 80, 80, Color::rgb(100, 100, 100)); //Draw a hole in the window replacing alpha channel (Only in Orbital, not in SDL2) window.rect(300, 100, 80, 80, Color::rgba(10, 10, 10, 1)); //Draw a transparent rectangle over window content window.mode().set(Mode::Blend); //set mode to Blend fron now on window.rect(200, 230, 80, 80, Color::rgba(100, 100, 100, 100)); //Draw a blured box over window content
window.sync(); 'events: loop { for event in window.events() { match event.to_option() { EventOption::Quit(_quit_event) => break 'events, EventOption::Mouse(evt) => println!( "At position {:?} pixel color is : {:?}", (evt.x, evt.y), window.getpixel(evt.x, evt.y) ), event_option => println!("{:?}", event_option), } } } }
window.box_blur(170, 100, 150, 150, 10); //Draw a shadow around a box window.box_shadow(170, 100, 150, 150, 0, 0, 20, Color::rgba(0, 0, 0, 255));
random_line_split
lib.rs
#![warn(missing_docs, missing_debug_implementations)] //! EGL utilities //! //! This module contains bindings to the `libwayland-egl.so` library. //! //! This library is used to interface with the OpenGL stack, and creating //! EGL surfaces from a wayland surface.
use std::os::raw::c_void; use wayland_backend::{client::InvalidId, sys::client::ObjectId}; use wayland_sys::{client::wl_proxy, egl::*, ffi_dispatch}; /// Checks if the wayland-egl lib is available and can be used /// /// Trying to create an `WlEglSurface` while this function returns /// `false` will result in a panic. pub fn is_available() -> bool { is_lib_available() } unsafe impl Send for WlEglSurface {} unsafe impl Sync for WlEglSurface {} /// EGL surface /// /// This object is a simple wrapper around a `WlSurface` to add the EGL /// capabilities. Just use the `ptr` method once this object is created /// to get the window pointer your OpenGL library is needing to initialize the /// EGL context (you'll most likely need the display ptr as well, that you can /// get via the `ptr` method of the `Proxy` trait on the `WlDisplay` object). #[derive(Debug)] pub struct WlEglSurface { ptr: *mut wl_egl_window, } impl WlEglSurface { /// Create an EGL surface from a wayland surface /// /// This method will check that the provided `ObjectId` is still alive and from the /// correct interface (`wl_surface`). pub fn new(surface: ObjectId, width: i32, height: i32) -> Result<WlEglSurface, InvalidId> { if surface.interface().name!= "wl_surface" { return Err(InvalidId); } let ptr = surface.as_ptr(); if ptr.is_null() { Err(InvalidId) } else { Ok(unsafe { WlEglSurface::new_from_raw(ptr, width, height) }) } } /// Create an EGL surface from a raw pointer to a wayland surface /// /// # Safety /// /// The provided pointer must be a valid `wl_surface` pointer from `libwayland-client`. pub unsafe fn new_from_raw(surface: *mut wl_proxy, width: i32, height: i32) -> WlEglSurface { let ptr = ffi_dispatch!(WAYLAND_EGL_HANDLE, wl_egl_window_create, surface, width, height); WlEglSurface { ptr } } /// Fetch current size of the EGL surface pub fn get_size(&self) -> (i32, i32) { let mut w = 0i32; let mut h = 0i32; unsafe { ffi_dispatch!( WAYLAND_EGL_HANDLE, wl_egl_window_get_attached_size, self.ptr, &mut w as *mut i32, &mut h as *mut i32 ); } (w, h) } /// Resize the EGL surface /// /// The two first arguments `(width, height)` are the new size of /// the surface, the two others `(dx, dy)` represent the displacement /// of the top-left corner of the surface. It allows you to control the /// direction of the resizing if necessary. pub fn resize(&self, width: i32, height: i32, dx: i32, dy: i32) { unsafe { ffi_dispatch!(WAYLAND_EGL_HANDLE, wl_egl_window_resize, self.ptr, width, height, dx, dy) } } /// Raw pointer to the EGL surface /// /// You'll need this pointer to initialize the EGL context in your /// favourite OpenGL lib. pub fn ptr(&self) -> *const c_void { self.ptr as *const c_void } } impl Drop for WlEglSurface { fn drop(&mut self) { unsafe { ffi_dispatch!(WAYLAND_EGL_HANDLE, wl_egl_window_destroy, self.ptr); } } }
//! //! See WlEglSurface documentation for details.
random_line_split
lib.rs
#![warn(missing_docs, missing_debug_implementations)] //! EGL utilities //! //! This module contains bindings to the `libwayland-egl.so` library. //! //! This library is used to interface with the OpenGL stack, and creating //! EGL surfaces from a wayland surface. //! //! See WlEglSurface documentation for details. use std::os::raw::c_void; use wayland_backend::{client::InvalidId, sys::client::ObjectId}; use wayland_sys::{client::wl_proxy, egl::*, ffi_dispatch}; /// Checks if the wayland-egl lib is available and can be used /// /// Trying to create an `WlEglSurface` while this function returns /// `false` will result in a panic. pub fn is_available() -> bool { is_lib_available() } unsafe impl Send for WlEglSurface {} unsafe impl Sync for WlEglSurface {} /// EGL surface /// /// This object is a simple wrapper around a `WlSurface` to add the EGL /// capabilities. Just use the `ptr` method once this object is created /// to get the window pointer your OpenGL library is needing to initialize the /// EGL context (you'll most likely need the display ptr as well, that you can /// get via the `ptr` method of the `Proxy` trait on the `WlDisplay` object). #[derive(Debug)] pub struct WlEglSurface { ptr: *mut wl_egl_window, } impl WlEglSurface { /// Create an EGL surface from a wayland surface /// /// This method will check that the provided `ObjectId` is still alive and from the /// correct interface (`wl_surface`). pub fn new(surface: ObjectId, width: i32, height: i32) -> Result<WlEglSurface, InvalidId>
/// Create an EGL surface from a raw pointer to a wayland surface /// /// # Safety /// /// The provided pointer must be a valid `wl_surface` pointer from `libwayland-client`. pub unsafe fn new_from_raw(surface: *mut wl_proxy, width: i32, height: i32) -> WlEglSurface { let ptr = ffi_dispatch!(WAYLAND_EGL_HANDLE, wl_egl_window_create, surface, width, height); WlEglSurface { ptr } } /// Fetch current size of the EGL surface pub fn get_size(&self) -> (i32, i32) { let mut w = 0i32; let mut h = 0i32; unsafe { ffi_dispatch!( WAYLAND_EGL_HANDLE, wl_egl_window_get_attached_size, self.ptr, &mut w as *mut i32, &mut h as *mut i32 ); } (w, h) } /// Resize the EGL surface /// /// The two first arguments `(width, height)` are the new size of /// the surface, the two others `(dx, dy)` represent the displacement /// of the top-left corner of the surface. It allows you to control the /// direction of the resizing if necessary. pub fn resize(&self, width: i32, height: i32, dx: i32, dy: i32) { unsafe { ffi_dispatch!(WAYLAND_EGL_HANDLE, wl_egl_window_resize, self.ptr, width, height, dx, dy) } } /// Raw pointer to the EGL surface /// /// You'll need this pointer to initialize the EGL context in your /// favourite OpenGL lib. pub fn ptr(&self) -> *const c_void { self.ptr as *const c_void } } impl Drop for WlEglSurface { fn drop(&mut self) { unsafe { ffi_dispatch!(WAYLAND_EGL_HANDLE, wl_egl_window_destroy, self.ptr); } } }
{ if surface.interface().name != "wl_surface" { return Err(InvalidId); } let ptr = surface.as_ptr(); if ptr.is_null() { Err(InvalidId) } else { Ok(unsafe { WlEglSurface::new_from_raw(ptr, width, height) }) } }
identifier_body
lib.rs
#![warn(missing_docs, missing_debug_implementations)] //! EGL utilities //! //! This module contains bindings to the `libwayland-egl.so` library. //! //! This library is used to interface with the OpenGL stack, and creating //! EGL surfaces from a wayland surface. //! //! See WlEglSurface documentation for details. use std::os::raw::c_void; use wayland_backend::{client::InvalidId, sys::client::ObjectId}; use wayland_sys::{client::wl_proxy, egl::*, ffi_dispatch}; /// Checks if the wayland-egl lib is available and can be used /// /// Trying to create an `WlEglSurface` while this function returns /// `false` will result in a panic. pub fn is_available() -> bool { is_lib_available() } unsafe impl Send for WlEglSurface {} unsafe impl Sync for WlEglSurface {} /// EGL surface /// /// This object is a simple wrapper around a `WlSurface` to add the EGL /// capabilities. Just use the `ptr` method once this object is created /// to get the window pointer your OpenGL library is needing to initialize the /// EGL context (you'll most likely need the display ptr as well, that you can /// get via the `ptr` method of the `Proxy` trait on the `WlDisplay` object). #[derive(Debug)] pub struct WlEglSurface { ptr: *mut wl_egl_window, } impl WlEglSurface { /// Create an EGL surface from a wayland surface /// /// This method will check that the provided `ObjectId` is still alive and from the /// correct interface (`wl_surface`). pub fn new(surface: ObjectId, width: i32, height: i32) -> Result<WlEglSurface, InvalidId> { if surface.interface().name!= "wl_surface" { return Err(InvalidId); } let ptr = surface.as_ptr(); if ptr.is_null() { Err(InvalidId) } else { Ok(unsafe { WlEglSurface::new_from_raw(ptr, width, height) }) } } /// Create an EGL surface from a raw pointer to a wayland surface /// /// # Safety /// /// The provided pointer must be a valid `wl_surface` pointer from `libwayland-client`. pub unsafe fn new_from_raw(surface: *mut wl_proxy, width: i32, height: i32) -> WlEglSurface { let ptr = ffi_dispatch!(WAYLAND_EGL_HANDLE, wl_egl_window_create, surface, width, height); WlEglSurface { ptr } } /// Fetch current size of the EGL surface pub fn get_size(&self) -> (i32, i32) { let mut w = 0i32; let mut h = 0i32; unsafe { ffi_dispatch!( WAYLAND_EGL_HANDLE, wl_egl_window_get_attached_size, self.ptr, &mut w as *mut i32, &mut h as *mut i32 ); } (w, h) } /// Resize the EGL surface /// /// The two first arguments `(width, height)` are the new size of /// the surface, the two others `(dx, dy)` represent the displacement /// of the top-left corner of the surface. It allows you to control the /// direction of the resizing if necessary. pub fn resize(&self, width: i32, height: i32, dx: i32, dy: i32) { unsafe { ffi_dispatch!(WAYLAND_EGL_HANDLE, wl_egl_window_resize, self.ptr, width, height, dx, dy) } } /// Raw pointer to the EGL surface /// /// You'll need this pointer to initialize the EGL context in your /// favourite OpenGL lib. pub fn
(&self) -> *const c_void { self.ptr as *const c_void } } impl Drop for WlEglSurface { fn drop(&mut self) { unsafe { ffi_dispatch!(WAYLAND_EGL_HANDLE, wl_egl_window_destroy, self.ptr); } } }
ptr
identifier_name
lib.rs
#![warn(missing_docs, missing_debug_implementations)] //! EGL utilities //! //! This module contains bindings to the `libwayland-egl.so` library. //! //! This library is used to interface with the OpenGL stack, and creating //! EGL surfaces from a wayland surface. //! //! See WlEglSurface documentation for details. use std::os::raw::c_void; use wayland_backend::{client::InvalidId, sys::client::ObjectId}; use wayland_sys::{client::wl_proxy, egl::*, ffi_dispatch}; /// Checks if the wayland-egl lib is available and can be used /// /// Trying to create an `WlEglSurface` while this function returns /// `false` will result in a panic. pub fn is_available() -> bool { is_lib_available() } unsafe impl Send for WlEglSurface {} unsafe impl Sync for WlEglSurface {} /// EGL surface /// /// This object is a simple wrapper around a `WlSurface` to add the EGL /// capabilities. Just use the `ptr` method once this object is created /// to get the window pointer your OpenGL library is needing to initialize the /// EGL context (you'll most likely need the display ptr as well, that you can /// get via the `ptr` method of the `Proxy` trait on the `WlDisplay` object). #[derive(Debug)] pub struct WlEglSurface { ptr: *mut wl_egl_window, } impl WlEglSurface { /// Create an EGL surface from a wayland surface /// /// This method will check that the provided `ObjectId` is still alive and from the /// correct interface (`wl_surface`). pub fn new(surface: ObjectId, width: i32, height: i32) -> Result<WlEglSurface, InvalidId> { if surface.interface().name!= "wl_surface" { return Err(InvalidId); } let ptr = surface.as_ptr(); if ptr.is_null() { Err(InvalidId) } else
} /// Create an EGL surface from a raw pointer to a wayland surface /// /// # Safety /// /// The provided pointer must be a valid `wl_surface` pointer from `libwayland-client`. pub unsafe fn new_from_raw(surface: *mut wl_proxy, width: i32, height: i32) -> WlEglSurface { let ptr = ffi_dispatch!(WAYLAND_EGL_HANDLE, wl_egl_window_create, surface, width, height); WlEglSurface { ptr } } /// Fetch current size of the EGL surface pub fn get_size(&self) -> (i32, i32) { let mut w = 0i32; let mut h = 0i32; unsafe { ffi_dispatch!( WAYLAND_EGL_HANDLE, wl_egl_window_get_attached_size, self.ptr, &mut w as *mut i32, &mut h as *mut i32 ); } (w, h) } /// Resize the EGL surface /// /// The two first arguments `(width, height)` are the new size of /// the surface, the two others `(dx, dy)` represent the displacement /// of the top-left corner of the surface. It allows you to control the /// direction of the resizing if necessary. pub fn resize(&self, width: i32, height: i32, dx: i32, dy: i32) { unsafe { ffi_dispatch!(WAYLAND_EGL_HANDLE, wl_egl_window_resize, self.ptr, width, height, dx, dy) } } /// Raw pointer to the EGL surface /// /// You'll need this pointer to initialize the EGL context in your /// favourite OpenGL lib. pub fn ptr(&self) -> *const c_void { self.ptr as *const c_void } } impl Drop for WlEglSurface { fn drop(&mut self) { unsafe { ffi_dispatch!(WAYLAND_EGL_HANDLE, wl_egl_window_destroy, self.ptr); } } }
{ Ok(unsafe { WlEglSurface::new_from_raw(ptr, width, height) }) }
conditional_block
home.rs
///Tässä tiedostossa luodaan routeri "/" polulle. Jos käyttäjä on kirjautuneena, näytetään omat ///tapahtumat, suosituimmat tapahtumat, lippukunnan tapahtumat ja hallinoimat tapahtumat. Jos ei ole, kerrotaan mikä on Silmukka ja ohjataan ///rekisteröitymään use nickel::{Nickel, HttpRouter}; use nickel::router::router::Router; use session::{Session, CookieSession}; use std::collections::HashMap; use ServerData; pub fn route()->Rou
erverData>{ let mut home: Router<ServerData> = Nickel::router(); home.get("/", middleware!{|req, mut vastaus| let mut palautetaan = HashMap::new(); let logged = match *CookieSession::get_mut(req, &mut vastaus){ Some(_) => true, _ => false }; if logged == false{ palautetaan.insert("kirjaudu".to_string(), "Kirjaudu sisään".to_string()); return vastaus.render("assets/out_index.html", &palautetaan); } else{ let mut user = "/user/".to_string(); // otetaan käyttäjänimi {let ref a: Option<String> = *CookieSession::get_mut(req, &mut vastaus); user.push_str(&(a.clone().unwrap()));} palautetaan.insert("kirjaudu".to_string(), "Kirjaudu ulos".to_string()); palautetaan.insert("kayttaja".to_string(), user); return vastaus.render("assets/index.html", &palautetaan); } }); return home; }
ter<S
identifier_name
home.rs
///Tässä tiedostossa luodaan routeri "/" polulle. Jos käyttäjä on kirjautuneena, näytetään omat ///tapahtumat, suosituimmat tapahtumat, lippukunnan tapahtumat ja hallinoimat tapahtumat. Jos ei ole, kerrotaan mikä on Silmukka ja ohjataan ///rekisteröitymään use nickel::{Nickel, HttpRouter}; use nickel::router::router::Router; use session::{Session, CookieSession}; use std::collections::HashMap; use ServerData; pub fn route()->Router<ServerData>{ let mut home: Router<ServerData> = Nickel::router(); home.get("/", middleware!{|req, mut vastaus| let mut palautetaan = HashMap::new(); let logged = match *CookieSession::get_mut(req, &mut vastaus){ Some(_) => true, _ => false };
let mut user = "/user/".to_string(); // otetaan käyttäjänimi {let ref a: Option<String> = *CookieSession::get_mut(req, &mut vastaus); user.push_str(&(a.clone().unwrap()));} palautetaan.insert("kirjaudu".to_string(), "Kirjaudu ulos".to_string()); palautetaan.insert("kayttaja".to_string(), user); return vastaus.render("assets/index.html", &palautetaan); } }); return home; }
if logged == false{ palautetaan.insert("kirjaudu".to_string(), "Kirjaudu sisään".to_string()); return vastaus.render("assets/out_index.html", &palautetaan); } else{
random_line_split
home.rs
///Tässä tiedostossa luodaan routeri "/" polulle. Jos käyttäjä on kirjautuneena, näytetään omat ///tapahtumat, suosituimmat tapahtumat, lippukunnan tapahtumat ja hallinoimat tapahtumat. Jos ei ole, kerrotaan mikä on Silmukka ja ohjataan ///rekisteröitymään use nickel::{Nickel, HttpRouter}; use nickel::router::router::Router; use session::{Session, CookieSession}; use std::collections::HashMap; use ServerData; pub fn route()->Router<ServerData>{ let mu
}); return home; }
t home: Router<ServerData> = Nickel::router(); home.get("/", middleware!{|req, mut vastaus| let mut palautetaan = HashMap::new(); let logged = match *CookieSession::get_mut(req, &mut vastaus){ Some(_) => true, _ => false }; if logged == false{ palautetaan.insert("kirjaudu".to_string(), "Kirjaudu sisään".to_string()); return vastaus.render("assets/out_index.html", &palautetaan); } else{ let mut user = "/user/".to_string(); // otetaan käyttäjänimi {let ref a: Option<String> = *CookieSession::get_mut(req, &mut vastaus); user.push_str(&(a.clone().unwrap()));} palautetaan.insert("kirjaudu".to_string(), "Kirjaudu ulos".to_string()); palautetaan.insert("kayttaja".to_string(), user); return vastaus.render("assets/index.html", &palautetaan); }
identifier_body
mod.rs
// Copyleft (ↄ) meh. <[email protected]> | http://meh.schizofreni.co // // This file is part of cancer. // // cancer is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // cancer is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with cancer. If not, see <http://www.gnu.org/licenses/>. mod iter; pub use self::iter::Iter; pub mod cell; pub use self::cell::Cell; mod row; pub use self::row::Row; mod free; pub use self::free::Free; pub mod touched; pub use self::touched::Touched;
pub mod cursor; pub use self::cursor::Cursor; pub mod grid; pub use self::grid::Grid; mod tabs; pub use self::tabs::Tabs; mod input; pub use self::input::Input; mod sixel; pub use self::sixel::Sixel; mod terminal; pub use self::terminal::Terminal;
pub mod mode; pub use self::mode::Mode;
random_line_split
io.rs
// This file is part of zinc64. // Copyright (c) 2016-2019 Sebastian Jastrzebski. All rights reserved. // Licensed under the GPLv3. See LICENSE file in the project root for full license text. #[cfg(not(feature = "std"))] use alloc::prelude::*; use byteorder::ByteOrder; use core::result; pub type Result<T> = result::Result<T, String>; pub trait Reader { fn read(&mut self, buf: &mut [u8]) -> Result<usize>; fn read_to_end(&mut self, buf: &mut Vec<u8>) -> Result<usize>; fn read_exact(&mut self, buf: &mut [u8]) -> Result<()>; fn consume(&mut self, amt: usize); } pub trait ReadBytesExt: Reader { #[inline] fn read_u8(&mut self) -> Result<u8> { let mut buf = [0; 1]; self.read_exact(&mut buf)?; Ok(buf[0]) } #[inline] fn read_u16<T: ByteOrder>(&mut self) -> Result<u16> { let mut buf = [0; 2]; self.read_exact(&mut buf)?; Ok(T::read_u16(&buf)) } #[inline] fn read_u32<T: ByteOrder>(&mut self) -> Result<u32>
} impl<R: Reader +?Sized> ReadBytesExt for R {}
{ let mut buf = [0; 4]; self.read_exact(&mut buf)?; Ok(T::read_u32(&buf)) }
identifier_body
io.rs
// This file is part of zinc64.
// Copyright (c) 2016-2019 Sebastian Jastrzebski. All rights reserved. // Licensed under the GPLv3. See LICENSE file in the project root for full license text. #[cfg(not(feature = "std"))] use alloc::prelude::*; use byteorder::ByteOrder; use core::result; pub type Result<T> = result::Result<T, String>; pub trait Reader { fn read(&mut self, buf: &mut [u8]) -> Result<usize>; fn read_to_end(&mut self, buf: &mut Vec<u8>) -> Result<usize>; fn read_exact(&mut self, buf: &mut [u8]) -> Result<()>; fn consume(&mut self, amt: usize); } pub trait ReadBytesExt: Reader { #[inline] fn read_u8(&mut self) -> Result<u8> { let mut buf = [0; 1]; self.read_exact(&mut buf)?; Ok(buf[0]) } #[inline] fn read_u16<T: ByteOrder>(&mut self) -> Result<u16> { let mut buf = [0; 2]; self.read_exact(&mut buf)?; Ok(T::read_u16(&buf)) } #[inline] fn read_u32<T: ByteOrder>(&mut self) -> Result<u32> { let mut buf = [0; 4]; self.read_exact(&mut buf)?; Ok(T::read_u32(&buf)) } } impl<R: Reader +?Sized> ReadBytesExt for R {}
random_line_split
io.rs
// This file is part of zinc64. // Copyright (c) 2016-2019 Sebastian Jastrzebski. All rights reserved. // Licensed under the GPLv3. See LICENSE file in the project root for full license text. #[cfg(not(feature = "std"))] use alloc::prelude::*; use byteorder::ByteOrder; use core::result; pub type Result<T> = result::Result<T, String>; pub trait Reader { fn read(&mut self, buf: &mut [u8]) -> Result<usize>; fn read_to_end(&mut self, buf: &mut Vec<u8>) -> Result<usize>; fn read_exact(&mut self, buf: &mut [u8]) -> Result<()>; fn consume(&mut self, amt: usize); } pub trait ReadBytesExt: Reader { #[inline] fn
(&mut self) -> Result<u8> { let mut buf = [0; 1]; self.read_exact(&mut buf)?; Ok(buf[0]) } #[inline] fn read_u16<T: ByteOrder>(&mut self) -> Result<u16> { let mut buf = [0; 2]; self.read_exact(&mut buf)?; Ok(T::read_u16(&buf)) } #[inline] fn read_u32<T: ByteOrder>(&mut self) -> Result<u32> { let mut buf = [0; 4]; self.read_exact(&mut buf)?; Ok(T::read_u32(&buf)) } } impl<R: Reader +?Sized> ReadBytesExt for R {}
read_u8
identifier_name
colors.rs
//! Color definitions. //! //! This module should contain all colors used in the game. Later on, we could //! replace it with a customizeable version, where people could write themes //! (e.g. in TOML or YAML format). //! //! Make sure to contain only semantic names (e.g. "primary" or "background", //! not "red" or "green"). pub const BLACK: [f32; 4] = [0.0, 0.0, 0.0, 1.0]; pub const WHITE: [f32; 4] = [1.0, 1.0, 1.0, 1.0]; pub const TRANSPARENT_WHITE: [f32; 4] = [1.0, 1.0, 1.0, 0.2]; pub const YELLOW: [f32; 4] = [1.0, 1.0, 0.22, 1.0]; pub const ORANGE: [f32; 4] = [1.0, 0.61, 0.22, 1.0]; pub const RED: [f32; 4] = [1.0, 0.22, 0.22, 1.0]; pub const LIGHT_BLUE: [f32; 4] = [0.22, 0.22, 1.0, 1.0]; pub const BLUE: [f32; 4] = [0.0, 0.0, 1.0, 1.0]; pub struct
{ pub primary: [f32; 4], pub secondary: [f32; 4], } pub const PLAYERS: [Player; 3] = [ Player { primary: ORANGE, secondary: YELLOW, }, Player { primary: RED, secondary: ORANGE, }, Player { primary: BLUE, secondary: LIGHT_BLUE, }, ];
Player
identifier_name
colors.rs
//! Color definitions. //! //! This module should contain all colors used in the game. Later on, we could //! replace it with a customizeable version, where people could write themes //! (e.g. in TOML or YAML format). //! //! Make sure to contain only semantic names (e.g. "primary" or "background", //! not "red" or "green"). pub const BLACK: [f32; 4] = [0.0, 0.0, 0.0, 1.0]; pub const WHITE: [f32; 4] = [1.0, 1.0, 1.0, 1.0];
pub const LIGHT_BLUE: [f32; 4] = [0.22, 0.22, 1.0, 1.0]; pub const BLUE: [f32; 4] = [0.0, 0.0, 1.0, 1.0]; pub struct Player { pub primary: [f32; 4], pub secondary: [f32; 4], } pub const PLAYERS: [Player; 3] = [ Player { primary: ORANGE, secondary: YELLOW, }, Player { primary: RED, secondary: ORANGE, }, Player { primary: BLUE, secondary: LIGHT_BLUE, }, ];
pub const TRANSPARENT_WHITE: [f32; 4] = [1.0, 1.0, 1.0, 0.2]; pub const YELLOW: [f32; 4] = [1.0, 1.0, 0.22, 1.0]; pub const ORANGE: [f32; 4] = [1.0, 0.61, 0.22, 1.0]; pub const RED: [f32; 4] = [1.0, 0.22, 0.22, 1.0];
random_line_split
simple_packet.rs
use crate::errors::PcapError; use byteorder::{ByteOrder, ReadBytesExt}; use std::borrow::Cow; use derive_into_owned::IntoOwned; /// The Simple Packet Block (SPB) is a lightweight container for storing the packets coming from the network. /// Its presence is optional. #[derive(Clone, Debug, IntoOwned)] pub struct SimplePacketBlock<'a> { /// Actual length of the packet when it was transmitted on the network. pub original_len: u32, /// The data coming from the network, including link-layer headers. pub data: Cow<'a, [u8]> } impl<'a> SimplePacketBlock<'a> { pub fn from_slice<B: ByteOrder>(mut slice: &'a [u8]) -> Result<(&'a [u8], Self), PcapError> {
if slice.len() < 4 { return Err(PcapError::InvalidField("SimplePacketBlock: block length < 4")); } let original_len = slice.read_u32::<B>()?; let packet = SimplePacketBlock { original_len, data: Cow::Borrowed(slice) }; Ok((&[], packet)) } }
random_line_split
simple_packet.rs
use crate::errors::PcapError; use byteorder::{ByteOrder, ReadBytesExt}; use std::borrow::Cow; use derive_into_owned::IntoOwned; /// The Simple Packet Block (SPB) is a lightweight container for storing the packets coming from the network. /// Its presence is optional. #[derive(Clone, Debug, IntoOwned)] pub struct SimplePacketBlock<'a> { /// Actual length of the packet when it was transmitted on the network. pub original_len: u32, /// The data coming from the network, including link-layer headers. pub data: Cow<'a, [u8]> } impl<'a> SimplePacketBlock<'a> { pub fn
<B: ByteOrder>(mut slice: &'a [u8]) -> Result<(&'a [u8], Self), PcapError> { if slice.len() < 4 { return Err(PcapError::InvalidField("SimplePacketBlock: block length < 4")); } let original_len = slice.read_u32::<B>()?; let packet = SimplePacketBlock { original_len, data: Cow::Borrowed(slice) }; Ok((&[], packet)) } }
from_slice
identifier_name
simple_packet.rs
use crate::errors::PcapError; use byteorder::{ByteOrder, ReadBytesExt}; use std::borrow::Cow; use derive_into_owned::IntoOwned; /// The Simple Packet Block (SPB) is a lightweight container for storing the packets coming from the network. /// Its presence is optional. #[derive(Clone, Debug, IntoOwned)] pub struct SimplePacketBlock<'a> { /// Actual length of the packet when it was transmitted on the network. pub original_len: u32, /// The data coming from the network, including link-layer headers. pub data: Cow<'a, [u8]> } impl<'a> SimplePacketBlock<'a> { pub fn from_slice<B: ByteOrder>(mut slice: &'a [u8]) -> Result<(&'a [u8], Self), PcapError> { if slice.len() < 4
let original_len = slice.read_u32::<B>()?; let packet = SimplePacketBlock { original_len, data: Cow::Borrowed(slice) }; Ok((&[], packet)) } }
{ return Err(PcapError::InvalidField("SimplePacketBlock: block length < 4")); }
conditional_block
reader.rs
//! Represents a way to read a record-based file by mapping each read line to a record. The mapping between //! the data from the file and the record name is made by the `mapper` function. //! //! # Examples //! ```rust //! use rbf::record::{AsciiMode, UTF8Mode}; //! use rbf::layout::Layout; //! use rbf::reader::Reader; //! //! // load our layout //! let layout = Layout::<UTF8Mode>::new("./tests/test.xml"); //! //! // create reader //! let mapper = Box::new(|x: &str| x[0..2].to_string()); //! let mut reader = Reader::<UTF8Mode>::new("./tests/test_utf8.data", layout, mapper); //! //! // useful vars //! let letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; //! let digits = "123456789"; //! let greek = "αβγδεζηθικλμνξοπρστυφχψω"; //! //! // read file and loop through records //! while let Some(rec) = reader.next() { //! match rec.name.as_ref() { //! "LL" => { //! assert_eq!(rec.get_value("ID"), "LL"); //! //! // test every field //! for (i, l) in letters.chars().enumerate() { //! let fname = format!("W{}", i+1); //! assert_eq!(rec.get_value(&fname), l.to_string().repeat(i+1)); //! } //! } //! "NB" => { //! assert_eq!(rec.get_value("ID"), "NB"); //! // test every field //! for (i, n) in digits.chars().enumerate() { //! let fname = format!("N{}", i+1); //! assert_eq!(rec.get_value(&fname), n.to_string().repeat(i+1)); //! } //! }, //! "GL" => { //! assert_eq!(rec.get_value("ID"), "GL"); //! for (i, l) in greek.chars().enumerate() { //! let fname = format!("G{}", i+1); //! assert_eq!(rec.get_value(&fname), l.to_string().repeat(i+1)); //! } //! }, //! "DP" => { //! assert_eq!(rec.get_value("ID"), "DP"); //! assert_eq!(rec.get("F5").unwrap()[0].value(), "AAAAA"); //! assert_eq!(rec.get("F5").unwrap()[1].value(), "BBBBB"); //! assert_eq!(rec.get("F5").unwrap()[2].value(), "CCCCC"); //! assert_eq!(rec.get("F5").unwrap()[3].value(), "DDDDD"); //! }, //! _ => panic!("record name <{}> not found in file <{}>", rec.name, "./tests/test_utf8.data") //! } //! } //! //! ``` use std::error::Error; use std::io::{BufReader,BufRead}; use std::fs::File; use record::{ReadMode, Record}; use layout::Layout; use mapper::RecordHasher; /// This enum defines whether we should stop reading when an unknown record ID is found #[derive(PartialEq)] pub enum ReaderLazyness { /// When set, this panics the reader Stringent, /// When set, ignore unknown reader Lazy, } // function type to get the record ID from the whole line read from the target file //pub type RecordMapper = fn(&str) -> &str; pub struct Reader<T> { /// record-based file to read pub rbf_file: String, /// layout struct describing the file to read pub layout: Layout<T>, /// function to map each line to a record name pub mapper: RecordHasher, /// buffer use when reading the file line by line bufreader: BufReader<File>, /// the line read from file pub line: String, /// lazyness when reading pub lazyness: ReaderLazyness, /// input file size pub file_size: u64, /// number of chars read when reading a line pub chars_read: usize, /// number of lines read so far pub nblines_read: u64, } impl<T> Reader<T> { /// Creates a new reader. /// /// # Arguments /// /// * `rbf_file` - name and path of the record-based file to read /// * `layout`: Layout struct previously created from the XML layout file describing the data file /// * `mapper` function to map each line to a record name /// /// # Panics /// If `rbf_file` could not be read pub fn new(rbf_file: &str, layout: Layout<T>, mapper: RecordHasher) -> Reader<T> { // open file f
line: String::new(), lazyness: ReaderLazyness::Lazy, file_size: metadata.len(), chars_read: 0, nblines_read: 0, } } /// Returns a muta ble reference on the record corresponding to the line read. **next()** returns **None** /// if EOF. /// It allows to read the whole file using the following idiom: /// /// ```rust,ignore /// // loop through records /// while let Some(rec) = reader.next() { /// // do something with rec /// } /// ``` /// # Panics /// If an error is met when reading the file. pub fn next(&mut self) -> Option<&mut Record<T>> where Record<T>: ReadMode { // record ID from line let mut rec_id: String; // try to get a record ID loop { // clear buffer, otherwise buffer is growing self.line.clear(); // read one line of text match self.bufreader.read_line(&mut self.line) { // No bytes read? This is EOF and we must end the iteration Ok(chars_read) => if chars_read == 0 { return None; } else { self.chars_read = chars_read; self.nblines_read += 1; }, // error reading bytes Err(why) => panic!("error {} when reading file {}", why.description(), self.rbf_file), }; // get the record ID using mapper rec_id = (self.mapper)(&self.line); // record ID could not exist match self.layout.contains_record(&rec_id) { true => break, false => if self.lazyness == ReaderLazyness::Stringent { panic!("couldn't find record ID {} in file {}", rec_id, self.rbf_file); } else { continue; } }; } // set value for this record let rec = self.layout.get_mut(&rec_id).unwrap(); // set all field values rec.set_value(&self.line); // return our record return Some(rec); } /// Sets reader lazyness pub fn set_lazyness(&mut self, lazyness: ReaderLazyness) { self.lazyness = lazyness; } }
or reading let bufreader = match File::open(&rbf_file) { // if ok, create a new BufReader to read the file line by line Ok(f) => match layout.rec_length { 0 => BufReader::new(f), _ => BufReader::with_capacity(layout.rec_length+1, f), }, // The `description` method of `io::Error` returns a string that // describes the error Err(why) => panic!("couldn't open {}: {}", rbf_file, why.description()), }; // get file size let metadata = ::std::fs::metadata(&rbf_file).unwrap(); Reader { rbf_file: rbf_file.to_string(), layout: layout, mapper: mapper, bufreader: bufreader,
identifier_body
reader.rs
//! Represents a way to read a record-based file by mapping each read line to a record. The mapping between //! the data from the file and the record name is made by the `mapper` function. //! //! # Examples //! ```rust //! use rbf::record::{AsciiMode, UTF8Mode}; //! use rbf::layout::Layout; //! use rbf::reader::Reader; //! //! // load our layout //! let layout = Layout::<UTF8Mode>::new("./tests/test.xml"); //! //! // create reader //! let mapper = Box::new(|x: &str| x[0..2].to_string()); //! let mut reader = Reader::<UTF8Mode>::new("./tests/test_utf8.data", layout, mapper); //! //! // useful vars //! let letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; //! let digits = "123456789"; //! let greek = "αβγδεζηθικλμνξοπρστυφχψω"; //! //! // read file and loop through records //! while let Some(rec) = reader.next() { //! match rec.name.as_ref() { //! "LL" => { //! assert_eq!(rec.get_value("ID"), "LL"); //! //! // test every field //! for (i, l) in letters.chars().enumerate() { //! let fname = format!("W{}", i+1); //! assert_eq!(rec.get_value(&fname), l.to_string().repeat(i+1)); //! } //! } //! "NB" => { //! assert_eq!(rec.get_value("ID"), "NB"); //! // test every field //! for (i, n) in digits.chars().enumerate() { //! let fname = format!("N{}", i+1); //! assert_eq!(rec.get_value(&fname), n.to_string().repeat(i+1)); //! } //! }, //! "GL" => { //! assert_eq!(rec.get_value("ID"), "GL");
//! for (i, l) in greek.chars().enumerate() { //! let fname = format!("G{}", i+1); //! assert_eq!(rec.get_value(&fname), l.to_string().repeat(i+1)); //! } //! }, //! "DP" => { //! assert_eq!(rec.get_value("ID"), "DP"); //! assert_eq!(rec.get("F5").unwrap()[0].value(), "AAAAA"); //! assert_eq!(rec.get("F5").unwrap()[1].value(), "BBBBB"); //! assert_eq!(rec.get("F5").unwrap()[2].value(), "CCCCC"); //! assert_eq!(rec.get("F5").unwrap()[3].value(), "DDDDD"); //! }, //! _ => panic!("record name <{}> not found in file <{}>", rec.name, "./tests/test_utf8.data") //! } //! } //! //! ``` use std::error::Error; use std::io::{BufReader,BufRead}; use std::fs::File; use record::{ReadMode, Record}; use layout::Layout; use mapper::RecordHasher; /// This enum defines whether we should stop reading when an unknown record ID is found #[derive(PartialEq)] pub enum ReaderLazyness { /// When set, this panics the reader Stringent, /// When set, ignore unknown reader Lazy, } // function type to get the record ID from the whole line read from the target file //pub type RecordMapper = fn(&str) -> &str; pub struct Reader<T> { /// record-based file to read pub rbf_file: String, /// layout struct describing the file to read pub layout: Layout<T>, /// function to map each line to a record name pub mapper: RecordHasher, /// buffer use when reading the file line by line bufreader: BufReader<File>, /// the line read from file pub line: String, /// lazyness when reading pub lazyness: ReaderLazyness, /// input file size pub file_size: u64, /// number of chars read when reading a line pub chars_read: usize, /// number of lines read so far pub nblines_read: u64, } impl<T> Reader<T> { /// Creates a new reader. /// /// # Arguments /// /// * `rbf_file` - name and path of the record-based file to read /// * `layout`: Layout struct previously created from the XML layout file describing the data file /// * `mapper` function to map each line to a record name /// /// # Panics /// If `rbf_file` could not be read pub fn new(rbf_file: &str, layout: Layout<T>, mapper: RecordHasher) -> Reader<T> { // open file for reading let bufreader = match File::open(&rbf_file) { // if ok, create a new BufReader to read the file line by line Ok(f) => match layout.rec_length { 0 => BufReader::new(f), _ => BufReader::with_capacity(layout.rec_length+1, f), }, // The `description` method of `io::Error` returns a string that // describes the error Err(why) => panic!("couldn't open {}: {}", rbf_file, why.description()), }; // get file size let metadata = ::std::fs::metadata(&rbf_file).unwrap(); Reader { rbf_file: rbf_file.to_string(), layout: layout, mapper: mapper, bufreader: bufreader, line: String::new(), lazyness: ReaderLazyness::Lazy, file_size: metadata.len(), chars_read: 0, nblines_read: 0, } } /// Returns a mutable reference on the record corresponding to the line read. **next()** returns **None** /// if EOF. /// It allows to read the whole file using the following idiom: /// /// ```rust,ignore /// // loop through records /// while let Some(rec) = reader.next() { /// // do something with rec /// } /// ``` /// # Panics /// If an error is met when reading the file. pub fn next(&mut self) -> Option<&mut Record<T>> where Record<T>: ReadMode { // record ID from line let mut rec_id: String; // try to get a record ID loop { // clear buffer, otherwise buffer is growing self.line.clear(); // read one line of text match self.bufreader.read_line(&mut self.line) { // No bytes read? This is EOF and we must end the iteration Ok(chars_read) => if chars_read == 0 { return None; } else { self.chars_read = chars_read; self.nblines_read += 1; }, // error reading bytes Err(why) => panic!("error {} when reading file {}", why.description(), self.rbf_file), }; // get the record ID using mapper rec_id = (self.mapper)(&self.line); // record ID could not exist match self.layout.contains_record(&rec_id) { true => break, false => if self.lazyness == ReaderLazyness::Stringent { panic!("couldn't find record ID {} in file {}", rec_id, self.rbf_file); } else { continue; } }; } // set value for this record let rec = self.layout.get_mut(&rec_id).unwrap(); // set all field values rec.set_value(&self.line); // return our record return Some(rec); } /// Sets reader lazyness pub fn set_lazyness(&mut self, lazyness: ReaderLazyness) { self.lazyness = lazyness; } }
random_line_split
reader.rs
//! Represents a way to read a record-based file by mapping each read line to a record. The mapping between //! the data from the file and the record name is made by the `mapper` function. //! //! # Examples //! ```rust //! use rbf::record::{AsciiMode, UTF8Mode}; //! use rbf::layout::Layout; //! use rbf::reader::Reader; //! //! // load our layout //! let layout = Layout::<UTF8Mode>::new("./tests/test.xml"); //! //! // create reader //! let mapper = Box::new(|x: &str| x[0..2].to_string()); //! let mut reader = Reader::<UTF8Mode>::new("./tests/test_utf8.data", layout, mapper); //! //! // useful vars //! let letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; //! let digits = "123456789"; //! let greek = "αβγδεζηθικλμνξοπρστυφχψω"; //! //! // read file and loop through records //! while let Some(rec) = reader.next() { //! match rec.name.as_ref() { //! "LL" => { //! assert_eq!(rec.get_value("ID"), "LL"); //! //! // test every field //! for (i, l) in letters.chars().enumerate() { //! let fname = format!("W{}", i+1); //! assert_eq!(rec.get_value(&fname), l.to_string().repeat(i+1)); //! } //! } //! "NB" => { //! assert_eq!(rec.get_value("ID"), "NB"); //! // test every field //! for (i, n) in digits.chars().enumerate() { //! let fname = format!("N{}", i+1); //! assert_eq!(rec.get_value(&fname), n.to_string().repeat(i+1)); //! } //! }, //! "GL" => { //! assert_eq!(rec.get_value("ID"), "GL"); //! for (i, l) in greek.chars().enumerate() { //! let fname = format!("G{}", i+1); //! assert_eq!(rec.get_value(&fname), l.to_string().repeat(i+1)); //! } //! }, //! "DP" => { //! assert_eq!(rec.get_value("ID"), "DP"); //! assert_eq!(rec.get("F5").unwrap()[0].value(), "AAAAA"); //! assert_eq!(rec.get("F5").unwrap()[1].value(), "BBBBB"); //! assert_eq!(rec.get("F5").unwrap()[2].value(), "CCCCC"); //! assert_eq!(rec.get("F5").unwrap()[3].value(), "DDDDD"); //! }, //! _ => panic!("record name <{}> not found in file <{}>", rec.name, "./tests/test_utf8.data") //! } //! } //! //! ``` use std::error::Error; use std::io::{BufReader,BufRead}; use std::fs::File; use record::{ReadMode, Record}; use layout::Layout; use mapper::RecordHasher; /// This enum defines whether we should stop reading when an unknown record ID is found #[derive(PartialEq)] pub enum ReaderLazyness { /// When set, this panics the reader Stringent, /// When set, ignore unknown reader Lazy, } // function type to get the record ID from the whole line read from the target file //pub type RecordMapper = fn(&str) -> &str; pub struct Reader<T> { /// record-based file to read pub rbf_file: String, /// layout struct describing the file to read pub layout: Layout<T>, /// function to map each line to a record name pub mapper: RecordHasher, /// buffer use when reading the file line by line bufreader: BufReader<File>, /// the line read from file pub line: String, /// lazyness when reading pub lazyness: ReaderLazyness, /// input file size pub file_size: u64, /// number of chars read when reading a line pub chars_read: usize, /// number of lines read so far pub nblines_read: u64, } impl<T> Reader<T> { /// Creates a new reader. /// /// # Arguments /// /// * `rbf_file` - name and path of the record-based file to read /// * `layout`: Layout struct previously created from the XML layout file describing the data file /// * `mapper` function to map each line to a record name /// /// # Panics /// If `rbf_file` could not be read pub fn new(rbf_file: &str, layout: Layout<T>, mapper: RecordHasher) -> Reader<T> { // open file for reading let bufreader = match File::open(&rbf_file) { // if ok, create a new BufReader to read the file line by line Ok(f) => match layout.rec_length { 0 => BufReader::new(f), _ => BufReader::with_capacity(layout.rec_length+1, f), }, // The `description` method of `io::Error` returns a string that // describes the error Err(why) => panic!("couldn't open {}: {}", rbf_file, why.description()), }; // get file size let metadata = ::std::fs::metadata(&rbf_file).unwrap(); Reader { rbf_file: rbf_file.to_string(), layout: layout, mapper: mapper, bufreader: bufreader, line: String::new(), lazyness: ReaderLazyness::Lazy, file_size: metadata.len(), chars_read: 0, nblines_read: 0, } } /// Returns a mutable reference on the record corresponding to the line read. **next()** returns **None** /// if EOF. /// It allows to read the whole file using the following idiom: /// /// ```rust,ignore /// // loop through records /// while let Some(rec) = reader.next() { /// // do something with rec /// } /// ``` /// # Panics /// If an error is met when reading the file. pub fn next(&mut self) -> Option<&mut Record<T>> where Record<T>: ReadMode { // record ID from line let mut rec_id: String; // try to get a record ID loop { // clear buffer, otherwise buffer is growing self.line.clear(); // read one line of text match self.bufreader.read_line(&mut self.line) { // No bytes read? This is EOF and we must end the iteration Ok(chars_read) => if chars_read == 0 { return None; } else { self.chars_read = chars_read; self.nblines_read += 1; }, // error reading bytes Err(why) => panic!("error {} when reading file {}", why.description(), self.rbf_file), }; // get the record ID using mapper rec_id = (self.mapper)(&self.line); // record ID could not exist match self.layout.contains_record(&rec_id) { true => break, false => if self.lazyness == ReaderLazyness::Stringent { panic!("couldn't find record ID {} in file {}", rec_id, self.rbf_file); } else { continue; } }; } // set value for this record let rec = self.layout.get_mut(&rec_id).unwrap(); // set all field values rec.set_value(&self.line); // return our record return Some(rec); } /// Sets reader lazyness pub fn set_lazyness(&mut self,
aderLazyness) { self.lazyness = lazyness; } }
lazyness: Re
identifier_name
reader.rs
//! Represents a way to read a record-based file by mapping each read line to a record. The mapping between //! the data from the file and the record name is made by the `mapper` function. //! //! # Examples //! ```rust //! use rbf::record::{AsciiMode, UTF8Mode}; //! use rbf::layout::Layout; //! use rbf::reader::Reader; //! //! // load our layout //! let layout = Layout::<UTF8Mode>::new("./tests/test.xml"); //! //! // create reader //! let mapper = Box::new(|x: &str| x[0..2].to_string()); //! let mut reader = Reader::<UTF8Mode>::new("./tests/test_utf8.data", layout, mapper); //! //! // useful vars //! let letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; //! let digits = "123456789"; //! let greek = "αβγδεζηθικλμνξοπρστυφχψω"; //! //! // read file and loop through records //! while let Some(rec) = reader.next() { //! match rec.name.as_ref() { //! "LL" => { //! assert_eq!(rec.get_value("ID"), "LL"); //! //! // test every field //! for (i, l) in letters.chars().enumerate() { //! let fname = format!("W{}", i+1); //! assert_eq!(rec.get_value(&fname), l.to_string().repeat(i+1)); //! } //! } //! "NB" => { //! assert_eq!(rec.get_value("ID"), "NB"); //! // test every field //! for (i, n) in digits.chars().enumerate() { //! let fname = format!("N{}", i+1); //! assert_eq!(rec.get_value(&fname), n.to_string().repeat(i+1)); //! } //! }, //! "GL" => { //! assert_eq!(rec.get_value("ID"), "GL"); //! for (i, l) in greek.chars().enumerate() { //! let fname = format!("G{}", i+1); //! assert_eq!(rec.get_value(&fname), l.to_string().repeat(i+1)); //! } //! }, //! "DP" => { //! assert_eq!(rec.get_value("ID"), "DP"); //! assert_eq!(rec.get("F5").unwrap()[0].value(), "AAAAA"); //! assert_eq!(rec.get("F5").unwrap()[1].value(), "BBBBB"); //! assert_eq!(rec.get("F5").unwrap()[2].value(), "CCCCC"); //! assert_eq!(rec.get("F5").unwrap()[3].value(), "DDDDD"); //! }, //! _ => panic!("record name <{}> not found in file <{}>", rec.name, "./tests/test_utf8.data") //! } //! } //! //! ``` use std::error::Error; use std::io::{BufReader,BufRead}; use std::fs::File; use record::{ReadMode, Record}; use layout::Layout; use mapper::RecordHasher; /// This enum defines whether we should stop reading when an unknown record ID is found #[derive(PartialEq)] pub enum ReaderLazyness { /// When set, this panics the reader Stringent, /// When set, ignore unknown reader Lazy, } // function type to get the record ID from the whole line read from the target file //pub type RecordMapper = fn(&str) -> &str; pub struct Reader<T> { /// record-based file to read pub rbf_file: String, /// layout struct describing the file to read pub layout: Layout<T>, /// function to map each line to a record name pub mapper: RecordHasher, /// buffer use when reading the file line by line bufreader: BufReader<File>, /// the line read from file pub line: String, /// lazyness when reading pub lazyness: ReaderLazyness, /// input file size pub file_size: u64, /// number of chars read when reading a line pub chars_read: usize, /// number of lines read so far pub nblines_read: u64, } impl<T> Reader<T> { /// Creates a new reader. /// /// # Arguments /// /// * `rbf_file` - name and path of the record-based file to read /// * `layout`: Layout struct previously created from the XML layout file describing the data file /// * `mapper` function to map each line to a record name /// /// # Panics /// If `rbf_file` could not be read pub fn new(rbf_file: &str, layout: Layout<T>, mapper: RecordHasher) -> Reader<T> { // open file for reading let bufreader = match File::open(&rbf_file) { // if ok, create a new BufReader to read the file line by line Ok(f) => match layout.rec_length { 0 => BufReader::new(f), _ => BufReader::with_capacity(layout.rec_length+1, f), }, // The `description` method of `io::Error` returns a string that // describes the error Err(why) => panic!("couldn't open {}: {}", rbf_file, why.description()), }; // get file size let metadata = ::std::fs::metadata(&rbf_file).unwrap(); Reader { rbf_file: rbf_file.to_string(), layout: layout, mapper: mapper, bufreader: bufreader, line: String::new(), lazyness: ReaderLazyness::Lazy, file_size: metadata.len(), chars_read: 0, nblines_read: 0, } } /// Returns a mutable reference on the record corresponding to the line read. **next()** returns **None** /// if EOF. /// It allows to read the whole file using the following idiom: /// /// ```rust,ignore /// // loop through records /// while let Some(rec) = reader.next() { /// // do something with rec /// } /// ``` /// # Panics /// If an error is met when reading the file. pub fn next(&mut self) -> Option<&mut Record<T>> where Record<T>: ReadMode { // record ID from line let mut rec_id: String; // try to get a record ID loop { // clear buffer, otherwise buffer is growing self.line.clear(); // read one line of text match self.bufreader.read_line(&mut self.line) { // No bytes read? This is EOF and we must end the iteration Ok(chars_read) => if chars_read == 0 {
self.chars_read = chars_read; self.nblines_read += 1; }, // error reading bytes Err(why) => panic!("error {} when reading file {}", why.description(), self.rbf_file), }; // get the record ID using mapper rec_id = (self.mapper)(&self.line); // record ID could not exist match self.layout.contains_record(&rec_id) { true => break, false => if self.lazyness == ReaderLazyness::Stringent { panic!("couldn't find record ID {} in file {}", rec_id, self.rbf_file); } else { continue; } }; } // set value for this record let rec = self.layout.get_mut(&rec_id).unwrap(); // set all field values rec.set_value(&self.line); // return our record return Some(rec); } /// Sets reader lazyness pub fn set_lazyness(&mut self, lazyness: ReaderLazyness) { self.lazyness = lazyness; } }
return None; } else {
conditional_block
factor.rs
#![crate_name = "uu_factor"] /* * This file is part of the uutils coreutils package. * * (c) T. Jameson Little <[email protected]> * (c) Wiktor Kuropatwa <[email protected]> * 20150223 added Pollard rho method implementation * (c) kwantam <[email protected]> * 20150429 sped up trial division by adding table of prime inverses * * For the full copyright and license information, please view the LICENSE file * that was distributed with this source code. */ extern crate rand; #[macro_use] extern crate uucore; use numeric::*; use prime_table::P_INVS_U64; use rand::distributions::{Range, IndependentSample}; use std::cmp::{max, min}; use std::io::{stdin, BufRead, BufReader, Write}; use std::num::Wrapping; use std::mem::swap; mod numeric; mod prime_table; static SYNTAX: &'static str = "[OPTION] [NUMBER]..."; static SUMMARY: &'static str = "Print the prime factors of the given number(s). If none are specified, read from standard input."; static LONG_HELP: &'static str = ""; fn rho_pollard_pseudorandom_function(x: u64, a: u64, b: u64, num: u64) -> u64 { if num < 1 << 63 { (sm_mul(a, sm_mul(x, x, num), num) + b) % num } else { big_add(big_mul(a, big_mul(x, x, num), num), b, num) } } fn gcd(mut a: u64, mut b: u64) -> u64 { while b > 0 { a %= b; swap(&mut a, &mut b); } a } fn rho_pollard_find_divisor(num: u64) -> u64 { let range = Range::new(1, num); let mut rng = rand::weak_rng(); let mut x = range.ind_sample(&mut rng); let mut y = x; let mut a = range.ind_sample(&mut rng); let mut b = range.ind_sample(&mut rng); loop { x = rho_pollard_pseudorandom_function(x, a, b, num); y = rho_pollard_pseudorandom_function(y, a, b, num); y = rho_pollard_pseudorandom_function(y, a, b, num); let d = gcd(num, max(x, y) - min(x, y)); if d == num { // Failure, retry with diffrent function x = range.ind_sample(&mut rng); y = x; a = range.ind_sample(&mut rng); b = range.ind_sample(&mut rng); } else if d > 1
} } fn rho_pollard_factor(num: u64, factors: &mut Vec<u64>) { if is_prime(num) { factors.push(num); return; } let divisor = rho_pollard_find_divisor(num); rho_pollard_factor(divisor, factors); rho_pollard_factor(num / divisor, factors); } fn table_division(mut num: u64, factors: &mut Vec<u64>) { if num < 2 { return; } while num % 2 == 0 { num /= 2; factors.push(2); } if num == 1 { return; } if is_prime(num) { factors.push(num); return; } for &(prime, inv, ceil) in P_INVS_U64 { if num == 1 { break; } // inv = prime^-1 mod 2^64 // ceil = floor((2^64-1) / prime) // if (num * inv) mod 2^64 <= ceil, then prime divides num // See http://math.stackexchange.com/questions/1251327/ // for a nice explanation. loop { let Wrapping(x) = Wrapping(num) * Wrapping(inv); // x = num * inv mod 2^64 if x <= ceil { num = x; factors.push(prime); if is_prime(num) { factors.push(num); return; } } else { break; } } } // do we still have more factoring to do? // Decide whether to use Pollard Rho or slow divisibility based on // number's size: //if num >= 1 << 63 { // number is too big to use rho pollard without overflowing //trial_division_slow(num, factors); //} else if num > 1 { // number is still greater than 1, but not so big that we have to worry rho_pollard_factor(num, factors); //} } fn print_factors(num: u64) { print!("{}:", num); let mut factors = Vec::new(); // we always start with table division, and go from there table_division(num, &mut factors); factors.sort(); for fac in &factors { print!(" {}", fac); } println!(""); } fn print_factors_str(num_str: &str) { if let Err(e) = num_str.parse::<u64>().and_then(|x| Ok(print_factors(x))) { show_warning!("{}: {}", num_str, e); } } pub fn uumain(args: Vec<String>) -> i32 { let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP) .parse(args); if matches.free.is_empty() { for line in BufReader::new(stdin()).lines() { for number in line.unwrap().split_whitespace() { print_factors_str(number); } } } else { for num_str in &matches.free { print_factors_str(num_str); } } 0 }
{ return d; }
conditional_block
factor.rs
#![crate_name = "uu_factor"] /* * This file is part of the uutils coreutils package. * * (c) T. Jameson Little <[email protected]> * (c) Wiktor Kuropatwa <[email protected]> * 20150223 added Pollard rho method implementation * (c) kwantam <[email protected]> * 20150429 sped up trial division by adding table of prime inverses * * For the full copyright and license information, please view the LICENSE file * that was distributed with this source code. */ extern crate rand; #[macro_use] extern crate uucore; use numeric::*; use prime_table::P_INVS_U64; use rand::distributions::{Range, IndependentSample}; use std::cmp::{max, min}; use std::io::{stdin, BufRead, BufReader, Write}; use std::num::Wrapping; use std::mem::swap; mod numeric; mod prime_table; static SYNTAX: &'static str = "[OPTION] [NUMBER]..."; static SUMMARY: &'static str = "Print the prime factors of the given number(s). If none are specified, read from standard input."; static LONG_HELP: &'static str = ""; fn rho_pollard_pseudorandom_function(x: u64, a: u64, b: u64, num: u64) -> u64 { if num < 1 << 63 { (sm_mul(a, sm_mul(x, x, num), num) + b) % num } else { big_add(big_mul(a, big_mul(x, x, num), num), b, num) } } fn gcd(mut a: u64, mut b: u64) -> u64 { while b > 0 { a %= b; swap(&mut a, &mut b); } a } fn rho_pollard_find_divisor(num: u64) -> u64 { let range = Range::new(1, num); let mut rng = rand::weak_rng(); let mut x = range.ind_sample(&mut rng); let mut y = x; let mut a = range.ind_sample(&mut rng); let mut b = range.ind_sample(&mut rng); loop { x = rho_pollard_pseudorandom_function(x, a, b, num); y = rho_pollard_pseudorandom_function(y, a, b, num); y = rho_pollard_pseudorandom_function(y, a, b, num); let d = gcd(num, max(x, y) - min(x, y)); if d == num { // Failure, retry with diffrent function x = range.ind_sample(&mut rng); y = x; a = range.ind_sample(&mut rng); b = range.ind_sample(&mut rng); } else if d > 1 { return d; } } } fn rho_pollard_factor(num: u64, factors: &mut Vec<u64>) { if is_prime(num) { factors.push(num); return; } let divisor = rho_pollard_find_divisor(num); rho_pollard_factor(divisor, factors); rho_pollard_factor(num / divisor, factors); } fn table_division(mut num: u64, factors: &mut Vec<u64>) { if num < 2 { return; } while num % 2 == 0 { num /= 2; factors.push(2); } if num == 1 { return; } if is_prime(num) { factors.push(num); return; } for &(prime, inv, ceil) in P_INVS_U64 { if num == 1 { break; } // inv = prime^-1 mod 2^64 // ceil = floor((2^64-1) / prime) // if (num * inv) mod 2^64 <= ceil, then prime divides num // See http://math.stackexchange.com/questions/1251327/ // for a nice explanation. loop { let Wrapping(x) = Wrapping(num) * Wrapping(inv); // x = num * inv mod 2^64 if x <= ceil { num = x; factors.push(prime); if is_prime(num) { factors.push(num); return; } } else { break; } } } // do we still have more factoring to do? // Decide whether to use Pollard Rho or slow divisibility based on // number's size: //if num >= 1 << 63 { // number is too big to use rho pollard without overflowing //trial_division_slow(num, factors); //} else if num > 1 { // number is still greater than 1, but not so big that we have to worry rho_pollard_factor(num, factors); //} } fn print_factors(num: u64) { print!("{}:", num); let mut factors = Vec::new(); // we always start with table division, and go from there table_division(num, &mut factors); factors.sort(); for fac in &factors { print!(" {}", fac); } println!(""); } fn print_factors_str(num_str: &str) { if let Err(e) = num_str.parse::<u64>().and_then(|x| Ok(print_factors(x))) { show_warning!("{}: {}", num_str, e); } }
let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP) .parse(args); if matches.free.is_empty() { for line in BufReader::new(stdin()).lines() { for number in line.unwrap().split_whitespace() { print_factors_str(number); } } } else { for num_str in &matches.free { print_factors_str(num_str); } } 0 }
pub fn uumain(args: Vec<String>) -> i32 {
random_line_split
factor.rs
#![crate_name = "uu_factor"] /* * This file is part of the uutils coreutils package. * * (c) T. Jameson Little <[email protected]> * (c) Wiktor Kuropatwa <[email protected]> * 20150223 added Pollard rho method implementation * (c) kwantam <[email protected]> * 20150429 sped up trial division by adding table of prime inverses * * For the full copyright and license information, please view the LICENSE file * that was distributed with this source code. */ extern crate rand; #[macro_use] extern crate uucore; use numeric::*; use prime_table::P_INVS_U64; use rand::distributions::{Range, IndependentSample}; use std::cmp::{max, min}; use std::io::{stdin, BufRead, BufReader, Write}; use std::num::Wrapping; use std::mem::swap; mod numeric; mod prime_table; static SYNTAX: &'static str = "[OPTION] [NUMBER]..."; static SUMMARY: &'static str = "Print the prime factors of the given number(s). If none are specified, read from standard input."; static LONG_HELP: &'static str = ""; fn rho_pollard_pseudorandom_function(x: u64, a: u64, b: u64, num: u64) -> u64 { if num < 1 << 63 { (sm_mul(a, sm_mul(x, x, num), num) + b) % num } else { big_add(big_mul(a, big_mul(x, x, num), num), b, num) } } fn
(mut a: u64, mut b: u64) -> u64 { while b > 0 { a %= b; swap(&mut a, &mut b); } a } fn rho_pollard_find_divisor(num: u64) -> u64 { let range = Range::new(1, num); let mut rng = rand::weak_rng(); let mut x = range.ind_sample(&mut rng); let mut y = x; let mut a = range.ind_sample(&mut rng); let mut b = range.ind_sample(&mut rng); loop { x = rho_pollard_pseudorandom_function(x, a, b, num); y = rho_pollard_pseudorandom_function(y, a, b, num); y = rho_pollard_pseudorandom_function(y, a, b, num); let d = gcd(num, max(x, y) - min(x, y)); if d == num { // Failure, retry with diffrent function x = range.ind_sample(&mut rng); y = x; a = range.ind_sample(&mut rng); b = range.ind_sample(&mut rng); } else if d > 1 { return d; } } } fn rho_pollard_factor(num: u64, factors: &mut Vec<u64>) { if is_prime(num) { factors.push(num); return; } let divisor = rho_pollard_find_divisor(num); rho_pollard_factor(divisor, factors); rho_pollard_factor(num / divisor, factors); } fn table_division(mut num: u64, factors: &mut Vec<u64>) { if num < 2 { return; } while num % 2 == 0 { num /= 2; factors.push(2); } if num == 1 { return; } if is_prime(num) { factors.push(num); return; } for &(prime, inv, ceil) in P_INVS_U64 { if num == 1 { break; } // inv = prime^-1 mod 2^64 // ceil = floor((2^64-1) / prime) // if (num * inv) mod 2^64 <= ceil, then prime divides num // See http://math.stackexchange.com/questions/1251327/ // for a nice explanation. loop { let Wrapping(x) = Wrapping(num) * Wrapping(inv); // x = num * inv mod 2^64 if x <= ceil { num = x; factors.push(prime); if is_prime(num) { factors.push(num); return; } } else { break; } } } // do we still have more factoring to do? // Decide whether to use Pollard Rho or slow divisibility based on // number's size: //if num >= 1 << 63 { // number is too big to use rho pollard without overflowing //trial_division_slow(num, factors); //} else if num > 1 { // number is still greater than 1, but not so big that we have to worry rho_pollard_factor(num, factors); //} } fn print_factors(num: u64) { print!("{}:", num); let mut factors = Vec::new(); // we always start with table division, and go from there table_division(num, &mut factors); factors.sort(); for fac in &factors { print!(" {}", fac); } println!(""); } fn print_factors_str(num_str: &str) { if let Err(e) = num_str.parse::<u64>().and_then(|x| Ok(print_factors(x))) { show_warning!("{}: {}", num_str, e); } } pub fn uumain(args: Vec<String>) -> i32 { let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP) .parse(args); if matches.free.is_empty() { for line in BufReader::new(stdin()).lines() { for number in line.unwrap().split_whitespace() { print_factors_str(number); } } } else { for num_str in &matches.free { print_factors_str(num_str); } } 0 }
gcd
identifier_name
factor.rs
#![crate_name = "uu_factor"] /* * This file is part of the uutils coreutils package. * * (c) T. Jameson Little <[email protected]> * (c) Wiktor Kuropatwa <[email protected]> * 20150223 added Pollard rho method implementation * (c) kwantam <[email protected]> * 20150429 sped up trial division by adding table of prime inverses * * For the full copyright and license information, please view the LICENSE file * that was distributed with this source code. */ extern crate rand; #[macro_use] extern crate uucore; use numeric::*; use prime_table::P_INVS_U64; use rand::distributions::{Range, IndependentSample}; use std::cmp::{max, min}; use std::io::{stdin, BufRead, BufReader, Write}; use std::num::Wrapping; use std::mem::swap; mod numeric; mod prime_table; static SYNTAX: &'static str = "[OPTION] [NUMBER]..."; static SUMMARY: &'static str = "Print the prime factors of the given number(s). If none are specified, read from standard input."; static LONG_HELP: &'static str = ""; fn rho_pollard_pseudorandom_function(x: u64, a: u64, b: u64, num: u64) -> u64 { if num < 1 << 63 { (sm_mul(a, sm_mul(x, x, num), num) + b) % num } else { big_add(big_mul(a, big_mul(x, x, num), num), b, num) } } fn gcd(mut a: u64, mut b: u64) -> u64 { while b > 0 { a %= b; swap(&mut a, &mut b); } a } fn rho_pollard_find_divisor(num: u64) -> u64 { let range = Range::new(1, num); let mut rng = rand::weak_rng(); let mut x = range.ind_sample(&mut rng); let mut y = x; let mut a = range.ind_sample(&mut rng); let mut b = range.ind_sample(&mut rng); loop { x = rho_pollard_pseudorandom_function(x, a, b, num); y = rho_pollard_pseudorandom_function(y, a, b, num); y = rho_pollard_pseudorandom_function(y, a, b, num); let d = gcd(num, max(x, y) - min(x, y)); if d == num { // Failure, retry with diffrent function x = range.ind_sample(&mut rng); y = x; a = range.ind_sample(&mut rng); b = range.ind_sample(&mut rng); } else if d > 1 { return d; } } } fn rho_pollard_factor(num: u64, factors: &mut Vec<u64>) { if is_prime(num) { factors.push(num); return; } let divisor = rho_pollard_find_divisor(num); rho_pollard_factor(divisor, factors); rho_pollard_factor(num / divisor, factors); } fn table_division(mut num: u64, factors: &mut Vec<u64>) { if num < 2 { return; } while num % 2 == 0 { num /= 2; factors.push(2); } if num == 1 { return; } if is_prime(num) { factors.push(num); return; } for &(prime, inv, ceil) in P_INVS_U64 { if num == 1 { break; } // inv = prime^-1 mod 2^64 // ceil = floor((2^64-1) / prime) // if (num * inv) mod 2^64 <= ceil, then prime divides num // See http://math.stackexchange.com/questions/1251327/ // for a nice explanation. loop { let Wrapping(x) = Wrapping(num) * Wrapping(inv); // x = num * inv mod 2^64 if x <= ceil { num = x; factors.push(prime); if is_prime(num) { factors.push(num); return; } } else { break; } } } // do we still have more factoring to do? // Decide whether to use Pollard Rho or slow divisibility based on // number's size: //if num >= 1 << 63 { // number is too big to use rho pollard without overflowing //trial_division_slow(num, factors); //} else if num > 1 { // number is still greater than 1, but not so big that we have to worry rho_pollard_factor(num, factors); //} } fn print_factors(num: u64) { print!("{}:", num); let mut factors = Vec::new(); // we always start with table division, and go from there table_division(num, &mut factors); factors.sort(); for fac in &factors { print!(" {}", fac); } println!(""); } fn print_factors_str(num_str: &str)
pub fn uumain(args: Vec<String>) -> i32 { let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP) .parse(args); if matches.free.is_empty() { for line in BufReader::new(stdin()).lines() { for number in line.unwrap().split_whitespace() { print_factors_str(number); } } } else { for num_str in &matches.free { print_factors_str(num_str); } } 0 }
{ if let Err(e) = num_str.parse::<u64>().and_then(|x| Ok(print_factors(x))) { show_warning!("{}: {}", num_str, e); } }
identifier_body
intrinsicck.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use hir::def::Def; use hir::def_id::DefId; use ty::{self, Ty, TyCtxt}; use ty::layout::{LayoutError, Pointer, SizeSkeleton, VariantIdx}; use rustc_target::spec::abi::Abi::RustIntrinsic; use rustc_data_structures::indexed_vec::Idx; use syntax_pos::Span; use hir::intravisit::{self, Visitor, NestedVisitorMap}; use hir; pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let mut visitor = ItemVisitor { tcx, }; tcx.hir().krate().visit_all_item_likes(&mut visitor.as_deep_visitor()); } struct ItemVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx> } struct ExprVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, tables: &'tcx ty::TypeckTables<'tcx>, param_env: ty::ParamEnv<'tcx>, } /// If the type is `Option<T>`, it will return `T`, otherwise /// the type itself. Works on most `Option`-like types. fn unpack_option_like<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> Ty<'tcx>
if def.variants[data_idx].fields.len() == 1 { return def.variants[data_idx].fields[0].ty(tcx, substs); } } ty } impl<'a, 'tcx> ExprVisitor<'a, 'tcx> { fn def_id_is_transmute(&self, def_id: DefId) -> bool { self.tcx.fn_sig(def_id).abi() == RustIntrinsic && self.tcx.item_name(def_id) == "transmute" } fn check_transmute(&self, span: Span, from: Ty<'tcx>, to: Ty<'tcx>) { let sk_from = SizeSkeleton::compute(from, self.tcx, self.param_env); let sk_to = SizeSkeleton::compute(to, self.tcx, self.param_env); // Check for same size using the skeletons. if let (Ok(sk_from), Ok(sk_to)) = (sk_from, sk_to) { if sk_from.same_size(sk_to) { return; } // Special-case transmutting from `typeof(function)` and // `Option<typeof(function)>` to present a clearer error. let from = unpack_option_like(self.tcx.global_tcx(), from); if let (&ty::FnDef(..), SizeSkeleton::Known(size_to)) = (&from.sty, sk_to) { if size_to == Pointer.size(&self.tcx) { struct_span_err!(self.tcx.sess, span, E0591, "can't transmute zero-sized type") .note(&format!("source type: {}", from)) .note(&format!("target type: {}", to)) .help("cast with `as` to a pointer instead") .emit(); return; } } } // Try to display a sensible error with as much information as possible. let skeleton_string = |ty: Ty<'tcx>, sk| { match sk { Ok(SizeSkeleton::Known(size)) => { format!("{} bits", size.bits()) } Ok(SizeSkeleton::Pointer { tail,.. }) => { format!("pointer to {}", tail) } Err(LayoutError::Unknown(bad)) => { if bad == ty { "this type's size can vary".to_owned() } else { format!("size can vary because of {}", bad) } } Err(err) => err.to_string() } }; struct_span_err!(self.tcx.sess, span, E0512, "transmute called with types of different sizes") .note(&format!("source type: {} ({})", from, skeleton_string(from, sk_from))) .note(&format!("target type: {} ({})", to, skeleton_string(to, sk_to))) .emit(); } } impl<'a, 'tcx> Visitor<'tcx> for ItemVisitor<'a, 'tcx> { fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> { NestedVisitorMap::None } fn visit_nested_body(&mut self, body_id: hir::BodyId) { let owner_def_id = self.tcx.hir().body_owner_def_id(body_id); let body = self.tcx.hir().body(body_id); let param_env = self.tcx.param_env(owner_def_id); let tables = self.tcx.typeck_tables_of(owner_def_id); ExprVisitor { tcx: self.tcx, param_env, tables }.visit_body(body); self.visit_body(body); } } impl<'a, 'tcx> Visitor<'tcx> for ExprVisitor<'a, 'tcx> { fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> { NestedVisitorMap::None } fn visit_expr(&mut self, expr: &'tcx hir::Expr) { let def = if let hir::ExprKind::Path(ref qpath) = expr.node { self.tables.qpath_def(qpath, expr.hir_id) } else { Def::Err }; if let Def::Fn(did) = def { if self.def_id_is_transmute(did) { let typ = self.tables.node_id_to_type(expr.hir_id); let sig = typ.fn_sig(self.tcx); let from = sig.inputs().skip_binder()[0]; let to = *sig.output().skip_binder(); self.check_transmute(expr.span, from, to); } } intravisit::walk_expr(self, expr); } }
{ let (def, substs) = match ty.sty { ty::Adt(def, substs) => (def, substs), _ => return ty }; if def.variants.len() == 2 && !def.repr.c() && def.repr.int.is_none() { let data_idx; let one = VariantIdx::new(1); let zero = VariantIdx::new(0); if def.variants[zero].fields.is_empty() { data_idx = one; } else if def.variants[one].fields.is_empty() { data_idx = zero; } else { return ty; }
identifier_body
intrinsicck.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use hir::def::Def; use hir::def_id::DefId; use ty::{self, Ty, TyCtxt}; use ty::layout::{LayoutError, Pointer, SizeSkeleton, VariantIdx}; use rustc_target::spec::abi::Abi::RustIntrinsic; use rustc_data_structures::indexed_vec::Idx; use syntax_pos::Span; use hir::intravisit::{self, Visitor, NestedVisitorMap}; use hir; pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let mut visitor = ItemVisitor { tcx, }; tcx.hir().krate().visit_all_item_likes(&mut visitor.as_deep_visitor()); } struct
<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx> } struct ExprVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, tables: &'tcx ty::TypeckTables<'tcx>, param_env: ty::ParamEnv<'tcx>, } /// If the type is `Option<T>`, it will return `T`, otherwise /// the type itself. Works on most `Option`-like types. fn unpack_option_like<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { let (def, substs) = match ty.sty { ty::Adt(def, substs) => (def, substs), _ => return ty }; if def.variants.len() == 2 &&!def.repr.c() && def.repr.int.is_none() { let data_idx; let one = VariantIdx::new(1); let zero = VariantIdx::new(0); if def.variants[zero].fields.is_empty() { data_idx = one; } else if def.variants[one].fields.is_empty() { data_idx = zero; } else { return ty; } if def.variants[data_idx].fields.len() == 1 { return def.variants[data_idx].fields[0].ty(tcx, substs); } } ty } impl<'a, 'tcx> ExprVisitor<'a, 'tcx> { fn def_id_is_transmute(&self, def_id: DefId) -> bool { self.tcx.fn_sig(def_id).abi() == RustIntrinsic && self.tcx.item_name(def_id) == "transmute" } fn check_transmute(&self, span: Span, from: Ty<'tcx>, to: Ty<'tcx>) { let sk_from = SizeSkeleton::compute(from, self.tcx, self.param_env); let sk_to = SizeSkeleton::compute(to, self.tcx, self.param_env); // Check for same size using the skeletons. if let (Ok(sk_from), Ok(sk_to)) = (sk_from, sk_to) { if sk_from.same_size(sk_to) { return; } // Special-case transmutting from `typeof(function)` and // `Option<typeof(function)>` to present a clearer error. let from = unpack_option_like(self.tcx.global_tcx(), from); if let (&ty::FnDef(..), SizeSkeleton::Known(size_to)) = (&from.sty, sk_to) { if size_to == Pointer.size(&self.tcx) { struct_span_err!(self.tcx.sess, span, E0591, "can't transmute zero-sized type") .note(&format!("source type: {}", from)) .note(&format!("target type: {}", to)) .help("cast with `as` to a pointer instead") .emit(); return; } } } // Try to display a sensible error with as much information as possible. let skeleton_string = |ty: Ty<'tcx>, sk| { match sk { Ok(SizeSkeleton::Known(size)) => { format!("{} bits", size.bits()) } Ok(SizeSkeleton::Pointer { tail,.. }) => { format!("pointer to {}", tail) } Err(LayoutError::Unknown(bad)) => { if bad == ty { "this type's size can vary".to_owned() } else { format!("size can vary because of {}", bad) } } Err(err) => err.to_string() } }; struct_span_err!(self.tcx.sess, span, E0512, "transmute called with types of different sizes") .note(&format!("source type: {} ({})", from, skeleton_string(from, sk_from))) .note(&format!("target type: {} ({})", to, skeleton_string(to, sk_to))) .emit(); } } impl<'a, 'tcx> Visitor<'tcx> for ItemVisitor<'a, 'tcx> { fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> { NestedVisitorMap::None } fn visit_nested_body(&mut self, body_id: hir::BodyId) { let owner_def_id = self.tcx.hir().body_owner_def_id(body_id); let body = self.tcx.hir().body(body_id); let param_env = self.tcx.param_env(owner_def_id); let tables = self.tcx.typeck_tables_of(owner_def_id); ExprVisitor { tcx: self.tcx, param_env, tables }.visit_body(body); self.visit_body(body); } } impl<'a, 'tcx> Visitor<'tcx> for ExprVisitor<'a, 'tcx> { fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> { NestedVisitorMap::None } fn visit_expr(&mut self, expr: &'tcx hir::Expr) { let def = if let hir::ExprKind::Path(ref qpath) = expr.node { self.tables.qpath_def(qpath, expr.hir_id) } else { Def::Err }; if let Def::Fn(did) = def { if self.def_id_is_transmute(did) { let typ = self.tables.node_id_to_type(expr.hir_id); let sig = typ.fn_sig(self.tcx); let from = sig.inputs().skip_binder()[0]; let to = *sig.output().skip_binder(); self.check_transmute(expr.span, from, to); } } intravisit::walk_expr(self, expr); } }
ItemVisitor
identifier_name
intrinsicck.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use hir::def::Def; use hir::def_id::DefId; use ty::{self, Ty, TyCtxt}; use ty::layout::{LayoutError, Pointer, SizeSkeleton, VariantIdx}; use rustc_target::spec::abi::Abi::RustIntrinsic; use rustc_data_structures::indexed_vec::Idx; use syntax_pos::Span; use hir::intravisit::{self, Visitor, NestedVisitorMap}; use hir; pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let mut visitor = ItemVisitor { tcx, }; tcx.hir().krate().visit_all_item_likes(&mut visitor.as_deep_visitor()); } struct ItemVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx> } struct ExprVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, tables: &'tcx ty::TypeckTables<'tcx>, param_env: ty::ParamEnv<'tcx>, } /// If the type is `Option<T>`, it will return `T`, otherwise /// the type itself. Works on most `Option`-like types. fn unpack_option_like<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { let (def, substs) = match ty.sty { ty::Adt(def, substs) => (def, substs), _ => return ty }; if def.variants.len() == 2 &&!def.repr.c() && def.repr.int.is_none() { let data_idx; let one = VariantIdx::new(1); let zero = VariantIdx::new(0); if def.variants[zero].fields.is_empty() { data_idx = one; } else if def.variants[one].fields.is_empty() { data_idx = zero; } else { return ty; } if def.variants[data_idx].fields.len() == 1 { return def.variants[data_idx].fields[0].ty(tcx, substs); } } ty } impl<'a, 'tcx> ExprVisitor<'a, 'tcx> { fn def_id_is_transmute(&self, def_id: DefId) -> bool { self.tcx.fn_sig(def_id).abi() == RustIntrinsic && self.tcx.item_name(def_id) == "transmute" } fn check_transmute(&self, span: Span, from: Ty<'tcx>, to: Ty<'tcx>) { let sk_from = SizeSkeleton::compute(from, self.tcx, self.param_env); let sk_to = SizeSkeleton::compute(to, self.tcx, self.param_env); // Check for same size using the skeletons. if let (Ok(sk_from), Ok(sk_to)) = (sk_from, sk_to) {
if sk_from.same_size(sk_to) { return; } // Special-case transmutting from `typeof(function)` and // `Option<typeof(function)>` to present a clearer error. let from = unpack_option_like(self.tcx.global_tcx(), from); if let (&ty::FnDef(..), SizeSkeleton::Known(size_to)) = (&from.sty, sk_to) { if size_to == Pointer.size(&self.tcx) { struct_span_err!(self.tcx.sess, span, E0591, "can't transmute zero-sized type") .note(&format!("source type: {}", from)) .note(&format!("target type: {}", to)) .help("cast with `as` to a pointer instead") .emit(); return; } } } // Try to display a sensible error with as much information as possible. let skeleton_string = |ty: Ty<'tcx>, sk| { match sk { Ok(SizeSkeleton::Known(size)) => { format!("{} bits", size.bits()) } Ok(SizeSkeleton::Pointer { tail,.. }) => { format!("pointer to {}", tail) } Err(LayoutError::Unknown(bad)) => { if bad == ty { "this type's size can vary".to_owned() } else { format!("size can vary because of {}", bad) } } Err(err) => err.to_string() } }; struct_span_err!(self.tcx.sess, span, E0512, "transmute called with types of different sizes") .note(&format!("source type: {} ({})", from, skeleton_string(from, sk_from))) .note(&format!("target type: {} ({})", to, skeleton_string(to, sk_to))) .emit(); } } impl<'a, 'tcx> Visitor<'tcx> for ItemVisitor<'a, 'tcx> { fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> { NestedVisitorMap::None } fn visit_nested_body(&mut self, body_id: hir::BodyId) { let owner_def_id = self.tcx.hir().body_owner_def_id(body_id); let body = self.tcx.hir().body(body_id); let param_env = self.tcx.param_env(owner_def_id); let tables = self.tcx.typeck_tables_of(owner_def_id); ExprVisitor { tcx: self.tcx, param_env, tables }.visit_body(body); self.visit_body(body); } } impl<'a, 'tcx> Visitor<'tcx> for ExprVisitor<'a, 'tcx> { fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> { NestedVisitorMap::None } fn visit_expr(&mut self, expr: &'tcx hir::Expr) { let def = if let hir::ExprKind::Path(ref qpath) = expr.node { self.tables.qpath_def(qpath, expr.hir_id) } else { Def::Err }; if let Def::Fn(did) = def { if self.def_id_is_transmute(did) { let typ = self.tables.node_id_to_type(expr.hir_id); let sig = typ.fn_sig(self.tcx); let from = sig.inputs().skip_binder()[0]; let to = *sig.output().skip_binder(); self.check_transmute(expr.span, from, to); } } intravisit::walk_expr(self, expr); } }
random_line_split
file.rs
use std::path::Path; use std::fs::File; use std::io::{Read, Write}; use encoding::{Encoding, DecoderTrap, EncoderTrap}; use encoding::all::WINDOWS_1252; pub fn
<P: AsRef<Path>>(path: P) -> String { let mut file = File::open(path).unwrap(); let mut data = String::new(); file.read_to_string(&mut data).unwrap(); data } pub fn write_all_text<P: AsRef<Path>>(path: P, text: &str) { let mut file = File::create(path).unwrap(); file.write_all(text.as_bytes()).unwrap(); } pub fn read_all_win_1252<P: AsRef<Path>>(path: P) -> String { let mut file = File::open(path).unwrap(); let mut data = Vec::new(); file.read_to_end(&mut data).unwrap(); WINDOWS_1252.decode(&data, DecoderTrap::Strict).unwrap() } pub fn write_all_win_1252<P: AsRef<Path>>(path: P, text: &str) { let mut file = File::create(path).unwrap(); let data = WINDOWS_1252.encode(&text, EncoderTrap::Strict).unwrap(); file.write_all(&data).unwrap(); }
read_all_text
identifier_name
file.rs
use std::path::Path; use std::fs::File; use std::io::{Read, Write}; use encoding::{Encoding, DecoderTrap, EncoderTrap}; use encoding::all::WINDOWS_1252; pub fn read_all_text<P: AsRef<Path>>(path: P) -> String { let mut file = File::open(path).unwrap(); let mut data = String::new(); file.read_to_string(&mut data).unwrap(); data } pub fn write_all_text<P: AsRef<Path>>(path: P, text: &str) { let mut file = File::create(path).unwrap(); file.write_all(text.as_bytes()).unwrap(); } pub fn read_all_win_1252<P: AsRef<Path>>(path: P) -> String
pub fn write_all_win_1252<P: AsRef<Path>>(path: P, text: &str) { let mut file = File::create(path).unwrap(); let data = WINDOWS_1252.encode(&text, EncoderTrap::Strict).unwrap(); file.write_all(&data).unwrap(); }
{ let mut file = File::open(path).unwrap(); let mut data = Vec::new(); file.read_to_end(&mut data).unwrap(); WINDOWS_1252.decode(&data, DecoderTrap::Strict).unwrap() }
identifier_body
file.rs
use std::path::Path; use std::fs::File; use std::io::{Read, Write}; use encoding::{Encoding, DecoderTrap, EncoderTrap}; use encoding::all::WINDOWS_1252;
let mut data = String::new(); file.read_to_string(&mut data).unwrap(); data } pub fn write_all_text<P: AsRef<Path>>(path: P, text: &str) { let mut file = File::create(path).unwrap(); file.write_all(text.as_bytes()).unwrap(); } pub fn read_all_win_1252<P: AsRef<Path>>(path: P) -> String { let mut file = File::open(path).unwrap(); let mut data = Vec::new(); file.read_to_end(&mut data).unwrap(); WINDOWS_1252.decode(&data, DecoderTrap::Strict).unwrap() } pub fn write_all_win_1252<P: AsRef<Path>>(path: P, text: &str) { let mut file = File::create(path).unwrap(); let data = WINDOWS_1252.encode(&text, EncoderTrap::Strict).unwrap(); file.write_all(&data).unwrap(); }
pub fn read_all_text<P: AsRef<Path>>(path: P) -> String { let mut file = File::open(path).unwrap();
random_line_split
page.rs
/* * Copyright 2019 Jeehoon Kang * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use core::mem; use core::ops::*; use core::ptr; use crate::utils::*; pub const PAGE_BITS: usize = 12; pub const PAGE_SIZE: usize = 1 << PAGE_BITS; pub const PAGE_LEVEL_BITS: usize = 9; #[repr(C, align(4096))] pub struct RawPage { inner: [u8; PAGE_SIZE], } const_assert_eq!(mem::align_of::<RawPage>(), PAGE_SIZE); const_assert_eq!(mem::size_of::<RawPage>(), PAGE_SIZE); impl RawPage { pub const fn new() -> Self { Self { inner: [0; PAGE_SIZE], } } pub fn clear(&mut self) { for byte in self.inner.iter_mut() { *byte = 0; } } } impl Deref for RawPage { type Target = [u8; PAGE_SIZE]; fn deref(&self) -> &Self::Target { &self.inner } } impl DerefMut for RawPage { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.inner } } pub struct Page { ptr: *mut RawPage, } impl Page { pub unsafe fn from_raw(ptr: *mut RawPage) -> Self { Self { ptr } } pub fn into_raw(self) -> *mut RawPage { let ptr = self.ptr; mem::forget(self); ptr } } impl Drop for Page { fn drop(&mut self) { panic!("`Page` should not be dropped."); } } impl Deref for Page { type Target = RawPage; fn deref(&self) -> &Self::Target { unsafe { &*(self.ptr as *const Self::Target) } } } impl DerefMut for Page { fn deref_mut(&mut self) -> &mut Self::Target { unsafe { &mut *(self.ptr as *mut Self::Target) } } } pub struct Pages { ptr: *mut [RawPage], } impl Pages { pub unsafe fn from_raw(raw: *mut RawPage, size: usize) -> Self { Self { ptr: ptr::slice_from_raw_parts_mut(raw, size), } } pub unsafe fn from_raw_u8(ptr: *mut u8, size: usize) -> Result<Self, ()> { // Round begin address up, and end address down. let new_begin = round_up(ptr as usize, PAGE_SIZE); let new_end = round_down(ptr as usize + size, PAGE_SIZE); // No pages if there isn't enough room for an entry. if new_begin >= new_end || new_end - new_begin < PAGE_SIZE
Ok(Self::from_raw( new_begin as *mut RawPage, (new_end - new_begin) / PAGE_SIZE, )) } pub fn into_raw(self) -> *mut RawPage { let ptr = self.ptr; mem::forget(self); ptr as *mut _ } pub fn clear(&mut self) { for page in self.iter_mut() { page.clear(); } } } impl Drop for Pages { fn drop(&mut self) { panic!("`Pages` should not be dropped."); } } impl Deref for Pages { type Target = [RawPage]; fn deref(&self) -> &Self::Target { unsafe { &*self.ptr } } } impl DerefMut for Pages { fn deref_mut(&mut self) -> &mut Self::Target { unsafe { &mut *self.ptr } } }
{ return Err(()); }
conditional_block
page.rs
/* * Copyright 2019 Jeehoon Kang * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use core::mem; use core::ops::*; use core::ptr; use crate::utils::*; pub const PAGE_BITS: usize = 12; pub const PAGE_SIZE: usize = 1 << PAGE_BITS; pub const PAGE_LEVEL_BITS: usize = 9; #[repr(C, align(4096))] pub struct RawPage { inner: [u8; PAGE_SIZE], } const_assert_eq!(mem::align_of::<RawPage>(), PAGE_SIZE); const_assert_eq!(mem::size_of::<RawPage>(), PAGE_SIZE); impl RawPage { pub const fn new() -> Self { Self { inner: [0; PAGE_SIZE], } } pub fn clear(&mut self) { for byte in self.inner.iter_mut() { *byte = 0; } } } impl Deref for RawPage { type Target = [u8; PAGE_SIZE]; fn deref(&self) -> &Self::Target { &self.inner } } impl DerefMut for RawPage { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.inner } } pub struct Page { ptr: *mut RawPage, } impl Page { pub unsafe fn from_raw(ptr: *mut RawPage) -> Self { Self { ptr } } pub fn into_raw(self) -> *mut RawPage { let ptr = self.ptr; mem::forget(self); ptr } } impl Drop for Page { fn drop(&mut self) { panic!("`Page` should not be dropped."); } } impl Deref for Page { type Target = RawPage; fn deref(&self) -> &Self::Target { unsafe { &*(self.ptr as *const Self::Target) } } } impl DerefMut for Page { fn deref_mut(&mut self) -> &mut Self::Target { unsafe { &mut *(self.ptr as *mut Self::Target) } } } pub struct Pages { ptr: *mut [RawPage], } impl Pages { pub unsafe fn from_raw(raw: *mut RawPage, size: usize) -> Self { Self { ptr: ptr::slice_from_raw_parts_mut(raw, size), } } pub unsafe fn from_raw_u8(ptr: *mut u8, size: usize) -> Result<Self, ()> {
if new_begin >= new_end || new_end - new_begin < PAGE_SIZE { return Err(()); } Ok(Self::from_raw( new_begin as *mut RawPage, (new_end - new_begin) / PAGE_SIZE, )) } pub fn into_raw(self) -> *mut RawPage { let ptr = self.ptr; mem::forget(self); ptr as *mut _ } pub fn clear(&mut self) { for page in self.iter_mut() { page.clear(); } } } impl Drop for Pages { fn drop(&mut self) { panic!("`Pages` should not be dropped."); } } impl Deref for Pages { type Target = [RawPage]; fn deref(&self) -> &Self::Target { unsafe { &*self.ptr } } } impl DerefMut for Pages { fn deref_mut(&mut self) -> &mut Self::Target { unsafe { &mut *self.ptr } } }
// Round begin address up, and end address down. let new_begin = round_up(ptr as usize, PAGE_SIZE); let new_end = round_down(ptr as usize + size, PAGE_SIZE); // No pages if there isn't enough room for an entry.
random_line_split
page.rs
/* * Copyright 2019 Jeehoon Kang * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use core::mem; use core::ops::*; use core::ptr; use crate::utils::*; pub const PAGE_BITS: usize = 12; pub const PAGE_SIZE: usize = 1 << PAGE_BITS; pub const PAGE_LEVEL_BITS: usize = 9; #[repr(C, align(4096))] pub struct RawPage { inner: [u8; PAGE_SIZE], } const_assert_eq!(mem::align_of::<RawPage>(), PAGE_SIZE); const_assert_eq!(mem::size_of::<RawPage>(), PAGE_SIZE); impl RawPage { pub const fn
() -> Self { Self { inner: [0; PAGE_SIZE], } } pub fn clear(&mut self) { for byte in self.inner.iter_mut() { *byte = 0; } } } impl Deref for RawPage { type Target = [u8; PAGE_SIZE]; fn deref(&self) -> &Self::Target { &self.inner } } impl DerefMut for RawPage { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.inner } } pub struct Page { ptr: *mut RawPage, } impl Page { pub unsafe fn from_raw(ptr: *mut RawPage) -> Self { Self { ptr } } pub fn into_raw(self) -> *mut RawPage { let ptr = self.ptr; mem::forget(self); ptr } } impl Drop for Page { fn drop(&mut self) { panic!("`Page` should not be dropped."); } } impl Deref for Page { type Target = RawPage; fn deref(&self) -> &Self::Target { unsafe { &*(self.ptr as *const Self::Target) } } } impl DerefMut for Page { fn deref_mut(&mut self) -> &mut Self::Target { unsafe { &mut *(self.ptr as *mut Self::Target) } } } pub struct Pages { ptr: *mut [RawPage], } impl Pages { pub unsafe fn from_raw(raw: *mut RawPage, size: usize) -> Self { Self { ptr: ptr::slice_from_raw_parts_mut(raw, size), } } pub unsafe fn from_raw_u8(ptr: *mut u8, size: usize) -> Result<Self, ()> { // Round begin address up, and end address down. let new_begin = round_up(ptr as usize, PAGE_SIZE); let new_end = round_down(ptr as usize + size, PAGE_SIZE); // No pages if there isn't enough room for an entry. if new_begin >= new_end || new_end - new_begin < PAGE_SIZE { return Err(()); } Ok(Self::from_raw( new_begin as *mut RawPage, (new_end - new_begin) / PAGE_SIZE, )) } pub fn into_raw(self) -> *mut RawPage { let ptr = self.ptr; mem::forget(self); ptr as *mut _ } pub fn clear(&mut self) { for page in self.iter_mut() { page.clear(); } } } impl Drop for Pages { fn drop(&mut self) { panic!("`Pages` should not be dropped."); } } impl Deref for Pages { type Target = [RawPage]; fn deref(&self) -> &Self::Target { unsafe { &*self.ptr } } } impl DerefMut for Pages { fn deref_mut(&mut self) -> &mut Self::Target { unsafe { &mut *self.ptr } } }
new
identifier_name
pipe-pingpong-bounded.rs
// xfail-fast // Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Ping-pong is a bounded protocol. This is place where I can // experiment with what code the compiler should generate for bounded // protocols. use core::cell::Cell; // This was generated initially by the pipe compiler, but it's been // modified in hopefully straightforward ways. mod pingpong { use core::pipes; use core::pipes::*; use core::ptr; pub struct Packets { ping: Packet<ping>, pong: Packet<pong>, } pub fn init() -> (client::ping, server::ping) { let buffer = ~Buffer { header: BufferHeader(), data: Packets { ping: mk_packet::<ping>(), pong: mk_packet::<pong>() } }; do pipes::entangle_buffer(buffer) |buffer, data| { data.ping.set_buffer(buffer); data.pong.set_buffer(buffer); ptr::addr_of(&(data.ping)) } } pub struct ping(server::pong); pub struct pong(client::ping); pub mod client { use core::pipes; use core::pipes::*; use core::ptr; pub fn ping(+pipe: ping) -> pong { { let b = pipe.reuse_buffer(); let s = SendPacketBuffered(ptr::addr_of(&(b.buffer.data.pong))); let c = RecvPacketBuffered(ptr::addr_of(&(b.buffer.data.pong))); let message = ::pingpong::ping(s); send(pipe, message); c } } pub type ping = pipes::SendPacketBuffered<::pingpong::ping, ::pingpong::Packets>; pub type pong = pipes::RecvPacketBuffered<::pingpong::pong, ::pingpong::Packets>; } pub mod server { use core::pipes; use core::pipes::*; use core::ptr; pub type ping = pipes::RecvPacketBuffered<::pingpong::ping, ::pingpong::Packets>; pub fn pong(+pipe: pong) -> ping { { let b = pipe.reuse_buffer(); let s = SendPacketBuffered(ptr::addr_of(&(b.buffer.data.ping))); let c = RecvPacketBuffered(ptr::addr_of(&(b.buffer.data.ping))); let message = ::pingpong::pong(s); send(pipe, message); c } } pub type pong = pipes::SendPacketBuffered<::pingpong::pong, ::pingpong::Packets>; } } mod test { use core::pipes::recv; use pingpong::{ping, pong}; pub fn client(+chan: ::pingpong::client::ping) { use pingpong::client; let chan = client::ping(chan); return; error!("Sent ping"); let pong(_chan) = recv(chan); error!("Received pong"); } pub fn server(+chan: ::pingpong::server::ping) { use pingpong::server; let ping(chan) = recv(chan); return; error!("Received ping"); let _chan = server::pong(chan); error!("Sent pong"); } } pub fn
() { let (client_, server_) = ::pingpong::init(); let client_ = Cell(client_); let server_ = Cell(server_); do task::spawn { let client__ = client_.take(); test::client(client__); }; do task::spawn { let server__ = server_.take(); test::server(server__); }; }
main
identifier_name
pipe-pingpong-bounded.rs
// xfail-fast // Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Ping-pong is a bounded protocol. This is place where I can // experiment with what code the compiler should generate for bounded // protocols. use core::cell::Cell; // This was generated initially by the pipe compiler, but it's been // modified in hopefully straightforward ways. mod pingpong { use core::pipes; use core::pipes::*; use core::ptr; pub struct Packets { ping: Packet<ping>, pong: Packet<pong>, } pub fn init() -> (client::ping, server::ping) { let buffer = ~Buffer { header: BufferHeader(), data: Packets { ping: mk_packet::<ping>(), pong: mk_packet::<pong>() } }; do pipes::entangle_buffer(buffer) |buffer, data| { data.ping.set_buffer(buffer); data.pong.set_buffer(buffer); ptr::addr_of(&(data.ping)) } } pub struct ping(server::pong); pub struct pong(client::ping); pub mod client { use core::pipes; use core::pipes::*; use core::ptr; pub fn ping(+pipe: ping) -> pong { { let b = pipe.reuse_buffer(); let s = SendPacketBuffered(ptr::addr_of(&(b.buffer.data.pong))); let c = RecvPacketBuffered(ptr::addr_of(&(b.buffer.data.pong))); let message = ::pingpong::ping(s); send(pipe, message); c } } pub type ping = pipes::SendPacketBuffered<::pingpong::ping, ::pingpong::Packets>; pub type pong = pipes::RecvPacketBuffered<::pingpong::pong, ::pingpong::Packets>; } pub mod server { use core::pipes; use core::pipes::*; use core::ptr; pub type ping = pipes::RecvPacketBuffered<::pingpong::ping, ::pingpong::Packets>; pub fn pong(+pipe: pong) -> ping { { let b = pipe.reuse_buffer(); let s = SendPacketBuffered(ptr::addr_of(&(b.buffer.data.ping))); let c = RecvPacketBuffered(ptr::addr_of(&(b.buffer.data.ping))); let message = ::pingpong::pong(s); send(pipe, message); c } } pub type pong = pipes::SendPacketBuffered<::pingpong::pong, ::pingpong::Packets>; } } mod test { use core::pipes::recv; use pingpong::{ping, pong}; pub fn client(+chan: ::pingpong::client::ping) { use pingpong::client; let chan = client::ping(chan); return; error!("Sent ping"); let pong(_chan) = recv(chan); error!("Received pong"); } pub fn server(+chan: ::pingpong::server::ping) { use pingpong::server; let ping(chan) = recv(chan); return; error!("Received ping"); let _chan = server::pong(chan); error!("Sent pong"); } } pub fn main() { let (client_, server_) = ::pingpong::init(); let client_ = Cell(client_); let server_ = Cell(server_); do task::spawn {
let server__ = server_.take(); test::server(server__); }; }
let client__ = client_.take(); test::client(client__); }; do task::spawn {
random_line_split
parser.rs
use std::io; use std::path::PathBuf; use super::read_config; use crate::modules::{ModuleResolutionError, ModuleResolutionErrorKind}; use crate::{ErrorKind, Input, Session}; #[test] fn parser_errors_in_submods_are_surfaced() { // See also https://github.com/rust-lang/rustfmt/issues/4126 let filename = "tests/parser/issue-4126/lib.rs"; let input_file = PathBuf::from(filename); let exp_mod_name = "invalid"; let config = read_config(&input_file); let mut session = Session::<io::Stdout>::new(config, None); if let Err(ErrorKind::ModuleResolutionError(ModuleResolutionError { module, kind })) = session.format(Input::File(filename.into())) { assert_eq!(&module, exp_mod_name); if let ModuleResolutionErrorKind::ParseError { file: unparseable_file, } = kind { assert_eq!( unparseable_file, PathBuf::from("tests/parser/issue-4126/invalid.rs"), ); } else { panic!("Expected parser error"); } } else { panic!("Expected ModuleResolution operation error"); } } fn
(filename: &str) { let file = PathBuf::from(filename); let config = read_config(&file); let mut session = Session::<io::Stdout>::new(config, None); let _ = session.format(Input::File(filename.into())).unwrap(); assert!(session.has_parsing_errors()); } #[test] fn parser_creation_errors_on_entry_new_parser_from_file_panic() { // See also https://github.com/rust-lang/rustfmt/issues/4418 let filename = "tests/parser/issue_4418.rs"; assert_parser_error(filename); } #[test] fn crate_parsing_errors_on_unclosed_delims() { // See also https://github.com/rust-lang/rustfmt/issues/4466 let filename = "tests/parser/unclosed-delims/issue_4466.rs"; assert_parser_error(filename); }
assert_parser_error
identifier_name
parser.rs
use std::io; use std::path::PathBuf; use super::read_config; use crate::modules::{ModuleResolutionError, ModuleResolutionErrorKind}; use crate::{ErrorKind, Input, Session}; #[test] fn parser_errors_in_submods_are_surfaced()
panic!("Expected parser error"); } } else { panic!("Expected ModuleResolution operation error"); } } fn assert_parser_error(filename: &str) { let file = PathBuf::from(filename); let config = read_config(&file); let mut session = Session::<io::Stdout>::new(config, None); let _ = session.format(Input::File(filename.into())).unwrap(); assert!(session.has_parsing_errors()); } #[test] fn parser_creation_errors_on_entry_new_parser_from_file_panic() { // See also https://github.com/rust-lang/rustfmt/issues/4418 let filename = "tests/parser/issue_4418.rs"; assert_parser_error(filename); } #[test] fn crate_parsing_errors_on_unclosed_delims() { // See also https://github.com/rust-lang/rustfmt/issues/4466 let filename = "tests/parser/unclosed-delims/issue_4466.rs"; assert_parser_error(filename); }
{ // See also https://github.com/rust-lang/rustfmt/issues/4126 let filename = "tests/parser/issue-4126/lib.rs"; let input_file = PathBuf::from(filename); let exp_mod_name = "invalid"; let config = read_config(&input_file); let mut session = Session::<io::Stdout>::new(config, None); if let Err(ErrorKind::ModuleResolutionError(ModuleResolutionError { module, kind })) = session.format(Input::File(filename.into())) { assert_eq!(&module, exp_mod_name); if let ModuleResolutionErrorKind::ParseError { file: unparseable_file, } = kind { assert_eq!( unparseable_file, PathBuf::from("tests/parser/issue-4126/invalid.rs"), ); } else {
identifier_body
parser.rs
use std::io; use std::path::PathBuf; use super::read_config; use crate::modules::{ModuleResolutionError, ModuleResolutionErrorKind}; use crate::{ErrorKind, Input, Session}; #[test] fn parser_errors_in_submods_are_surfaced() { // See also https://github.com/rust-lang/rustfmt/issues/4126 let filename = "tests/parser/issue-4126/lib.rs"; let input_file = PathBuf::from(filename); let exp_mod_name = "invalid"; let config = read_config(&input_file); let mut session = Session::<io::Stdout>::new(config, None); if let Err(ErrorKind::ModuleResolutionError(ModuleResolutionError { module, kind })) = session.format(Input::File(filename.into())) { assert_eq!(&module, exp_mod_name); if let ModuleResolutionErrorKind::ParseError { file: unparseable_file, } = kind { assert_eq!( unparseable_file, PathBuf::from("tests/parser/issue-4126/invalid.rs"), ); } else { panic!("Expected parser error"); } } else { panic!("Expected ModuleResolution operation error"); } } fn assert_parser_error(filename: &str) { let file = PathBuf::from(filename); let config = read_config(&file); let mut session = Session::<io::Stdout>::new(config, None); let _ = session.format(Input::File(filename.into())).unwrap(); assert!(session.has_parsing_errors()); } #[test] fn parser_creation_errors_on_entry_new_parser_from_file_panic() { // See also https://github.com/rust-lang/rustfmt/issues/4418 let filename = "tests/parser/issue_4418.rs"; assert_parser_error(filename);
#[test] fn crate_parsing_errors_on_unclosed_delims() { // See also https://github.com/rust-lang/rustfmt/issues/4466 let filename = "tests/parser/unclosed-delims/issue_4466.rs"; assert_parser_error(filename); }
}
random_line_split
parser.rs
use std::io; use std::path::PathBuf; use super::read_config; use crate::modules::{ModuleResolutionError, ModuleResolutionErrorKind}; use crate::{ErrorKind, Input, Session}; #[test] fn parser_errors_in_submods_are_surfaced() { // See also https://github.com/rust-lang/rustfmt/issues/4126 let filename = "tests/parser/issue-4126/lib.rs"; let input_file = PathBuf::from(filename); let exp_mod_name = "invalid"; let config = read_config(&input_file); let mut session = Session::<io::Stdout>::new(config, None); if let Err(ErrorKind::ModuleResolutionError(ModuleResolutionError { module, kind })) = session.format(Input::File(filename.into())) { assert_eq!(&module, exp_mod_name); if let ModuleResolutionErrorKind::ParseError { file: unparseable_file, } = kind { assert_eq!( unparseable_file, PathBuf::from("tests/parser/issue-4126/invalid.rs"), ); } else { panic!("Expected parser error"); } } else
} fn assert_parser_error(filename: &str) { let file = PathBuf::from(filename); let config = read_config(&file); let mut session = Session::<io::Stdout>::new(config, None); let _ = session.format(Input::File(filename.into())).unwrap(); assert!(session.has_parsing_errors()); } #[test] fn parser_creation_errors_on_entry_new_parser_from_file_panic() { // See also https://github.com/rust-lang/rustfmt/issues/4418 let filename = "tests/parser/issue_4418.rs"; assert_parser_error(filename); } #[test] fn crate_parsing_errors_on_unclosed_delims() { // See also https://github.com/rust-lang/rustfmt/issues/4466 let filename = "tests/parser/unclosed-delims/issue_4466.rs"; assert_parser_error(filename); }
{ panic!("Expected ModuleResolution operation error"); }
conditional_block
interact.rs
//! Functions for an interactive mode command line (REPL) use crate::command::Run; use eyre::{bail, eyre, Result}; use rustyline::error::ReadlineError; use std::path::Path; use structopt::StructOpt; fn help() -> String { use ansi_term::{ Colour::{Green, Yellow}, Style, }; let mut help = Style::new() .bold() .paint("Stencila CLI interactive mode\n\n") .to_string(); help += &Yellow.paint("ABOUT:").to_string(); help += r#" Interactive mode allows you to interact with the Stencila CLI without having to restart it. This is particularly useful for doing things like exploring the structure of documents using `query`, or running code within them using `execute`. Interactive mode has the concept of a command prefix to save you having to retype the same command and its options. For example, to interactively query the structure of a Markdown document: stencila query report.Rmd --interact You can also print, set and clear the command prefix during the interactive session (see the shortcut keystrokes below). "#; help += &Yellow.paint("SHORTCUTS:\n").to_string(); for (keys, desc) in &[ ("--help", "Get help for the current command prefix"), ("^ ", "Print the current command prefix"), ("> ", "Append arguments to the command prefix"), ("< ", "Remove the last argument from the command prefix"), (">> ", "Set the command prefix"), ("<< ", "Clear the command prefix"), ("$ ", "Ignore the command prefix for this command"), ("↑ ", "Go back through command history"), ("↓ ", "Go forward through command history"), ("? ", "Print this message"), ("Ctrl+C", "Cancel the current task (if any)"), ("Ctrl+D", "Exit interactive session"), ] { help += &format!(" {} {}\n", Green.paint(*keys), desc) } help } /// Run the interactive REPL #[tracing::instrument] pub async fn run<T>(mut prefix: Vec<String>, formats: &[String], history: &Path) -> Result<()> where T: StructOpt + Run + Send + Sync, { let mut rl = editor::new(); if rl.load_history(history).is_err() { tracing::debug!("History file not found") } println!("{}", help()); if!prefix.is_empty() { println!("Starting command prefix is {:?}", prefix); } loop { let readline = rl.readline("> "); match readline { Ok(line) => { rl.add_history_entry(&line); let line = line.trim(); let mut args = line .split_whitespace() .map(str::to_string) .collect::<Vec<String>>(); // Handle prefix inspection / manipulation shortcuts if line.starts_with('^') { tracing::info!("Command prefix is: `{}`", prefix.join(" ")); continue; } else if line.starts_with(">>") { prefix = args[1..].into(); tracing::info!("Command prefix was set to: `{}`", prefix.join(" ")); continue; } else if line.starts_with('>') { prefix = [prefix, args[1..].into()].concat(); tracing::info!("Command prefix was appended to: `{}`", prefix.join(" ")); continue; } else if line.starts_with("<<") { prefix.clear(); tracing::info!("Command prefix was cleared"); continue; } else if line.starts_with('<') { prefix.truncate(std::cmp::max(1, prefix.len()) - 1); tracing::info!("Command prefix was truncated to: `{}`", prefix.join(" ")); continue; } else if line.starts_with('?') { tracing::info!("{}", help()); continue; } // Construct args vector for this line, handling bypassing the prefix and // reordering (and errors) if using the `with` command. let mut args = if line.starts_with('$') { args.remove(0); args } else { [prefix.as_slice(), args.as_slice()].concat() }; if args.len() > 1 && args[1] == "with" { if args.len() == 2 { tracing::error!("Using the `with` command without a path; use `>` to append one to the command prefix."); continue; } else if args.len() == 3 { tracing::error!( "Using the `with` command without a subcommand e.g `show`." ); continue; } else if args.len() > 3 { let subcommand = args.remove(3); args[1] = subcommand; } }; // Parse args and run the command match T::clap().get_matches_from_safe(args) { Ok(matches) => { let command = T::from_clap(&matches); command.print(formats).await } Err(error) => { if error.kind == structopt::clap::ErrorKind::VersionDisplayed { print!("{}", error) } else if error.kind == structopt::clap::ErrorKind::HelpDisplayed || error.kind == structopt::clap::ErrorKind::MissingArgumentOrSubcommand { // Remove the unnecessary command / version line at the start let lines = format!("{}\n", error) .to_string() .lines() .skip(1) .map(str::to_string) .collect::<Vec<String>>() .join("\n"); print!("{}", lines) } else { eprintln!("{:?}", eyre!(error)) } } } } Err(ReadlineError::Interrupted) => { tracing::info!( "Ctrl+C pressed, but no active task (use Ctrl+D to end interactive session)" ); } Err(ReadlineError::Eof) => { tracing::info!("Ctrl+D pressed, ending interactive session"); break; } Err(error) => bail!(error), } } rl.save_history(history)?; Ok(()) } /// Module for interactive mode line editor /// /// Implements traits for `rustyline` mod editor { use ansi_term::Colour::{Blue, White, Yellow}; use rustyline::{ completion::{Completer, FilenameCompleter, Pair}, config::OutputStreamType, highlight::{Highlighter, MatchingBracketHighlighter}, hint::{Hinter, HistoryHinter}, validate::{MatchingBracketValidator, Validator}, validate::{ValidationContext, ValidationResult}, CompletionType, Context, EditMode, Editor, Result, }; use rustyline_derive::Helper; use std::borrow::Cow::{self, Owned}; pub fn new() -> Editor<Helper> { let config = rustyline::Config::builder() .history_ignore_space(true) .max_history_size(1000) .completion_type(CompletionType::List) .edit_mode(EditMode::Emacs) .output_stream(OutputStreamType::Stdout) .build(); let mut editor = Editor::with_config(config); let helper = Helper::new(); editor.set_helper(Some(helper)); editor } #[derive(Helper)] pub struct Helper { pub completer: FilenameCompleter, pub hinter: HistoryHinter, pub validator: MatchingBracketValidator, pub highlighter: MatchingBracketHighlighter, } impl Helper { pub fn new() -> Self { Helper { completer: FilenameCompleter::new(), hinter: HistoryHinter {}, validator: MatchingBracketValidator::new(), highlighter: MatchingBracketHighlighter::new(), } } } /// Provides tab-completion candidates /// /// https://github.com/kkawakam/rustyline/blob/master/src/completion.rs impl Completer for Helper { type Candidate = Pair; fn complete( &self, line: &str, pos: usize, ctx: &Context<'_>, ) -> Result<(usize, Vec<Self::Candidate>)> { self.completer.complete(line, pos, ctx) } } /// Provides hints based on the current line /// /// See https://github.com/kkawakam/rustyline/blob/master/src/hint.rs impl Hinter for Helper { type Hint = String; // Takes the currently edited line with the cursor position and returns the string that should be // displayed or None if no hint is available for the text the user currently typed fn hint(&self, line: &str, pos: usize, ctx: &Context<'_>) -> Option<String> { self.hinter.hint(line, pos, ctx) } } /// Determines whether the current buffer is a valid command or should continue. /// /// Will not validate unless brackets (round, square and curly) are balanced. impl Validator for Helper { fn validate(&self, ctx: &mut ValidationContext) -> Result<ValidationResult> { self.validator.validate(ctx) } fn validate_while_typing(&self) -> bool { self.validator.validate_while_typing() } } /// Syntax highlighter /// /// Highlights brackets, prompt, hints and completion candidates. /// See https://github.com/kkawakam/rustyline/blob/master/src/highlight.rs impl Highlighter for Helper { fn highlight<'l>(&self, line: &'l str, pos: usize) -> Cow<'l, str> { self.highlighter.highlight(line, pos) } fn highlight_prompt<'b,'s: 'b, 'p: 'b>( &'s self, prompt: &'p str, _default: bool, ) -> Cow<'b, str> { Owned(Blue.paint(prompt).to_string()) } fn highlight_hint<'h>(&self, hint: &'h str) -> Cow<'h, str> { Owned(White.dimmed().paint(hint).to_string()) } fn highlight_candidate<'c>( &self, candidate: &'c str, _completion: CompletionType, ) -> Cow<'c, str> {
fn highlight_char(&self, line: &str, pos: usize) -> bool { self.highlighter.highlight_char(line, pos) } } }
Owned(Yellow.dimmed().paint(candidate).to_string()) }
identifier_body
interact.rs
//! Functions for an interactive mode command line (REPL) use crate::command::Run; use eyre::{bail, eyre, Result}; use rustyline::error::ReadlineError; use std::path::Path; use structopt::StructOpt; fn help() -> String { use ansi_term::{ Colour::{Green, Yellow}, Style, }; let mut help = Style::new() .bold() .paint("Stencila CLI interactive mode\n\n") .to_string(); help += &Yellow.paint("ABOUT:").to_string(); help += r#" Interactive mode allows you to interact with the Stencila CLI without having to restart it. This is particularly useful for doing things like exploring the structure of documents using `query`, or running code within them using `execute`. Interactive mode has the concept of a command prefix to save you having to retype the same command and its options. For example, to interactively query the structure of a Markdown document: stencila query report.Rmd --interact You can also print, set and clear the command prefix during the interactive session (see the shortcut keystrokes below). "#; help += &Yellow.paint("SHORTCUTS:\n").to_string(); for (keys, desc) in &[ ("--help", "Get help for the current command prefix"), ("^ ", "Print the current command prefix"), ("> ", "Append arguments to the command prefix"), ("< ", "Remove the last argument from the command prefix"), (">> ", "Set the command prefix"), ("<< ", "Clear the command prefix"), ("$ ", "Ignore the command prefix for this command"), ("↑ ", "Go back through command history"), ("↓ ", "Go forward through command history"), ("? ", "Print this message"), ("Ctrl+C", "Cancel the current task (if any)"), ("Ctrl+D", "Exit interactive session"), ] { help += &format!(" {} {}\n", Green.paint(*keys), desc) } help } /// Run the interactive REPL #[tracing::instrument] pub async fn run<T>(mut prefix: Vec<String>, formats: &[String], history: &Path) -> Result<()> where T: StructOpt + Run + Send + Sync, { let mut rl = editor::new(); if rl.load_history(history).is_err() { tracing::debug!("History file not found") } println!("{}", help()); if!prefix.is_empty() { println!("Starting command prefix is {:?}", prefix); } loop { let readline = rl.readline("> "); match readline { Ok(line) => { rl.add_history_entry(&line); let line = line.trim(); let mut args = line .split_whitespace() .map(str::to_string) .collect::<Vec<String>>(); // Handle prefix inspection / manipulation shortcuts if line.starts_with('^') { tracing::info!("Command prefix is: `{}`", prefix.join(" ")); continue; } else if line.starts_with(">>") { prefix = args[1..].into(); tracing::info!("Command prefix was set to: `{}`", prefix.join(" ")); continue; } else if line.starts_with('>') { prefix = [prefix, args[1..].into()].concat(); tracing::info!("Command prefix was appended to: `{}`", prefix.join(" ")); continue; } else if line.starts_with("<<") { prefix.clear(); tracing::info!("Command prefix was cleared"); continue; } else if line.starts_with('<') { prefix.truncate(std::cmp::max(1, prefix.len()) - 1); tracing::info!("Command prefix was truncated to: `{}`", prefix.join(" ")); continue; } else if line.starts_with('?') { tracing::info!("{}", help()); continue; } // Construct args vector for this line, handling bypassing the prefix and // reordering (and errors) if using the `with` command. let mut args = if line.starts_with('$') { args.remove(0); args } else { [prefix.as_slice(), args.as_slice()].concat() }; if args.len() > 1 && args[1] == "with" { if args.len() == 2 { tracing::error!("Using the `with` command without a path; use `>` to append one to the command prefix."); continue; } else if args.len() == 3 { tracing::error!( "Using the `with` command without a subcommand e.g `show`." ); continue; } else if args.len() > 3 { let subcommand = args.remove(3); args[1] = subcommand; } }; // Parse args and run the command match T::clap().get_matches_from_safe(args) { Ok(matches) => { let command = T::from_clap(&matches); command.print(formats).await } Err(error) => { if error.kind == structopt::clap::ErrorKind::VersionDisplayed { print!("{}", error) } else if error.kind == structopt::clap::ErrorKind::HelpDisplayed || error.kind == structopt::clap::ErrorKind::MissingArgumentOrSubcommand { // Remove the unnecessary command / version line at the start let lines = format!("{}\n", error) .to_string() .lines() .skip(1) .map(str::to_string) .collect::<Vec<String>>() .join("\n"); print!("{}", lines) } else { eprintln!("{:?}", eyre!(error)) } } } } Err(ReadlineError::Interrupted) => { tracing::info!( "Ctrl+C pressed, but no active task (use Ctrl+D to end interactive session)" ); } Err(ReadlineError::Eof) => { tracing::info!("Ctrl+D pressed, ending interactive session"); break; } Err(error) => bail!(error), } } rl.save_history(history)?; Ok(()) } /// Module for interactive mode line editor /// /// Implements traits for `rustyline` mod editor { use ansi_term::Colour::{Blue, White, Yellow}; use rustyline::{ completion::{Completer, FilenameCompleter, Pair}, config::OutputStreamType, highlight::{Highlighter, MatchingBracketHighlighter}, hint::{Hinter, HistoryHinter}, validate::{MatchingBracketValidator, Validator}, validate::{ValidationContext, ValidationResult}, CompletionType, Context, EditMode, Editor, Result, }; use rustyline_derive::Helper; use std::borrow::Cow::{self, Owned}; pub fn new() -> Editor<Helper> { let config = rustyline::Config::builder() .history_ignore_space(true) .max_history_size(1000) .completion_type(CompletionType::List) .edit_mode(EditMode::Emacs) .output_stream(OutputStreamType::Stdout) .build(); let mut editor = Editor::with_config(config); let helper = Helper::new(); editor.set_helper(Some(helper)); editor } #[derive(Helper)] pub struct Helper { pub completer: FilenameCompleter, pub hinter: HistoryHinter, pub validator: MatchingBracketValidator, pub highlighter: MatchingBracketHighlighter, } impl Helper { pub fn new() -> Self { Helper { completer: FilenameCompleter::new(), hinter: HistoryHinter {}, validator: MatchingBracketValidator::new(), highlighter: MatchingBracketHighlighter::new(), } } } /// Provides tab-completion candidates /// /// https://github.com/kkawakam/rustyline/blob/master/src/completion.rs impl Completer for Helper { type Candidate = Pair; fn complete( &self, line: &str, pos: usize, ctx: &Context<'_>, ) -> Result<(usize, Vec<Self::Candidate>)> { self.completer.complete(line, pos, ctx) } } /// Provides hints based on the current line /// /// See https://github.com/kkawakam/rustyline/blob/master/src/hint.rs impl Hinter for Helper { type Hint = String; // Takes the currently edited line with the cursor position and returns the string that should be // displayed or None if no hint is available for the text the user currently typed fn hint(&self, line: &str, pos: usize, ctx: &Context<'_>) -> Option<String> { self.hinter.hint(line, pos, ctx) } } /// Determines whether the current buffer is a valid command or should continue. /// /// Will not validate unless brackets (round, square and curly) are balanced. impl Validator for Helper { fn validate(&self, ctx: &mut ValidationContext) -> Result<ValidationResult> { self.validator.validate(ctx) } fn validate_while_typing(&self) -> bool { self.validator.validate_while_typing() } } /// Syntax highlighter /// /// Highlights brackets, prompt, hints and completion candidates. /// See https://github.com/kkawakam/rustyline/blob/master/src/highlight.rs impl Highlighter for Helper { fn highlight<'l>(&self, line: &'l str, pos: usize) -> Cow<'l, str> { self.highlighter.highlight(line, pos) } fn high
's: 'b, 'p: 'b>( &'s self, prompt: &'p str, _default: bool, ) -> Cow<'b, str> { Owned(Blue.paint(prompt).to_string()) } fn highlight_hint<'h>(&self, hint: &'h str) -> Cow<'h, str> { Owned(White.dimmed().paint(hint).to_string()) } fn highlight_candidate<'c>( &self, candidate: &'c str, _completion: CompletionType, ) -> Cow<'c, str> { Owned(Yellow.dimmed().paint(candidate).to_string()) } fn highlight_char(&self, line: &str, pos: usize) -> bool { self.highlighter.highlight_char(line, pos) } } }
light_prompt<'b,
identifier_name
interact.rs
//! Functions for an interactive mode command line (REPL) use crate::command::Run; use eyre::{bail, eyre, Result}; use rustyline::error::ReadlineError; use std::path::Path; use structopt::StructOpt; fn help() -> String { use ansi_term::{ Colour::{Green, Yellow}, Style, }; let mut help = Style::new() .bold() .paint("Stencila CLI interactive mode\n\n") .to_string(); help += &Yellow.paint("ABOUT:").to_string(); help += r#" Interactive mode allows you to interact with the Stencila CLI without having to restart it. This is particularly useful for doing things like exploring the structure of documents using `query`, or running code within them using `execute`. Interactive mode has the concept of a command prefix to save you having to retype the same command and its options. For example, to interactively query the structure of a Markdown document: stencila query report.Rmd --interact You can also print, set and clear the command prefix during the interactive session (see the shortcut keystrokes below). "#; help += &Yellow.paint("SHORTCUTS:\n").to_string(); for (keys, desc) in &[ ("--help", "Get help for the current command prefix"), ("^ ", "Print the current command prefix"), ("> ", "Append arguments to the command prefix"), ("< ", "Remove the last argument from the command prefix"), (">> ", "Set the command prefix"),
("Ctrl+C", "Cancel the current task (if any)"), ("Ctrl+D", "Exit interactive session"), ] { help += &format!(" {} {}\n", Green.paint(*keys), desc) } help } /// Run the interactive REPL #[tracing::instrument] pub async fn run<T>(mut prefix: Vec<String>, formats: &[String], history: &Path) -> Result<()> where T: StructOpt + Run + Send + Sync, { let mut rl = editor::new(); if rl.load_history(history).is_err() { tracing::debug!("History file not found") } println!("{}", help()); if!prefix.is_empty() { println!("Starting command prefix is {:?}", prefix); } loop { let readline = rl.readline("> "); match readline { Ok(line) => { rl.add_history_entry(&line); let line = line.trim(); let mut args = line .split_whitespace() .map(str::to_string) .collect::<Vec<String>>(); // Handle prefix inspection / manipulation shortcuts if line.starts_with('^') { tracing::info!("Command prefix is: `{}`", prefix.join(" ")); continue; } else if line.starts_with(">>") { prefix = args[1..].into(); tracing::info!("Command prefix was set to: `{}`", prefix.join(" ")); continue; } else if line.starts_with('>') { prefix = [prefix, args[1..].into()].concat(); tracing::info!("Command prefix was appended to: `{}`", prefix.join(" ")); continue; } else if line.starts_with("<<") { prefix.clear(); tracing::info!("Command prefix was cleared"); continue; } else if line.starts_with('<') { prefix.truncate(std::cmp::max(1, prefix.len()) - 1); tracing::info!("Command prefix was truncated to: `{}`", prefix.join(" ")); continue; } else if line.starts_with('?') { tracing::info!("{}", help()); continue; } // Construct args vector for this line, handling bypassing the prefix and // reordering (and errors) if using the `with` command. let mut args = if line.starts_with('$') { args.remove(0); args } else { [prefix.as_slice(), args.as_slice()].concat() }; if args.len() > 1 && args[1] == "with" { if args.len() == 2 { tracing::error!("Using the `with` command without a path; use `>` to append one to the command prefix."); continue; } else if args.len() == 3 { tracing::error!( "Using the `with` command without a subcommand e.g `show`." ); continue; } else if args.len() > 3 { let subcommand = args.remove(3); args[1] = subcommand; } }; // Parse args and run the command match T::clap().get_matches_from_safe(args) { Ok(matches) => { let command = T::from_clap(&matches); command.print(formats).await } Err(error) => { if error.kind == structopt::clap::ErrorKind::VersionDisplayed { print!("{}", error) } else if error.kind == structopt::clap::ErrorKind::HelpDisplayed || error.kind == structopt::clap::ErrorKind::MissingArgumentOrSubcommand { // Remove the unnecessary command / version line at the start let lines = format!("{}\n", error) .to_string() .lines() .skip(1) .map(str::to_string) .collect::<Vec<String>>() .join("\n"); print!("{}", lines) } else { eprintln!("{:?}", eyre!(error)) } } } } Err(ReadlineError::Interrupted) => { tracing::info!( "Ctrl+C pressed, but no active task (use Ctrl+D to end interactive session)" ); } Err(ReadlineError::Eof) => { tracing::info!("Ctrl+D pressed, ending interactive session"); break; } Err(error) => bail!(error), } } rl.save_history(history)?; Ok(()) } /// Module for interactive mode line editor /// /// Implements traits for `rustyline` mod editor { use ansi_term::Colour::{Blue, White, Yellow}; use rustyline::{ completion::{Completer, FilenameCompleter, Pair}, config::OutputStreamType, highlight::{Highlighter, MatchingBracketHighlighter}, hint::{Hinter, HistoryHinter}, validate::{MatchingBracketValidator, Validator}, validate::{ValidationContext, ValidationResult}, CompletionType, Context, EditMode, Editor, Result, }; use rustyline_derive::Helper; use std::borrow::Cow::{self, Owned}; pub fn new() -> Editor<Helper> { let config = rustyline::Config::builder() .history_ignore_space(true) .max_history_size(1000) .completion_type(CompletionType::List) .edit_mode(EditMode::Emacs) .output_stream(OutputStreamType::Stdout) .build(); let mut editor = Editor::with_config(config); let helper = Helper::new(); editor.set_helper(Some(helper)); editor } #[derive(Helper)] pub struct Helper { pub completer: FilenameCompleter, pub hinter: HistoryHinter, pub validator: MatchingBracketValidator, pub highlighter: MatchingBracketHighlighter, } impl Helper { pub fn new() -> Self { Helper { completer: FilenameCompleter::new(), hinter: HistoryHinter {}, validator: MatchingBracketValidator::new(), highlighter: MatchingBracketHighlighter::new(), } } } /// Provides tab-completion candidates /// /// https://github.com/kkawakam/rustyline/blob/master/src/completion.rs impl Completer for Helper { type Candidate = Pair; fn complete( &self, line: &str, pos: usize, ctx: &Context<'_>, ) -> Result<(usize, Vec<Self::Candidate>)> { self.completer.complete(line, pos, ctx) } } /// Provides hints based on the current line /// /// See https://github.com/kkawakam/rustyline/blob/master/src/hint.rs impl Hinter for Helper { type Hint = String; // Takes the currently edited line with the cursor position and returns the string that should be // displayed or None if no hint is available for the text the user currently typed fn hint(&self, line: &str, pos: usize, ctx: &Context<'_>) -> Option<String> { self.hinter.hint(line, pos, ctx) } } /// Determines whether the current buffer is a valid command or should continue. /// /// Will not validate unless brackets (round, square and curly) are balanced. impl Validator for Helper { fn validate(&self, ctx: &mut ValidationContext) -> Result<ValidationResult> { self.validator.validate(ctx) } fn validate_while_typing(&self) -> bool { self.validator.validate_while_typing() } } /// Syntax highlighter /// /// Highlights brackets, prompt, hints and completion candidates. /// See https://github.com/kkawakam/rustyline/blob/master/src/highlight.rs impl Highlighter for Helper { fn highlight<'l>(&self, line: &'l str, pos: usize) -> Cow<'l, str> { self.highlighter.highlight(line, pos) } fn highlight_prompt<'b,'s: 'b, 'p: 'b>( &'s self, prompt: &'p str, _default: bool, ) -> Cow<'b, str> { Owned(Blue.paint(prompt).to_string()) } fn highlight_hint<'h>(&self, hint: &'h str) -> Cow<'h, str> { Owned(White.dimmed().paint(hint).to_string()) } fn highlight_candidate<'c>( &self, candidate: &'c str, _completion: CompletionType, ) -> Cow<'c, str> { Owned(Yellow.dimmed().paint(candidate).to_string()) } fn highlight_char(&self, line: &str, pos: usize) -> bool { self.highlighter.highlight_char(line, pos) } } }
("<< ", "Clear the command prefix"), ("$ ", "Ignore the command prefix for this command"), ("↑ ", "Go back through command history"), ("↓ ", "Go forward through command history"), ("? ", "Print this message"),
random_line_split
mod.rs
//! Kafka producers. //! //! ## The C librdkafka producer //! //! Rust-rdkafka relies on the C librdkafka producer to communicate with Kafka, //! so in order to understand how the Rust producers work it is important to //! understand the basics of the C one as well. //! //! ### Async //! //! The librdkafka producer is completely asynchronous: it maintains a memory //! buffer where messages waiting to be sent or currently in flight are stored. //! Once a message is delivered or an error occurred and the maximum number of //! retries has been reached, the producer will enqueue a delivery event with //! the appropriate delivery result into an internal event queue. //! //! The librdkafka user is responsible for calling the `poll` function at //! regular intervals to process those events; the thread calling `poll` will be //! the one executing the user-specified delivery callback for every delivery //! event. If `poll` is not called, or not frequently enough, the producer will //! return a [`RDKafkaErrorCode::QueueFull`] error and it won't be able to send //! any other message until more delivery events are processed via `poll`. The //! `QueueFull` error can also be returned if Kafka is not able to receive the //! messages quickly enough. //! //! ### Error reporting //! //! The C library will try deal with all the transient errors such as broker //! disconnection, timeouts etc. These errors, called global errors, are //! automatically logged in rust-rdkafka, but they normally don't require any //! handling as they are automatically handled internally. To see the logs, make //! sure you initialize the logger. //! //! As mentioned earlier, errors specific to message production will be reported //! in the delivery callback. //! //! ### Buffering //! //! Buffering is done automatically by librdkafka. When `send` is called, the //! message is enqueued internally and once enough messages have been enqueued, //! or when enough time has passed, they will be sent to Kafka as a single //! batch. You can control the behavior of the buffer by configuring the the //! `queue.buffering.max.*` parameters listed below. //! //! ## `rust-rdkafka` producers //! //! `rust-rdkafka` (rdkafka for brevity) provides two sets of producers: low //! level and high level. //! //! ### Low-level producers //! //! The lowest level producer provided by rdkafka is called [`BaseProducer`]. //! The goal of the `BaseProducer` is to be as close as possible to the C one //! while maintaining a safe Rust interface. In particular, the `BaseProducer` //! needs to be polled at regular intervals to execute any delivery callback //! that might be waiting and to make sure the queue doesn't fill up. //! //! Another low lever producer is the [`ThreadedProducer`], which is a //! `BaseProducer` with a dedicated thread for polling. //! //! The delivery callback can be defined using a `ProducerContext`. See the //! [`base_producer`] module for more information. //! //! ### High-level producer //! //! At the moment the only high level producer implemented is the //! [`FutureProducer`]. The `FutureProducer` doesn't rely on user-defined //! callbacks to notify the delivery or failure of a message; instead, this //! information will be returned in a Future. The `FutureProducer` also uses an //! internal thread that is used for polling, which makes calling poll //! explicitly not necessary. The returned future will contain information about //! the delivered message in case of success, or a copy of the original message //! in case of failure. Additional computation can be chained to the returned //! future, and it will executed by the future executor once the value is //! available (for more information, check the documentation of the futures //! crate). //! //! ## Transactions //! //! All rust-rdkafka producers support transactions. Transactional producers //! work together with transaction-aware consumers configured with the default //! `isolation.level` of `read_committed`. //! //! To configure a producer for transactions set `transactional.id` to an //! identifier unique to the application when creating the producer. After //! creating the producer, you must initialize it with //! [`Producer::init_transactions`]. //! //! To start a new transaction use [`Producer::begin_transaction`]. There can be //! **only one ongoing transaction** at a time per producer. All records sent //! after starting a transaction and before committing or aborting it will //! automatically be associated with that transaction. //! //! Once you have initialized transactions on a producer, you are not permitted //! to produce messages outside of a transaction. //! //! Consumer offsets can be sent as part of the ongoing transaction using //! `send_offsets_to_transaction` and will be committed atomically with the //! other records sent in the transaction. //! //! The current transaction can be committed with //! [`Producer::commit_transaction`] or aborted using //! [`Producer::abort_transaction`]. Afterwards, a new transaction can begin. //! //! ### Errors //! //! Errors returned by transaction methods may: //! //! * be retriable ([`RDKafkaError::is_retriable`]), in which case the operation //! that encountered the error may be retried. //! * require abort ([`RDKafkaError::txn_requires_abort`], in which case the //! current transaction must be aborted and a new transaction begun. //! * be fatal ([`RDKafkaError::is_fatal`]), in which case the producer must be //! stopped and the application terminated. //! //! For more details about transactions, see the [Transactional Producer] //! section of the librdkafka introduction. //! //! ## Configuration //! //! ### Producer configuration //! //! For the configuration parameters common to both producers and consumers, //! refer to the documentation in the `config` module. Here are listed the most //! commonly used producer configuration. Click //! [here](https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md) //! for the full list. //! //! - `queue.buffering.max.messages`: Maximum number of messages allowed on the //! producer queue. Default: 100000. //! - `queue.buffering.max.kbytes`: Maximum total message size sum allowed on //! the producer queue. This property has higher priority than //! queue.buffering.max.messages. Default: 4000000. //! - `queue.buffering.max.ms`: Delay in milliseconds to wait for messages in //! the producer queue to accumulate before sending a request to the brokers. //! A higher value allows larger and more effective (less overhead, improved //! compression) batches of messages to accumulate at the expense of increased //! message delivery latency. Default: 0. //! - `message.send.max.retries`: How many times to retry sending a failing //! batch. Note: retrying may cause reordering. Default: 2. //! - `compression.codec`: Compression codec to use for compressing message //! sets. Default: none. //! - `request.required.acks`: This field indicates how many acknowledgements //! the leader broker must receive from ISR brokers before responding to the //! request: 0=Broker does not send any response/ack to client, 1=Only the //! leader broker will need to ack the message, -1 or all=broker will block //! until message is committed by all in sync replicas (ISRs) or broker's //! in.sync.replicas setting before sending response. Default: 1. //! - `request.timeout.ms`: The ack timeout of the producer request in //! milliseconds. This value is only enforced by the broker and relies on //! request.required.acks being!= 0. Default: 5000. //! - `message.timeout.ms`: Local message timeout. This value is only enforced //! locally and limits the time a produced message waits for successful //! delivery. A time of 0 is infinite. Default: 300000. //! //! [`RDKafkaErrorCode::QueueFull`]: crate::error::RDKafkaErrorCode::QueueFull //! [`RDKafkaError::is_retriable`]: crate::error::RDKafkaError::is_retriable //! [`RDKafkaError::txn_requires_abort`]: crate::error::RDKafkaError::txn_requires_abort //! [`RDKafkaError::is_fatal`]: crate::error::RDKafkaError::is_fatal //! [Transactional Producer]: https://github.com/edenhill/librdkafka/blob/master/INTRODUCTION.md#transactional-producer use std::sync::Arc; use crate::client::{Client, ClientContext}; use crate::consumer::ConsumerGroupMetadata; use crate::error::KafkaResult; use crate::topic_partition_list::TopicPartitionList; use crate::util::{IntoOpaque, Timeout}; pub mod base_producer; pub mod future_producer; #[doc(inline)] pub use self::base_producer::{BaseProducer, BaseRecord, DeliveryResult, ThreadedProducer}; #[doc(inline)] pub use self::future_producer::{DeliveryFuture, FutureProducer, FutureRecord}; // // ********** PRODUCER CONTEXT ********** // /// Producer-specific context. /// /// This user-defined object can be used to provide custom callbacks for /// producer events. Refer to the list of methods to check which callbacks can /// be specified. /// /// In particular, it can be used to specify the `delivery` callback that will /// be called when the acknowledgement for a delivered message is received. /// /// See also the [`ClientContext`] trait. pub trait ProducerContext: ClientContext { /// A `DeliveryOpaque` is a user-defined structure that will be passed to /// the producer when producing a message, and returned to the `delivery` /// method once the message has been delivered, or failed to. type DeliveryOpaque: IntoOpaque; /// This method will be called once the message has been delivered (or /// failed to). The `DeliveryOpaque` will be the one provided by the user /// when calling send. fn delivery(&self, delivery_result: &DeliveryResult<'_>, delivery_opaque: Self::DeliveryOpaque); } /// An inert producer context that can be used when customizations are not /// required. #[derive(Clone)] pub struct
; impl ClientContext for DefaultProducerContext {} impl ProducerContext for DefaultProducerContext { type DeliveryOpaque = (); fn delivery(&self, _: &DeliveryResult<'_>, _: Self::DeliveryOpaque) {} } /// Common trait for all producers. pub trait Producer<C = DefaultProducerContext> where C: ProducerContext, { /// Returns the [`Client`] underlying this producer. fn client(&self) -> &Client<C>; /// Returns a reference to the [`ProducerContext`] used to create this /// producer. fn context(&self) -> &Arc<C> { self.client().context() } /// Returns the number of messages that are either waiting to be sent or are /// sent but are waiting to be acknowledged. fn in_flight_count(&self) -> i32; /// Flushes any pending messages. /// /// This method should be called before termination to ensure delivery of /// all enqueued messages. It will call `poll()` internally. fn flush<T: Into<Timeout>>(&self, timeout: T) -> KafkaResult<()>; /// Enable sending transactions with this producer. /// /// # Prerequisites /// /// * The configuration used to create the producer must include a /// `transactional.id` setting. /// * You must not have sent any messages or called any of the other /// transaction-related functions. /// /// # Details /// /// This function ensures any transactions initiated by previous producers /// with the same `transactional.id` are completed. Any transactions left /// open by any such previous producers will be aborted. /// /// Once previous transactions have been fenced, this function acquires an /// internal producer ID and epoch that will be used by all transactional /// messages sent by this producer. /// /// If this function returns successfully, messages may only be sent to this /// producer when a transaction is active. See /// [`Producer::begin_transaction`]. /// /// This function may block for the specified `timeout`. fn init_transactions<T: Into<Timeout>>(&self, timeout: T) -> KafkaResult<()>; /// Begins a new transaction. /// /// # Prerequisites /// /// You must have successfully called [`Producer::init_transactions`]. /// /// # Details /// /// This function begins a new transaction, and implicitly associates that /// open transaction with this producer. /// /// After a successful call to this function, any messages sent via this /// producer or any calls to [`Producer::send_offsets_to_transaction`] will /// be implicitly associated with this transaction, until the transaction is /// finished. /// /// Finish the transaction by calling [`Producer::commit_transaction`] or /// [`Producer::abort_transaction`]. /// /// While a transaction is open, you must perform at least one transaction /// operation every `transaction.timeout.ms` to avoid timing out the /// transaction on the broker. fn begin_transaction(&self) -> KafkaResult<()>; /// Associates an offset commit operation with this transaction. /// /// # Prerequisites /// /// The producer must have an open transaction via a call to /// [`Producer::begin_transaction`]. /// /// # Details /// /// Sends a list of topic partition offsets to the consumer group /// coordinator for `cgm`, and marks the offsets as part of the current /// transaction. These offsets will be considered committed only if the /// transaction is committed successfully. /// /// The offsets should be the next message your application will consume, /// i.e., one greater than the the last processed message's offset for each /// partition. /// /// Use this method at the end of a consume-transform-produce loop, prior to /// comitting the transaction with [`Producer::commit_transaction`]. /// /// This function may block for the specified `timeout`. /// /// # Hints /// /// To obtain the correct consumer group metadata, call /// [`Consumer::group_metadata`] on the consumer for which offsets are being /// committed. /// /// The consumer must not have automatic commits enabled. /// /// [`Consumer::group_metadata`]: crate::consumer::Consumer::group_metadata fn send_offsets_to_transaction<T: Into<Timeout>>( &self, offsets: &TopicPartitionList, cgm: &ConsumerGroupMetadata, timeout: T, ) -> KafkaResult<()>; /// Commits the current transaction. /// /// # Prerequisites /// /// The producer must have an open transaction via a call to /// [`Producer::begin_transaction`]. /// /// # Details /// /// Any outstanding messages will be flushed (i.e., delivered) before /// actually committing the transaction. /// /// If any of the outstanding messages fail permanently, the current /// transaction will enter an abortable error state and this function will /// return an abortable error. You must then call /// [`Producer::abort_transaction`] before attemping to create another /// transaction. /// /// This function may block for the specified `timeout`. fn commit_transaction<T: Into<Timeout>>(&self, timeout: T) -> KafkaResult<()>; /// Aborts the current transaction. /// /// # Prerequisites /// /// The producer must have an open transaction via a call to /// [`Producer::begin_transaction`]. /// /// # Details /// /// Any oustanding messages will be purged and failed with /// [`RDKafkaErrorCode::PurgeInflight`] or [`RDKafkaErrorCode::PurgeQueue`]. /// /// This function should also be used to recover from non-fatal abortable /// transaction errors. /// /// This function may block for the specified `timeout`. /// /// [`RDKafkaErrorCode::PurgeInflight`]: crate::error::RDKafkaErrorCode::PurgeInflight /// [`RDKafkaErrorCode::PurgeQueue`]: crate::error::RDKafkaErrorCode::PurgeQueue fn abort_transaction<T: Into<Timeout>>(&self, timeout: T) -> KafkaResult<()>; }
DefaultProducerContext
identifier_name
mod.rs
//! Kafka producers. //! //! ## The C librdkafka producer //! //! Rust-rdkafka relies on the C librdkafka producer to communicate with Kafka, //! so in order to understand how the Rust producers work it is important to //! understand the basics of the C one as well. //! //! ### Async //! //! The librdkafka producer is completely asynchronous: it maintains a memory //! buffer where messages waiting to be sent or currently in flight are stored. //! Once a message is delivered or an error occurred and the maximum number of //! retries has been reached, the producer will enqueue a delivery event with //! the appropriate delivery result into an internal event queue. //! //! The librdkafka user is responsible for calling the `poll` function at //! regular intervals to process those events; the thread calling `poll` will be //! the one executing the user-specified delivery callback for every delivery //! event. If `poll` is not called, or not frequently enough, the producer will //! return a [`RDKafkaErrorCode::QueueFull`] error and it won't be able to send //! any other message until more delivery events are processed via `poll`. The //! `QueueFull` error can also be returned if Kafka is not able to receive the //! messages quickly enough. //! //! ### Error reporting //! //! The C library will try deal with all the transient errors such as broker //! disconnection, timeouts etc. These errors, called global errors, are //! automatically logged in rust-rdkafka, but they normally don't require any //! handling as they are automatically handled internally. To see the logs, make //! sure you initialize the logger. //! //! As mentioned earlier, errors specific to message production will be reported //! in the delivery callback. //! //! ### Buffering //! //! Buffering is done automatically by librdkafka. When `send` is called, the //! message is enqueued internally and once enough messages have been enqueued, //! or when enough time has passed, they will be sent to Kafka as a single //! batch. You can control the behavior of the buffer by configuring the the //! `queue.buffering.max.*` parameters listed below. //! //! ## `rust-rdkafka` producers //! //! `rust-rdkafka` (rdkafka for brevity) provides two sets of producers: low //! level and high level. //! //! ### Low-level producers //! //! The lowest level producer provided by rdkafka is called [`BaseProducer`]. //! The goal of the `BaseProducer` is to be as close as possible to the C one //! while maintaining a safe Rust interface. In particular, the `BaseProducer` //! needs to be polled at regular intervals to execute any delivery callback //! that might be waiting and to make sure the queue doesn't fill up. //! //! Another low lever producer is the [`ThreadedProducer`], which is a //! `BaseProducer` with a dedicated thread for polling. //! //! The delivery callback can be defined using a `ProducerContext`. See the //! [`base_producer`] module for more information. //! //! ### High-level producer //! //! At the moment the only high level producer implemented is the //! [`FutureProducer`]. The `FutureProducer` doesn't rely on user-defined //! callbacks to notify the delivery or failure of a message; instead, this //! information will be returned in a Future. The `FutureProducer` also uses an //! internal thread that is used for polling, which makes calling poll //! explicitly not necessary. The returned future will contain information about //! the delivered message in case of success, or a copy of the original message //! in case of failure. Additional computation can be chained to the returned //! future, and it will executed by the future executor once the value is //! available (for more information, check the documentation of the futures //! crate). //! //! ## Transactions //! //! All rust-rdkafka producers support transactions. Transactional producers //! work together with transaction-aware consumers configured with the default //! `isolation.level` of `read_committed`. //! //! To configure a producer for transactions set `transactional.id` to an //! identifier unique to the application when creating the producer. After //! creating the producer, you must initialize it with //! [`Producer::init_transactions`]. //! //! To start a new transaction use [`Producer::begin_transaction`]. There can be //! **only one ongoing transaction** at a time per producer. All records sent //! after starting a transaction and before committing or aborting it will //! automatically be associated with that transaction. //! //! Once you have initialized transactions on a producer, you are not permitted //! to produce messages outside of a transaction. //! //! Consumer offsets can be sent as part of the ongoing transaction using //! `send_offsets_to_transaction` and will be committed atomically with the //! other records sent in the transaction. //! //! The current transaction can be committed with //! [`Producer::commit_transaction`] or aborted using //! [`Producer::abort_transaction`]. Afterwards, a new transaction can begin. //! //! ### Errors //! //! Errors returned by transaction methods may: //! //! * be retriable ([`RDKafkaError::is_retriable`]), in which case the operation //! that encountered the error may be retried. //! * require abort ([`RDKafkaError::txn_requires_abort`], in which case the //! current transaction must be aborted and a new transaction begun. //! * be fatal ([`RDKafkaError::is_fatal`]), in which case the producer must be //! stopped and the application terminated. //! //! For more details about transactions, see the [Transactional Producer] //! section of the librdkafka introduction. //! //! ## Configuration //! //! ### Producer configuration //! //! For the configuration parameters common to both producers and consumers, //! refer to the documentation in the `config` module. Here are listed the most //! commonly used producer configuration. Click //! [here](https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md) //! for the full list. //! //! - `queue.buffering.max.messages`: Maximum number of messages allowed on the //! producer queue. Default: 100000. //! - `queue.buffering.max.kbytes`: Maximum total message size sum allowed on //! the producer queue. This property has higher priority than //! queue.buffering.max.messages. Default: 4000000. //! - `queue.buffering.max.ms`: Delay in milliseconds to wait for messages in //! the producer queue to accumulate before sending a request to the brokers. //! A higher value allows larger and more effective (less overhead, improved //! compression) batches of messages to accumulate at the expense of increased //! message delivery latency. Default: 0. //! - `message.send.max.retries`: How many times to retry sending a failing //! batch. Note: retrying may cause reordering. Default: 2. //! - `compression.codec`: Compression codec to use for compressing message //! sets. Default: none. //! - `request.required.acks`: This field indicates how many acknowledgements //! the leader broker must receive from ISR brokers before responding to the //! request: 0=Broker does not send any response/ack to client, 1=Only the //! leader broker will need to ack the message, -1 or all=broker will block //! until message is committed by all in sync replicas (ISRs) or broker's //! in.sync.replicas setting before sending response. Default: 1. //! - `request.timeout.ms`: The ack timeout of the producer request in //! milliseconds. This value is only enforced by the broker and relies on //! request.required.acks being!= 0. Default: 5000. //! - `message.timeout.ms`: Local message timeout. This value is only enforced //! locally and limits the time a produced message waits for successful //! delivery. A time of 0 is infinite. Default: 300000. //! //! [`RDKafkaErrorCode::QueueFull`]: crate::error::RDKafkaErrorCode::QueueFull //! [`RDKafkaError::is_retriable`]: crate::error::RDKafkaError::is_retriable //! [`RDKafkaError::txn_requires_abort`]: crate::error::RDKafkaError::txn_requires_abort //! [`RDKafkaError::is_fatal`]: crate::error::RDKafkaError::is_fatal //! [Transactional Producer]: https://github.com/edenhill/librdkafka/blob/master/INTRODUCTION.md#transactional-producer use std::sync::Arc; use crate::client::{Client, ClientContext}; use crate::consumer::ConsumerGroupMetadata; use crate::error::KafkaResult; use crate::topic_partition_list::TopicPartitionList; use crate::util::{IntoOpaque, Timeout}; pub mod base_producer; pub mod future_producer; #[doc(inline)] pub use self::base_producer::{BaseProducer, BaseRecord, DeliveryResult, ThreadedProducer}; #[doc(inline)] pub use self::future_producer::{DeliveryFuture, FutureProducer, FutureRecord}; // // ********** PRODUCER CONTEXT ********** // /// Producer-specific context. /// /// This user-defined object can be used to provide custom callbacks for /// producer events. Refer to the list of methods to check which callbacks can /// be specified. /// /// In particular, it can be used to specify the `delivery` callback that will /// be called when the acknowledgement for a delivered message is received. /// /// See also the [`ClientContext`] trait. pub trait ProducerContext: ClientContext { /// A `DeliveryOpaque` is a user-defined structure that will be passed to
/// failed to). The `DeliveryOpaque` will be the one provided by the user /// when calling send. fn delivery(&self, delivery_result: &DeliveryResult<'_>, delivery_opaque: Self::DeliveryOpaque); } /// An inert producer context that can be used when customizations are not /// required. #[derive(Clone)] pub struct DefaultProducerContext; impl ClientContext for DefaultProducerContext {} impl ProducerContext for DefaultProducerContext { type DeliveryOpaque = (); fn delivery(&self, _: &DeliveryResult<'_>, _: Self::DeliveryOpaque) {} } /// Common trait for all producers. pub trait Producer<C = DefaultProducerContext> where C: ProducerContext, { /// Returns the [`Client`] underlying this producer. fn client(&self) -> &Client<C>; /// Returns a reference to the [`ProducerContext`] used to create this /// producer. fn context(&self) -> &Arc<C> { self.client().context() } /// Returns the number of messages that are either waiting to be sent or are /// sent but are waiting to be acknowledged. fn in_flight_count(&self) -> i32; /// Flushes any pending messages. /// /// This method should be called before termination to ensure delivery of /// all enqueued messages. It will call `poll()` internally. fn flush<T: Into<Timeout>>(&self, timeout: T) -> KafkaResult<()>; /// Enable sending transactions with this producer. /// /// # Prerequisites /// /// * The configuration used to create the producer must include a /// `transactional.id` setting. /// * You must not have sent any messages or called any of the other /// transaction-related functions. /// /// # Details /// /// This function ensures any transactions initiated by previous producers /// with the same `transactional.id` are completed. Any transactions left /// open by any such previous producers will be aborted. /// /// Once previous transactions have been fenced, this function acquires an /// internal producer ID and epoch that will be used by all transactional /// messages sent by this producer. /// /// If this function returns successfully, messages may only be sent to this /// producer when a transaction is active. See /// [`Producer::begin_transaction`]. /// /// This function may block for the specified `timeout`. fn init_transactions<T: Into<Timeout>>(&self, timeout: T) -> KafkaResult<()>; /// Begins a new transaction. /// /// # Prerequisites /// /// You must have successfully called [`Producer::init_transactions`]. /// /// # Details /// /// This function begins a new transaction, and implicitly associates that /// open transaction with this producer. /// /// After a successful call to this function, any messages sent via this /// producer or any calls to [`Producer::send_offsets_to_transaction`] will /// be implicitly associated with this transaction, until the transaction is /// finished. /// /// Finish the transaction by calling [`Producer::commit_transaction`] or /// [`Producer::abort_transaction`]. /// /// While a transaction is open, you must perform at least one transaction /// operation every `transaction.timeout.ms` to avoid timing out the /// transaction on the broker. fn begin_transaction(&self) -> KafkaResult<()>; /// Associates an offset commit operation with this transaction. /// /// # Prerequisites /// /// The producer must have an open transaction via a call to /// [`Producer::begin_transaction`]. /// /// # Details /// /// Sends a list of topic partition offsets to the consumer group /// coordinator for `cgm`, and marks the offsets as part of the current /// transaction. These offsets will be considered committed only if the /// transaction is committed successfully. /// /// The offsets should be the next message your application will consume, /// i.e., one greater than the the last processed message's offset for each /// partition. /// /// Use this method at the end of a consume-transform-produce loop, prior to /// comitting the transaction with [`Producer::commit_transaction`]. /// /// This function may block for the specified `timeout`. /// /// # Hints /// /// To obtain the correct consumer group metadata, call /// [`Consumer::group_metadata`] on the consumer for which offsets are being /// committed. /// /// The consumer must not have automatic commits enabled. /// /// [`Consumer::group_metadata`]: crate::consumer::Consumer::group_metadata fn send_offsets_to_transaction<T: Into<Timeout>>( &self, offsets: &TopicPartitionList, cgm: &ConsumerGroupMetadata, timeout: T, ) -> KafkaResult<()>; /// Commits the current transaction. /// /// # Prerequisites /// /// The producer must have an open transaction via a call to /// [`Producer::begin_transaction`]. /// /// # Details /// /// Any outstanding messages will be flushed (i.e., delivered) before /// actually committing the transaction. /// /// If any of the outstanding messages fail permanently, the current /// transaction will enter an abortable error state and this function will /// return an abortable error. You must then call /// [`Producer::abort_transaction`] before attemping to create another /// transaction. /// /// This function may block for the specified `timeout`. fn commit_transaction<T: Into<Timeout>>(&self, timeout: T) -> KafkaResult<()>; /// Aborts the current transaction. /// /// # Prerequisites /// /// The producer must have an open transaction via a call to /// [`Producer::begin_transaction`]. /// /// # Details /// /// Any oustanding messages will be purged and failed with /// [`RDKafkaErrorCode::PurgeInflight`] or [`RDKafkaErrorCode::PurgeQueue`]. /// /// This function should also be used to recover from non-fatal abortable /// transaction errors. /// /// This function may block for the specified `timeout`. /// /// [`RDKafkaErrorCode::PurgeInflight`]: crate::error::RDKafkaErrorCode::PurgeInflight /// [`RDKafkaErrorCode::PurgeQueue`]: crate::error::RDKafkaErrorCode::PurgeQueue fn abort_transaction<T: Into<Timeout>>(&self, timeout: T) -> KafkaResult<()>; }
/// the producer when producing a message, and returned to the `delivery` /// method once the message has been delivered, or failed to. type DeliveryOpaque: IntoOpaque; /// This method will be called once the message has been delivered (or
random_line_split
armv7s_apple_ios.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use spec::{LinkerFlavor, Target, TargetOptions, TargetResult}; use super::apple_ios_base::{opts, Arch}; pub fn
() -> TargetResult { let base = opts(Arch::Armv7s)?; Ok(Target { llvm_target: "armv7s-apple-ios".to_string(), target_endian: "little".to_string(), target_pointer_width: "32".to_string(), target_c_int_width: "32".to_string(), data_layout: "e-m:o-p:32:32-f64:32:64-v64:32:64-v128:32:128-a:0:32-n32-S32".to_string(), arch: "arm".to_string(), target_os: "ios".to_string(), target_env: String::new(), target_vendor: "apple".to_string(), linker_flavor: LinkerFlavor::Gcc, options: TargetOptions { features: "+v7,+vfp4,+neon".to_string(), max_atomic_width: Some(64), abi_blacklist: super::arm_base::abi_blacklist(), .. base } }) }
target
identifier_name
armv7s_apple_ios.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use spec::{LinkerFlavor, Target, TargetOptions, TargetResult}; use super::apple_ios_base::{opts, Arch}; pub fn target() -> TargetResult { let base = opts(Arch::Armv7s)?; Ok(Target { llvm_target: "armv7s-apple-ios".to_string(), target_endian: "little".to_string(), target_pointer_width: "32".to_string(), target_c_int_width: "32".to_string(), data_layout: "e-m:o-p:32:32-f64:32:64-v64:32:64-v128:32:128-a:0:32-n32-S32".to_string(), arch: "arm".to_string(), target_os: "ios".to_string(),
options: TargetOptions { features: "+v7,+vfp4,+neon".to_string(), max_atomic_width: Some(64), abi_blacklist: super::arm_base::abi_blacklist(), .. base } }) }
target_env: String::new(), target_vendor: "apple".to_string(), linker_flavor: LinkerFlavor::Gcc,
random_line_split
armv7s_apple_ios.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use spec::{LinkerFlavor, Target, TargetOptions, TargetResult}; use super::apple_ios_base::{opts, Arch}; pub fn target() -> TargetResult
}
{ let base = opts(Arch::Armv7s)?; Ok(Target { llvm_target: "armv7s-apple-ios".to_string(), target_endian: "little".to_string(), target_pointer_width: "32".to_string(), target_c_int_width: "32".to_string(), data_layout: "e-m:o-p:32:32-f64:32:64-v64:32:64-v128:32:128-a:0:32-n32-S32".to_string(), arch: "arm".to_string(), target_os: "ios".to_string(), target_env: String::new(), target_vendor: "apple".to_string(), linker_flavor: LinkerFlavor::Gcc, options: TargetOptions { features: "+v7,+vfp4,+neon".to_string(), max_atomic_width: Some(64), abi_blacklist: super::arm_base::abi_blacklist(), .. base } })
identifier_body
unboxed-closures-drop.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // A battery of tests to ensure destructors of unboxed closure environments // run at the right times. #![feature(unboxed_closures)] static mut DROP_COUNT: usize = 0; fn drop_count() -> usize { unsafe { DROP_COUNT } } struct Droppable { x: isize, } impl Droppable { fn new() -> Droppable { Droppable { x: 1 } } } impl Drop for Droppable { fn
(&mut self) { unsafe { DROP_COUNT += 1 } } } fn a<F:Fn(isize, isize) -> isize>(f: F) -> isize { f(1, 2) } fn b<F:FnMut(isize, isize) -> isize>(mut f: F) -> isize { f(3, 4) } fn c<F:FnOnce(isize, isize) -> isize>(f: F) -> isize { f(5, 6) } fn test_fn() { { a(move |a: isize, b| { a + b }); } assert_eq!(drop_count(), 0); { let z = &Droppable::new(); a(move |a: isize, b| { z; a + b }); assert_eq!(drop_count(), 0); } assert_eq!(drop_count(), 1); { let z = &Droppable::new(); let zz = &Droppable::new(); a(move |a: isize, b| { z; zz; a + b }); assert_eq!(drop_count(), 1); } assert_eq!(drop_count(), 3); } fn test_fn_mut() { { b(move |a: isize, b| { a + b }); } assert_eq!(drop_count(), 3); { let z = &Droppable::new(); b(move |a: isize, b| { z; a + b }); assert_eq!(drop_count(), 3); } assert_eq!(drop_count(), 4); { let z = &Droppable::new(); let zz = &Droppable::new(); b(move |a: isize, b| { z; zz; a + b }); assert_eq!(drop_count(), 4); } assert_eq!(drop_count(), 6); } fn test_fn_once() { { c(move |a: isize, b| { a + b }); } assert_eq!(drop_count(), 6); { let z = Droppable::new(); c(move |a: isize, b| { z; a + b }); assert_eq!(drop_count(), 7); } assert_eq!(drop_count(), 7); { let z = Droppable::new(); let zz = Droppable::new(); c(move |a: isize, b| { z; zz; a + b }); assert_eq!(drop_count(), 9); } assert_eq!(drop_count(), 9); } fn main() { test_fn(); test_fn_mut(); test_fn_once(); }
drop
identifier_name
unboxed-closures-drop.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // A battery of tests to ensure destructors of unboxed closure environments // run at the right times. #![feature(unboxed_closures)] static mut DROP_COUNT: usize = 0; fn drop_count() -> usize { unsafe { DROP_COUNT } } struct Droppable { x: isize, } impl Droppable { fn new() -> Droppable { Droppable { x: 1 } } } impl Drop for Droppable { fn drop(&mut self) { unsafe { DROP_COUNT += 1 } } } fn a<F:Fn(isize, isize) -> isize>(f: F) -> isize { f(1, 2) } fn b<F:FnMut(isize, isize) -> isize>(mut f: F) -> isize { f(3, 4) } fn c<F:FnOnce(isize, isize) -> isize>(f: F) -> isize { f(5, 6) } fn test_fn() { { a(move |a: isize, b| { a + b }); } assert_eq!(drop_count(), 0); { let z = &Droppable::new(); a(move |a: isize, b| { z; a + b }); assert_eq!(drop_count(), 0); } assert_eq!(drop_count(), 1); { let z = &Droppable::new(); let zz = &Droppable::new(); a(move |a: isize, b| { z; zz; a + b }); assert_eq!(drop_count(), 1); } assert_eq!(drop_count(), 3); } fn test_fn_mut() { { b(move |a: isize, b| { a + b }); } assert_eq!(drop_count(), 3); { let z = &Droppable::new(); b(move |a: isize, b| { z; a + b }); assert_eq!(drop_count(), 3); } assert_eq!(drop_count(), 4); { let z = &Droppable::new(); let zz = &Droppable::new(); b(move |a: isize, b| { z; zz; a + b }); assert_eq!(drop_count(), 4); } assert_eq!(drop_count(), 6); } fn test_fn_once() { { c(move |a: isize, b| { a + b }); } assert_eq!(drop_count(), 6); { let z = Droppable::new(); c(move |a: isize, b| { z; a + b }); assert_eq!(drop_count(), 7); } assert_eq!(drop_count(), 7); { let z = Droppable::new(); let zz = Droppable::new(); c(move |a: isize, b| { z; zz; a + b }); assert_eq!(drop_count(), 9); } assert_eq!(drop_count(), 9); } fn main()
{ test_fn(); test_fn_mut(); test_fn_once(); }
identifier_body
unboxed-closures-drop.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // A battery of tests to ensure destructors of unboxed closure environments // run at the right times. #![feature(unboxed_closures)] static mut DROP_COUNT: usize = 0; fn drop_count() -> usize { unsafe { DROP_COUNT } } struct Droppable { x: isize, } impl Droppable { fn new() -> Droppable { Droppable { x: 1 } } } impl Drop for Droppable { fn drop(&mut self) { unsafe { DROP_COUNT += 1 } } } fn a<F:Fn(isize, isize) -> isize>(f: F) -> isize { f(1, 2) } fn b<F:FnMut(isize, isize) -> isize>(mut f: F) -> isize { f(3, 4) } fn c<F:FnOnce(isize, isize) -> isize>(f: F) -> isize { f(5, 6) } fn test_fn() { { a(move |a: isize, b| { a + b }); } assert_eq!(drop_count(), 0); { let z = &Droppable::new(); a(move |a: isize, b| { z; a + b }); assert_eq!(drop_count(), 0); } assert_eq!(drop_count(), 1); { let z = &Droppable::new(); let zz = &Droppable::new(); a(move |a: isize, b| { z; zz; a + b }); assert_eq!(drop_count(), 1); } assert_eq!(drop_count(), 3); } fn test_fn_mut() { { b(move |a: isize, b| { a + b }); } assert_eq!(drop_count(), 3); { let z = &Droppable::new(); b(move |a: isize, b| { z; a + b }); assert_eq!(drop_count(), 3); } assert_eq!(drop_count(), 4); { let z = &Droppable::new(); let zz = &Droppable::new(); b(move |a: isize, b| { z; zz; a + b }); assert_eq!(drop_count(), 4); } assert_eq!(drop_count(), 6); } fn test_fn_once() { { c(move |a: isize, b| { a + b }); } assert_eq!(drop_count(), 6); { let z = Droppable::new(); c(move |a: isize, b| { z; a + b }); assert_eq!(drop_count(), 7); }
let zz = Droppable::new(); c(move |a: isize, b| { z; zz; a + b }); assert_eq!(drop_count(), 9); } assert_eq!(drop_count(), 9); } fn main() { test_fn(); test_fn_mut(); test_fn_once(); }
assert_eq!(drop_count(), 7); { let z = Droppable::new();
random_line_split
flock.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Simple file-locking apis for each OS. //! //! This is not meant to be in the standard library, it does nothing with //! green/native threading. This is just a bare-bones enough solution for //! librustdoc, it is not production quality at all. #![allow(non_camel_case_types)] pub use self::imp::Lock; #[cfg(unix)] mod imp { use std::libc; #[cfg(target_os = "linux")] mod os { use std::libc; pub struct flock { l_type: libc::c_short, l_whence: libc::c_short, l_start: libc::off_t, l_len: libc::off_t, l_pid: libc::pid_t, // not actually here, but brings in line with freebsd l_sysid: libc::c_int, } pub static F_WRLCK: libc::c_short = 1; pub static F_UNLCK: libc::c_short = 2; pub static F_SETLK: libc::c_int = 6; pub static F_SETLKW: libc::c_int = 7; } #[cfg(target_os = "freebsd")] mod os { use std::libc; pub struct flock { l_start: libc::off_t, l_len: libc::off_t, l_pid: libc::pid_t, l_type: libc::c_short, l_whence: libc::c_short, l_sysid: libc::c_int, } pub static F_UNLCK: libc::c_short = 2; pub static F_WRLCK: libc::c_short = 3; pub static F_SETLK: libc::c_int = 12; pub static F_SETLKW: libc::c_int = 13; } #[cfg(target_os = "macos")] mod os { use std::libc; pub struct flock { l_start: libc::off_t, l_len: libc::off_t, l_pid: libc::pid_t, l_type: libc::c_short, l_whence: libc::c_short, // not actually here, but brings in line with freebsd l_sysid: libc::c_int, } pub static F_UNLCK: libc::c_short = 2; pub static F_WRLCK: libc::c_short = 3; pub static F_SETLK: libc::c_int = 8; pub static F_SETLKW: libc::c_int = 9; } pub struct Lock { priv fd: libc::c_int, } impl Lock { pub fn new(p: &Path) -> Lock { let fd = p.with_c_str(|s| unsafe { libc::open(s, libc::O_RDWR | libc::O_CREAT, libc::S_IRWXU) }); assert!(fd > 0); let flock = os::flock { l_start: 0, l_len: 0, l_pid: 0, l_whence: libc::SEEK_SET as libc::c_short, l_type: os::F_WRLCK, l_sysid: 0, }; let ret = unsafe { libc::fcntl(fd, os::F_SETLKW, &flock as *os::flock) }; if ret == -1 { unsafe { libc::close(fd); } fail!("could not lock `{}`", p.display()) } Lock { fd: fd } } } impl Drop for Lock { fn drop(&mut self) { let flock = os::flock { l_start: 0, l_len: 0, l_pid: 0, l_whence: libc::SEEK_SET as libc::c_short, l_type: os::F_UNLCK, l_sysid: 0, }; unsafe { libc::fcntl(self.fd, os::F_SETLK, &flock as *os::flock); libc::close(self.fd); } } } } #[cfg(windows)] mod imp { use std::libc; use std::mem; use std::os::win32::as_utf16_p; use std::os; use std::ptr; static LOCKFILE_EXCLUSIVE_LOCK: libc::DWORD = 0x00000002; extern "system" { fn LockFileEx(hFile: libc::HANDLE, dwFlags: libc::DWORD, dwReserved: libc::DWORD, nNumberOfBytesToLockLow: libc::DWORD, nNumberOfBytesToLockHigh: libc::DWORD, lpOverlapped: libc::LPOVERLAPPED) -> libc::BOOL; fn UnlockFileEx(hFile: libc::HANDLE, dwReserved: libc::DWORD, nNumberOfBytesToLockLow: libc::DWORD, nNumberOfBytesToLockHigh: libc::DWORD, lpOverlapped: libc::LPOVERLAPPED) -> libc::BOOL; } pub struct Lock { priv handle: libc::HANDLE, } impl Lock { pub fn new(p: &Path) -> Lock { let handle = as_utf16_p(p.as_str().unwrap(), |p| unsafe { libc::CreateFileW(p, libc::FILE_GENERIC_READ | libc::FILE_GENERIC_WRITE, libc::FILE_SHARE_READ | libc::FILE_SHARE_DELETE | libc::FILE_SHARE_WRITE, ptr::mut_null(), libc::CREATE_ALWAYS, libc::FILE_ATTRIBUTE_NORMAL, ptr::mut_null()) }); if handle as uint == libc::INVALID_HANDLE_VALUE as uint { fail!("create file error: {}", os::last_os_error()); } let mut overlapped: libc::OVERLAPPED = unsafe { mem::init() }; let ret = unsafe { LockFileEx(handle, LOCKFILE_EXCLUSIVE_LOCK, 0, 100, 0, &mut overlapped) }; if ret == 0
Lock { handle: handle } } } impl Drop for Lock { fn drop(&mut self) { let mut overlapped: libc::OVERLAPPED = unsafe { mem::init() }; unsafe { UnlockFileEx(self.handle, 0, 100, 0, &mut overlapped); libc::CloseHandle(self.handle); } } } }
{ unsafe { libc::CloseHandle(handle); } fail!("could not lock `{}`: {}", p.display(), os::last_os_error()) }
conditional_block
flock.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Simple file-locking apis for each OS. //! //! This is not meant to be in the standard library, it does nothing with //! green/native threading. This is just a bare-bones enough solution for //! librustdoc, it is not production quality at all. #![allow(non_camel_case_types)] pub use self::imp::Lock; #[cfg(unix)] mod imp { use std::libc; #[cfg(target_os = "linux")] mod os { use std::libc; pub struct flock { l_type: libc::c_short, l_whence: libc::c_short, l_start: libc::off_t, l_len: libc::off_t, l_pid: libc::pid_t, // not actually here, but brings in line with freebsd l_sysid: libc::c_int, } pub static F_WRLCK: libc::c_short = 1; pub static F_UNLCK: libc::c_short = 2; pub static F_SETLK: libc::c_int = 6; pub static F_SETLKW: libc::c_int = 7; } #[cfg(target_os = "freebsd")] mod os { use std::libc; pub struct flock { l_start: libc::off_t, l_len: libc::off_t, l_pid: libc::pid_t, l_type: libc::c_short, l_whence: libc::c_short, l_sysid: libc::c_int, } pub static F_UNLCK: libc::c_short = 2; pub static F_WRLCK: libc::c_short = 3; pub static F_SETLK: libc::c_int = 12; pub static F_SETLKW: libc::c_int = 13; } #[cfg(target_os = "macos")] mod os { use std::libc; pub struct flock { l_start: libc::off_t, l_len: libc::off_t, l_pid: libc::pid_t, l_type: libc::c_short, l_whence: libc::c_short, // not actually here, but brings in line with freebsd l_sysid: libc::c_int, } pub static F_UNLCK: libc::c_short = 2; pub static F_WRLCK: libc::c_short = 3; pub static F_SETLK: libc::c_int = 8; pub static F_SETLKW: libc::c_int = 9; } pub struct Lock { priv fd: libc::c_int, }
let fd = p.with_c_str(|s| unsafe { libc::open(s, libc::O_RDWR | libc::O_CREAT, libc::S_IRWXU) }); assert!(fd > 0); let flock = os::flock { l_start: 0, l_len: 0, l_pid: 0, l_whence: libc::SEEK_SET as libc::c_short, l_type: os::F_WRLCK, l_sysid: 0, }; let ret = unsafe { libc::fcntl(fd, os::F_SETLKW, &flock as *os::flock) }; if ret == -1 { unsafe { libc::close(fd); } fail!("could not lock `{}`", p.display()) } Lock { fd: fd } } } impl Drop for Lock { fn drop(&mut self) { let flock = os::flock { l_start: 0, l_len: 0, l_pid: 0, l_whence: libc::SEEK_SET as libc::c_short, l_type: os::F_UNLCK, l_sysid: 0, }; unsafe { libc::fcntl(self.fd, os::F_SETLK, &flock as *os::flock); libc::close(self.fd); } } } } #[cfg(windows)] mod imp { use std::libc; use std::mem; use std::os::win32::as_utf16_p; use std::os; use std::ptr; static LOCKFILE_EXCLUSIVE_LOCK: libc::DWORD = 0x00000002; extern "system" { fn LockFileEx(hFile: libc::HANDLE, dwFlags: libc::DWORD, dwReserved: libc::DWORD, nNumberOfBytesToLockLow: libc::DWORD, nNumberOfBytesToLockHigh: libc::DWORD, lpOverlapped: libc::LPOVERLAPPED) -> libc::BOOL; fn UnlockFileEx(hFile: libc::HANDLE, dwReserved: libc::DWORD, nNumberOfBytesToLockLow: libc::DWORD, nNumberOfBytesToLockHigh: libc::DWORD, lpOverlapped: libc::LPOVERLAPPED) -> libc::BOOL; } pub struct Lock { priv handle: libc::HANDLE, } impl Lock { pub fn new(p: &Path) -> Lock { let handle = as_utf16_p(p.as_str().unwrap(), |p| unsafe { libc::CreateFileW(p, libc::FILE_GENERIC_READ | libc::FILE_GENERIC_WRITE, libc::FILE_SHARE_READ | libc::FILE_SHARE_DELETE | libc::FILE_SHARE_WRITE, ptr::mut_null(), libc::CREATE_ALWAYS, libc::FILE_ATTRIBUTE_NORMAL, ptr::mut_null()) }); if handle as uint == libc::INVALID_HANDLE_VALUE as uint { fail!("create file error: {}", os::last_os_error()); } let mut overlapped: libc::OVERLAPPED = unsafe { mem::init() }; let ret = unsafe { LockFileEx(handle, LOCKFILE_EXCLUSIVE_LOCK, 0, 100, 0, &mut overlapped) }; if ret == 0 { unsafe { libc::CloseHandle(handle); } fail!("could not lock `{}`: {}", p.display(), os::last_os_error()) } Lock { handle: handle } } } impl Drop for Lock { fn drop(&mut self) { let mut overlapped: libc::OVERLAPPED = unsafe { mem::init() }; unsafe { UnlockFileEx(self.handle, 0, 100, 0, &mut overlapped); libc::CloseHandle(self.handle); } } } }
impl Lock { pub fn new(p: &Path) -> Lock {
random_line_split
flock.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Simple file-locking apis for each OS. //! //! This is not meant to be in the standard library, it does nothing with //! green/native threading. This is just a bare-bones enough solution for //! librustdoc, it is not production quality at all. #![allow(non_camel_case_types)] pub use self::imp::Lock; #[cfg(unix)] mod imp { use std::libc; #[cfg(target_os = "linux")] mod os { use std::libc; pub struct
{ l_type: libc::c_short, l_whence: libc::c_short, l_start: libc::off_t, l_len: libc::off_t, l_pid: libc::pid_t, // not actually here, but brings in line with freebsd l_sysid: libc::c_int, } pub static F_WRLCK: libc::c_short = 1; pub static F_UNLCK: libc::c_short = 2; pub static F_SETLK: libc::c_int = 6; pub static F_SETLKW: libc::c_int = 7; } #[cfg(target_os = "freebsd")] mod os { use std::libc; pub struct flock { l_start: libc::off_t, l_len: libc::off_t, l_pid: libc::pid_t, l_type: libc::c_short, l_whence: libc::c_short, l_sysid: libc::c_int, } pub static F_UNLCK: libc::c_short = 2; pub static F_WRLCK: libc::c_short = 3; pub static F_SETLK: libc::c_int = 12; pub static F_SETLKW: libc::c_int = 13; } #[cfg(target_os = "macos")] mod os { use std::libc; pub struct flock { l_start: libc::off_t, l_len: libc::off_t, l_pid: libc::pid_t, l_type: libc::c_short, l_whence: libc::c_short, // not actually here, but brings in line with freebsd l_sysid: libc::c_int, } pub static F_UNLCK: libc::c_short = 2; pub static F_WRLCK: libc::c_short = 3; pub static F_SETLK: libc::c_int = 8; pub static F_SETLKW: libc::c_int = 9; } pub struct Lock { priv fd: libc::c_int, } impl Lock { pub fn new(p: &Path) -> Lock { let fd = p.with_c_str(|s| unsafe { libc::open(s, libc::O_RDWR | libc::O_CREAT, libc::S_IRWXU) }); assert!(fd > 0); let flock = os::flock { l_start: 0, l_len: 0, l_pid: 0, l_whence: libc::SEEK_SET as libc::c_short, l_type: os::F_WRLCK, l_sysid: 0, }; let ret = unsafe { libc::fcntl(fd, os::F_SETLKW, &flock as *os::flock) }; if ret == -1 { unsafe { libc::close(fd); } fail!("could not lock `{}`", p.display()) } Lock { fd: fd } } } impl Drop for Lock { fn drop(&mut self) { let flock = os::flock { l_start: 0, l_len: 0, l_pid: 0, l_whence: libc::SEEK_SET as libc::c_short, l_type: os::F_UNLCK, l_sysid: 0, }; unsafe { libc::fcntl(self.fd, os::F_SETLK, &flock as *os::flock); libc::close(self.fd); } } } } #[cfg(windows)] mod imp { use std::libc; use std::mem; use std::os::win32::as_utf16_p; use std::os; use std::ptr; static LOCKFILE_EXCLUSIVE_LOCK: libc::DWORD = 0x00000002; extern "system" { fn LockFileEx(hFile: libc::HANDLE, dwFlags: libc::DWORD, dwReserved: libc::DWORD, nNumberOfBytesToLockLow: libc::DWORD, nNumberOfBytesToLockHigh: libc::DWORD, lpOverlapped: libc::LPOVERLAPPED) -> libc::BOOL; fn UnlockFileEx(hFile: libc::HANDLE, dwReserved: libc::DWORD, nNumberOfBytesToLockLow: libc::DWORD, nNumberOfBytesToLockHigh: libc::DWORD, lpOverlapped: libc::LPOVERLAPPED) -> libc::BOOL; } pub struct Lock { priv handle: libc::HANDLE, } impl Lock { pub fn new(p: &Path) -> Lock { let handle = as_utf16_p(p.as_str().unwrap(), |p| unsafe { libc::CreateFileW(p, libc::FILE_GENERIC_READ | libc::FILE_GENERIC_WRITE, libc::FILE_SHARE_READ | libc::FILE_SHARE_DELETE | libc::FILE_SHARE_WRITE, ptr::mut_null(), libc::CREATE_ALWAYS, libc::FILE_ATTRIBUTE_NORMAL, ptr::mut_null()) }); if handle as uint == libc::INVALID_HANDLE_VALUE as uint { fail!("create file error: {}", os::last_os_error()); } let mut overlapped: libc::OVERLAPPED = unsafe { mem::init() }; let ret = unsafe { LockFileEx(handle, LOCKFILE_EXCLUSIVE_LOCK, 0, 100, 0, &mut overlapped) }; if ret == 0 { unsafe { libc::CloseHandle(handle); } fail!("could not lock `{}`: {}", p.display(), os::last_os_error()) } Lock { handle: handle } } } impl Drop for Lock { fn drop(&mut self) { let mut overlapped: libc::OVERLAPPED = unsafe { mem::init() }; unsafe { UnlockFileEx(self.handle, 0, 100, 0, &mut overlapped); libc::CloseHandle(self.handle); } } } }
flock
identifier_name
03-literals-and-operators.rs
fn main()
println!("One million is written as {}", 1_000_000u); }
{ // Integer addition println!("1 + 2 = {}", 1u + 2); // Integer subtraction println!("1 - 2 = {}", 1i - 2); // Short-circuiting boolean logic println!("true AND false is {}", true && false); println!("true OR false is {}", true || false); println!("NOT true is {}", !true); // Bitwise operations println!("0011 AND 0101 is {:04t}", 0b0011u & 0b0101); println!("0011 OR 0101 is {:04t}", 0b0011u | 0b0101); println!("0011 XOR 0101 is {:04t}", 0b0011u ^ 0b0101); println!("1 << 5 is {}", 1u << 5); println!("0x80 >> 2 is 0x{:x}", 0x80u >> 2); // Use underscores to improve readability!
identifier_body
03-literals-and-operators.rs
fn
() { // Integer addition println!("1 + 2 = {}", 1u + 2); // Integer subtraction println!("1 - 2 = {}", 1i - 2); // Short-circuiting boolean logic println!("true AND false is {}", true && false); println!("true OR false is {}", true || false); println!("NOT true is {}",!true); // Bitwise operations println!("0011 AND 0101 is {:04t}", 0b0011u & 0b0101); println!("0011 OR 0101 is {:04t}", 0b0011u | 0b0101); println!("0011 XOR 0101 is {:04t}", 0b0011u ^ 0b0101); println!("1 << 5 is {}", 1u << 5); println!("0x80 >> 2 is 0x{:x}", 0x80u >> 2); // Use underscores to improve readability! println!("One million is written as {}", 1_000_000u); }
main
identifier_name
03-literals-and-operators.rs
fn main() { // Integer addition println!("1 + 2 = {}", 1u + 2); // Integer subtraction println!("1 - 2 = {}", 1i - 2);
// Bitwise operations println!("0011 AND 0101 is {:04t}", 0b0011u & 0b0101); println!("0011 OR 0101 is {:04t}", 0b0011u | 0b0101); println!("0011 XOR 0101 is {:04t}", 0b0011u ^ 0b0101); println!("1 << 5 is {}", 1u << 5); println!("0x80 >> 2 is 0x{:x}", 0x80u >> 2); // Use underscores to improve readability! println!("One million is written as {}", 1_000_000u); }
// Short-circuiting boolean logic println!("true AND false is {}", true && false); println!("true OR false is {}", true || false); println!("NOT true is {}", !true);
random_line_split
mimetype.rs
use std::path; use std::io; use std::io::{Error, ErrorKind}; use std::io::BufRead; use std::io::BufReader; use std::fs::File; use server_side::utils; #[derive(Debug)] pub struct Mimetype { mimetype_vec: Vec<MimetypeData>, pub default_mimetype: String } #[derive(Debug)] pub struct MimetypeData { extension: String, mimetype: String } impl Mimetype { pub fn new() -> Result<Mimetype, io::Error> { let mut mimetypes: Vec<MimetypeData> = vec![]; let root_path = utils::get_root_path(); // Process custom mimetype first! // For safety, Use std::path::PathBuf to create path to compatible to cross platform. let custom_mimetype_pathbuf = path::PathBuf::from(utils::to_root_path("/config/custom_mimetype.mt", &root_path)); let custom_mimetype_path: &str = custom_mimetype_pathbuf.to_str().unwrap(); let custom_mimetype_file_handle = match File::open(custom_mimetype_path) { Ok(ok) => ok, Err(e) => return Err(Error::new(ErrorKind::Other, format!("can't find `{:?}`", custom_mimetype_path))) }; let custom_mimetype_bufreader = BufReader::new(&custom_mimetype_file_handle); for line_wrapped in custom_mimetype_bufreader.lines() { let line_raw = line_wrapped.unwrap(); // line_raw.trim(); // line_raw.trim_matches('\r'); // line_raw.trim_matches('\n'); // line_raw.trim_matches('\t'); let line = line_raw.trim(); if line.len() > 0 && line.as_bytes()[0]!= '#' as u8 { let line_splitted = line.split('\t').collect::<Vec<&str>>(); let key = line_splitted[0]; let val = line_splitted[1]; mimetypes.push(MimetypeData::new(key.to_string(), val.to_string())); } } // Then, server mimetype let mimetype_pathbuf = path::PathBuf::from(utils::to_root_path("/config/mimetype.mt", &root_path)); let mimetype_path: &str = mimetype_pathbuf.to_str().unwrap(); let mimetype_file_handle = match File::open(mimetype_path) { Ok(ok) => ok, Err(e) => return Err(Error::new(ErrorKind::Other, format!("can't find `{:?}`", mimetype_path))) }; let mimetype_bufreader = BufReader::new(&mimetype_file_handle); for line_wrapped in mimetype_bufreader.lines() { let line_raw = line_wrapped.unwrap(); // line_raw.trim(); // line_raw.trim_matches('\r'); // line_raw.trim_matches('\n'); // line_raw.trim_matches('\t'); let line = line_raw.trim(); if line.len() > 0 && line.as_bytes()[0]!= '#' as u8 { let line_splitted = line.split('\t').collect::<Vec<&str>>(); let key = line_splitted[0]; let val = line_splitted[1]; mimetypes.push(MimetypeData::new(key.to_string(), val.to_string())); } } Ok(Mimetype{ mimetype_vec: mimetypes, default_mimetype: "text/plain".to_string() }) } pub fn get_mimetype(&self, filename: &String) -> Result<String, io::Error> { let ext: String; if filename.contains(".") { let filename_ext = filename.split(".").last().unwrap().to_string(); ext = ".".to_string() + filename_ext.as_str(); for mt in self.mimetype_vec.iter() { if mt.get_extension() == ext.as_str() { return Ok(mt.get_mimetype()); } } } Err(io::Error::new(io::ErrorKind::Other, "mimetype not found.")) } pub fn get_mimetype_or(&self, filename: &String, default: &str) -> String { match self.get_mimetype(filename) { Ok(mt) => mt, Err(_) => default.to_owned() } } pub fn get_mimetype_default(&self, filename: &String) -> String { match self.get_mimetype(filename) { Ok(mt) => mt, // TODO: Err(_) => self.default_mimetype.to_owned(), } } pub fn to_owned(&self) -> Mimetype { let mut new_mt: Vec<MimetypeData> = vec![]; for mt in self.mimetype_vec.iter() { new_mt.push(MimetypeData{extension: mt.get_extension(), mimetype: mt.get_mimetype()}) } Mimetype { mimetype_vec: new_mt, default_mimetype: self.default_mimetype.to_owned() } } pub fn delete(self) {} } impl MimetypeData { pub fn new(e: String, m: String) -> MimetypeData {
} pub fn get_mimetype(&self) -> String { self.mimetype.to_owned() } pub fn set_mimetype(&mut self, e: String) { self.mimetype = e; } pub fn delete(self) {} }
MimetypeData{extension: e, mimetype: m} } pub fn get_extension(&self) -> String { self.extension.to_owned()
random_line_split
mimetype.rs
use std::path; use std::io; use std::io::{Error, ErrorKind}; use std::io::BufRead; use std::io::BufReader; use std::fs::File; use server_side::utils; #[derive(Debug)] pub struct Mimetype { mimetype_vec: Vec<MimetypeData>, pub default_mimetype: String } #[derive(Debug)] pub struct MimetypeData { extension: String, mimetype: String } impl Mimetype { pub fn new() -> Result<Mimetype, io::Error> { let mut mimetypes: Vec<MimetypeData> = vec![]; let root_path = utils::get_root_path(); // Process custom mimetype first! // For safety, Use std::path::PathBuf to create path to compatible to cross platform. let custom_mimetype_pathbuf = path::PathBuf::from(utils::to_root_path("/config/custom_mimetype.mt", &root_path)); let custom_mimetype_path: &str = custom_mimetype_pathbuf.to_str().unwrap(); let custom_mimetype_file_handle = match File::open(custom_mimetype_path) { Ok(ok) => ok, Err(e) => return Err(Error::new(ErrorKind::Other, format!("can't find `{:?}`", custom_mimetype_path))) }; let custom_mimetype_bufreader = BufReader::new(&custom_mimetype_file_handle); for line_wrapped in custom_mimetype_bufreader.lines() { let line_raw = line_wrapped.unwrap(); // line_raw.trim(); // line_raw.trim_matches('\r'); // line_raw.trim_matches('\n'); // line_raw.trim_matches('\t'); let line = line_raw.trim(); if line.len() > 0 && line.as_bytes()[0]!= '#' as u8 { let line_splitted = line.split('\t').collect::<Vec<&str>>(); let key = line_splitted[0]; let val = line_splitted[1]; mimetypes.push(MimetypeData::new(key.to_string(), val.to_string())); } } // Then, server mimetype let mimetype_pathbuf = path::PathBuf::from(utils::to_root_path("/config/mimetype.mt", &root_path)); let mimetype_path: &str = mimetype_pathbuf.to_str().unwrap(); let mimetype_file_handle = match File::open(mimetype_path) { Ok(ok) => ok, Err(e) => return Err(Error::new(ErrorKind::Other, format!("can't find `{:?}`", mimetype_path))) }; let mimetype_bufreader = BufReader::new(&mimetype_file_handle); for line_wrapped in mimetype_bufreader.lines() { let line_raw = line_wrapped.unwrap(); // line_raw.trim(); // line_raw.trim_matches('\r'); // line_raw.trim_matches('\n'); // line_raw.trim_matches('\t'); let line = line_raw.trim(); if line.len() > 0 && line.as_bytes()[0]!= '#' as u8 { let line_splitted = line.split('\t').collect::<Vec<&str>>(); let key = line_splitted[0]; let val = line_splitted[1]; mimetypes.push(MimetypeData::new(key.to_string(), val.to_string())); } } Ok(Mimetype{ mimetype_vec: mimetypes, default_mimetype: "text/plain".to_string() }) } pub fn get_mimetype(&self, filename: &String) -> Result<String, io::Error> { let ext: String; if filename.contains(".") { let filename_ext = filename.split(".").last().unwrap().to_string(); ext = ".".to_string() + filename_ext.as_str(); for mt in self.mimetype_vec.iter() { if mt.get_extension() == ext.as_str() { return Ok(mt.get_mimetype()); } } } Err(io::Error::new(io::ErrorKind::Other, "mimetype not found.")) } pub fn get_mimetype_or(&self, filename: &String, default: &str) -> String { match self.get_mimetype(filename) { Ok(mt) => mt, Err(_) => default.to_owned() } } pub fn get_mimetype_default(&self, filename: &String) -> String { match self.get_mimetype(filename) { Ok(mt) => mt, // TODO: Err(_) => self.default_mimetype.to_owned(), } } pub fn to_owned(&self) -> Mimetype { let mut new_mt: Vec<MimetypeData> = vec![]; for mt in self.mimetype_vec.iter() { new_mt.push(MimetypeData{extension: mt.get_extension(), mimetype: mt.get_mimetype()}) } Mimetype { mimetype_vec: new_mt, default_mimetype: self.default_mimetype.to_owned() } } pub fn delete(self) {} } impl MimetypeData { pub fn new(e: String, m: String) -> MimetypeData { MimetypeData{extension: e, mimetype: m} } pub fn get_extension(&self) -> String { self.extension.to_owned() } pub fn
(&self) -> String { self.mimetype.to_owned() } pub fn set_mimetype(&mut self, e: String) { self.mimetype = e; } pub fn delete(self) {} }
get_mimetype
identifier_name
exercise.rs
use regex::Regex; use serde::Deserialize; use std::fmt::{self, Display, Formatter}; use std::fs::{self, remove_file, File}; use std::io::Read; use std::path::PathBuf; use std::process::{self, Command}; const RUSTC_COLOR_ARGS: &[&str] = &["--color", "always"]; const I_AM_DONE_REGEX: &str = r"(?m)^\s*///?\s*I\s+AM\s+NOT\s+DONE"; const CONTEXT: usize = 2; const CLIPPY_CARGO_TOML_PATH: &str = "./exercises/clippy/Cargo.toml"; // Get a temporary file name that is hopefully unique to this process #[inline] fn temp_file() -> String { format!("./temp_{}", process::id()) } // The mode of the exercise. #[derive(Deserialize, Copy, Clone)] #[serde(rename_all = "lowercase")] pub enum Mode { // Indicates that the exercise should be compiled as a binary Compile, // Indicates that the exercise should be compiled as a test harness Test, // Indicates that the exercise should be linted with clippy Clippy, } #[derive(Deserialize)] pub struct ExerciseList { pub exercises: Vec<Exercise>, } // A representation of a rustlings exercise. // This is deserialized from the accompanying info.toml file #[derive(Deserialize)] pub struct Exercise { // Name of the exercise pub name: String, // The path to the file containing the exercise's source code pub path: PathBuf, // The mode of the exercise (Test, Compile, or Clippy) pub mode: Mode, // The hint text associated with the exercise pub hint: String, } // An enum to track of the state of an Exercise. // An Exercise can be either Done or Pending #[derive(PartialEq, Debug)] pub enum State { // The state of the exercise once it's been completed Done, // The state of the exercise while it's not completed yet Pending(Vec<ContextLine>), } // The context information of a pending exercise #[derive(PartialEq, Debug)] pub struct
{ // The source code that is still pending completion pub line: String, // The line number of the source code still pending completion pub number: usize, // Whether or not this is important pub important: bool, } // The result of compiling an exercise pub struct CompiledExercise<'a> { exercise: &'a Exercise, _handle: FileHandle, } impl<'a> CompiledExercise<'a> { // Run the compiled exercise pub fn run(&self) -> Result<ExerciseOutput, ExerciseOutput> { self.exercise.run() } } // A representation of an already executed binary #[derive(Debug)] pub struct ExerciseOutput { // The textual contents of the standard output of the binary pub stdout: String, // The textual contents of the standard error of the binary pub stderr: String, } struct FileHandle; impl Drop for FileHandle { fn drop(&mut self) { clean(); } } impl Exercise { pub fn compile(&self) -> Result<CompiledExercise, ExerciseOutput> { let cmd = match self.mode { Mode::Compile => Command::new("rustc") .args(&[self.path.to_str().unwrap(), "-o", &temp_file()]) .args(RUSTC_COLOR_ARGS) .output(), Mode::Test => Command::new("rustc") .args(&["--test", self.path.to_str().unwrap(), "-o", &temp_file()]) .args(RUSTC_COLOR_ARGS) .output(), Mode::Clippy => { let cargo_toml = format!( r#"[package] name = "{}" version = "0.0.1" edition = "2018" [[bin]] name = "{}" path = "{}.rs""#, self.name, self.name, self.name ); fs::write(CLIPPY_CARGO_TOML_PATH, cargo_toml) .expect("Failed to write 📎 Clippy 📎 Cargo.toml file."); // To support the ability to run the clipy exercises, build // an executable, in addition to running clippy. With a // compilation failure, this would silently fail. But we expect // clippy to reflect the same failure while compiling later. Command::new("rustc") .args(&[self.path.to_str().unwrap(), "-o", &temp_file()]) .args(RUSTC_COLOR_ARGS) .output() .expect("Failed to compile!"); // Due to an issue with Clippy, a cargo clean is required to catch all lints. // See https://github.com/rust-lang/rust-clippy/issues/2604 // This is already fixed on master branch. See this issue to track merging into Cargo: // https://github.com/rust-lang/rust-clippy/issues/3837 Command::new("cargo") .args(&["clean", "--manifest-path", CLIPPY_CARGO_TOML_PATH]) .args(RUSTC_COLOR_ARGS) .output() .expect("Failed to run 'cargo clean'"); Command::new("cargo") .args(&["clippy", "--manifest-path", CLIPPY_CARGO_TOML_PATH]) .args(RUSTC_COLOR_ARGS) .args(&["--", "-D", "warnings"]) .output() } } .expect("Failed to run 'compile' command."); if cmd.status.success() { Ok(CompiledExercise { exercise: &self, _handle: FileHandle, }) } else { clean(); Err(ExerciseOutput { stdout: String::from_utf8_lossy(&cmd.stdout).to_string(), stderr: String::from_utf8_lossy(&cmd.stderr).to_string(), }) } } fn run(&self) -> Result<ExerciseOutput, ExerciseOutput> { let arg = match self.mode { Mode::Test => "--show-output", _ => "" }; let cmd = Command::new(&temp_file()).arg(arg) .output() .expect("Failed to run 'run' command"); let output = ExerciseOutput { stdout: String::from_utf8_lossy(&cmd.stdout).to_string(), stderr: String::from_utf8_lossy(&cmd.stderr).to_string(), }; if cmd.status.success() { Ok(output) } else { Err(output) } } pub fn state(&self) -> State { let mut source_file = File::open(&self.path).expect("We were unable to open the exercise file!"); let source = { let mut s = String::new(); source_file .read_to_string(&mut s) .expect("We were unable to read the exercise file!"); s }; let re = Regex::new(I_AM_DONE_REGEX).unwrap(); if!re.is_match(&source) { return State::Done; } let matched_line_index = source .lines() .enumerate() .filter_map(|(i, line)| if re.is_match(line) { Some(i) } else { None }) .next() .expect("This should not happen at all"); let min_line = ((matched_line_index as i32) - (CONTEXT as i32)).max(0) as usize; let max_line = matched_line_index + CONTEXT; let context = source .lines() .enumerate() .filter(|&(i, _)| i >= min_line && i <= max_line) .map(|(i, line)| ContextLine { line: line.to_string(), number: i + 1, important: i == matched_line_index, }) .collect(); State::Pending(context) } } impl Display for Exercise { fn fmt(&self, f: &mut Formatter) -> fmt::Result { write!(f, "{}", self.path.to_str().unwrap()) } } #[inline] fn clean() { let _ignored = remove_file(&temp_file()); } #[cfg(test)] mod test { use super::*; use std::path::Path; #[test] fn test_clean() { File::create(&temp_file()).unwrap(); let exercise = Exercise { name: String::from("example"), path: PathBuf::from("tests/fixture/state/pending_exercise.rs"), mode: Mode::Compile, hint: String::from(""), }; let compiled = exercise.compile().unwrap(); drop(compiled); assert!(!Path::new(&temp_file()).exists()); } #[test] fn test_pending_state() { let exercise = Exercise { name: "pending_exercise".into(), path: PathBuf::from("tests/fixture/state/pending_exercise.rs"), mode: Mode::Compile, hint: String::new(), }; let state = exercise.state(); let expected = vec![ ContextLine { line: "// fake_exercise".to_string(), number: 1, important: false, }, ContextLine { line: "".to_string(), number: 2, important: false, }, ContextLine { line: "// I AM NOT DONE".to_string(), number: 3, important: true, }, ContextLine { line: "".to_string(), number: 4, important: false, }, ContextLine { line: "fn main() {".to_string(), number: 5, important: false, }, ]; assert_eq!(state, State::Pending(expected)); } #[test] fn test_finished_exercise() { let exercise = Exercise { name: "finished_exercise".into(), path: PathBuf::from("tests/fixture/state/finished_exercise.rs"), mode: Mode::Compile, hint: String::new(), }; assert_eq!(exercise.state(), State::Done); } #[test] fn test_exercise_with_output() { let exercise = Exercise { name: "finished_exercise".into(), path: PathBuf::from("tests/fixture/success/testSuccess.rs"), mode: Mode::Test, hint: String::new(), }; let out = exercise.compile().unwrap().run().unwrap(); assert!(out.stdout.contains("THIS TEST TOO SHALL PASS")); } }
ContextLine
identifier_name
exercise.rs
use regex::Regex; use serde::Deserialize; use std::fmt::{self, Display, Formatter}; use std::fs::{self, remove_file, File}; use std::io::Read; use std::path::PathBuf; use std::process::{self, Command}; const RUSTC_COLOR_ARGS: &[&str] = &["--color", "always"]; const I_AM_DONE_REGEX: &str = r"(?m)^\s*///?\s*I\s+AM\s+NOT\s+DONE"; const CONTEXT: usize = 2; const CLIPPY_CARGO_TOML_PATH: &str = "./exercises/clippy/Cargo.toml"; // Get a temporary file name that is hopefully unique to this process #[inline] fn temp_file() -> String { format!("./temp_{}", process::id()) } // The mode of the exercise. #[derive(Deserialize, Copy, Clone)] #[serde(rename_all = "lowercase")] pub enum Mode { // Indicates that the exercise should be compiled as a binary Compile, // Indicates that the exercise should be compiled as a test harness Test, // Indicates that the exercise should be linted with clippy Clippy, } #[derive(Deserialize)] pub struct ExerciseList { pub exercises: Vec<Exercise>, } // A representation of a rustlings exercise. // This is deserialized from the accompanying info.toml file #[derive(Deserialize)] pub struct Exercise { // Name of the exercise pub name: String, // The path to the file containing the exercise's source code pub path: PathBuf, // The mode of the exercise (Test, Compile, or Clippy) pub mode: Mode, // The hint text associated with the exercise pub hint: String, } // An enum to track of the state of an Exercise. // An Exercise can be either Done or Pending #[derive(PartialEq, Debug)] pub enum State { // The state of the exercise once it's been completed Done, // The state of the exercise while it's not completed yet Pending(Vec<ContextLine>), } // The context information of a pending exercise #[derive(PartialEq, Debug)] pub struct ContextLine { // The source code that is still pending completion pub line: String, // The line number of the source code still pending completion pub number: usize, // Whether or not this is important pub important: bool, } // The result of compiling an exercise pub struct CompiledExercise<'a> { exercise: &'a Exercise, _handle: FileHandle, } impl<'a> CompiledExercise<'a> { // Run the compiled exercise pub fn run(&self) -> Result<ExerciseOutput, ExerciseOutput> { self.exercise.run() } } // A representation of an already executed binary #[derive(Debug)] pub struct ExerciseOutput { // The textual contents of the standard output of the binary pub stdout: String, // The textual contents of the standard error of the binary pub stderr: String, } struct FileHandle; impl Drop for FileHandle { fn drop(&mut self) { clean(); } } impl Exercise { pub fn compile(&self) -> Result<CompiledExercise, ExerciseOutput> { let cmd = match self.mode { Mode::Compile => Command::new("rustc") .args(&[self.path.to_str().unwrap(), "-o", &temp_file()]) .args(RUSTC_COLOR_ARGS) .output(), Mode::Test => Command::new("rustc") .args(&["--test", self.path.to_str().unwrap(), "-o", &temp_file()]) .args(RUSTC_COLOR_ARGS) .output(), Mode::Clippy => { let cargo_toml = format!( r#"[package] name = "{}" version = "0.0.1" edition = "2018" [[bin]] name = "{}" path = "{}.rs""#, self.name, self.name, self.name ); fs::write(CLIPPY_CARGO_TOML_PATH, cargo_toml) .expect("Failed to write 📎 Clippy 📎 Cargo.toml file."); // To support the ability to run the clipy exercises, build // an executable, in addition to running clippy. With a // compilation failure, this would silently fail. But we expect // clippy to reflect the same failure while compiling later.
.expect("Failed to compile!"); // Due to an issue with Clippy, a cargo clean is required to catch all lints. // See https://github.com/rust-lang/rust-clippy/issues/2604 // This is already fixed on master branch. See this issue to track merging into Cargo: // https://github.com/rust-lang/rust-clippy/issues/3837 Command::new("cargo") .args(&["clean", "--manifest-path", CLIPPY_CARGO_TOML_PATH]) .args(RUSTC_COLOR_ARGS) .output() .expect("Failed to run 'cargo clean'"); Command::new("cargo") .args(&["clippy", "--manifest-path", CLIPPY_CARGO_TOML_PATH]) .args(RUSTC_COLOR_ARGS) .args(&["--", "-D", "warnings"]) .output() } } .expect("Failed to run 'compile' command."); if cmd.status.success() { Ok(CompiledExercise { exercise: &self, _handle: FileHandle, }) } else { clean(); Err(ExerciseOutput { stdout: String::from_utf8_lossy(&cmd.stdout).to_string(), stderr: String::from_utf8_lossy(&cmd.stderr).to_string(), }) } } fn run(&self) -> Result<ExerciseOutput, ExerciseOutput> { let arg = match self.mode { Mode::Test => "--show-output", _ => "" }; let cmd = Command::new(&temp_file()).arg(arg) .output() .expect("Failed to run 'run' command"); let output = ExerciseOutput { stdout: String::from_utf8_lossy(&cmd.stdout).to_string(), stderr: String::from_utf8_lossy(&cmd.stderr).to_string(), }; if cmd.status.success() { Ok(output) } else { Err(output) } } pub fn state(&self) -> State { let mut source_file = File::open(&self.path).expect("We were unable to open the exercise file!"); let source = { let mut s = String::new(); source_file .read_to_string(&mut s) .expect("We were unable to read the exercise file!"); s }; let re = Regex::new(I_AM_DONE_REGEX).unwrap(); if!re.is_match(&source) { return State::Done; } let matched_line_index = source .lines() .enumerate() .filter_map(|(i, line)| if re.is_match(line) { Some(i) } else { None }) .next() .expect("This should not happen at all"); let min_line = ((matched_line_index as i32) - (CONTEXT as i32)).max(0) as usize; let max_line = matched_line_index + CONTEXT; let context = source .lines() .enumerate() .filter(|&(i, _)| i >= min_line && i <= max_line) .map(|(i, line)| ContextLine { line: line.to_string(), number: i + 1, important: i == matched_line_index, }) .collect(); State::Pending(context) } } impl Display for Exercise { fn fmt(&self, f: &mut Formatter) -> fmt::Result { write!(f, "{}", self.path.to_str().unwrap()) } } #[inline] fn clean() { let _ignored = remove_file(&temp_file()); } #[cfg(test)] mod test { use super::*; use std::path::Path; #[test] fn test_clean() { File::create(&temp_file()).unwrap(); let exercise = Exercise { name: String::from("example"), path: PathBuf::from("tests/fixture/state/pending_exercise.rs"), mode: Mode::Compile, hint: String::from(""), }; let compiled = exercise.compile().unwrap(); drop(compiled); assert!(!Path::new(&temp_file()).exists()); } #[test] fn test_pending_state() { let exercise = Exercise { name: "pending_exercise".into(), path: PathBuf::from("tests/fixture/state/pending_exercise.rs"), mode: Mode::Compile, hint: String::new(), }; let state = exercise.state(); let expected = vec![ ContextLine { line: "// fake_exercise".to_string(), number: 1, important: false, }, ContextLine { line: "".to_string(), number: 2, important: false, }, ContextLine { line: "// I AM NOT DONE".to_string(), number: 3, important: true, }, ContextLine { line: "".to_string(), number: 4, important: false, }, ContextLine { line: "fn main() {".to_string(), number: 5, important: false, }, ]; assert_eq!(state, State::Pending(expected)); } #[test] fn test_finished_exercise() { let exercise = Exercise { name: "finished_exercise".into(), path: PathBuf::from("tests/fixture/state/finished_exercise.rs"), mode: Mode::Compile, hint: String::new(), }; assert_eq!(exercise.state(), State::Done); } #[test] fn test_exercise_with_output() { let exercise = Exercise { name: "finished_exercise".into(), path: PathBuf::from("tests/fixture/success/testSuccess.rs"), mode: Mode::Test, hint: String::new(), }; let out = exercise.compile().unwrap().run().unwrap(); assert!(out.stdout.contains("THIS TEST TOO SHALL PASS")); } }
Command::new("rustc") .args(&[self.path.to_str().unwrap(), "-o", &temp_file()]) .args(RUSTC_COLOR_ARGS) .output()
random_line_split
exercise.rs
use regex::Regex; use serde::Deserialize; use std::fmt::{self, Display, Formatter}; use std::fs::{self, remove_file, File}; use std::io::Read; use std::path::PathBuf; use std::process::{self, Command}; const RUSTC_COLOR_ARGS: &[&str] = &["--color", "always"]; const I_AM_DONE_REGEX: &str = r"(?m)^\s*///?\s*I\s+AM\s+NOT\s+DONE"; const CONTEXT: usize = 2; const CLIPPY_CARGO_TOML_PATH: &str = "./exercises/clippy/Cargo.toml"; // Get a temporary file name that is hopefully unique to this process #[inline] fn temp_file() -> String { format!("./temp_{}", process::id()) } // The mode of the exercise. #[derive(Deserialize, Copy, Clone)] #[serde(rename_all = "lowercase")] pub enum Mode { // Indicates that the exercise should be compiled as a binary Compile, // Indicates that the exercise should be compiled as a test harness Test, // Indicates that the exercise should be linted with clippy Clippy, } #[derive(Deserialize)] pub struct ExerciseList { pub exercises: Vec<Exercise>, } // A representation of a rustlings exercise. // This is deserialized from the accompanying info.toml file #[derive(Deserialize)] pub struct Exercise { // Name of the exercise pub name: String, // The path to the file containing the exercise's source code pub path: PathBuf, // The mode of the exercise (Test, Compile, or Clippy) pub mode: Mode, // The hint text associated with the exercise pub hint: String, } // An enum to track of the state of an Exercise. // An Exercise can be either Done or Pending #[derive(PartialEq, Debug)] pub enum State { // The state of the exercise once it's been completed Done, // The state of the exercise while it's not completed yet Pending(Vec<ContextLine>), } // The context information of a pending exercise #[derive(PartialEq, Debug)] pub struct ContextLine { // The source code that is still pending completion pub line: String, // The line number of the source code still pending completion pub number: usize, // Whether or not this is important pub important: bool, } // The result of compiling an exercise pub struct CompiledExercise<'a> { exercise: &'a Exercise, _handle: FileHandle, } impl<'a> CompiledExercise<'a> { // Run the compiled exercise pub fn run(&self) -> Result<ExerciseOutput, ExerciseOutput> { self.exercise.run() } } // A representation of an already executed binary #[derive(Debug)] pub struct ExerciseOutput { // The textual contents of the standard output of the binary pub stdout: String, // The textual contents of the standard error of the binary pub stderr: String, } struct FileHandle; impl Drop for FileHandle { fn drop(&mut self)
} impl Exercise { pub fn compile(&self) -> Result<CompiledExercise, ExerciseOutput> { let cmd = match self.mode { Mode::Compile => Command::new("rustc") .args(&[self.path.to_str().unwrap(), "-o", &temp_file()]) .args(RUSTC_COLOR_ARGS) .output(), Mode::Test => Command::new("rustc") .args(&["--test", self.path.to_str().unwrap(), "-o", &temp_file()]) .args(RUSTC_COLOR_ARGS) .output(), Mode::Clippy => { let cargo_toml = format!( r#"[package] name = "{}" version = "0.0.1" edition = "2018" [[bin]] name = "{}" path = "{}.rs""#, self.name, self.name, self.name ); fs::write(CLIPPY_CARGO_TOML_PATH, cargo_toml) .expect("Failed to write 📎 Clippy 📎 Cargo.toml file."); // To support the ability to run the clipy exercises, build // an executable, in addition to running clippy. With a // compilation failure, this would silently fail. But we expect // clippy to reflect the same failure while compiling later. Command::new("rustc") .args(&[self.path.to_str().unwrap(), "-o", &temp_file()]) .args(RUSTC_COLOR_ARGS) .output() .expect("Failed to compile!"); // Due to an issue with Clippy, a cargo clean is required to catch all lints. // See https://github.com/rust-lang/rust-clippy/issues/2604 // This is already fixed on master branch. See this issue to track merging into Cargo: // https://github.com/rust-lang/rust-clippy/issues/3837 Command::new("cargo") .args(&["clean", "--manifest-path", CLIPPY_CARGO_TOML_PATH]) .args(RUSTC_COLOR_ARGS) .output() .expect("Failed to run 'cargo clean'"); Command::new("cargo") .args(&["clippy", "--manifest-path", CLIPPY_CARGO_TOML_PATH]) .args(RUSTC_COLOR_ARGS) .args(&["--", "-D", "warnings"]) .output() } } .expect("Failed to run 'compile' command."); if cmd.status.success() { Ok(CompiledExercise { exercise: &self, _handle: FileHandle, }) } else { clean(); Err(ExerciseOutput { stdout: String::from_utf8_lossy(&cmd.stdout).to_string(), stderr: String::from_utf8_lossy(&cmd.stderr).to_string(), }) } } fn run(&self) -> Result<ExerciseOutput, ExerciseOutput> { let arg = match self.mode { Mode::Test => "--show-output", _ => "" }; let cmd = Command::new(&temp_file()).arg(arg) .output() .expect("Failed to run 'run' command"); let output = ExerciseOutput { stdout: String::from_utf8_lossy(&cmd.stdout).to_string(), stderr: String::from_utf8_lossy(&cmd.stderr).to_string(), }; if cmd.status.success() { Ok(output) } else { Err(output) } } pub fn state(&self) -> State { let mut source_file = File::open(&self.path).expect("We were unable to open the exercise file!"); let source = { let mut s = String::new(); source_file .read_to_string(&mut s) .expect("We were unable to read the exercise file!"); s }; let re = Regex::new(I_AM_DONE_REGEX).unwrap(); if!re.is_match(&source) { return State::Done; } let matched_line_index = source .lines() .enumerate() .filter_map(|(i, line)| if re.is_match(line) { Some(i) } else { None }) .next() .expect("This should not happen at all"); let min_line = ((matched_line_index as i32) - (CONTEXT as i32)).max(0) as usize; let max_line = matched_line_index + CONTEXT; let context = source .lines() .enumerate() .filter(|&(i, _)| i >= min_line && i <= max_line) .map(|(i, line)| ContextLine { line: line.to_string(), number: i + 1, important: i == matched_line_index, }) .collect(); State::Pending(context) } } impl Display for Exercise { fn fmt(&self, f: &mut Formatter) -> fmt::Result { write!(f, "{}", self.path.to_str().unwrap()) } } #[inline] fn clean() { let _ignored = remove_file(&temp_file()); } #[cfg(test)] mod test { use super::*; use std::path::Path; #[test] fn test_clean() { File::create(&temp_file()).unwrap(); let exercise = Exercise { name: String::from("example"), path: PathBuf::from("tests/fixture/state/pending_exercise.rs"), mode: Mode::Compile, hint: String::from(""), }; let compiled = exercise.compile().unwrap(); drop(compiled); assert!(!Path::new(&temp_file()).exists()); } #[test] fn test_pending_state() { let exercise = Exercise { name: "pending_exercise".into(), path: PathBuf::from("tests/fixture/state/pending_exercise.rs"), mode: Mode::Compile, hint: String::new(), }; let state = exercise.state(); let expected = vec![ ContextLine { line: "// fake_exercise".to_string(), number: 1, important: false, }, ContextLine { line: "".to_string(), number: 2, important: false, }, ContextLine { line: "// I AM NOT DONE".to_string(), number: 3, important: true, }, ContextLine { line: "".to_string(), number: 4, important: false, }, ContextLine { line: "fn main() {".to_string(), number: 5, important: false, }, ]; assert_eq!(state, State::Pending(expected)); } #[test] fn test_finished_exercise() { let exercise = Exercise { name: "finished_exercise".into(), path: PathBuf::from("tests/fixture/state/finished_exercise.rs"), mode: Mode::Compile, hint: String::new(), }; assert_eq!(exercise.state(), State::Done); } #[test] fn test_exercise_with_output() { let exercise = Exercise { name: "finished_exercise".into(), path: PathBuf::from("tests/fixture/success/testSuccess.rs"), mode: Mode::Test, hint: String::new(), }; let out = exercise.compile().unwrap().run().unwrap(); assert!(out.stdout.contains("THIS TEST TOO SHALL PASS")); } }
{ clean(); }
identifier_body
exercise.rs
use regex::Regex; use serde::Deserialize; use std::fmt::{self, Display, Formatter}; use std::fs::{self, remove_file, File}; use std::io::Read; use std::path::PathBuf; use std::process::{self, Command}; const RUSTC_COLOR_ARGS: &[&str] = &["--color", "always"]; const I_AM_DONE_REGEX: &str = r"(?m)^\s*///?\s*I\s+AM\s+NOT\s+DONE"; const CONTEXT: usize = 2; const CLIPPY_CARGO_TOML_PATH: &str = "./exercises/clippy/Cargo.toml"; // Get a temporary file name that is hopefully unique to this process #[inline] fn temp_file() -> String { format!("./temp_{}", process::id()) } // The mode of the exercise. #[derive(Deserialize, Copy, Clone)] #[serde(rename_all = "lowercase")] pub enum Mode { // Indicates that the exercise should be compiled as a binary Compile, // Indicates that the exercise should be compiled as a test harness Test, // Indicates that the exercise should be linted with clippy Clippy, } #[derive(Deserialize)] pub struct ExerciseList { pub exercises: Vec<Exercise>, } // A representation of a rustlings exercise. // This is deserialized from the accompanying info.toml file #[derive(Deserialize)] pub struct Exercise { // Name of the exercise pub name: String, // The path to the file containing the exercise's source code pub path: PathBuf, // The mode of the exercise (Test, Compile, or Clippy) pub mode: Mode, // The hint text associated with the exercise pub hint: String, } // An enum to track of the state of an Exercise. // An Exercise can be either Done or Pending #[derive(PartialEq, Debug)] pub enum State { // The state of the exercise once it's been completed Done, // The state of the exercise while it's not completed yet Pending(Vec<ContextLine>), } // The context information of a pending exercise #[derive(PartialEq, Debug)] pub struct ContextLine { // The source code that is still pending completion pub line: String, // The line number of the source code still pending completion pub number: usize, // Whether or not this is important pub important: bool, } // The result of compiling an exercise pub struct CompiledExercise<'a> { exercise: &'a Exercise, _handle: FileHandle, } impl<'a> CompiledExercise<'a> { // Run the compiled exercise pub fn run(&self) -> Result<ExerciseOutput, ExerciseOutput> { self.exercise.run() } } // A representation of an already executed binary #[derive(Debug)] pub struct ExerciseOutput { // The textual contents of the standard output of the binary pub stdout: String, // The textual contents of the standard error of the binary pub stderr: String, } struct FileHandle; impl Drop for FileHandle { fn drop(&mut self) { clean(); } } impl Exercise { pub fn compile(&self) -> Result<CompiledExercise, ExerciseOutput> { let cmd = match self.mode { Mode::Compile => Command::new("rustc") .args(&[self.path.to_str().unwrap(), "-o", &temp_file()]) .args(RUSTC_COLOR_ARGS) .output(), Mode::Test => Command::new("rustc") .args(&["--test", self.path.to_str().unwrap(), "-o", &temp_file()]) .args(RUSTC_COLOR_ARGS) .output(), Mode::Clippy => { let cargo_toml = format!( r#"[package] name = "{}" version = "0.0.1" edition = "2018" [[bin]] name = "{}" path = "{}.rs""#, self.name, self.name, self.name ); fs::write(CLIPPY_CARGO_TOML_PATH, cargo_toml) .expect("Failed to write 📎 Clippy 📎 Cargo.toml file."); // To support the ability to run the clipy exercises, build // an executable, in addition to running clippy. With a // compilation failure, this would silently fail. But we expect // clippy to reflect the same failure while compiling later. Command::new("rustc") .args(&[self.path.to_str().unwrap(), "-o", &temp_file()]) .args(RUSTC_COLOR_ARGS) .output() .expect("Failed to compile!"); // Due to an issue with Clippy, a cargo clean is required to catch all lints. // See https://github.com/rust-lang/rust-clippy/issues/2604 // This is already fixed on master branch. See this issue to track merging into Cargo: // https://github.com/rust-lang/rust-clippy/issues/3837 Command::new("cargo") .args(&["clean", "--manifest-path", CLIPPY_CARGO_TOML_PATH]) .args(RUSTC_COLOR_ARGS) .output() .expect("Failed to run 'cargo clean'"); Command::new("cargo") .args(&["clippy", "--manifest-path", CLIPPY_CARGO_TOML_PATH]) .args(RUSTC_COLOR_ARGS) .args(&["--", "-D", "warnings"]) .output() } } .expect("Failed to run 'compile' command."); if cmd.status.success() { Ok(CompiledExercise { exercise: &self, _handle: FileHandle, }) } else { clean(); Err(ExerciseOutput { stdout: String::from_utf8_lossy(&cmd.stdout).to_string(), stderr: String::from_utf8_lossy(&cmd.stderr).to_string(), }) } } fn run(&self) -> Result<ExerciseOutput, ExerciseOutput> { let arg = match self.mode { Mode::Test => "--show-output", _ => "" }; let cmd = Command::new(&temp_file()).arg(arg) .output() .expect("Failed to run 'run' command"); let output = ExerciseOutput { stdout: String::from_utf8_lossy(&cmd.stdout).to_string(), stderr: String::from_utf8_lossy(&cmd.stderr).to_string(), }; if cmd.status.success() { Ok(output) } else { Err(output) } } pub fn state(&self) -> State { let mut source_file = File::open(&self.path).expect("We were unable to open the exercise file!"); let source = { let mut s = String::new(); source_file .read_to_string(&mut s) .expect("We were unable to read the exercise file!"); s }; let re = Regex::new(I_AM_DONE_REGEX).unwrap(); if!re.is_match(&source) {
let matched_line_index = source .lines() .enumerate() .filter_map(|(i, line)| if re.is_match(line) { Some(i) } else { None }) .next() .expect("This should not happen at all"); let min_line = ((matched_line_index as i32) - (CONTEXT as i32)).max(0) as usize; let max_line = matched_line_index + CONTEXT; let context = source .lines() .enumerate() .filter(|&(i, _)| i >= min_line && i <= max_line) .map(|(i, line)| ContextLine { line: line.to_string(), number: i + 1, important: i == matched_line_index, }) .collect(); State::Pending(context) } } impl Display for Exercise { fn fmt(&self, f: &mut Formatter) -> fmt::Result { write!(f, "{}", self.path.to_str().unwrap()) } } #[inline] fn clean() { let _ignored = remove_file(&temp_file()); } #[cfg(test)] mod test { use super::*; use std::path::Path; #[test] fn test_clean() { File::create(&temp_file()).unwrap(); let exercise = Exercise { name: String::from("example"), path: PathBuf::from("tests/fixture/state/pending_exercise.rs"), mode: Mode::Compile, hint: String::from(""), }; let compiled = exercise.compile().unwrap(); drop(compiled); assert!(!Path::new(&temp_file()).exists()); } #[test] fn test_pending_state() { let exercise = Exercise { name: "pending_exercise".into(), path: PathBuf::from("tests/fixture/state/pending_exercise.rs"), mode: Mode::Compile, hint: String::new(), }; let state = exercise.state(); let expected = vec![ ContextLine { line: "// fake_exercise".to_string(), number: 1, important: false, }, ContextLine { line: "".to_string(), number: 2, important: false, }, ContextLine { line: "// I AM NOT DONE".to_string(), number: 3, important: true, }, ContextLine { line: "".to_string(), number: 4, important: false, }, ContextLine { line: "fn main() {".to_string(), number: 5, important: false, }, ]; assert_eq!(state, State::Pending(expected)); } #[test] fn test_finished_exercise() { let exercise = Exercise { name: "finished_exercise".into(), path: PathBuf::from("tests/fixture/state/finished_exercise.rs"), mode: Mode::Compile, hint: String::new(), }; assert_eq!(exercise.state(), State::Done); } #[test] fn test_exercise_with_output() { let exercise = Exercise { name: "finished_exercise".into(), path: PathBuf::from("tests/fixture/success/testSuccess.rs"), mode: Mode::Test, hint: String::new(), }; let out = exercise.compile().unwrap().run().unwrap(); assert!(out.stdout.contains("THIS TEST TOO SHALL PASS")); } }
return State::Done; }
conditional_block
clipboard.rs
use std::string; use SdlResult; use get_error; #[allow(non_camel_case_types)] pub mod ll { use libc::{c_int, c_char}; pub type SDL_bool = c_int; extern "C" { pub fn SDL_SetClipboardText(text: *const c_char) -> c_int; pub fn SDL_GetClipboardText() -> *const c_char; pub fn SDL_HasClipboardText() -> SDL_bool; } } pub fn set_clipboard_text(text: &String) -> SdlResult<()> { unsafe { let result = text.with_c_str(|buff| { ll::SDL_SetClipboardText(buff) }); if result == 0 { Err(get_error()) } else { Ok(()) } } } pub fn
() -> SdlResult<String> { let result = unsafe { let cstr = ll::SDL_GetClipboardText() as *const u8; string::raw::from_buf(cstr) }; if result.len() == 0 { Err(get_error()) } else { Ok(result) } } pub fn has_clipboard_text() -> bool { unsafe { ll::SDL_HasClipboardText() == 1 } }
get_clipboard_text
identifier_name
clipboard.rs
use std::string; use SdlResult; use get_error; #[allow(non_camel_case_types)] pub mod ll { use libc::{c_int, c_char}; pub type SDL_bool = c_int; extern "C" { pub fn SDL_SetClipboardText(text: *const c_char) -> c_int; pub fn SDL_GetClipboardText() -> *const c_char; pub fn SDL_HasClipboardText() -> SDL_bool; } } pub fn set_clipboard_text(text: &String) -> SdlResult<()> { unsafe { let result = text.with_c_str(|buff| { ll::SDL_SetClipboardText(buff) }); if result == 0
else { Ok(()) } } } pub fn get_clipboard_text() -> SdlResult<String> { let result = unsafe { let cstr = ll::SDL_GetClipboardText() as *const u8; string::raw::from_buf(cstr) }; if result.len() == 0 { Err(get_error()) } else { Ok(result) } } pub fn has_clipboard_text() -> bool { unsafe { ll::SDL_HasClipboardText() == 1 } }
{ Err(get_error()) }
conditional_block
clipboard.rs
use std::string; use SdlResult; use get_error; #[allow(non_camel_case_types)] pub mod ll { use libc::{c_int, c_char}; pub type SDL_bool = c_int; extern "C" { pub fn SDL_SetClipboardText(text: *const c_char) -> c_int; pub fn SDL_GetClipboardText() -> *const c_char; pub fn SDL_HasClipboardText() -> SDL_bool; } } pub fn set_clipboard_text(text: &String) -> SdlResult<()> { unsafe { let result = text.with_c_str(|buff| { ll::SDL_SetClipboardText(buff) }); if result == 0 { Err(get_error()) } else { Ok(()) } } } pub fn get_clipboard_text() -> SdlResult<String>
pub fn has_clipboard_text() -> bool { unsafe { ll::SDL_HasClipboardText() == 1 } }
{ let result = unsafe { let cstr = ll::SDL_GetClipboardText() as *const u8; string::raw::from_buf(cstr) }; if result.len() == 0 { Err(get_error()) } else { Ok(result) } }
identifier_body
clipboard.rs
use std::string; use SdlResult; use get_error;
#[allow(non_camel_case_types)] pub mod ll { use libc::{c_int, c_char}; pub type SDL_bool = c_int; extern "C" { pub fn SDL_SetClipboardText(text: *const c_char) -> c_int; pub fn SDL_GetClipboardText() -> *const c_char; pub fn SDL_HasClipboardText() -> SDL_bool; } } pub fn set_clipboard_text(text: &String) -> SdlResult<()> { unsafe { let result = text.with_c_str(|buff| { ll::SDL_SetClipboardText(buff) }); if result == 0 { Err(get_error()) } else { Ok(()) } } } pub fn get_clipboard_text() -> SdlResult<String> { let result = unsafe { let cstr = ll::SDL_GetClipboardText() as *const u8; string::raw::from_buf(cstr) }; if result.len() == 0 { Err(get_error()) } else { Ok(result) } } pub fn has_clipboard_text() -> bool { unsafe { ll::SDL_HasClipboardText() == 1 } }
random_line_split
communication.rs
// Copyright Cryptape Technologies LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use futures::future::Either; use futures::sync::{mpsc, oneshot}; use futures::{Future, Sink, Stream}; use hyper; use libproto::TryInto; use parking_lot::{Mutex, RwLock}; use serde_json; use std::convert::Into; use tokio_core::reactor::{Core, Timeout}; use crate::configuration::UpStream; use cita_types::{H256, U256};
#[derive(Debug)] pub enum Error { BadStatus, Timeout, Parse, } type RpcSender = Mutex<mpsc::Sender<(hyper::Request, oneshot::Sender<Result<hyper::Chunk, Error>>)>>; pub struct RpcClient { sender: RpcSender, uri: RwLock<hyper::Uri>, } impl RpcClient { pub fn create(upstream: &UpStream) -> ::std::sync::Arc<Self> { let tb = ::std::thread::Builder::new().name("RpcClient".to_string()); let uri = upstream.url.parse::<hyper::Uri>().unwrap(); let (tx, rx) = mpsc::channel::<(hyper::Request, oneshot::Sender<Result<hyper::Chunk, Error>>)>(65_535); let timeout_duration = upstream.timeout; let _tb = tb .spawn(move || { let mut core = Core::new().unwrap(); let handle = core.handle(); let client = hyper::Client::configure() .connector(hyper::client::HttpConnector::new(4, &handle)) .keep_alive(false) .build(&handle); let messages = rx.for_each(|(req, sender)| { let timeout = Timeout::new(timeout_duration, &handle).unwrap(); let post = client.request(req).and_then(|res| res.body().concat2()); let work = post.select2(timeout).then(move |res| match res { Ok(Either::A((got, _timeout))) => { let _ = sender.send(Ok(got)); Ok(()) } Ok(Either::B(_)) | Err(_) => { let _ = sender.send(Err(Error::Timeout)); Ok(()) } }); handle.spawn(work); Ok(()) }); core.run(messages).unwrap(); }) .expect("Couldn't spawn a thread."); ::std::sync::Arc::new(RpcClient { sender: Mutex::new(tx), uri: RwLock::new(uri), }) } pub fn do_post(&self, body: &str) -> Result<hyper::Chunk, Error> { let uri = { self.uri.read().clone() }; trace!("Send body {:?} to {:?}.", body, uri); let mut req = hyper::Request::new(hyper::Method::Post, uri); req.headers_mut().set(hyper::header::ContentType::json()); req.set_body(body.to_owned()); let (tx, rx) = oneshot::channel(); { let _ = self.sender.lock().start_send((req, tx)); } match rx.wait() { Ok(res) => { let res = res.map_err(|_| Error::BadStatus)?; trace!("Get response {:?}.", res); Ok(res) } Err(_) => Err(Error::BadStatus), } } } // Pack the result type into a reply type, and parse result from the reply, and return the result. // The user of this macro do NOT have to care about the inner reply type. macro_rules! rpc_send_and_get_result_from_reply { ($upstream:ident, $request:ident, $result_type:path) => {{ define_reply_type!(ReplyType, $result_type); let rpc_cli = RpcClient::create($upstream); let body: String = $request.into(); let data = rpc_cli.do_post(&body)?; let reply: ReplyType = serde_json::from_slice(&data).map_err(|_| { error!( "send {:?} return error: {:?}", &body, ::std::str::from_utf8(&data) ); Error::Parse })?; trace!("get reply {:?}.", reply); reply.result }}; } macro_rules! define_reply_type { ($reply_type:ident, $result_type:path) => { #[derive(Debug, Clone, Serialize, Deserialize)] struct $reply_type { pub jsonrpc: Option<rpc_types::Version>, pub id: rpc_types::Id, pub result: $result_type, } }; } pub fn cita_get_transaction_proof(upstream: &UpStream, tx_hash: H256) -> Result<Vec<u8>, Error> { let req = rpc_request::GetTransactionProofParams::new(tx_hash.into()).into_request(1); let result = rpc_send_and_get_result_from_reply!(upstream, req, rpc_types::Data); Ok(result.into()) } pub fn cita_block_number(upstream: &UpStream) -> Result<U256, Error> { let req = rpc_request::BlockNumberParams::new().into_request(1); let result = rpc_send_and_get_result_from_reply!(upstream, req, U256); Ok(result) } pub fn cita_get_metadata(upstream: &UpStream) -> Result<rpc_types::MetaData, Error> { let height = rpc_types::BlockNumber::latest(); let req = rpc_request::GetMetaDataParams::new(height).into_request(1); let result = rpc_send_and_get_result_from_reply!(upstream, req, rpc_types::MetaData); Ok(result) } pub fn cita_send_transaction( upstream: &UpStream, utx: &UnverifiedTransaction, ) -> Result<H256, Error> { let tx_bytes: Vec<u8> = utx.try_into().unwrap(); let req = rpc_request::SendRawTransactionParams::new(tx_bytes.into()).into_request(1); let result = rpc_send_and_get_result_from_reply!(upstream, req, rpc_types::TxResponse); if result.status.to_uppercase() == "OK" { Ok(result.hash) } else { Err(Error::BadStatus) } }
use jsonrpc_types::{rpc_request, rpc_types}; use libproto::blockchain::UnverifiedTransaction;
random_line_split
communication.rs
// Copyright Cryptape Technologies LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use futures::future::Either; use futures::sync::{mpsc, oneshot}; use futures::{Future, Sink, Stream}; use hyper; use libproto::TryInto; use parking_lot::{Mutex, RwLock}; use serde_json; use std::convert::Into; use tokio_core::reactor::{Core, Timeout}; use crate::configuration::UpStream; use cita_types::{H256, U256}; use jsonrpc_types::{rpc_request, rpc_types}; use libproto::blockchain::UnverifiedTransaction; #[derive(Debug)] pub enum Error { BadStatus, Timeout, Parse, } type RpcSender = Mutex<mpsc::Sender<(hyper::Request, oneshot::Sender<Result<hyper::Chunk, Error>>)>>; pub struct RpcClient { sender: RpcSender, uri: RwLock<hyper::Uri>, } impl RpcClient { pub fn create(upstream: &UpStream) -> ::std::sync::Arc<Self> { let tb = ::std::thread::Builder::new().name("RpcClient".to_string()); let uri = upstream.url.parse::<hyper::Uri>().unwrap(); let (tx, rx) = mpsc::channel::<(hyper::Request, oneshot::Sender<Result<hyper::Chunk, Error>>)>(65_535); let timeout_duration = upstream.timeout; let _tb = tb .spawn(move || { let mut core = Core::new().unwrap(); let handle = core.handle(); let client = hyper::Client::configure() .connector(hyper::client::HttpConnector::new(4, &handle)) .keep_alive(false) .build(&handle); let messages = rx.for_each(|(req, sender)| { let timeout = Timeout::new(timeout_duration, &handle).unwrap(); let post = client.request(req).and_then(|res| res.body().concat2()); let work = post.select2(timeout).then(move |res| match res { Ok(Either::A((got, _timeout))) => { let _ = sender.send(Ok(got)); Ok(()) } Ok(Either::B(_)) | Err(_) => { let _ = sender.send(Err(Error::Timeout)); Ok(()) } }); handle.spawn(work); Ok(()) }); core.run(messages).unwrap(); }) .expect("Couldn't spawn a thread."); ::std::sync::Arc::new(RpcClient { sender: Mutex::new(tx), uri: RwLock::new(uri), }) } pub fn do_post(&self, body: &str) -> Result<hyper::Chunk, Error> { let uri = { self.uri.read().clone() }; trace!("Send body {:?} to {:?}.", body, uri); let mut req = hyper::Request::new(hyper::Method::Post, uri); req.headers_mut().set(hyper::header::ContentType::json()); req.set_body(body.to_owned()); let (tx, rx) = oneshot::channel(); { let _ = self.sender.lock().start_send((req, tx)); } match rx.wait() { Ok(res) => { let res = res.map_err(|_| Error::BadStatus)?; trace!("Get response {:?}.", res); Ok(res) } Err(_) => Err(Error::BadStatus), } } } // Pack the result type into a reply type, and parse result from the reply, and return the result. // The user of this macro do NOT have to care about the inner reply type. macro_rules! rpc_send_and_get_result_from_reply { ($upstream:ident, $request:ident, $result_type:path) => {{ define_reply_type!(ReplyType, $result_type); let rpc_cli = RpcClient::create($upstream); let body: String = $request.into(); let data = rpc_cli.do_post(&body)?; let reply: ReplyType = serde_json::from_slice(&data).map_err(|_| { error!( "send {:?} return error: {:?}", &body, ::std::str::from_utf8(&data) ); Error::Parse })?; trace!("get reply {:?}.", reply); reply.result }}; } macro_rules! define_reply_type { ($reply_type:ident, $result_type:path) => { #[derive(Debug, Clone, Serialize, Deserialize)] struct $reply_type { pub jsonrpc: Option<rpc_types::Version>, pub id: rpc_types::Id, pub result: $result_type, } }; } pub fn cita_get_transaction_proof(upstream: &UpStream, tx_hash: H256) -> Result<Vec<u8>, Error> { let req = rpc_request::GetTransactionProofParams::new(tx_hash.into()).into_request(1); let result = rpc_send_and_get_result_from_reply!(upstream, req, rpc_types::Data); Ok(result.into()) } pub fn cita_block_number(upstream: &UpStream) -> Result<U256, Error> { let req = rpc_request::BlockNumberParams::new().into_request(1); let result = rpc_send_and_get_result_from_reply!(upstream, req, U256); Ok(result) } pub fn cita_get_metadata(upstream: &UpStream) -> Result<rpc_types::MetaData, Error> { let height = rpc_types::BlockNumber::latest(); let req = rpc_request::GetMetaDataParams::new(height).into_request(1); let result = rpc_send_and_get_result_from_reply!(upstream, req, rpc_types::MetaData); Ok(result) } pub fn cita_send_transaction( upstream: &UpStream, utx: &UnverifiedTransaction, ) -> Result<H256, Error> { let tx_bytes: Vec<u8> = utx.try_into().unwrap(); let req = rpc_request::SendRawTransactionParams::new(tx_bytes.into()).into_request(1); let result = rpc_send_and_get_result_from_reply!(upstream, req, rpc_types::TxResponse); if result.status.to_uppercase() == "OK"
else { Err(Error::BadStatus) } }
{ Ok(result.hash) }
conditional_block
communication.rs
// Copyright Cryptape Technologies LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use futures::future::Either; use futures::sync::{mpsc, oneshot}; use futures::{Future, Sink, Stream}; use hyper; use libproto::TryInto; use parking_lot::{Mutex, RwLock}; use serde_json; use std::convert::Into; use tokio_core::reactor::{Core, Timeout}; use crate::configuration::UpStream; use cita_types::{H256, U256}; use jsonrpc_types::{rpc_request, rpc_types}; use libproto::blockchain::UnverifiedTransaction; #[derive(Debug)] pub enum Error { BadStatus, Timeout, Parse, } type RpcSender = Mutex<mpsc::Sender<(hyper::Request, oneshot::Sender<Result<hyper::Chunk, Error>>)>>; pub struct RpcClient { sender: RpcSender, uri: RwLock<hyper::Uri>, } impl RpcClient { pub fn create(upstream: &UpStream) -> ::std::sync::Arc<Self> { let tb = ::std::thread::Builder::new().name("RpcClient".to_string()); let uri = upstream.url.parse::<hyper::Uri>().unwrap(); let (tx, rx) = mpsc::channel::<(hyper::Request, oneshot::Sender<Result<hyper::Chunk, Error>>)>(65_535); let timeout_duration = upstream.timeout; let _tb = tb .spawn(move || { let mut core = Core::new().unwrap(); let handle = core.handle(); let client = hyper::Client::configure() .connector(hyper::client::HttpConnector::new(4, &handle)) .keep_alive(false) .build(&handle); let messages = rx.for_each(|(req, sender)| { let timeout = Timeout::new(timeout_duration, &handle).unwrap(); let post = client.request(req).and_then(|res| res.body().concat2()); let work = post.select2(timeout).then(move |res| match res { Ok(Either::A((got, _timeout))) => { let _ = sender.send(Ok(got)); Ok(()) } Ok(Either::B(_)) | Err(_) => { let _ = sender.send(Err(Error::Timeout)); Ok(()) } }); handle.spawn(work); Ok(()) }); core.run(messages).unwrap(); }) .expect("Couldn't spawn a thread."); ::std::sync::Arc::new(RpcClient { sender: Mutex::new(tx), uri: RwLock::new(uri), }) } pub fn do_post(&self, body: &str) -> Result<hyper::Chunk, Error> { let uri = { self.uri.read().clone() }; trace!("Send body {:?} to {:?}.", body, uri); let mut req = hyper::Request::new(hyper::Method::Post, uri); req.headers_mut().set(hyper::header::ContentType::json()); req.set_body(body.to_owned()); let (tx, rx) = oneshot::channel(); { let _ = self.sender.lock().start_send((req, tx)); } match rx.wait() { Ok(res) => { let res = res.map_err(|_| Error::BadStatus)?; trace!("Get response {:?}.", res); Ok(res) } Err(_) => Err(Error::BadStatus), } } } // Pack the result type into a reply type, and parse result from the reply, and return the result. // The user of this macro do NOT have to care about the inner reply type. macro_rules! rpc_send_and_get_result_from_reply { ($upstream:ident, $request:ident, $result_type:path) => {{ define_reply_type!(ReplyType, $result_type); let rpc_cli = RpcClient::create($upstream); let body: String = $request.into(); let data = rpc_cli.do_post(&body)?; let reply: ReplyType = serde_json::from_slice(&data).map_err(|_| { error!( "send {:?} return error: {:?}", &body, ::std::str::from_utf8(&data) ); Error::Parse })?; trace!("get reply {:?}.", reply); reply.result }}; } macro_rules! define_reply_type { ($reply_type:ident, $result_type:path) => { #[derive(Debug, Clone, Serialize, Deserialize)] struct $reply_type { pub jsonrpc: Option<rpc_types::Version>, pub id: rpc_types::Id, pub result: $result_type, } }; } pub fn cita_get_transaction_proof(upstream: &UpStream, tx_hash: H256) -> Result<Vec<u8>, Error> { let req = rpc_request::GetTransactionProofParams::new(tx_hash.into()).into_request(1); let result = rpc_send_and_get_result_from_reply!(upstream, req, rpc_types::Data); Ok(result.into()) } pub fn cita_block_number(upstream: &UpStream) -> Result<U256, Error> { let req = rpc_request::BlockNumberParams::new().into_request(1); let result = rpc_send_and_get_result_from_reply!(upstream, req, U256); Ok(result) } pub fn
(upstream: &UpStream) -> Result<rpc_types::MetaData, Error> { let height = rpc_types::BlockNumber::latest(); let req = rpc_request::GetMetaDataParams::new(height).into_request(1); let result = rpc_send_and_get_result_from_reply!(upstream, req, rpc_types::MetaData); Ok(result) } pub fn cita_send_transaction( upstream: &UpStream, utx: &UnverifiedTransaction, ) -> Result<H256, Error> { let tx_bytes: Vec<u8> = utx.try_into().unwrap(); let req = rpc_request::SendRawTransactionParams::new(tx_bytes.into()).into_request(1); let result = rpc_send_and_get_result_from_reply!(upstream, req, rpc_types::TxResponse); if result.status.to_uppercase() == "OK" { Ok(result.hash) } else { Err(Error::BadStatus) } }
cita_get_metadata
identifier_name
communication.rs
// Copyright Cryptape Technologies LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use futures::future::Either; use futures::sync::{mpsc, oneshot}; use futures::{Future, Sink, Stream}; use hyper; use libproto::TryInto; use parking_lot::{Mutex, RwLock}; use serde_json; use std::convert::Into; use tokio_core::reactor::{Core, Timeout}; use crate::configuration::UpStream; use cita_types::{H256, U256}; use jsonrpc_types::{rpc_request, rpc_types}; use libproto::blockchain::UnverifiedTransaction; #[derive(Debug)] pub enum Error { BadStatus, Timeout, Parse, } type RpcSender = Mutex<mpsc::Sender<(hyper::Request, oneshot::Sender<Result<hyper::Chunk, Error>>)>>; pub struct RpcClient { sender: RpcSender, uri: RwLock<hyper::Uri>, } impl RpcClient { pub fn create(upstream: &UpStream) -> ::std::sync::Arc<Self> { let tb = ::std::thread::Builder::new().name("RpcClient".to_string()); let uri = upstream.url.parse::<hyper::Uri>().unwrap(); let (tx, rx) = mpsc::channel::<(hyper::Request, oneshot::Sender<Result<hyper::Chunk, Error>>)>(65_535); let timeout_duration = upstream.timeout; let _tb = tb .spawn(move || { let mut core = Core::new().unwrap(); let handle = core.handle(); let client = hyper::Client::configure() .connector(hyper::client::HttpConnector::new(4, &handle)) .keep_alive(false) .build(&handle); let messages = rx.for_each(|(req, sender)| { let timeout = Timeout::new(timeout_duration, &handle).unwrap(); let post = client.request(req).and_then(|res| res.body().concat2()); let work = post.select2(timeout).then(move |res| match res { Ok(Either::A((got, _timeout))) => { let _ = sender.send(Ok(got)); Ok(()) } Ok(Either::B(_)) | Err(_) => { let _ = sender.send(Err(Error::Timeout)); Ok(()) } }); handle.spawn(work); Ok(()) }); core.run(messages).unwrap(); }) .expect("Couldn't spawn a thread."); ::std::sync::Arc::new(RpcClient { sender: Mutex::new(tx), uri: RwLock::new(uri), }) } pub fn do_post(&self, body: &str) -> Result<hyper::Chunk, Error>
} // Pack the result type into a reply type, and parse result from the reply, and return the result. // The user of this macro do NOT have to care about the inner reply type. macro_rules! rpc_send_and_get_result_from_reply { ($upstream:ident, $request:ident, $result_type:path) => {{ define_reply_type!(ReplyType, $result_type); let rpc_cli = RpcClient::create($upstream); let body: String = $request.into(); let data = rpc_cli.do_post(&body)?; let reply: ReplyType = serde_json::from_slice(&data).map_err(|_| { error!( "send {:?} return error: {:?}", &body, ::std::str::from_utf8(&data) ); Error::Parse })?; trace!("get reply {:?}.", reply); reply.result }}; } macro_rules! define_reply_type { ($reply_type:ident, $result_type:path) => { #[derive(Debug, Clone, Serialize, Deserialize)] struct $reply_type { pub jsonrpc: Option<rpc_types::Version>, pub id: rpc_types::Id, pub result: $result_type, } }; } pub fn cita_get_transaction_proof(upstream: &UpStream, tx_hash: H256) -> Result<Vec<u8>, Error> { let req = rpc_request::GetTransactionProofParams::new(tx_hash.into()).into_request(1); let result = rpc_send_and_get_result_from_reply!(upstream, req, rpc_types::Data); Ok(result.into()) } pub fn cita_block_number(upstream: &UpStream) -> Result<U256, Error> { let req = rpc_request::BlockNumberParams::new().into_request(1); let result = rpc_send_and_get_result_from_reply!(upstream, req, U256); Ok(result) } pub fn cita_get_metadata(upstream: &UpStream) -> Result<rpc_types::MetaData, Error> { let height = rpc_types::BlockNumber::latest(); let req = rpc_request::GetMetaDataParams::new(height).into_request(1); let result = rpc_send_and_get_result_from_reply!(upstream, req, rpc_types::MetaData); Ok(result) } pub fn cita_send_transaction( upstream: &UpStream, utx: &UnverifiedTransaction, ) -> Result<H256, Error> { let tx_bytes: Vec<u8> = utx.try_into().unwrap(); let req = rpc_request::SendRawTransactionParams::new(tx_bytes.into()).into_request(1); let result = rpc_send_and_get_result_from_reply!(upstream, req, rpc_types::TxResponse); if result.status.to_uppercase() == "OK" { Ok(result.hash) } else { Err(Error::BadStatus) } }
{ let uri = { self.uri.read().clone() }; trace!("Send body {:?} to {:?}.", body, uri); let mut req = hyper::Request::new(hyper::Method::Post, uri); req.headers_mut().set(hyper::header::ContentType::json()); req.set_body(body.to_owned()); let (tx, rx) = oneshot::channel(); { let _ = self.sender.lock().start_send((req, tx)); } match rx.wait() { Ok(res) => { let res = res.map_err(|_| Error::BadStatus)?; trace!("Get response {:?}.", res); Ok(res) } Err(_) => Err(Error::BadStatus), } }
identifier_body
functional-struct-update-noncopyable.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // issue 7327 use std::sync::Arc; struct A { y: Arc<isize>, x: Arc<isize> } impl Drop for A { fn drop(&mut self)
} fn main() { let a = A { y: Arc::new(1), x: Arc::new(2) }; let _b = A { y: Arc::new(3),..a }; //~ ERROR cannot move out of type `A` let _c = a; }
{ println!("x={}", *self.x); }
identifier_body
functional-struct-update-noncopyable.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // issue 7327 use std::sync::Arc; struct
{ y: Arc<isize>, x: Arc<isize> } impl Drop for A { fn drop(&mut self) { println!("x={}", *self.x); } } fn main() { let a = A { y: Arc::new(1), x: Arc::new(2) }; let _b = A { y: Arc::new(3),..a }; //~ ERROR cannot move out of type `A` let _c = a; }
A
identifier_name
functional-struct-update-noncopyable.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. //
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // issue 7327 use std::sync::Arc; struct A { y: Arc<isize>, x: Arc<isize> } impl Drop for A { fn drop(&mut self) { println!("x={}", *self.x); } } fn main() { let a = A { y: Arc::new(1), x: Arc::new(2) }; let _b = A { y: Arc::new(3),..a }; //~ ERROR cannot move out of type `A` let _c = a; }
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
random_line_split
admin_geofinder.rs
// Copyright © 2016, Canal TP and/or its affiliates. All rights reserved. // // This file is part of Navitia, // the software to build cool stuff with public transport. // // Hope you'll enjoy and contribute to this project, // powered by Canal TP (www.canaltp.fr). // Help us simplify mobility and open public transport: // a non ending quest to the responsive locomotion way of traveling! // // LICENCE: This program is free software; you can redistribute it // and/or modify it under the terms of the GNU Affero General Public // License as published by the Free Software Foundation, either // version 3 of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, but // WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Affero General Public License for more details. // // You should have received a copy of the GNU Affero General Public // License along with this program. If not, see // <http://www.gnu.org/licenses/>. // // Stay tuned using // twitter @navitia // IRC #navitia on freenode // https://groups.google.com/d/forum/navitia // www.navitia.io use geo; use geo::contains::Contains; use gst::rtree::{RTree, Rect}; use mimir::Admin; use std; use std::collections::{BTreeMap, BTreeSet}; use std::iter::FromIterator; use std::sync::Arc; /// We want to strip the admin's boundary for the objects referencing it (for performance purpose) /// thus in the `AdminGeoFinder` we store an Admin without the boundary (the option is emptied) /// and we store the boundary aside struct BoundaryAndAdmin(Option<geo::MultiPolygon<f64>>, Arc<Admin>); impl BoundaryAndAdmin { fn new(mut admin: Admin) -> BoundaryAndAdmin { let b = std::mem::replace(&mut admin.boundary, None); let minimal_admin = Arc::new(admin); BoundaryAndAdmin(b, minimal_admin) } } pub struct AdminGeoFinder { admins: RTree<BoundaryAndAdmin>, admin_by_id: BTreeMap<String, Arc<Admin>>, } impl AdminGeoFinder { pub fn insert(&mut self, admin: Admin) { use ordered_float::OrderedFloat; fn min(a: OrderedFloat<f32>, b: f64) -> f32 { a.0.min(down(b as f32)) } fn max(a: OrderedFloat<f32>, b: f64) -> f32 { a.0.max(up(b as f32)) } let rect = { let mut coords = match admin.boundary { Some(ref b) => b.0.iter().flat_map(|poly| (poly.exterior).0.iter()), None => return, }; let first_coord = match coords.next() { Some(c) => c, None => return, }; let first_rect: Rect = { let (x, y) = (first_coord.x() as f32, first_coord.y() as f32); Rect::from_float(down(x), up(x), down(y), up(y)) }; coords.fold(first_rect, |accu, p| { Rect::from_float( min(accu.xmin, p.x()), max(accu.xmax, p.x()), min(accu.ymin, p.y()), max(accu.ymax, p.y()), ) }) }; let bound_admin = BoundaryAndAdmin::new(admin); self.admin_by_id .insert(bound_admin.1.id.clone(), bound_admin.1.clone()); self.admins.insert(rect, bound_admin); } /// Get all Admins overlapping the coordinate pub fn get(&self, coord: &geo::Coordinate<f64>) -> Vec<Arc<Admin>> { let (x, y) = (coord.x as f32, coord.y as f32); let search = Rect::from_float(down(x), up(x), down(y), up(y)); let mut rtree_results = self.admins.get(&search); rtree_results.sort_by_key(|(_, a)| a.1.zone_type); let mut tested_hierarchy = BTreeSet::<String>::new(); let mut added_zone_types = BTreeSet::new(); let mut res = vec![]; for (_, boundary_and_admin) in rtree_results { let boundary = &boundary_and_admin.0; let admin = &boundary_and_admin.1; if tested_hierarchy.contains(&admin.id) { res.push(admin.clone()); } else if admin .zone_type .as_ref() .map_or(false, |zt| added_zone_types.contains(zt)) { // we don't want it, we already have this kind of ZoneType } else if boundary .as_ref() .map_or(false, |b| b.contains(&geo::Point(*coord))) { // we found a valid admin, we save it's hierarchy not to have to test their boundaries if let Some(zt) = admin.zone_type { added_zone_types.insert(zt.clone()); } let mut admin_parent_id = admin.parent_id.clone(); while let Some(id) = admin_parent_id { let admin_parent = self.admin_by_id.get(&id); if let Some(zt) = admin_parent.as_ref().and_then(|a| a.zone_type) { added_zone_types.insert(zt.clone()); } tested_hierarchy.insert(id); admin_parent_id = admin_parent.and_then(|a| a.parent_id.clone()); } res.push(admin.clone()); } } res } /// Iterates on all the admins with a not None boundary. pub fn admins<'a>(&'a self) -> Box<Iterator<Item = Admin> + 'a> { let iter = self .admins .get(&Rect::from_float( std::f32::NEG_INFINITY, std::f32::INFINITY, std::f32::NEG_INFINITY, std::f32::INFINITY, )).into_iter() .map(|(_, a)| { let mut admin = (*a.1).clone(); admin.boundary = a.0.clone(); admin }); Box::new(iter) } /// Iterates on all the `Rc<Admin>` in the structure as returned by `get`. pub fn admins_without_boundary<'a>(&'a self) -> Box<Iterator<Item = Arc<Admin>> + 'a> { let iter = self .admins .get(&Rect::from_float( std::f32::NEG_INFINITY, std::f32::INFINITY, std::f32::NEG_INFINITY, std::f32::INFINITY, )).into_iter() .map(|(_, a)| a.1.clone()); Box::new(iter) } } impl Default for AdminGeoFinder { fn default() -> Self { AdminGeoFinder { admins: RTree::new(), admin_by_id: BTreeMap::new(), } } } impl FromIterator<Admin> for AdminGeoFinder { fn from_iter<I: IntoIterator<Item = Admin>>(admins: I) -> Self { let mut geofinder = AdminGeoFinder::default(); for admin in admins { geofinder.insert(admin); } geofinder } } // the goal is that f in [down(f as f32) as f64, up(f as f32) as f64] fn down(f: f32) -> f32 { f - (f * ::std::f32::EPSILON).abs() } fn up(f: f32) -> f32 { f + (f * ::std::f32::EPSILON).abs() } #[test] fn test_up_down() { for &f in [1.0f64, 0., -0., -1., 0.1, -0.1, 0.9, -0.9, 42., -42.].iter() { let small_f = f as f32; assert!( down(small_f) as f64 <= f, format!("{} <= {}", down(small_f) as f64, f) ); assert!( f <= up(small_f) as f64, format!("{} <= {}", f, up(small_f) as f64) ); } } #[cfg(test)] mod tests { use super::*; use cosmogony::ZoneType; use geo::prelude::BoundingBox; fn p(x: f64, y: f64) -> ::geo::Point<f64> { ::geo::Point(::geo::Coordinate { x: x, y: y }) } fn make_admin(offset: f64, zt: Option<ZoneType>) -> ::mimir::Admin { make_complex_admin(&format!("admin:offset:{}", offset,), offset, zt, 1., None) } fn make_complex_admin( id: &str, offset: f64, zt: Option<ZoneType>, zone_size: f64, parent_offset: Option<&str>, ) -> ::mimir::Admin { // the boundary is a big octogon // the zone_size param is used to control the area of the zone let shape = ::geo::Polygon::new( ::geo::LineString(vec![ p(3. * zone_size + offset, 0. * zone_size + offset), p(6. * zone_size + offset, 0. * zone_size + offset), p(9. * zone_size + offset, 3. * zone_size + offset), p(9. * zone_size + offset, 6. * zone_size + offset), p(6. * zone_size + offset, 9. * zone_size + offset), p(3. * zone_size + offset, 9. * zone_size + offset), p(0. * zone_size + offset, 6. * zone_size + offset), p(0. * zone_size + offset, 3. * zone_size + offset), p(3. * zone_size + offset, 0. * zone_size + offset), ]), vec![], ); let boundary = ::geo::MultiPolygon(vec![shape]); ::mimir::Admin { id: id.into(), level: 8, name: "city".to_string(), label: format!("city {}", offset), zip_codes: vec!["421337".to_string()], weight: 0f64, coord: ::mimir::Coord::new(4.0 + offset, 4.0 + offset), bbox: boundary.bbox(), boundary: Some(boundary), insee: "outlook".to_string(), zone_type: zt, parent_id: parent_offset.map(|id| id.into()), codes: vec![], } } #[test] fn test_two_fake_admins() { let mut finder = AdminGeoFinder::default(); finder.insert(make_admin(40., Some(ZoneType::City))); finder.insert(make_admin(43., Some(ZoneType::State))); // outside for coord in [p(48., 41.), p(411., 41.), p(51., 54.), p(53., 53.)].iter() { assert!(finder.get(&coord.0).is_empty()); } // inside one let admins = finder.get(&p(44., 44.).0); assert_eq!(admins.len(), 1); assert_eq!(admins[0].id, "admin:offset:40"); let admins = finder.get(&p(48., 48.).0); assert_eq!(admins.len(), 1); assert_eq!(admins[0].id, "admin:offset:43"); // inside two let mut admins = finder.get(&p(46., 46.).0); admins.sort_by(|a, b| a.id.cmp(&b.id)); assert_eq!(admins.len(), 2); assert_eq!(admins[0].id, "admin:offset:40"); assert_eq!(admins[1].id, "admin:offset:43"); } #[test] fn test_two_admin_same_zone_type() { // a point can be associated to only 1 admin type // so a point is in 2 city, it is associated to only one let mut finder = AdminGeoFinder::default(); finder.insert(make_admin(40., Some(ZoneType::City))); finder.insert(make_admin(43., Some(ZoneType::City))); let admins = finder.get(&p(46., 46.).0); assert_eq!(admins.len(), 1); } #[test] fn test_two_no_zone_type() { // a point can be associated to only 1 admin type // but a point can be associated to multiple admin without zone_type // (for retrocompatibility of the data imported without cosmogony) let mut finder = AdminGeoFinder::default(); finder.insert(make_admin(40., None)); finder.insert(make_admin(43., None)); let admins = finder.get(&p(46., 46.).0); assert_eq!(admins.len(), 2); } #[test] fn test_hierarchy() { let mut finder = AdminGeoFinder::default(); finder.insert(make_complex_admin( "bob_city", 40., Some(ZoneType::City), 1., Some("bob_state"), )); finder.insert(make_complex_admin( "bob_state", 40., Some(ZoneType::StateDistrict), 2., Some("bob_country"), )); finder.insert(make_complex_admin( "bob_country", 40., Some(ZoneType::Country), 3., None, )); let admins = finder.get(&p(46., 46.).0); assert_eq!(admins.len(), 3); assert_eq!(admins[0].id, "bob_city"); assert_eq!(admins[1].id, "bob_state"); assert_eq!(admins[2].id, "bob_country"); } #[test] fn test_hierarchy_orphan() { let mut finder = AdminGeoFinder::default(); finder.insert(make_complex_admin( "bob_city", 40., Some(ZoneType::City), 1., Some("bob_state"), )); finder.insert(make_complex_admin( "bob_state", 40., Some(ZoneType::StateDistrict), 2., Some("bob_country"), )); finder.insert(make_complex_admin( "bob_country", 40., Some(ZoneType::Country), 3., None, )); // another_state also contains the point, but the geofinder look for only 1 admin by type (it needs only 1 state) // since bob_city has been tester first, it's hierarchy has been added automatically // so [46., 46.] will not be associated to another_state finder.insert(make_complex_admin( "another_state", 40., Some(ZoneType::StateDistrict), 2., Some("bob_country"), )); let admins = finder.get(&p(46., 46.).0); assert_eq!(admins.len(), 3); assert_eq!(admins[0].id, "bob_city"); assert_eq!(admins[1].id, "bob_state"); assert_eq!(admins[2].id, "bob_country"); } #[test] fn test_hierarchy_and_not_typed_zone() { let mut finder = AdminGeoFinder::default(); finder.insert(make_complex_admin( "bob_city", 40., Some(ZoneType::City), 1., Some("bob_state"), )); finder.insert(make_complex_admin( "bob_state",
Some("bob_country"), )); finder.insert(make_complex_admin( "bob_country", 40., Some(ZoneType::Country), 3., None, )); // not_typed zone is outside the hierarchy, but since it contains the point and it has no type it is added finder.insert(make_complex_admin("no_typed_zone", 40., None, 2., None)); let admins = finder.get(&p(46., 46.).0); assert_eq!(admins.len(), 4); assert_eq!(admins[0].id, "no_typed_zone"); assert_eq!(admins[1].id, "bob_city"); assert_eq!(admins[2].id, "bob_state"); assert_eq!(admins[3].id, "bob_country"); } }
40., Some(ZoneType::StateDistrict), 2.,
random_line_split
admin_geofinder.rs
// Copyright © 2016, Canal TP and/or its affiliates. All rights reserved. // // This file is part of Navitia, // the software to build cool stuff with public transport. // // Hope you'll enjoy and contribute to this project, // powered by Canal TP (www.canaltp.fr). // Help us simplify mobility and open public transport: // a non ending quest to the responsive locomotion way of traveling! // // LICENCE: This program is free software; you can redistribute it // and/or modify it under the terms of the GNU Affero General Public // License as published by the Free Software Foundation, either // version 3 of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, but // WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Affero General Public License for more details. // // You should have received a copy of the GNU Affero General Public // License along with this program. If not, see // <http://www.gnu.org/licenses/>. // // Stay tuned using // twitter @navitia // IRC #navitia on freenode // https://groups.google.com/d/forum/navitia // www.navitia.io use geo; use geo::contains::Contains; use gst::rtree::{RTree, Rect}; use mimir::Admin; use std; use std::collections::{BTreeMap, BTreeSet}; use std::iter::FromIterator; use std::sync::Arc; /// We want to strip the admin's boundary for the objects referencing it (for performance purpose) /// thus in the `AdminGeoFinder` we store an Admin without the boundary (the option is emptied) /// and we store the boundary aside struct BoundaryAndAdmin(Option<geo::MultiPolygon<f64>>, Arc<Admin>); impl BoundaryAndAdmin { fn new(mut admin: Admin) -> BoundaryAndAdmin { let b = std::mem::replace(&mut admin.boundary, None); let minimal_admin = Arc::new(admin); BoundaryAndAdmin(b, minimal_admin) } } pub struct AdminGeoFinder { admins: RTree<BoundaryAndAdmin>, admin_by_id: BTreeMap<String, Arc<Admin>>, } impl AdminGeoFinder { pub fn i
&mut self, admin: Admin) { use ordered_float::OrderedFloat; fn min(a: OrderedFloat<f32>, b: f64) -> f32 { a.0.min(down(b as f32)) } fn max(a: OrderedFloat<f32>, b: f64) -> f32 { a.0.max(up(b as f32)) } let rect = { let mut coords = match admin.boundary { Some(ref b) => b.0.iter().flat_map(|poly| (poly.exterior).0.iter()), None => return, }; let first_coord = match coords.next() { Some(c) => c, None => return, }; let first_rect: Rect = { let (x, y) = (first_coord.x() as f32, first_coord.y() as f32); Rect::from_float(down(x), up(x), down(y), up(y)) }; coords.fold(first_rect, |accu, p| { Rect::from_float( min(accu.xmin, p.x()), max(accu.xmax, p.x()), min(accu.ymin, p.y()), max(accu.ymax, p.y()), ) }) }; let bound_admin = BoundaryAndAdmin::new(admin); self.admin_by_id .insert(bound_admin.1.id.clone(), bound_admin.1.clone()); self.admins.insert(rect, bound_admin); } /// Get all Admins overlapping the coordinate pub fn get(&self, coord: &geo::Coordinate<f64>) -> Vec<Arc<Admin>> { let (x, y) = (coord.x as f32, coord.y as f32); let search = Rect::from_float(down(x), up(x), down(y), up(y)); let mut rtree_results = self.admins.get(&search); rtree_results.sort_by_key(|(_, a)| a.1.zone_type); let mut tested_hierarchy = BTreeSet::<String>::new(); let mut added_zone_types = BTreeSet::new(); let mut res = vec![]; for (_, boundary_and_admin) in rtree_results { let boundary = &boundary_and_admin.0; let admin = &boundary_and_admin.1; if tested_hierarchy.contains(&admin.id) { res.push(admin.clone()); } else if admin .zone_type .as_ref() .map_or(false, |zt| added_zone_types.contains(zt)) { // we don't want it, we already have this kind of ZoneType } else if boundary .as_ref() .map_or(false, |b| b.contains(&geo::Point(*coord))) { // we found a valid admin, we save it's hierarchy not to have to test their boundaries if let Some(zt) = admin.zone_type { added_zone_types.insert(zt.clone()); } let mut admin_parent_id = admin.parent_id.clone(); while let Some(id) = admin_parent_id { let admin_parent = self.admin_by_id.get(&id); if let Some(zt) = admin_parent.as_ref().and_then(|a| a.zone_type) { added_zone_types.insert(zt.clone()); } tested_hierarchy.insert(id); admin_parent_id = admin_parent.and_then(|a| a.parent_id.clone()); } res.push(admin.clone()); } } res } /// Iterates on all the admins with a not None boundary. pub fn admins<'a>(&'a self) -> Box<Iterator<Item = Admin> + 'a> { let iter = self .admins .get(&Rect::from_float( std::f32::NEG_INFINITY, std::f32::INFINITY, std::f32::NEG_INFINITY, std::f32::INFINITY, )).into_iter() .map(|(_, a)| { let mut admin = (*a.1).clone(); admin.boundary = a.0.clone(); admin }); Box::new(iter) } /// Iterates on all the `Rc<Admin>` in the structure as returned by `get`. pub fn admins_without_boundary<'a>(&'a self) -> Box<Iterator<Item = Arc<Admin>> + 'a> { let iter = self .admins .get(&Rect::from_float( std::f32::NEG_INFINITY, std::f32::INFINITY, std::f32::NEG_INFINITY, std::f32::INFINITY, )).into_iter() .map(|(_, a)| a.1.clone()); Box::new(iter) } } impl Default for AdminGeoFinder { fn default() -> Self { AdminGeoFinder { admins: RTree::new(), admin_by_id: BTreeMap::new(), } } } impl FromIterator<Admin> for AdminGeoFinder { fn from_iter<I: IntoIterator<Item = Admin>>(admins: I) -> Self { let mut geofinder = AdminGeoFinder::default(); for admin in admins { geofinder.insert(admin); } geofinder } } // the goal is that f in [down(f as f32) as f64, up(f as f32) as f64] fn down(f: f32) -> f32 { f - (f * ::std::f32::EPSILON).abs() } fn up(f: f32) -> f32 { f + (f * ::std::f32::EPSILON).abs() } #[test] fn test_up_down() { for &f in [1.0f64, 0., -0., -1., 0.1, -0.1, 0.9, -0.9, 42., -42.].iter() { let small_f = f as f32; assert!( down(small_f) as f64 <= f, format!("{} <= {}", down(small_f) as f64, f) ); assert!( f <= up(small_f) as f64, format!("{} <= {}", f, up(small_f) as f64) ); } } #[cfg(test)] mod tests { use super::*; use cosmogony::ZoneType; use geo::prelude::BoundingBox; fn p(x: f64, y: f64) -> ::geo::Point<f64> { ::geo::Point(::geo::Coordinate { x: x, y: y }) } fn make_admin(offset: f64, zt: Option<ZoneType>) -> ::mimir::Admin { make_complex_admin(&format!("admin:offset:{}", offset,), offset, zt, 1., None) } fn make_complex_admin( id: &str, offset: f64, zt: Option<ZoneType>, zone_size: f64, parent_offset: Option<&str>, ) -> ::mimir::Admin { // the boundary is a big octogon // the zone_size param is used to control the area of the zone let shape = ::geo::Polygon::new( ::geo::LineString(vec![ p(3. * zone_size + offset, 0. * zone_size + offset), p(6. * zone_size + offset, 0. * zone_size + offset), p(9. * zone_size + offset, 3. * zone_size + offset), p(9. * zone_size + offset, 6. * zone_size + offset), p(6. * zone_size + offset, 9. * zone_size + offset), p(3. * zone_size + offset, 9. * zone_size + offset), p(0. * zone_size + offset, 6. * zone_size + offset), p(0. * zone_size + offset, 3. * zone_size + offset), p(3. * zone_size + offset, 0. * zone_size + offset), ]), vec![], ); let boundary = ::geo::MultiPolygon(vec![shape]); ::mimir::Admin { id: id.into(), level: 8, name: "city".to_string(), label: format!("city {}", offset), zip_codes: vec!["421337".to_string()], weight: 0f64, coord: ::mimir::Coord::new(4.0 + offset, 4.0 + offset), bbox: boundary.bbox(), boundary: Some(boundary), insee: "outlook".to_string(), zone_type: zt, parent_id: parent_offset.map(|id| id.into()), codes: vec![], } } #[test] fn test_two_fake_admins() { let mut finder = AdminGeoFinder::default(); finder.insert(make_admin(40., Some(ZoneType::City))); finder.insert(make_admin(43., Some(ZoneType::State))); // outside for coord in [p(48., 41.), p(411., 41.), p(51., 54.), p(53., 53.)].iter() { assert!(finder.get(&coord.0).is_empty()); } // inside one let admins = finder.get(&p(44., 44.).0); assert_eq!(admins.len(), 1); assert_eq!(admins[0].id, "admin:offset:40"); let admins = finder.get(&p(48., 48.).0); assert_eq!(admins.len(), 1); assert_eq!(admins[0].id, "admin:offset:43"); // inside two let mut admins = finder.get(&p(46., 46.).0); admins.sort_by(|a, b| a.id.cmp(&b.id)); assert_eq!(admins.len(), 2); assert_eq!(admins[0].id, "admin:offset:40"); assert_eq!(admins[1].id, "admin:offset:43"); } #[test] fn test_two_admin_same_zone_type() { // a point can be associated to only 1 admin type // so a point is in 2 city, it is associated to only one let mut finder = AdminGeoFinder::default(); finder.insert(make_admin(40., Some(ZoneType::City))); finder.insert(make_admin(43., Some(ZoneType::City))); let admins = finder.get(&p(46., 46.).0); assert_eq!(admins.len(), 1); } #[test] fn test_two_no_zone_type() { // a point can be associated to only 1 admin type // but a point can be associated to multiple admin without zone_type // (for retrocompatibility of the data imported without cosmogony) let mut finder = AdminGeoFinder::default(); finder.insert(make_admin(40., None)); finder.insert(make_admin(43., None)); let admins = finder.get(&p(46., 46.).0); assert_eq!(admins.len(), 2); } #[test] fn test_hierarchy() { let mut finder = AdminGeoFinder::default(); finder.insert(make_complex_admin( "bob_city", 40., Some(ZoneType::City), 1., Some("bob_state"), )); finder.insert(make_complex_admin( "bob_state", 40., Some(ZoneType::StateDistrict), 2., Some("bob_country"), )); finder.insert(make_complex_admin( "bob_country", 40., Some(ZoneType::Country), 3., None, )); let admins = finder.get(&p(46., 46.).0); assert_eq!(admins.len(), 3); assert_eq!(admins[0].id, "bob_city"); assert_eq!(admins[1].id, "bob_state"); assert_eq!(admins[2].id, "bob_country"); } #[test] fn test_hierarchy_orphan() { let mut finder = AdminGeoFinder::default(); finder.insert(make_complex_admin( "bob_city", 40., Some(ZoneType::City), 1., Some("bob_state"), )); finder.insert(make_complex_admin( "bob_state", 40., Some(ZoneType::StateDistrict), 2., Some("bob_country"), )); finder.insert(make_complex_admin( "bob_country", 40., Some(ZoneType::Country), 3., None, )); // another_state also contains the point, but the geofinder look for only 1 admin by type (it needs only 1 state) // since bob_city has been tester first, it's hierarchy has been added automatically // so [46., 46.] will not be associated to another_state finder.insert(make_complex_admin( "another_state", 40., Some(ZoneType::StateDistrict), 2., Some("bob_country"), )); let admins = finder.get(&p(46., 46.).0); assert_eq!(admins.len(), 3); assert_eq!(admins[0].id, "bob_city"); assert_eq!(admins[1].id, "bob_state"); assert_eq!(admins[2].id, "bob_country"); } #[test] fn test_hierarchy_and_not_typed_zone() { let mut finder = AdminGeoFinder::default(); finder.insert(make_complex_admin( "bob_city", 40., Some(ZoneType::City), 1., Some("bob_state"), )); finder.insert(make_complex_admin( "bob_state", 40., Some(ZoneType::StateDistrict), 2., Some("bob_country"), )); finder.insert(make_complex_admin( "bob_country", 40., Some(ZoneType::Country), 3., None, )); // not_typed zone is outside the hierarchy, but since it contains the point and it has no type it is added finder.insert(make_complex_admin("no_typed_zone", 40., None, 2., None)); let admins = finder.get(&p(46., 46.).0); assert_eq!(admins.len(), 4); assert_eq!(admins[0].id, "no_typed_zone"); assert_eq!(admins[1].id, "bob_city"); assert_eq!(admins[2].id, "bob_state"); assert_eq!(admins[3].id, "bob_country"); } }
nsert(
identifier_name
admin_geofinder.rs
// Copyright © 2016, Canal TP and/or its affiliates. All rights reserved. // // This file is part of Navitia, // the software to build cool stuff with public transport. // // Hope you'll enjoy and contribute to this project, // powered by Canal TP (www.canaltp.fr). // Help us simplify mobility and open public transport: // a non ending quest to the responsive locomotion way of traveling! // // LICENCE: This program is free software; you can redistribute it // and/or modify it under the terms of the GNU Affero General Public // License as published by the Free Software Foundation, either // version 3 of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, but // WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Affero General Public License for more details. // // You should have received a copy of the GNU Affero General Public // License along with this program. If not, see // <http://www.gnu.org/licenses/>. // // Stay tuned using // twitter @navitia // IRC #navitia on freenode // https://groups.google.com/d/forum/navitia // www.navitia.io use geo; use geo::contains::Contains; use gst::rtree::{RTree, Rect}; use mimir::Admin; use std; use std::collections::{BTreeMap, BTreeSet}; use std::iter::FromIterator; use std::sync::Arc; /// We want to strip the admin's boundary for the objects referencing it (for performance purpose) /// thus in the `AdminGeoFinder` we store an Admin without the boundary (the option is emptied) /// and we store the boundary aside struct BoundaryAndAdmin(Option<geo::MultiPolygon<f64>>, Arc<Admin>); impl BoundaryAndAdmin { fn new(mut admin: Admin) -> BoundaryAndAdmin { let b = std::mem::replace(&mut admin.boundary, None); let minimal_admin = Arc::new(admin); BoundaryAndAdmin(b, minimal_admin) } } pub struct AdminGeoFinder { admins: RTree<BoundaryAndAdmin>, admin_by_id: BTreeMap<String, Arc<Admin>>, } impl AdminGeoFinder { pub fn insert(&mut self, admin: Admin) { use ordered_float::OrderedFloat; fn min(a: OrderedFloat<f32>, b: f64) -> f32 { a.0.min(down(b as f32)) } fn max(a: OrderedFloat<f32>, b: f64) -> f32 { a.0.max(up(b as f32)) } let rect = { let mut coords = match admin.boundary { Some(ref b) => b.0.iter().flat_map(|poly| (poly.exterior).0.iter()), None => return, }; let first_coord = match coords.next() { Some(c) => c, None => return, }; let first_rect: Rect = { let (x, y) = (first_coord.x() as f32, first_coord.y() as f32); Rect::from_float(down(x), up(x), down(y), up(y)) }; coords.fold(first_rect, |accu, p| { Rect::from_float( min(accu.xmin, p.x()), max(accu.xmax, p.x()), min(accu.ymin, p.y()), max(accu.ymax, p.y()), ) }) }; let bound_admin = BoundaryAndAdmin::new(admin); self.admin_by_id .insert(bound_admin.1.id.clone(), bound_admin.1.clone()); self.admins.insert(rect, bound_admin); } /// Get all Admins overlapping the coordinate pub fn get(&self, coord: &geo::Coordinate<f64>) -> Vec<Arc<Admin>> { let (x, y) = (coord.x as f32, coord.y as f32); let search = Rect::from_float(down(x), up(x), down(y), up(y)); let mut rtree_results = self.admins.get(&search); rtree_results.sort_by_key(|(_, a)| a.1.zone_type); let mut tested_hierarchy = BTreeSet::<String>::new(); let mut added_zone_types = BTreeSet::new(); let mut res = vec![]; for (_, boundary_and_admin) in rtree_results { let boundary = &boundary_and_admin.0; let admin = &boundary_and_admin.1; if tested_hierarchy.contains(&admin.id) { res.push(admin.clone()); } else if admin .zone_type .as_ref() .map_or(false, |zt| added_zone_types.contains(zt)) { // we don't want it, we already have this kind of ZoneType } else if boundary .as_ref() .map_or(false, |b| b.contains(&geo::Point(*coord))) { // we found a valid admin, we save it's hierarchy not to have to test their boundaries if let Some(zt) = admin.zone_type { added_zone_types.insert(zt.clone()); } let mut admin_parent_id = admin.parent_id.clone(); while let Some(id) = admin_parent_id { let admin_parent = self.admin_by_id.get(&id); if let Some(zt) = admin_parent.as_ref().and_then(|a| a.zone_type) { added_zone_types.insert(zt.clone()); } tested_hierarchy.insert(id); admin_parent_id = admin_parent.and_then(|a| a.parent_id.clone()); } res.push(admin.clone()); } } res } /// Iterates on all the admins with a not None boundary. pub fn admins<'a>(&'a self) -> Box<Iterator<Item = Admin> + 'a> { let iter = self .admins .get(&Rect::from_float( std::f32::NEG_INFINITY, std::f32::INFINITY, std::f32::NEG_INFINITY, std::f32::INFINITY, )).into_iter() .map(|(_, a)| { let mut admin = (*a.1).clone(); admin.boundary = a.0.clone(); admin }); Box::new(iter) } /// Iterates on all the `Rc<Admin>` in the structure as returned by `get`. pub fn admins_without_boundary<'a>(&'a self) -> Box<Iterator<Item = Arc<Admin>> + 'a> { let iter = self .admins .get(&Rect::from_float( std::f32::NEG_INFINITY, std::f32::INFINITY, std::f32::NEG_INFINITY, std::f32::INFINITY, )).into_iter() .map(|(_, a)| a.1.clone()); Box::new(iter) } } impl Default for AdminGeoFinder { fn default() -> Self { AdminGeoFinder { admins: RTree::new(), admin_by_id: BTreeMap::new(), } } } impl FromIterator<Admin> for AdminGeoFinder { fn from_iter<I: IntoIterator<Item = Admin>>(admins: I) -> Self {
} // the goal is that f in [down(f as f32) as f64, up(f as f32) as f64] fn down(f: f32) -> f32 { f - (f * ::std::f32::EPSILON).abs() } fn up(f: f32) -> f32 { f + (f * ::std::f32::EPSILON).abs() } #[test] fn test_up_down() { for &f in [1.0f64, 0., -0., -1., 0.1, -0.1, 0.9, -0.9, 42., -42.].iter() { let small_f = f as f32; assert!( down(small_f) as f64 <= f, format!("{} <= {}", down(small_f) as f64, f) ); assert!( f <= up(small_f) as f64, format!("{} <= {}", f, up(small_f) as f64) ); } } #[cfg(test)] mod tests { use super::*; use cosmogony::ZoneType; use geo::prelude::BoundingBox; fn p(x: f64, y: f64) -> ::geo::Point<f64> { ::geo::Point(::geo::Coordinate { x: x, y: y }) } fn make_admin(offset: f64, zt: Option<ZoneType>) -> ::mimir::Admin { make_complex_admin(&format!("admin:offset:{}", offset,), offset, zt, 1., None) } fn make_complex_admin( id: &str, offset: f64, zt: Option<ZoneType>, zone_size: f64, parent_offset: Option<&str>, ) -> ::mimir::Admin { // the boundary is a big octogon // the zone_size param is used to control the area of the zone let shape = ::geo::Polygon::new( ::geo::LineString(vec![ p(3. * zone_size + offset, 0. * zone_size + offset), p(6. * zone_size + offset, 0. * zone_size + offset), p(9. * zone_size + offset, 3. * zone_size + offset), p(9. * zone_size + offset, 6. * zone_size + offset), p(6. * zone_size + offset, 9. * zone_size + offset), p(3. * zone_size + offset, 9. * zone_size + offset), p(0. * zone_size + offset, 6. * zone_size + offset), p(0. * zone_size + offset, 3. * zone_size + offset), p(3. * zone_size + offset, 0. * zone_size + offset), ]), vec![], ); let boundary = ::geo::MultiPolygon(vec![shape]); ::mimir::Admin { id: id.into(), level: 8, name: "city".to_string(), label: format!("city {}", offset), zip_codes: vec!["421337".to_string()], weight: 0f64, coord: ::mimir::Coord::new(4.0 + offset, 4.0 + offset), bbox: boundary.bbox(), boundary: Some(boundary), insee: "outlook".to_string(), zone_type: zt, parent_id: parent_offset.map(|id| id.into()), codes: vec![], } } #[test] fn test_two_fake_admins() { let mut finder = AdminGeoFinder::default(); finder.insert(make_admin(40., Some(ZoneType::City))); finder.insert(make_admin(43., Some(ZoneType::State))); // outside for coord in [p(48., 41.), p(411., 41.), p(51., 54.), p(53., 53.)].iter() { assert!(finder.get(&coord.0).is_empty()); } // inside one let admins = finder.get(&p(44., 44.).0); assert_eq!(admins.len(), 1); assert_eq!(admins[0].id, "admin:offset:40"); let admins = finder.get(&p(48., 48.).0); assert_eq!(admins.len(), 1); assert_eq!(admins[0].id, "admin:offset:43"); // inside two let mut admins = finder.get(&p(46., 46.).0); admins.sort_by(|a, b| a.id.cmp(&b.id)); assert_eq!(admins.len(), 2); assert_eq!(admins[0].id, "admin:offset:40"); assert_eq!(admins[1].id, "admin:offset:43"); } #[test] fn test_two_admin_same_zone_type() { // a point can be associated to only 1 admin type // so a point is in 2 city, it is associated to only one let mut finder = AdminGeoFinder::default(); finder.insert(make_admin(40., Some(ZoneType::City))); finder.insert(make_admin(43., Some(ZoneType::City))); let admins = finder.get(&p(46., 46.).0); assert_eq!(admins.len(), 1); } #[test] fn test_two_no_zone_type() { // a point can be associated to only 1 admin type // but a point can be associated to multiple admin without zone_type // (for retrocompatibility of the data imported without cosmogony) let mut finder = AdminGeoFinder::default(); finder.insert(make_admin(40., None)); finder.insert(make_admin(43., None)); let admins = finder.get(&p(46., 46.).0); assert_eq!(admins.len(), 2); } #[test] fn test_hierarchy() { let mut finder = AdminGeoFinder::default(); finder.insert(make_complex_admin( "bob_city", 40., Some(ZoneType::City), 1., Some("bob_state"), )); finder.insert(make_complex_admin( "bob_state", 40., Some(ZoneType::StateDistrict), 2., Some("bob_country"), )); finder.insert(make_complex_admin( "bob_country", 40., Some(ZoneType::Country), 3., None, )); let admins = finder.get(&p(46., 46.).0); assert_eq!(admins.len(), 3); assert_eq!(admins[0].id, "bob_city"); assert_eq!(admins[1].id, "bob_state"); assert_eq!(admins[2].id, "bob_country"); } #[test] fn test_hierarchy_orphan() { let mut finder = AdminGeoFinder::default(); finder.insert(make_complex_admin( "bob_city", 40., Some(ZoneType::City), 1., Some("bob_state"), )); finder.insert(make_complex_admin( "bob_state", 40., Some(ZoneType::StateDistrict), 2., Some("bob_country"), )); finder.insert(make_complex_admin( "bob_country", 40., Some(ZoneType::Country), 3., None, )); // another_state also contains the point, but the geofinder look for only 1 admin by type (it needs only 1 state) // since bob_city has been tester first, it's hierarchy has been added automatically // so [46., 46.] will not be associated to another_state finder.insert(make_complex_admin( "another_state", 40., Some(ZoneType::StateDistrict), 2., Some("bob_country"), )); let admins = finder.get(&p(46., 46.).0); assert_eq!(admins.len(), 3); assert_eq!(admins[0].id, "bob_city"); assert_eq!(admins[1].id, "bob_state"); assert_eq!(admins[2].id, "bob_country"); } #[test] fn test_hierarchy_and_not_typed_zone() { let mut finder = AdminGeoFinder::default(); finder.insert(make_complex_admin( "bob_city", 40., Some(ZoneType::City), 1., Some("bob_state"), )); finder.insert(make_complex_admin( "bob_state", 40., Some(ZoneType::StateDistrict), 2., Some("bob_country"), )); finder.insert(make_complex_admin( "bob_country", 40., Some(ZoneType::Country), 3., None, )); // not_typed zone is outside the hierarchy, but since it contains the point and it has no type it is added finder.insert(make_complex_admin("no_typed_zone", 40., None, 2., None)); let admins = finder.get(&p(46., 46.).0); assert_eq!(admins.len(), 4); assert_eq!(admins[0].id, "no_typed_zone"); assert_eq!(admins[1].id, "bob_city"); assert_eq!(admins[2].id, "bob_state"); assert_eq!(admins[3].id, "bob_country"); } }
let mut geofinder = AdminGeoFinder::default(); for admin in admins { geofinder.insert(admin); } geofinder }
identifier_body
log.rs
/* Copyright (C) 2017 Open Information Security Foundation * * You can copy, redistribute or modify this Program under the terms of * the GNU General Public License version 2 as published by the Free * Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * version 2 along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA * 02110-1301, USA. */ use std; use std::ffi::CString; use std::path::Path; use crate::core::*; #[derive(Debug)] #[repr(C)] pub enum Level { NotSet = -1, None = 0, Emergency, Alert, Critical, Error, Warning, Notice, Info, Perf, Config, Debug, } pub static mut LEVEL: i32 = Level::NotSet as i32; pub fn get_log_level() -> i32 { unsafe { LEVEL } } pub fn log_set_level(level: i32) { unsafe { LEVEL = level; } } #[no_mangle] pub extern "C" fn rs_log_set_level(level: i32) { log_set_level(level); } fn basename(filename: &str) -> &str { let path = Path::new(filename); for os_str in path.file_name() { for basename in os_str.to_str() { return basename; } } return filename; } pub fn sclog(level: Level, file: &str, line: u32, function: &str, code: i32, message: &str) { let filename = basename(file); sc_log_message(level, filename, line, function, code, message); } // This macro returns the function name. // // This macro has been borrowed from https://github.com/popzxc/stdext-rs, which // is released under the MIT license as there is currently no macro in Rust // to provide the function name. #[cfg(feature = "function-macro")] #[macro_export(local_inner_macros)] macro_rules!function { () => {{ // Okay, this is ugly, I get it. However, this is the best we can get on a stable rust. fn __f() {} fn type_name_of<T>(_: T) -> &'static str { std::any::type_name::<T>() } let name = type_name_of(__f); &name[..name.len() - 5] }} } // Rust versions less than 1.38 can not use the above macro, so keep the old // macro around for a while. #[cfg(not(feature = "function-macro"))] #[macro_export(local_inner_macros)] macro_rules!function { () => {{ "<rust>" }} } #[macro_export] macro_rules!do_log { ($level:expr, $file:expr, $line:expr, $function:expr, $code:expr, $($arg:tt)*) => { if $crate::log::get_log_level() >= $level as i32 { $crate::log::sclog($level, $file, $line, $function, $code, &(format!($($arg)*))); } } } #[macro_export] macro_rules!SCLogNotice { ($($arg:tt)*) => { $crate::do_log!($crate::log::Level::Notice, file!(), line!(), $crate::function!(), 0, $($arg)*); } } #[macro_export] macro_rules!SCLogInfo { ($($arg:tt)*) => { $crate::do_log!($crate::log::Level::Info, file!(), line!(), $crate::function!(), 0, $($arg)*); } } #[macro_export] macro_rules!SCLogPerf { ($($arg:tt)*) => { $crate::do_log!($crate::log::Level::Perf, file!(), line!(), $crate::function!(), 0, $($arg)*); } } #[macro_export] macro_rules!SCLogConfig { ($($arg:tt)*) => { $crate::do_log!($crate::log::Level::Config, file!(), line!(), $crate::function!(), 0, $($arg)*); } } #[macro_export] macro_rules!SCLogError { ($($arg:tt)*) => { $crate::do_log!($crate::log::Level::Error, file!(), line!(), $crate::function!(), 0, $($arg)*); } } // Debug mode: call C SCLogDebug #[cfg(feature = "debug")] #[macro_export] macro_rules!SCLogDebug { ($($arg:tt)*) => { do_log!($crate::log::Level::Debug, file!(), line!(), $crate::function!(), 0, $($arg)*); } } // Release mode: ignore arguments // Use a reference to avoid moving values. #[cfg(not(feature = "debug"))] #[macro_export] macro_rules!SCLogDebug { ($last:expr) => { let _ = &$last; let _ = $crate::log::Level::Debug; }; ($one:expr, $($arg:tt)*) => { let _ = &$one; SCLogDebug!($($arg)*); }; } /// SCLogMessage wrapper. If the Suricata C context is not registered /// a more basic log format will be used (for example, when running /// Rust unit tests). pub fn sc_log_message(level: Level, filename: &str, line: std::os::raw::c_uint, function: &str, code: std::os::raw::c_int, message: &str) -> std::os::raw::c_int { unsafe { if let Some(c) = SC { return (c.SCLogMessage)( level as i32,
line, to_safe_cstring(function).as_ptr(), code, to_safe_cstring(message).as_ptr()); } } // Fall back if the Suricata C context is not registered which is // the case when Rust unit tests are running. // // We don't log the time right now as I don't think it can be done // with Rust 1.7.0 without using an external crate. With Rust // 1.8.0 and newer we can unix UNIX_EPOCH.elapsed() to get the // unix time. println!("{}:{} <{:?}> -- {}", filename, line, level, message); return 0; } // Convert a &str into a CString by first stripping NUL bytes. fn to_safe_cstring(val: &str) -> CString { let mut safe = Vec::with_capacity(val.len()); for c in val.as_bytes() { if *c!= 0 { safe.push(*c); } } match CString::new(safe) { Ok(cstr) => cstr, _ => { CString::new("<failed to encode string>").unwrap() } } }
to_safe_cstring(filename).as_ptr(),
random_line_split
log.rs
/* Copyright (C) 2017 Open Information Security Foundation * * You can copy, redistribute or modify this Program under the terms of * the GNU General Public License version 2 as published by the Free * Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * version 2 along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA * 02110-1301, USA. */ use std; use std::ffi::CString; use std::path::Path; use crate::core::*; #[derive(Debug)] #[repr(C)] pub enum Level { NotSet = -1, None = 0, Emergency, Alert, Critical, Error, Warning, Notice, Info, Perf, Config, Debug, } pub static mut LEVEL: i32 = Level::NotSet as i32; pub fn
() -> i32 { unsafe { LEVEL } } pub fn log_set_level(level: i32) { unsafe { LEVEL = level; } } #[no_mangle] pub extern "C" fn rs_log_set_level(level: i32) { log_set_level(level); } fn basename(filename: &str) -> &str { let path = Path::new(filename); for os_str in path.file_name() { for basename in os_str.to_str() { return basename; } } return filename; } pub fn sclog(level: Level, file: &str, line: u32, function: &str, code: i32, message: &str) { let filename = basename(file); sc_log_message(level, filename, line, function, code, message); } // This macro returns the function name. // // This macro has been borrowed from https://github.com/popzxc/stdext-rs, which // is released under the MIT license as there is currently no macro in Rust // to provide the function name. #[cfg(feature = "function-macro")] #[macro_export(local_inner_macros)] macro_rules!function { () => {{ // Okay, this is ugly, I get it. However, this is the best we can get on a stable rust. fn __f() {} fn type_name_of<T>(_: T) -> &'static str { std::any::type_name::<T>() } let name = type_name_of(__f); &name[..name.len() - 5] }} } // Rust versions less than 1.38 can not use the above macro, so keep the old // macro around for a while. #[cfg(not(feature = "function-macro"))] #[macro_export(local_inner_macros)] macro_rules!function { () => {{ "<rust>" }} } #[macro_export] macro_rules!do_log { ($level:expr, $file:expr, $line:expr, $function:expr, $code:expr, $($arg:tt)*) => { if $crate::log::get_log_level() >= $level as i32 { $crate::log::sclog($level, $file, $line, $function, $code, &(format!($($arg)*))); } } } #[macro_export] macro_rules!SCLogNotice { ($($arg:tt)*) => { $crate::do_log!($crate::log::Level::Notice, file!(), line!(), $crate::function!(), 0, $($arg)*); } } #[macro_export] macro_rules!SCLogInfo { ($($arg:tt)*) => { $crate::do_log!($crate::log::Level::Info, file!(), line!(), $crate::function!(), 0, $($arg)*); } } #[macro_export] macro_rules!SCLogPerf { ($($arg:tt)*) => { $crate::do_log!($crate::log::Level::Perf, file!(), line!(), $crate::function!(), 0, $($arg)*); } } #[macro_export] macro_rules!SCLogConfig { ($($arg:tt)*) => { $crate::do_log!($crate::log::Level::Config, file!(), line!(), $crate::function!(), 0, $($arg)*); } } #[macro_export] macro_rules!SCLogError { ($($arg:tt)*) => { $crate::do_log!($crate::log::Level::Error, file!(), line!(), $crate::function!(), 0, $($arg)*); } } // Debug mode: call C SCLogDebug #[cfg(feature = "debug")] #[macro_export] macro_rules!SCLogDebug { ($($arg:tt)*) => { do_log!($crate::log::Level::Debug, file!(), line!(), $crate::function!(), 0, $($arg)*); } } // Release mode: ignore arguments // Use a reference to avoid moving values. #[cfg(not(feature = "debug"))] #[macro_export] macro_rules!SCLogDebug { ($last:expr) => { let _ = &$last; let _ = $crate::log::Level::Debug; }; ($one:expr, $($arg:tt)*) => { let _ = &$one; SCLogDebug!($($arg)*); }; } /// SCLogMessage wrapper. If the Suricata C context is not registered /// a more basic log format will be used (for example, when running /// Rust unit tests). pub fn sc_log_message(level: Level, filename: &str, line: std::os::raw::c_uint, function: &str, code: std::os::raw::c_int, message: &str) -> std::os::raw::c_int { unsafe { if let Some(c) = SC { return (c.SCLogMessage)( level as i32, to_safe_cstring(filename).as_ptr(), line, to_safe_cstring(function).as_ptr(), code, to_safe_cstring(message).as_ptr()); } } // Fall back if the Suricata C context is not registered which is // the case when Rust unit tests are running. // // We don't log the time right now as I don't think it can be done // with Rust 1.7.0 without using an external crate. With Rust // 1.8.0 and newer we can unix UNIX_EPOCH.elapsed() to get the // unix time. println!("{}:{} <{:?}> -- {}", filename, line, level, message); return 0; } // Convert a &str into a CString by first stripping NUL bytes. fn to_safe_cstring(val: &str) -> CString { let mut safe = Vec::with_capacity(val.len()); for c in val.as_bytes() { if *c!= 0 { safe.push(*c); } } match CString::new(safe) { Ok(cstr) => cstr, _ => { CString::new("<failed to encode string>").unwrap() } } }
get_log_level
identifier_name
log.rs
/* Copyright (C) 2017 Open Information Security Foundation * * You can copy, redistribute or modify this Program under the terms of * the GNU General Public License version 2 as published by the Free * Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * version 2 along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA * 02110-1301, USA. */ use std; use std::ffi::CString; use std::path::Path; use crate::core::*; #[derive(Debug)] #[repr(C)] pub enum Level { NotSet = -1, None = 0, Emergency, Alert, Critical, Error, Warning, Notice, Info, Perf, Config, Debug, } pub static mut LEVEL: i32 = Level::NotSet as i32; pub fn get_log_level() -> i32 { unsafe { LEVEL } } pub fn log_set_level(level: i32) { unsafe { LEVEL = level; } } #[no_mangle] pub extern "C" fn rs_log_set_level(level: i32) { log_set_level(level); } fn basename(filename: &str) -> &str { let path = Path::new(filename); for os_str in path.file_name() { for basename in os_str.to_str() { return basename; } } return filename; } pub fn sclog(level: Level, file: &str, line: u32, function: &str, code: i32, message: &str)
// This macro returns the function name. // // This macro has been borrowed from https://github.com/popzxc/stdext-rs, which // is released under the MIT license as there is currently no macro in Rust // to provide the function name. #[cfg(feature = "function-macro")] #[macro_export(local_inner_macros)] macro_rules!function { () => {{ // Okay, this is ugly, I get it. However, this is the best we can get on a stable rust. fn __f() {} fn type_name_of<T>(_: T) -> &'static str { std::any::type_name::<T>() } let name = type_name_of(__f); &name[..name.len() - 5] }} } // Rust versions less than 1.38 can not use the above macro, so keep the old // macro around for a while. #[cfg(not(feature = "function-macro"))] #[macro_export(local_inner_macros)] macro_rules!function { () => {{ "<rust>" }} } #[macro_export] macro_rules!do_log { ($level:expr, $file:expr, $line:expr, $function:expr, $code:expr, $($arg:tt)*) => { if $crate::log::get_log_level() >= $level as i32 { $crate::log::sclog($level, $file, $line, $function, $code, &(format!($($arg)*))); } } } #[macro_export] macro_rules!SCLogNotice { ($($arg:tt)*) => { $crate::do_log!($crate::log::Level::Notice, file!(), line!(), $crate::function!(), 0, $($arg)*); } } #[macro_export] macro_rules!SCLogInfo { ($($arg:tt)*) => { $crate::do_log!($crate::log::Level::Info, file!(), line!(), $crate::function!(), 0, $($arg)*); } } #[macro_export] macro_rules!SCLogPerf { ($($arg:tt)*) => { $crate::do_log!($crate::log::Level::Perf, file!(), line!(), $crate::function!(), 0, $($arg)*); } } #[macro_export] macro_rules!SCLogConfig { ($($arg:tt)*) => { $crate::do_log!($crate::log::Level::Config, file!(), line!(), $crate::function!(), 0, $($arg)*); } } #[macro_export] macro_rules!SCLogError { ($($arg:tt)*) => { $crate::do_log!($crate::log::Level::Error, file!(), line!(), $crate::function!(), 0, $($arg)*); } } // Debug mode: call C SCLogDebug #[cfg(feature = "debug")] #[macro_export] macro_rules!SCLogDebug { ($($arg:tt)*) => { do_log!($crate::log::Level::Debug, file!(), line!(), $crate::function!(), 0, $($arg)*); } } // Release mode: ignore arguments // Use a reference to avoid moving values. #[cfg(not(feature = "debug"))] #[macro_export] macro_rules!SCLogDebug { ($last:expr) => { let _ = &$last; let _ = $crate::log::Level::Debug; }; ($one:expr, $($arg:tt)*) => { let _ = &$one; SCLogDebug!($($arg)*); }; } /// SCLogMessage wrapper. If the Suricata C context is not registered /// a more basic log format will be used (for example, when running /// Rust unit tests). pub fn sc_log_message(level: Level, filename: &str, line: std::os::raw::c_uint, function: &str, code: std::os::raw::c_int, message: &str) -> std::os::raw::c_int { unsafe { if let Some(c) = SC { return (c.SCLogMessage)( level as i32, to_safe_cstring(filename).as_ptr(), line, to_safe_cstring(function).as_ptr(), code, to_safe_cstring(message).as_ptr()); } } // Fall back if the Suricata C context is not registered which is // the case when Rust unit tests are running. // // We don't log the time right now as I don't think it can be done // with Rust 1.7.0 without using an external crate. With Rust // 1.8.0 and newer we can unix UNIX_EPOCH.elapsed() to get the // unix time. println!("{}:{} <{:?}> -- {}", filename, line, level, message); return 0; } // Convert a &str into a CString by first stripping NUL bytes. fn to_safe_cstring(val: &str) -> CString { let mut safe = Vec::with_capacity(val.len()); for c in val.as_bytes() { if *c!= 0 { safe.push(*c); } } match CString::new(safe) { Ok(cstr) => cstr, _ => { CString::new("<failed to encode string>").unwrap() } } }
{ let filename = basename(file); sc_log_message(level, filename, line, function, code, message); }
identifier_body
lib.rs
use std::f64::consts::PI; /// Calculates the volume of the n-sphere. pub fn sphere_volume(radius: f64, dim: u64) -> f64
#[test] fn volume_of_1_sphere_works() { for radius in (0..5000000).map(|n| n as f64 / 7000.) { let expected = PI * radius.powi(2); let actual = sphere_volume(radius, 2); assert!(equals(expected, actual, 0.0001, 5), "Expected: {}, Actual: {}", expected, actual); } } #[test] fn volume_of_2_sphere_works() { for radius in (0..5000000).map(|n| n as f64 / 7000.) { let expected = (4. / 3.) * PI * radius.powi(3); let actual = sphere_volume(radius, 3); assert!(equals(expected, actual, 0.0001, 5), "Expected: {}, Actual: {}", expected, actual); } } #[test] fn volume_of_3_sphere_works() { for radius in (0..5000000).map(|n| n as f64 / 7000.) { let expected = (1. / 2.) * PI.powi(2) * radius.powi(4); let actual = sphere_volume(radius, 4); assert!(equals(expected, actual, 0.0001, 5), "Expected: {}, Actual: {}", expected, actual); } } #[cfg(test)] fn equals(a: f64, b: f64, max_diff: f64, max_ulps_diff: i64) -> bool { let diff = f64::abs(a - b); if diff <= max_diff { return true; } if a.is_sign_positive() &&!b.is_sign_positive() { return false; } let a_i64 = unsafe{::std::mem::transmute::<_, i64>(a)}; let b_i64 = unsafe{::std::mem::transmute::<_, i64>(b)}; let ulps_diff = i64::abs(a_i64 - b_i64); return ulps_diff <= max_ulps_diff; } #[inline] fn fact(n: u64) -> u64 { //TODO: This could be made faster with better algorithm, but it's called with low values so it doesn't matter much. if n < 2 { 1 } else { n * fact(n - 1) } } #[test] fn factorial_works() { assert_eq!(1, fact(0)); let mut x = 1; for n in 1..21 { x *= n; assert_eq!(x, fact(n)); } }
{ let gamma = if dim % 2 == 0 { // This works because dim / 2 is a whole number. fact(dim / 2) as f64 } else { // This works because the function is gamma(1/2 + n) where n = dim / 2 + 1 let n = dim / 2 + 1; (fact(2 * n) as f64 / (4u64.pow(n as u32) * fact(n)) as f64) * PI.sqrt() }; (PI.powf(0.5 * dim as f64) * radius.powf(dim as f64)) / gamma }
identifier_body
lib.rs
use std::f64::consts::PI; /// Calculates the volume of the n-sphere. pub fn sphere_volume(radius: f64, dim: u64) -> f64 { let gamma = if dim % 2 == 0 { // This works because dim / 2 is a whole number. fact(dim / 2) as f64 } else { // This works because the function is gamma(1/2 + n) where n = dim / 2 + 1 let n = dim / 2 + 1; (fact(2 * n) as f64 / (4u64.pow(n as u32) * fact(n)) as f64) * PI.sqrt() }; (PI.powf(0.5 * dim as f64) * radius.powf(dim as f64)) / gamma } #[test] fn volume_of_1_sphere_works() { for radius in (0..5000000).map(|n| n as f64 / 7000.) { let expected = PI * radius.powi(2); let actual = sphere_volume(radius, 2); assert!(equals(expected, actual, 0.0001, 5), "Expected: {}, Actual: {}", expected, actual); } } #[test] fn volume_of_2_sphere_works() { for radius in (0..5000000).map(|n| n as f64 / 7000.) { let expected = (4. / 3.) * PI * radius.powi(3); let actual = sphere_volume(radius, 3); assert!(equals(expected, actual, 0.0001, 5), "Expected: {}, Actual: {}", expected, actual); } } #[test] fn volume_of_3_sphere_works() { for radius in (0..5000000).map(|n| n as f64 / 7000.) { let expected = (1. / 2.) * PI.powi(2) * radius.powi(4); let actual = sphere_volume(radius, 4); assert!(equals(expected, actual, 0.0001, 5), "Expected: {}, Actual: {}", expected, actual); } } #[cfg(test)] fn equals(a: f64, b: f64, max_diff: f64, max_ulps_diff: i64) -> bool { let diff = f64::abs(a - b); if diff <= max_diff { return true; } if a.is_sign_positive() &&!b.is_sign_positive()
let a_i64 = unsafe{::std::mem::transmute::<_, i64>(a)}; let b_i64 = unsafe{::std::mem::transmute::<_, i64>(b)}; let ulps_diff = i64::abs(a_i64 - b_i64); return ulps_diff <= max_ulps_diff; } #[inline] fn fact(n: u64) -> u64 { //TODO: This could be made faster with better algorithm, but it's called with low values so it doesn't matter much. if n < 2 { 1 } else { n * fact(n - 1) } } #[test] fn factorial_works() { assert_eq!(1, fact(0)); let mut x = 1; for n in 1..21 { x *= n; assert_eq!(x, fact(n)); } }
{ return false; }
conditional_block
lib.rs
use std::f64::consts::PI; /// Calculates the volume of the n-sphere. pub fn sphere_volume(radius: f64, dim: u64) -> f64 { let gamma = if dim % 2 == 0 {
// This works because dim / 2 is a whole number. fact(dim / 2) as f64 } else { // This works because the function is gamma(1/2 + n) where n = dim / 2 + 1 let n = dim / 2 + 1; (fact(2 * n) as f64 / (4u64.pow(n as u32) * fact(n)) as f64) * PI.sqrt() }; (PI.powf(0.5 * dim as f64) * radius.powf(dim as f64)) / gamma } #[test] fn volume_of_1_sphere_works() { for radius in (0..5000000).map(|n| n as f64 / 7000.) { let expected = PI * radius.powi(2); let actual = sphere_volume(radius, 2); assert!(equals(expected, actual, 0.0001, 5), "Expected: {}, Actual: {}", expected, actual); } } #[test] fn volume_of_2_sphere_works() { for radius in (0..5000000).map(|n| n as f64 / 7000.) { let expected = (4. / 3.) * PI * radius.powi(3); let actual = sphere_volume(radius, 3); assert!(equals(expected, actual, 0.0001, 5), "Expected: {}, Actual: {}", expected, actual); } } #[test] fn volume_of_3_sphere_works() { for radius in (0..5000000).map(|n| n as f64 / 7000.) { let expected = (1. / 2.) * PI.powi(2) * radius.powi(4); let actual = sphere_volume(radius, 4); assert!(equals(expected, actual, 0.0001, 5), "Expected: {}, Actual: {}", expected, actual); } } #[cfg(test)] fn equals(a: f64, b: f64, max_diff: f64, max_ulps_diff: i64) -> bool { let diff = f64::abs(a - b); if diff <= max_diff { return true; } if a.is_sign_positive() &&!b.is_sign_positive() { return false; } let a_i64 = unsafe{::std::mem::transmute::<_, i64>(a)}; let b_i64 = unsafe{::std::mem::transmute::<_, i64>(b)}; let ulps_diff = i64::abs(a_i64 - b_i64); return ulps_diff <= max_ulps_diff; } #[inline] fn fact(n: u64) -> u64 { //TODO: This could be made faster with better algorithm, but it's called with low values so it doesn't matter much. if n < 2 { 1 } else { n * fact(n - 1) } } #[test] fn factorial_works() { assert_eq!(1, fact(0)); let mut x = 1; for n in 1..21 { x *= n; assert_eq!(x, fact(n)); } }
random_line_split
lib.rs
use std::f64::consts::PI; /// Calculates the volume of the n-sphere. pub fn sphere_volume(radius: f64, dim: u64) -> f64 { let gamma = if dim % 2 == 0 { // This works because dim / 2 is a whole number. fact(dim / 2) as f64 } else { // This works because the function is gamma(1/2 + n) where n = dim / 2 + 1 let n = dim / 2 + 1; (fact(2 * n) as f64 / (4u64.pow(n as u32) * fact(n)) as f64) * PI.sqrt() }; (PI.powf(0.5 * dim as f64) * radius.powf(dim as f64)) / gamma } #[test] fn volume_of_1_sphere_works() { for radius in (0..5000000).map(|n| n as f64 / 7000.) { let expected = PI * radius.powi(2); let actual = sphere_volume(radius, 2); assert!(equals(expected, actual, 0.0001, 5), "Expected: {}, Actual: {}", expected, actual); } } #[test] fn volume_of_2_sphere_works() { for radius in (0..5000000).map(|n| n as f64 / 7000.) { let expected = (4. / 3.) * PI * radius.powi(3); let actual = sphere_volume(radius, 3); assert!(equals(expected, actual, 0.0001, 5), "Expected: {}, Actual: {}", expected, actual); } } #[test] fn volume_of_3_sphere_works() { for radius in (0..5000000).map(|n| n as f64 / 7000.) { let expected = (1. / 2.) * PI.powi(2) * radius.powi(4); let actual = sphere_volume(radius, 4); assert!(equals(expected, actual, 0.0001, 5), "Expected: {}, Actual: {}", expected, actual); } } #[cfg(test)] fn
(a: f64, b: f64, max_diff: f64, max_ulps_diff: i64) -> bool { let diff = f64::abs(a - b); if diff <= max_diff { return true; } if a.is_sign_positive() &&!b.is_sign_positive() { return false; } let a_i64 = unsafe{::std::mem::transmute::<_, i64>(a)}; let b_i64 = unsafe{::std::mem::transmute::<_, i64>(b)}; let ulps_diff = i64::abs(a_i64 - b_i64); return ulps_diff <= max_ulps_diff; } #[inline] fn fact(n: u64) -> u64 { //TODO: This could be made faster with better algorithm, but it's called with low values so it doesn't matter much. if n < 2 { 1 } else { n * fact(n - 1) } } #[test] fn factorial_works() { assert_eq!(1, fact(0)); let mut x = 1; for n in 1..21 { x *= n; assert_eq!(x, fact(n)); } }
equals
identifier_name
def.rs
#[test] fn parse_macro_def_no_params() { super::test_parser( r#" <marker type="NODE_MACRO_DEF">macro test<marker type="NODE_MACRO_PARAM_LIST">()</marker> { }</marker> "#, ) } #[test] fn parse_macro_def() { super::test_parser( r#" <marker type="NODE_MACRO_DEF">macro test<marker type="NODE_MACRO_PARAM_LIST">( <marker type="NODE_MACRO_PARAM_LIST_ITEM">type t</marker> )</marker> { }</marker> "#, ) } #[test] #[ignore] fn parse_abstract_container_def() { super::test_parser( r#" <marker type="KW_ABSTRACT">abstract</marker> block test {} "#, ); } #[test] fn parse_abstract_container_with_extends_list() { super::test_parser( r#" abstract block test <marker type="NODE_EXTENDS_LIST">extends abc</marker> {} "#, ); } #[test] fn parse_var_def()
#[test] fn parse_var_with_initializer() { super::test_parser( r#" <marker type="NODE_VARIABLE_DEF">type_attribute a = <marker type="NODE_BINARY_EXPR">a | b</marker>;</marker> "#, ) }
{ super::test_parser( r#" <marker type="NODE_VARIABLE_DEF">type a;</marker> "#, ) }
identifier_body