file_name
large_stringlengths
4
69
prefix
large_stringlengths
0
26.7k
suffix
large_stringlengths
0
24.8k
middle
large_stringlengths
0
2.12k
fim_type
large_stringclasses
4 values
swap-overlapping.rs
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Issue #5041 - avoid overlapping memcpy when src and dest of a swap are the same use std::ptr; pub fn main() { let mut test = TestDescAndFn { desc: TestDesc { name: TestName::DynTestName("test".to_string()), should_fail: false }, testfn: TestFn::DynTestFn(proc() ()), }; do_swap(&mut test); } fn do_swap(test: &mut TestDescAndFn) { unsafe { ptr::swap(test, test); } } pub enum TestName { DynTestName(String) } pub enum TestFn { DynTestFn(proc():'static), DynBenchFn(proc(&mut int):'static) } pub struct TestDesc { name: TestName, should_fail: bool
pub struct TestDescAndFn { desc: TestDesc, testfn: TestFn, }
}
random_line_split
swap-overlapping.rs
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Issue #5041 - avoid overlapping memcpy when src and dest of a swap are the same use std::ptr; pub fn main() { let mut test = TestDescAndFn { desc: TestDesc { name: TestName::DynTestName("test".to_string()), should_fail: false }, testfn: TestFn::DynTestFn(proc() ()), }; do_swap(&mut test); } fn
(test: &mut TestDescAndFn) { unsafe { ptr::swap(test, test); } } pub enum TestName { DynTestName(String) } pub enum TestFn { DynTestFn(proc():'static), DynBenchFn(proc(&mut int):'static) } pub struct TestDesc { name: TestName, should_fail: bool } pub struct TestDescAndFn { desc: TestDesc, testfn: TestFn, }
do_swap
identifier_name
unnecessary_operation.rs
// run-rustfix #![feature(box_syntax)] #![allow(clippy::deref_addrof, dead_code, unused, clippy::no_effect)] #![warn(clippy::unnecessary_operation)] struct Tuple(i32); struct
{ field: i32, } enum Enum { Tuple(i32), Struct { field: i32 }, } struct DropStruct { field: i32, } impl Drop for DropStruct { fn drop(&mut self) {} } struct DropTuple(i32); impl Drop for DropTuple { fn drop(&mut self) {} } enum DropEnum { Tuple(i32), Struct { field: i32 }, } impl Drop for DropEnum { fn drop(&mut self) {} } struct FooString { s: String, } fn get_number() -> i32 { 0 } fn get_usize() -> usize { 0 } fn get_struct() -> Struct { Struct { field: 0 } } fn get_drop_struct() -> DropStruct { DropStruct { field: 0 } } fn main() { Tuple(get_number()); Struct { field: get_number() }; Struct {..get_struct() }; Enum::Tuple(get_number()); Enum::Struct { field: get_number() }; 5 + get_number(); *&get_number(); &get_number(); (5, 6, get_number()); box get_number(); get_number()..; ..get_number(); 5..get_number(); [42, get_number()]; [42, 55][get_usize()]; (42, get_number()).1; [get_number(); 55]; [42; 55][get_usize()]; { get_number() }; FooString { s: String::from("blah"), }; // Do not warn DropTuple(get_number()); DropStruct { field: get_number() }; DropStruct { field: get_number() }; DropStruct {..get_drop_struct() }; DropEnum::Tuple(get_number()); DropEnum::Struct { field: get_number() }; }
Struct
identifier_name
unnecessary_operation.rs
// run-rustfix #![feature(box_syntax)] #![allow(clippy::deref_addrof, dead_code, unused, clippy::no_effect)] #![warn(clippy::unnecessary_operation)] struct Tuple(i32); struct Struct { field: i32, } enum Enum { Tuple(i32), Struct { field: i32 }, } struct DropStruct { field: i32, } impl Drop for DropStruct { fn drop(&mut self) {} } struct DropTuple(i32); impl Drop for DropTuple { fn drop(&mut self) {} } enum DropEnum { Tuple(i32), Struct { field: i32 }, } impl Drop for DropEnum { fn drop(&mut self) {} } struct FooString { s: String, } fn get_number() -> i32
fn get_usize() -> usize { 0 } fn get_struct() -> Struct { Struct { field: 0 } } fn get_drop_struct() -> DropStruct { DropStruct { field: 0 } } fn main() { Tuple(get_number()); Struct { field: get_number() }; Struct {..get_struct() }; Enum::Tuple(get_number()); Enum::Struct { field: get_number() }; 5 + get_number(); *&get_number(); &get_number(); (5, 6, get_number()); box get_number(); get_number()..; ..get_number(); 5..get_number(); [42, get_number()]; [42, 55][get_usize()]; (42, get_number()).1; [get_number(); 55]; [42; 55][get_usize()]; { get_number() }; FooString { s: String::from("blah"), }; // Do not warn DropTuple(get_number()); DropStruct { field: get_number() }; DropStruct { field: get_number() }; DropStruct {..get_drop_struct() }; DropEnum::Tuple(get_number()); DropEnum::Struct { field: get_number() }; }
{ 0 }
identifier_body
unnecessary_operation.rs
// run-rustfix #![feature(box_syntax)] #![allow(clippy::deref_addrof, dead_code, unused, clippy::no_effect)] #![warn(clippy::unnecessary_operation)] struct Tuple(i32); struct Struct { field: i32, } enum Enum { Tuple(i32), Struct { field: i32 }, } struct DropStruct { field: i32, } impl Drop for DropStruct { fn drop(&mut self) {} } struct DropTuple(i32); impl Drop for DropTuple { fn drop(&mut self) {} } enum DropEnum { Tuple(i32), Struct { field: i32 }, } impl Drop for DropEnum { fn drop(&mut self) {} } struct FooString { s: String, } fn get_number() -> i32 { 0 } fn get_usize() -> usize { 0 } fn get_struct() -> Struct { Struct { field: 0 } } fn get_drop_struct() -> DropStruct { DropStruct { field: 0 } } fn main() { Tuple(get_number()); Struct { field: get_number() }; Struct {..get_struct() }; Enum::Tuple(get_number()); Enum::Struct { field: get_number() }; 5 + get_number(); *&get_number(); &get_number(); (5, 6, get_number()); box get_number(); get_number()..;
5..get_number(); [42, get_number()]; [42, 55][get_usize()]; (42, get_number()).1; [get_number(); 55]; [42; 55][get_usize()]; { get_number() }; FooString { s: String::from("blah"), }; // Do not warn DropTuple(get_number()); DropStruct { field: get_number() }; DropStruct { field: get_number() }; DropStruct {..get_drop_struct() }; DropEnum::Tuple(get_number()); DropEnum::Struct { field: get_number() }; }
..get_number();
random_line_split
main.rs
use std::io; use std::fmt; use std::io::prelude::*; use std::str::FromStr; use std::collections::VecDeque; #[derive(Debug)] struct Display { data: VecDeque<VecDeque<bool>>, height: usize, width: usize, } #[derive(Debug)] enum Command { Rect(usize, usize), RotateRow(usize, usize), RotateCol(usize, usize), } impl Display { fn new(w: usize, h: usize) -> Display { let mut rows = VecDeque::new(); for _ in 0..h { let mut row = VecDeque::new(); for _ in 0..w { row.push_back(false); } rows.push_back(row) } Display { data: rows, height: h, width: w, } } fn command(&mut self, cmd: &str) { let cmd: Command = cmd.parse().unwrap(); use Command::*; match cmd { Rect(x, y) => { for y in 0..y { for x in 0..x { self.data[y][x] = true; } } } RotateRow(y, offset) => { for _ in 0..offset { let last = self.data[y].pop_back().unwrap(); self.data[y].push_front(last); } } RotateCol(x, offset) => { for _ in 0..offset { let mut new_column: VecDeque<bool> = (0..self.height).map(|y| self.data[y][x]).collect(); let last = new_column.pop_back().unwrap(); new_column.push_front(last); for (y, &pixel) in new_column.iter().enumerate() { self.data[y][x] = pixel; } } } } } fn lit_pixels(&self) -> usize { self.data.iter().flat_map(|row| row.iter().filter(|&pixel| *pixel)).count() } } impl fmt::Display for Display { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
} impl FromStr for Command { // Slice patterns would make this so much better type Err = &'static str; fn from_str(input: &str) -> Result<Command, Self::Err> { let input = input.split_whitespace().collect::<Vec<_>>(); match input[0] { "rect" => { let xy = input[1].split('x').collect::<Vec<_>>(); let x = xy[0].parse().unwrap(); let y = xy[1].parse().unwrap(); Ok(Command::Rect(x, y)) } "rotate" => { let coord = input[2].split('=').last().unwrap().parse().unwrap(); let offset = input[4].parse().unwrap(); match input[1] { "row" => Ok(Command::RotateRow(coord, offset)), "column" => Ok(Command::RotateCol(coord, offset)), _ => Err("invalid command"), } } _ => Err("invalid command"), } } } fn main() { let mut display = Display::new(50, 6); let stdin = io::stdin(); let stdin = stdin.lock().lines(); for line in stdin { let line = line.unwrap(); display.command(&line); } println!("part1: {}", display.lit_pixels()); println!("part2:\n{}", display); }
{ let pixels = self.data .iter() .map(|row| row.iter().map(|&pixel| if pixel { '#' } else { '.' }).collect::<String>()) .collect::<Vec<_>>() .join("\n"); write!(f, "{}", pixels) }
identifier_body
main.rs
use std::io; use std::fmt; use std::io::prelude::*; use std::str::FromStr; use std::collections::VecDeque; #[derive(Debug)] struct Display { data: VecDeque<VecDeque<bool>>, height: usize, width: usize, } #[derive(Debug)] enum Command { Rect(usize, usize), RotateRow(usize, usize), RotateCol(usize, usize), } impl Display { fn new(w: usize, h: usize) -> Display { let mut rows = VecDeque::new(); for _ in 0..h { let mut row = VecDeque::new(); for _ in 0..w { row.push_back(false); } rows.push_back(row) } Display { data: rows, height: h, width: w, } } fn command(&mut self, cmd: &str) { let cmd: Command = cmd.parse().unwrap(); use Command::*; match cmd { Rect(x, y) => { for y in 0..y { for x in 0..x { self.data[y][x] = true; } } } RotateRow(y, offset) => { for _ in 0..offset { let last = self.data[y].pop_back().unwrap(); self.data[y].push_front(last); } } RotateCol(x, offset) => { for _ in 0..offset { let mut new_column: VecDeque<bool> = (0..self.height).map(|y| self.data[y][x]).collect(); let last = new_column.pop_back().unwrap(); new_column.push_front(last); for (y, &pixel) in new_column.iter().enumerate() { self.data[y][x] = pixel; } } } }
fn lit_pixels(&self) -> usize { self.data.iter().flat_map(|row| row.iter().filter(|&pixel| *pixel)).count() } } impl fmt::Display for Display { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let pixels = self.data .iter() .map(|row| row.iter().map(|&pixel| if pixel { '#' } else { '.' }).collect::<String>()) .collect::<Vec<_>>() .join("\n"); write!(f, "{}", pixels) } } impl FromStr for Command { // Slice patterns would make this so much better type Err = &'static str; fn from_str(input: &str) -> Result<Command, Self::Err> { let input = input.split_whitespace().collect::<Vec<_>>(); match input[0] { "rect" => { let xy = input[1].split('x').collect::<Vec<_>>(); let x = xy[0].parse().unwrap(); let y = xy[1].parse().unwrap(); Ok(Command::Rect(x, y)) } "rotate" => { let coord = input[2].split('=').last().unwrap().parse().unwrap(); let offset = input[4].parse().unwrap(); match input[1] { "row" => Ok(Command::RotateRow(coord, offset)), "column" => Ok(Command::RotateCol(coord, offset)), _ => Err("invalid command"), } } _ => Err("invalid command"), } } } fn main() { let mut display = Display::new(50, 6); let stdin = io::stdin(); let stdin = stdin.lock().lines(); for line in stdin { let line = line.unwrap(); display.command(&line); } println!("part1: {}", display.lit_pixels()); println!("part2:\n{}", display); }
}
random_line_split
main.rs
use std::io; use std::fmt; use std::io::prelude::*; use std::str::FromStr; use std::collections::VecDeque; #[derive(Debug)] struct Display { data: VecDeque<VecDeque<bool>>, height: usize, width: usize, } #[derive(Debug)] enum Command { Rect(usize, usize), RotateRow(usize, usize), RotateCol(usize, usize), } impl Display { fn new(w: usize, h: usize) -> Display { let mut rows = VecDeque::new(); for _ in 0..h { let mut row = VecDeque::new(); for _ in 0..w { row.push_back(false); } rows.push_back(row) } Display { data: rows, height: h, width: w, } } fn command(&mut self, cmd: &str) { let cmd: Command = cmd.parse().unwrap(); use Command::*; match cmd { Rect(x, y) => { for y in 0..y { for x in 0..x { self.data[y][x] = true; } } } RotateRow(y, offset) => { for _ in 0..offset { let last = self.data[y].pop_back().unwrap(); self.data[y].push_front(last); } } RotateCol(x, offset) => { for _ in 0..offset { let mut new_column: VecDeque<bool> = (0..self.height).map(|y| self.data[y][x]).collect(); let last = new_column.pop_back().unwrap(); new_column.push_front(last); for (y, &pixel) in new_column.iter().enumerate() { self.data[y][x] = pixel; } } } } } fn lit_pixels(&self) -> usize { self.data.iter().flat_map(|row| row.iter().filter(|&pixel| *pixel)).count() } } impl fmt::Display for Display { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let pixels = self.data .iter() .map(|row| row.iter().map(|&pixel| if pixel { '#' } else { '.' }).collect::<String>()) .collect::<Vec<_>>() .join("\n"); write!(f, "{}", pixels) } } impl FromStr for Command { // Slice patterns would make this so much better type Err = &'static str; fn from_str(input: &str) -> Result<Command, Self::Err> { let input = input.split_whitespace().collect::<Vec<_>>(); match input[0] { "rect" => { let xy = input[1].split('x').collect::<Vec<_>>(); let x = xy[0].parse().unwrap(); let y = xy[1].parse().unwrap(); Ok(Command::Rect(x, y)) } "rotate" => { let coord = input[2].split('=').last().unwrap().parse().unwrap(); let offset = input[4].parse().unwrap(); match input[1] { "row" => Ok(Command::RotateRow(coord, offset)), "column" => Ok(Command::RotateCol(coord, offset)), _ => Err("invalid command"), } } _ => Err("invalid command"), } } } fn
() { let mut display = Display::new(50, 6); let stdin = io::stdin(); let stdin = stdin.lock().lines(); for line in stdin { let line = line.unwrap(); display.command(&line); } println!("part1: {}", display.lit_pixels()); println!("part2:\n{}", display); }
main
identifier_name
attr.rs
//! Code generation for `#[graphql_union]` macro. use std::mem; use proc_macro2::{Span, TokenStream}; use quote::{quote, ToTokens as _}; use syn::{ext::IdentExt as _, parse_quote, spanned::Spanned as _}; use crate::{ common::{parse, scalar}, result::GraphQLScope, util::{path_eq_single, span_container::SpanContainer}, }; use super::{ all_variants_different, emerge_union_variants_from_attr, Attr, Definition, VariantAttr, VariantDefinition, }; /// [`GraphQLScope`] of errors for `#[graphql_union]` macro. const ERR: GraphQLScope = GraphQLScope::UnionAttr; /// Expands `#[graphql_union]` macro into generated code. pub fn expand(attr_args: TokenStream, body: TokenStream) -> syn::Result<TokenStream> { if let Ok(mut ast) = syn::parse2::<syn::ItemTrait>(body) { let trait_attrs = parse::attr::unite(("graphql_union", &attr_args), &ast.attrs); ast.attrs = parse::attr::strip("graphql_union", ast.attrs); return expand_on_trait(trait_attrs, ast); } Err(syn::Error::new( Span::call_site(), "#[graphql_union] attribute is applicable to trait definitions only", )) } /// Expands `#[graphql_union]` macro placed on a trait definition. fn expand_on_trait( attrs: Vec<syn::Attribute>, mut ast: syn::ItemTrait, ) -> syn::Result<TokenStream> { let attr = Attr::from_attrs("graphql_union", &attrs)?; let trait_span = ast.span(); let trait_ident = &ast.ident; let name = attr .name .clone() .map(SpanContainer::into_inner) .unwrap_or_else(|| trait_ident.unraw().to_string()); if!attr.is_internal && name.starts_with("__") { ERR.no_double_underscore( attr.name .as_ref() .map(SpanContainer::span_ident) .unwrap_or_else(|| trait_ident.span()), ); } let mut variants: Vec<_> = ast .items .iter_mut() .filter_map(|i| match i { syn::TraitItem::Method(m) => parse_variant_from_trait_method(m, trait_ident, &attr), _ => None, }) .collect(); proc_macro_error::abort_if_dirty(); emerge_union_variants_from_attr(&mut variants, attr.external_resolvers); if variants.is_empty() { ERR.emit_custom(trait_span, "expects at least one union variant"); } if!all_variants_different(&variants) { ERR.emit_custom( trait_span, "must have a different type for each union variant", ); } proc_macro_error::abort_if_dirty(); let context = attr .context .map(SpanContainer::into_inner) .or_else(|| variants.iter().find_map(|v| v.context.as_ref()).cloned()) .unwrap_or_else(|| parse_quote! { () }); let generated_code = Definition { name, ty: parse_quote! { #trait_ident }, is_trait_object: true, description: attr.description.map(SpanContainer::into_inner), context, scalar: scalar::Type::parse(attr.scalar.as_deref(), &ast.generics), generics: ast.generics.clone(), variants, }; Ok(quote! { #ast #generated_code }) } /// Parses given Rust trait `method` as [GraphQL union][1] variant. /// /// On failure returns [`None`] and internally fills up [`proc_macro_error`] /// with the corresponding errors. /// /// [1]: https://spec.graphql.org/June2018/#sec-Unions fn parse_variant_from_trait_method( method: &mut syn::TraitItemMethod, trait_ident: &syn::Ident, trait_attr: &Attr, ) -> Option<VariantDefinition>
#[graphql_union(on... =...)] on the trait itself", )) .emit() } if attr.ignore.is_some() { return None; } let method_span = method.sig.span(); let method_ident = &method.sig.ident; let ty = parse::downcaster::output_type(&method.sig.output) .map_err(|span| { ERR.emit_custom( span, "expects trait method return type to be `Option<&VariantType>` only", ) }) .ok()?; let method_context_ty = parse::downcaster::context_ty(&method.sig) .map_err(|span| { ERR.emit_custom( span, "expects trait method to accept `&self` only and, optionally, `&Context`", ) }) .ok()?; if let Some(is_async) = &method.sig.asyncness { ERR.emit_custom( is_async.span(), "async downcast to union variants is not supported", ); return None; } let resolver_code = { if let Some(other) = trait_attr.external_resolvers.get(&ty) { ERR.custom( method_span, format!( "trait method `{}` conflicts with the external resolver \ function `{}` declared on the trait to resolve the \ variant type `{}`", method_ident, other.to_token_stream(), ty.to_token_stream(), ), ) .note(String::from( "use `#[graphql(ignore)]` attribute to ignore this trait \ method for union variants resolution", )) .emit(); } if method_context_ty.is_some() { parse_quote! { #trait_ident::#method_ident(self, ::juniper::FromContext::from(context)) } } else { parse_quote! { #trait_ident::#method_ident(self) } } }; // Doing this may be quite an expensive, because resolving may contain some // heavy computation, so we're preforming it twice. Unfortunately, we have // no other options here, until the `juniper::GraphQLType` itself will allow // to do it in some cleverer way. let resolver_check = parse_quote! { ({ #resolver_code } as ::std::option::Option<&#ty>).is_some() }; Some(VariantDefinition { ty, resolver_code, resolver_check, context: method_context_ty, }) }
{ let method_attrs = method.attrs.clone(); // Remove repeated attributes from the method, to omit incorrect expansion. method.attrs = mem::take(&mut method.attrs) .into_iter() .filter(|attr| !path_eq_single(&attr.path, "graphql")) .collect(); let attr = VariantAttr::from_attrs("graphql", &method_attrs) .map_err(|e| proc_macro_error::emit_error!(e)) .ok()?; if let Some(rslvr) = attr.external_resolver { ERR.custom( rslvr.span_ident(), "cannot use #[graphql(with = ...)] attribute on a trait method", ) .note(String::from( "instead use #[graphql(ignore)] on the method with \
identifier_body
attr.rs
//! Code generation for `#[graphql_union]` macro. use std::mem; use proc_macro2::{Span, TokenStream}; use quote::{quote, ToTokens as _}; use syn::{ext::IdentExt as _, parse_quote, spanned::Spanned as _}; use crate::{ common::{parse, scalar}, result::GraphQLScope, util::{path_eq_single, span_container::SpanContainer}, }; use super::{ all_variants_different, emerge_union_variants_from_attr, Attr, Definition, VariantAttr, VariantDefinition, }; /// [`GraphQLScope`] of errors for `#[graphql_union]` macro. const ERR: GraphQLScope = GraphQLScope::UnionAttr; /// Expands `#[graphql_union]` macro into generated code. pub fn expand(attr_args: TokenStream, body: TokenStream) -> syn::Result<TokenStream> { if let Ok(mut ast) = syn::parse2::<syn::ItemTrait>(body) { let trait_attrs = parse::attr::unite(("graphql_union", &attr_args), &ast.attrs); ast.attrs = parse::attr::strip("graphql_union", ast.attrs); return expand_on_trait(trait_attrs, ast); } Err(syn::Error::new( Span::call_site(), "#[graphql_union] attribute is applicable to trait definitions only", )) } /// Expands `#[graphql_union]` macro placed on a trait definition. fn expand_on_trait( attrs: Vec<syn::Attribute>, mut ast: syn::ItemTrait, ) -> syn::Result<TokenStream> { let attr = Attr::from_attrs("graphql_union", &attrs)?; let trait_span = ast.span(); let trait_ident = &ast.ident; let name = attr .name .clone() .map(SpanContainer::into_inner) .unwrap_or_else(|| trait_ident.unraw().to_string()); if!attr.is_internal && name.starts_with("__")
let mut variants: Vec<_> = ast .items .iter_mut() .filter_map(|i| match i { syn::TraitItem::Method(m) => parse_variant_from_trait_method(m, trait_ident, &attr), _ => None, }) .collect(); proc_macro_error::abort_if_dirty(); emerge_union_variants_from_attr(&mut variants, attr.external_resolvers); if variants.is_empty() { ERR.emit_custom(trait_span, "expects at least one union variant"); } if!all_variants_different(&variants) { ERR.emit_custom( trait_span, "must have a different type for each union variant", ); } proc_macro_error::abort_if_dirty(); let context = attr .context .map(SpanContainer::into_inner) .or_else(|| variants.iter().find_map(|v| v.context.as_ref()).cloned()) .unwrap_or_else(|| parse_quote! { () }); let generated_code = Definition { name, ty: parse_quote! { #trait_ident }, is_trait_object: true, description: attr.description.map(SpanContainer::into_inner), context, scalar: scalar::Type::parse(attr.scalar.as_deref(), &ast.generics), generics: ast.generics.clone(), variants, }; Ok(quote! { #ast #generated_code }) } /// Parses given Rust trait `method` as [GraphQL union][1] variant. /// /// On failure returns [`None`] and internally fills up [`proc_macro_error`] /// with the corresponding errors. /// /// [1]: https://spec.graphql.org/June2018/#sec-Unions fn parse_variant_from_trait_method( method: &mut syn::TraitItemMethod, trait_ident: &syn::Ident, trait_attr: &Attr, ) -> Option<VariantDefinition> { let method_attrs = method.attrs.clone(); // Remove repeated attributes from the method, to omit incorrect expansion. method.attrs = mem::take(&mut method.attrs) .into_iter() .filter(|attr|!path_eq_single(&attr.path, "graphql")) .collect(); let attr = VariantAttr::from_attrs("graphql", &method_attrs) .map_err(|e| proc_macro_error::emit_error!(e)) .ok()?; if let Some(rslvr) = attr.external_resolver { ERR.custom( rslvr.span_ident(), "cannot use #[graphql(with =...)] attribute on a trait method", ) .note(String::from( "instead use #[graphql(ignore)] on the method with \ #[graphql_union(on... =...)] on the trait itself", )) .emit() } if attr.ignore.is_some() { return None; } let method_span = method.sig.span(); let method_ident = &method.sig.ident; let ty = parse::downcaster::output_type(&method.sig.output) .map_err(|span| { ERR.emit_custom( span, "expects trait method return type to be `Option<&VariantType>` only", ) }) .ok()?; let method_context_ty = parse::downcaster::context_ty(&method.sig) .map_err(|span| { ERR.emit_custom( span, "expects trait method to accept `&self` only and, optionally, `&Context`", ) }) .ok()?; if let Some(is_async) = &method.sig.asyncness { ERR.emit_custom( is_async.span(), "async downcast to union variants is not supported", ); return None; } let resolver_code = { if let Some(other) = trait_attr.external_resolvers.get(&ty) { ERR.custom( method_span, format!( "trait method `{}` conflicts with the external resolver \ function `{}` declared on the trait to resolve the \ variant type `{}`", method_ident, other.to_token_stream(), ty.to_token_stream(), ), ) .note(String::from( "use `#[graphql(ignore)]` attribute to ignore this trait \ method for union variants resolution", )) .emit(); } if method_context_ty.is_some() { parse_quote! { #trait_ident::#method_ident(self, ::juniper::FromContext::from(context)) } } else { parse_quote! { #trait_ident::#method_ident(self) } } }; // Doing this may be quite an expensive, because resolving may contain some // heavy computation, so we're preforming it twice. Unfortunately, we have // no other options here, until the `juniper::GraphQLType` itself will allow // to do it in some cleverer way. let resolver_check = parse_quote! { ({ #resolver_code } as ::std::option::Option<&#ty>).is_some() }; Some(VariantDefinition { ty, resolver_code, resolver_check, context: method_context_ty, }) }
{ ERR.no_double_underscore( attr.name .as_ref() .map(SpanContainer::span_ident) .unwrap_or_else(|| trait_ident.span()), ); }
conditional_block
attr.rs
//! Code generation for `#[graphql_union]` macro. use std::mem; use proc_macro2::{Span, TokenStream}; use quote::{quote, ToTokens as _}; use syn::{ext::IdentExt as _, parse_quote, spanned::Spanned as _}; use crate::{ common::{parse, scalar}, result::GraphQLScope, util::{path_eq_single, span_container::SpanContainer}, }; use super::{ all_variants_different, emerge_union_variants_from_attr, Attr, Definition, VariantAttr, VariantDefinition, }; /// [`GraphQLScope`] of errors for `#[graphql_union]` macro. const ERR: GraphQLScope = GraphQLScope::UnionAttr; /// Expands `#[graphql_union]` macro into generated code. pub fn expand(attr_args: TokenStream, body: TokenStream) -> syn::Result<TokenStream> { if let Ok(mut ast) = syn::parse2::<syn::ItemTrait>(body) { let trait_attrs = parse::attr::unite(("graphql_union", &attr_args), &ast.attrs); ast.attrs = parse::attr::strip("graphql_union", ast.attrs); return expand_on_trait(trait_attrs, ast); } Err(syn::Error::new( Span::call_site(), "#[graphql_union] attribute is applicable to trait definitions only", )) } /// Expands `#[graphql_union]` macro placed on a trait definition. fn
( attrs: Vec<syn::Attribute>, mut ast: syn::ItemTrait, ) -> syn::Result<TokenStream> { let attr = Attr::from_attrs("graphql_union", &attrs)?; let trait_span = ast.span(); let trait_ident = &ast.ident; let name = attr .name .clone() .map(SpanContainer::into_inner) .unwrap_or_else(|| trait_ident.unraw().to_string()); if!attr.is_internal && name.starts_with("__") { ERR.no_double_underscore( attr.name .as_ref() .map(SpanContainer::span_ident) .unwrap_or_else(|| trait_ident.span()), ); } let mut variants: Vec<_> = ast .items .iter_mut() .filter_map(|i| match i { syn::TraitItem::Method(m) => parse_variant_from_trait_method(m, trait_ident, &attr), _ => None, }) .collect(); proc_macro_error::abort_if_dirty(); emerge_union_variants_from_attr(&mut variants, attr.external_resolvers); if variants.is_empty() { ERR.emit_custom(trait_span, "expects at least one union variant"); } if!all_variants_different(&variants) { ERR.emit_custom( trait_span, "must have a different type for each union variant", ); } proc_macro_error::abort_if_dirty(); let context = attr .context .map(SpanContainer::into_inner) .or_else(|| variants.iter().find_map(|v| v.context.as_ref()).cloned()) .unwrap_or_else(|| parse_quote! { () }); let generated_code = Definition { name, ty: parse_quote! { #trait_ident }, is_trait_object: true, description: attr.description.map(SpanContainer::into_inner), context, scalar: scalar::Type::parse(attr.scalar.as_deref(), &ast.generics), generics: ast.generics.clone(), variants, }; Ok(quote! { #ast #generated_code }) } /// Parses given Rust trait `method` as [GraphQL union][1] variant. /// /// On failure returns [`None`] and internally fills up [`proc_macro_error`] /// with the corresponding errors. /// /// [1]: https://spec.graphql.org/June2018/#sec-Unions fn parse_variant_from_trait_method( method: &mut syn::TraitItemMethod, trait_ident: &syn::Ident, trait_attr: &Attr, ) -> Option<VariantDefinition> { let method_attrs = method.attrs.clone(); // Remove repeated attributes from the method, to omit incorrect expansion. method.attrs = mem::take(&mut method.attrs) .into_iter() .filter(|attr|!path_eq_single(&attr.path, "graphql")) .collect(); let attr = VariantAttr::from_attrs("graphql", &method_attrs) .map_err(|e| proc_macro_error::emit_error!(e)) .ok()?; if let Some(rslvr) = attr.external_resolver { ERR.custom( rslvr.span_ident(), "cannot use #[graphql(with =...)] attribute on a trait method", ) .note(String::from( "instead use #[graphql(ignore)] on the method with \ #[graphql_union(on... =...)] on the trait itself", )) .emit() } if attr.ignore.is_some() { return None; } let method_span = method.sig.span(); let method_ident = &method.sig.ident; let ty = parse::downcaster::output_type(&method.sig.output) .map_err(|span| { ERR.emit_custom( span, "expects trait method return type to be `Option<&VariantType>` only", ) }) .ok()?; let method_context_ty = parse::downcaster::context_ty(&method.sig) .map_err(|span| { ERR.emit_custom( span, "expects trait method to accept `&self` only and, optionally, `&Context`", ) }) .ok()?; if let Some(is_async) = &method.sig.asyncness { ERR.emit_custom( is_async.span(), "async downcast to union variants is not supported", ); return None; } let resolver_code = { if let Some(other) = trait_attr.external_resolvers.get(&ty) { ERR.custom( method_span, format!( "trait method `{}` conflicts with the external resolver \ function `{}` declared on the trait to resolve the \ variant type `{}`", method_ident, other.to_token_stream(), ty.to_token_stream(), ), ) .note(String::from( "use `#[graphql(ignore)]` attribute to ignore this trait \ method for union variants resolution", )) .emit(); } if method_context_ty.is_some() { parse_quote! { #trait_ident::#method_ident(self, ::juniper::FromContext::from(context)) } } else { parse_quote! { #trait_ident::#method_ident(self) } } }; // Doing this may be quite an expensive, because resolving may contain some // heavy computation, so we're preforming it twice. Unfortunately, we have // no other options here, until the `juniper::GraphQLType` itself will allow // to do it in some cleverer way. let resolver_check = parse_quote! { ({ #resolver_code } as ::std::option::Option<&#ty>).is_some() }; Some(VariantDefinition { ty, resolver_code, resolver_check, context: method_context_ty, }) }
expand_on_trait
identifier_name
attr.rs
//! Code generation for `#[graphql_union]` macro. use std::mem; use proc_macro2::{Span, TokenStream}; use quote::{quote, ToTokens as _}; use syn::{ext::IdentExt as _, parse_quote, spanned::Spanned as _}; use crate::{ common::{parse, scalar}, result::GraphQLScope, util::{path_eq_single, span_container::SpanContainer}, }; use super::{ all_variants_different, emerge_union_variants_from_attr, Attr, Definition, VariantAttr, VariantDefinition, }; /// [`GraphQLScope`] of errors for `#[graphql_union]` macro. const ERR: GraphQLScope = GraphQLScope::UnionAttr; /// Expands `#[graphql_union]` macro into generated code. pub fn expand(attr_args: TokenStream, body: TokenStream) -> syn::Result<TokenStream> { if let Ok(mut ast) = syn::parse2::<syn::ItemTrait>(body) { let trait_attrs = parse::attr::unite(("graphql_union", &attr_args), &ast.attrs); ast.attrs = parse::attr::strip("graphql_union", ast.attrs); return expand_on_trait(trait_attrs, ast); } Err(syn::Error::new( Span::call_site(), "#[graphql_union] attribute is applicable to trait definitions only", )) } /// Expands `#[graphql_union]` macro placed on a trait definition. fn expand_on_trait( attrs: Vec<syn::Attribute>, mut ast: syn::ItemTrait, ) -> syn::Result<TokenStream> { let attr = Attr::from_attrs("graphql_union", &attrs)?; let trait_span = ast.span(); let trait_ident = &ast.ident; let name = attr .name .clone() .map(SpanContainer::into_inner) .unwrap_or_else(|| trait_ident.unraw().to_string()); if!attr.is_internal && name.starts_with("__") { ERR.no_double_underscore( attr.name .as_ref() .map(SpanContainer::span_ident) .unwrap_or_else(|| trait_ident.span()), ); } let mut variants: Vec<_> = ast .items .iter_mut() .filter_map(|i| match i { syn::TraitItem::Method(m) => parse_variant_from_trait_method(m, trait_ident, &attr), _ => None, }) .collect(); proc_macro_error::abort_if_dirty(); emerge_union_variants_from_attr(&mut variants, attr.external_resolvers); if variants.is_empty() { ERR.emit_custom(trait_span, "expects at least one union variant"); } if!all_variants_different(&variants) { ERR.emit_custom( trait_span, "must have a different type for each union variant", ); } proc_macro_error::abort_if_dirty(); let context = attr .context .map(SpanContainer::into_inner) .or_else(|| variants.iter().find_map(|v| v.context.as_ref()).cloned()) .unwrap_or_else(|| parse_quote! { () }); let generated_code = Definition { name, ty: parse_quote! { #trait_ident }, is_trait_object: true, description: attr.description.map(SpanContainer::into_inner), context, scalar: scalar::Type::parse(attr.scalar.as_deref(), &ast.generics), generics: ast.generics.clone(), variants, }; Ok(quote! { #ast #generated_code }) } /// Parses given Rust trait `method` as [GraphQL union][1] variant. /// /// On failure returns [`None`] and internally fills up [`proc_macro_error`] /// with the corresponding errors. /// /// [1]: https://spec.graphql.org/June2018/#sec-Unions fn parse_variant_from_trait_method( method: &mut syn::TraitItemMethod, trait_ident: &syn::Ident, trait_attr: &Attr, ) -> Option<VariantDefinition> { let method_attrs = method.attrs.clone(); // Remove repeated attributes from the method, to omit incorrect expansion. method.attrs = mem::take(&mut method.attrs) .into_iter() .filter(|attr|!path_eq_single(&attr.path, "graphql")) .collect(); let attr = VariantAttr::from_attrs("graphql", &method_attrs) .map_err(|e| proc_macro_error::emit_error!(e)) .ok()?; if let Some(rslvr) = attr.external_resolver { ERR.custom( rslvr.span_ident(), "cannot use #[graphql(with =...)] attribute on a trait method", ) .note(String::from( "instead use #[graphql(ignore)] on the method with \ #[graphql_union(on... =...)] on the trait itself", )) .emit() } if attr.ignore.is_some() { return None; } let method_span = method.sig.span(); let method_ident = &method.sig.ident; let ty = parse::downcaster::output_type(&method.sig.output) .map_err(|span| { ERR.emit_custom( span, "expects trait method return type to be `Option<&VariantType>` only", ) }) .ok()?; let method_context_ty = parse::downcaster::context_ty(&method.sig) .map_err(|span| { ERR.emit_custom( span, "expects trait method to accept `&self` only and, optionally, `&Context`", ) }) .ok()?; if let Some(is_async) = &method.sig.asyncness { ERR.emit_custom( is_async.span(),
let resolver_code = { if let Some(other) = trait_attr.external_resolvers.get(&ty) { ERR.custom( method_span, format!( "trait method `{}` conflicts with the external resolver \ function `{}` declared on the trait to resolve the \ variant type `{}`", method_ident, other.to_token_stream(), ty.to_token_stream(), ), ) .note(String::from( "use `#[graphql(ignore)]` attribute to ignore this trait \ method for union variants resolution", )) .emit(); } if method_context_ty.is_some() { parse_quote! { #trait_ident::#method_ident(self, ::juniper::FromContext::from(context)) } } else { parse_quote! { #trait_ident::#method_ident(self) } } }; // Doing this may be quite an expensive, because resolving may contain some // heavy computation, so we're preforming it twice. Unfortunately, we have // no other options here, until the `juniper::GraphQLType` itself will allow // to do it in some cleverer way. let resolver_check = parse_quote! { ({ #resolver_code } as ::std::option::Option<&#ty>).is_some() }; Some(VariantDefinition { ty, resolver_code, resolver_check, context: method_context_ty, }) }
"async downcast to union variants is not supported", ); return None; }
random_line_split
issue-80706.rs
// check-pass // edition:2018 type BoxFuture<T> = std::pin::Pin<Box<dyn std::future::Future<Output=T>>>; fn main() { f(); } async fn f() { run("dependency").await; } struct InMemoryStorage; struct User<'dep> { dep: &'dep str, } impl<'a> StorageRequest<InMemoryStorage> for SaveUser<'a> { fn execute(&self) -> BoxFuture<Result<(), String>> { todo!() } } trait Storage { type Error; } impl Storage for InMemoryStorage { type Error = String; } trait StorageRequestReturnType { type Output; } trait StorageRequest<S: Storage>: StorageRequestReturnType { fn execute( &self, ) -> BoxFuture<Result<<Self as StorageRequestReturnType>::Output, <S as Storage>::Error>>; } struct SaveUser<'a> { name: &'a str, } impl<'a> StorageRequestReturnType for SaveUser<'a> { type Output = (); } impl<'dep> User<'dep> { async fn save<S>(self) where S: Storage, for<'a> SaveUser<'a>: StorageRequest<S>,
} async fn run<S>(dep: &str) where S: Storage, for<'a> SaveUser<'a>: StorageRequest<S>, { User { dep }.save().await; }
{ SaveUser { name: "Joe" } .execute() .await; }
identifier_body
issue-80706.rs
// check-pass // edition:2018 type BoxFuture<T> = std::pin::Pin<Box<dyn std::future::Future<Output=T>>>; fn main() { f(); } async fn f() { run("dependency").await; } struct
; struct User<'dep> { dep: &'dep str, } impl<'a> StorageRequest<InMemoryStorage> for SaveUser<'a> { fn execute(&self) -> BoxFuture<Result<(), String>> { todo!() } } trait Storage { type Error; } impl Storage for InMemoryStorage { type Error = String; } trait StorageRequestReturnType { type Output; } trait StorageRequest<S: Storage>: StorageRequestReturnType { fn execute( &self, ) -> BoxFuture<Result<<Self as StorageRequestReturnType>::Output, <S as Storage>::Error>>; } struct SaveUser<'a> { name: &'a str, } impl<'a> StorageRequestReturnType for SaveUser<'a> { type Output = (); } impl<'dep> User<'dep> { async fn save<S>(self) where S: Storage, for<'a> SaveUser<'a>: StorageRequest<S>, { SaveUser { name: "Joe" } .execute() .await; } } async fn run<S>(dep: &str) where S: Storage, for<'a> SaveUser<'a>: StorageRequest<S>, { User { dep }.save().await; }
InMemoryStorage
identifier_name
issue-80706.rs
// check-pass // edition:2018 type BoxFuture<T> = std::pin::Pin<Box<dyn std::future::Future<Output=T>>>; fn main() { f(); } async fn f() { run("dependency").await; } struct InMemoryStorage; struct User<'dep> { dep: &'dep str, } impl<'a> StorageRequest<InMemoryStorage> for SaveUser<'a> { fn execute(&self) -> BoxFuture<Result<(), String>> { todo!() } } trait Storage { type Error; } impl Storage for InMemoryStorage { type Error = String; } trait StorageRequestReturnType { type Output; } trait StorageRequest<S: Storage>: StorageRequestReturnType { fn execute( &self, ) -> BoxFuture<Result<<Self as StorageRequestReturnType>::Output, <S as Storage>::Error>>; } struct SaveUser<'a> { name: &'a str, } impl<'a> StorageRequestReturnType for SaveUser<'a> { type Output = ();
where S: Storage, for<'a> SaveUser<'a>: StorageRequest<S>, { SaveUser { name: "Joe" } .execute() .await; } } async fn run<S>(dep: &str) where S: Storage, for<'a> SaveUser<'a>: StorageRequest<S>, { User { dep }.save().await; }
} impl<'dep> User<'dep> { async fn save<S>(self)
random_line_split
next_back.rs
#![feature(core)] extern crate core; #[cfg(test)] mod tests { use core::result::IntoIter; // #[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)] // #[must_use] // #[stable(feature = "rust1", since = "1.0.0")] // pub enum Result<T, E> { // /// Contains the success value // #[stable(feature = "rust1", since = "1.0.0")] // Ok(T), // // /// Contains the error value
// } // impl<T, E> IntoIterator for Result<T, E> { // type Item = T; // type IntoIter = IntoIter<T>; // // /// Returns a consuming iterator over the possibly contained value. // /// // /// # Examples // /// // /// ``` // /// let x: Result<u32, &str> = Ok(5); // /// let v: Vec<u32> = x.into_iter().collect(); // /// assert_eq!(v, [5]); // /// // /// let x: Result<u32, &str> = Err("nothing!"); // /// let v: Vec<u32> = x.into_iter().collect(); // /// assert_eq!(v, []); // /// ``` // #[inline] // fn into_iter(self) -> IntoIter<T> { // IntoIter { inner: self.ok() } // } // } // pub struct IntoIter<T> { inner: Option<T> } // impl<T> DoubleEndedIterator for IntoIter<T> { // #[inline] // fn next_back(&mut self) -> Option<T> { self.inner.take() } // } type T = u32; type E = &'static str; #[test] fn next_back_test1() { let x: Result<T, E> = Ok::<T, E>(5); let mut into_iter: IntoIter<T> = x.into_iter(); let result: Option<T> = into_iter.next_back(); match result { Some(v) => assert_eq!(v, 5), None => assert!(false) } } #[test] fn next_back_test2() { let x: Result<T, E> = Err::<T, E>("nothing!"); let mut into_iter: IntoIter<T> = x.into_iter(); let result: Option<T> = into_iter.next_back(); assert_eq!(result, None::<T>); } }
// #[stable(feature = "rust1", since = "1.0.0")] // Err(E)
random_line_split
next_back.rs
#![feature(core)] extern crate core; #[cfg(test)] mod tests { use core::result::IntoIter; // #[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)] // #[must_use] // #[stable(feature = "rust1", since = "1.0.0")] // pub enum Result<T, E> { // /// Contains the success value // #[stable(feature = "rust1", since = "1.0.0")] // Ok(T), // // /// Contains the error value // #[stable(feature = "rust1", since = "1.0.0")] // Err(E) // } // impl<T, E> IntoIterator for Result<T, E> { // type Item = T; // type IntoIter = IntoIter<T>; // // /// Returns a consuming iterator over the possibly contained value. // /// // /// # Examples // /// // /// ``` // /// let x: Result<u32, &str> = Ok(5); // /// let v: Vec<u32> = x.into_iter().collect(); // /// assert_eq!(v, [5]); // /// // /// let x: Result<u32, &str> = Err("nothing!"); // /// let v: Vec<u32> = x.into_iter().collect(); // /// assert_eq!(v, []); // /// ``` // #[inline] // fn into_iter(self) -> IntoIter<T> { // IntoIter { inner: self.ok() } // } // } // pub struct IntoIter<T> { inner: Option<T> } // impl<T> DoubleEndedIterator for IntoIter<T> { // #[inline] // fn next_back(&mut self) -> Option<T> { self.inner.take() } // } type T = u32; type E = &'static str; #[test] fn
() { let x: Result<T, E> = Ok::<T, E>(5); let mut into_iter: IntoIter<T> = x.into_iter(); let result: Option<T> = into_iter.next_back(); match result { Some(v) => assert_eq!(v, 5), None => assert!(false) } } #[test] fn next_back_test2() { let x: Result<T, E> = Err::<T, E>("nothing!"); let mut into_iter: IntoIter<T> = x.into_iter(); let result: Option<T> = into_iter.next_back(); assert_eq!(result, None::<T>); } }
next_back_test1
identifier_name
next_back.rs
#![feature(core)] extern crate core; #[cfg(test)] mod tests { use core::result::IntoIter; // #[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)] // #[must_use] // #[stable(feature = "rust1", since = "1.0.0")] // pub enum Result<T, E> { // /// Contains the success value // #[stable(feature = "rust1", since = "1.0.0")] // Ok(T), // // /// Contains the error value // #[stable(feature = "rust1", since = "1.0.0")] // Err(E) // } // impl<T, E> IntoIterator for Result<T, E> { // type Item = T; // type IntoIter = IntoIter<T>; // // /// Returns a consuming iterator over the possibly contained value. // /// // /// # Examples // /// // /// ``` // /// let x: Result<u32, &str> = Ok(5); // /// let v: Vec<u32> = x.into_iter().collect(); // /// assert_eq!(v, [5]); // /// // /// let x: Result<u32, &str> = Err("nothing!"); // /// let v: Vec<u32> = x.into_iter().collect(); // /// assert_eq!(v, []); // /// ``` // #[inline] // fn into_iter(self) -> IntoIter<T> { // IntoIter { inner: self.ok() } // } // } // pub struct IntoIter<T> { inner: Option<T> } // impl<T> DoubleEndedIterator for IntoIter<T> { // #[inline] // fn next_back(&mut self) -> Option<T> { self.inner.take() } // } type T = u32; type E = &'static str; #[test] fn next_back_test1()
#[test] fn next_back_test2() { let x: Result<T, E> = Err::<T, E>("nothing!"); let mut into_iter: IntoIter<T> = x.into_iter(); let result: Option<T> = into_iter.next_back(); assert_eq!(result, None::<T>); } }
{ let x: Result<T, E> = Ok::<T, E>(5); let mut into_iter: IntoIter<T> = x.into_iter(); let result: Option<T> = into_iter.next_back(); match result { Some(v) => assert_eq!(v, 5), None => assert!(false) } }
identifier_body
size-and-align.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. enum clam<T> { a(T, int), b, } fn uhoh<T>(v: Vec<clam<T>> ) { match v[1] { clam::a::<T>(ref _t, ref u) => { println!("incorrect"); println!("{}", u); panic!(); } clam::b::<T> => { println!("correct"); } } } pub fn
() { let v: Vec<clam<int>> = vec!(clam::b::<int>, clam::b::<int>, clam::a::<int>(42, 17)); uhoh::<int>(v); }
main
identifier_name
size-and-align.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. enum clam<T> { a(T, int), b, } fn uhoh<T>(v: Vec<clam<T>> ) { match v[1] { clam::a::<T>(ref _t, ref u) => { println!("incorrect"); println!("{}", u);
clam::b::<T> => { println!("correct"); } } } pub fn main() { let v: Vec<clam<int>> = vec!(clam::b::<int>, clam::b::<int>, clam::a::<int>(42, 17)); uhoh::<int>(v); }
panic!(); }
random_line_split
size-and-align.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. enum clam<T> { a(T, int), b, } fn uhoh<T>(v: Vec<clam<T>> )
} pub fn main() { let v: Vec<clam<int>> = vec!(clam::b::<int>, clam::b::<int>, clam::a::<int>(42, 17)); uhoh::<int>(v); }
{ match v[1] { clam::a::<T>(ref _t, ref u) => { println!("incorrect"); println!("{}", u); panic!(); } clam::b::<T> => { println!("correct"); } }
identifier_body
dns_configuration.rs
// Copyright 2015 MaidSafe.net limited. // // This SAFE Network Software is licensed to you under (1) the MaidSafe.net Commercial License, // version 1.0 or later, or (2) The General Public License (GPL), version 3, depending on which // licence you accepted on initial access to the Software (the "Licences"). // // By contributing code to the SAFE Network Software, or to this project generally, you agree to be // bound by the terms of the MaidSafe Contributor Agreement, version 1.0. This, along with the // Licenses can be found in the root directory of this project at LICENSE, COPYING and CONTRIBUTOR. // // Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed // under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. // // Please review the Licences for the specific language governing permissions and limitations // relating to use of the SAFE Network Software. use maidsafe_utilities::serialisation::{serialise, deserialise}; const DNS_CONFIG_DIR_NAME: &'static str = "DnsReservedDirectory"; const DNS_CONFIG_FILE_NAME: &'static str = "DnsConfigurationFile"; #[derive(Clone, Debug, Eq, PartialEq, RustcEncodable, RustcDecodable)] pub struct DnsConfiguation { pub long_name : String, pub encryption_keypair: (::sodiumoxide::crypto::box_::PublicKey, ::sodiumoxide::crypto::box_::SecretKey), } pub fn initialise_dns_configuaration(client: ::std::sync::Arc<::std::sync::Mutex<::safe_core::client::Client>>) -> Result<(), ::errors::DnsError> { let dir_helper = ::safe_nfs::helper::directory_helper::DirectoryHelper::new(client.clone()); let dir_listing = try!(dir_helper.get_configuration_directory_listing(DNS_CONFIG_DIR_NAME.to_string())); let file_helper = ::safe_nfs::helper::file_helper::FileHelper::new(client.clone()); match file_helper.create(DNS_CONFIG_FILE_NAME.to_string(), vec![], dir_listing) { Ok(writer) => { let _ = try!(writer.close()); Ok(()) }, Err(::safe_nfs::errors::NfsError::FileAlreadyExistsWithSameName) => Ok(()), Err(error) => Err(::errors::DnsError::from(error)), } } pub fn get_dns_configuaration_data(client: ::std::sync::Arc<::std::sync::Mutex<::safe_core::client::Client>>) -> Result<Vec<DnsConfiguation>, ::errors::DnsError> { let dir_helper = ::safe_nfs::helper::directory_helper::DirectoryHelper::new(client.clone()); let dir_listing = try!(dir_helper.get_configuration_directory_listing(DNS_CONFIG_DIR_NAME.to_string())); let file = try!(dir_listing.get_files().iter().find(|file| file.get_name() == DNS_CONFIG_FILE_NAME).ok_or(::errors::DnsError::DnsConfigFileNotFoundOrCorrupted)); let file_helper = ::safe_nfs::helper::file_helper::FileHelper::new(client.clone()); debug!("Reading dns configuration data from file..."); let mut reader = file_helper.read(file); let size = reader.size(); if size!= 0 { Ok(try!(deserialise(&try!(reader.read(0, size))))) } else { Ok(vec![]) } } pub fn write_dns_configuaration_data(client: ::std::sync::Arc<::std::sync::Mutex<::safe_core::client::Client>>, config: &Vec<DnsConfiguation>) -> Result<(), ::errors::DnsError> { let dir_helper = ::safe_nfs::helper::directory_helper::DirectoryHelper::new(client.clone()); let dir_listing = try!(dir_helper.get_configuration_directory_listing(DNS_CONFIG_DIR_NAME.to_string())); let file = try!(dir_listing.get_files().iter().find(|file| file.get_name() == DNS_CONFIG_FILE_NAME).ok_or(::errors::DnsError::DnsConfigFileNotFoundOrCorrupted)).clone(); let file_helper = ::safe_nfs::helper::file_helper::FileHelper::new(client.clone()); let mut writer = try!(file_helper.update_content(file, ::safe_nfs::helper::writer::Mode::Overwrite, dir_listing)); debug!("Writing dns configuration data..."); writer.write(&try!(serialise(&config)), 0); let _ = try!(writer.close()); Ok(()) } #[cfg(test)] mod test { use super::*; #[test] fn read_write_dns_configuration_file()
unwrap_result!(write_dns_configuaration_data(client.clone(), &config_vec)); // Get the Stored Configurations config_vec = unwrap_result!(get_dns_configuaration_data(client.clone())); assert_eq!(config_vec.len(), 1); assert_eq!(config_vec[0], config_0); // Modify the content keypair = ::sodiumoxide::crypto::box_::gen_keypair(); let config_1 = DnsConfiguation { long_name : long_name, encryption_keypair: (keypair.0, keypair.1), }; config_vec[0] = config_1.clone(); unwrap_result!(write_dns_configuaration_data(client.clone(), &config_vec)); // Get the Stored Configurations config_vec = unwrap_result!(get_dns_configuaration_data(client.clone())); assert_eq!(config_vec.len(), 1); assert!(config_vec[0]!= config_0); assert_eq!(config_vec[0], config_1); // Delete Record config_vec.clear(); unwrap_result!(write_dns_configuaration_data(client.clone(), &config_vec)); // Get the Stored Configurations config_vec = unwrap_result!(get_dns_configuaration_data(client.clone())); assert_eq!(config_vec.len(), 0); } }
{ let client = ::std::sync::Arc::new(::std::sync::Mutex::new(unwrap_result!(::safe_core::utility::test_utils::get_client()))); // Initialise Dns Configuration File unwrap_result!(initialise_dns_configuaration(client.clone())); // Get the Stored Configurations let mut config_vec = unwrap_result!(get_dns_configuaration_data(client.clone())); assert_eq!(config_vec.len(), 0); let long_name = unwrap_result!(::safe_core::utility::generate_random_string(10)); // Put in the 1st record let mut keypair = ::sodiumoxide::crypto::box_::gen_keypair(); let config_0 = DnsConfiguation { long_name : long_name.clone(), encryption_keypair: (keypair.0, keypair.1), }; config_vec.push(config_0.clone());
identifier_body
dns_configuration.rs
// Copyright 2015 MaidSafe.net limited. // // This SAFE Network Software is licensed to you under (1) the MaidSafe.net Commercial License, // version 1.0 or later, or (2) The General Public License (GPL), version 3, depending on which // licence you accepted on initial access to the Software (the "Licences"). // // By contributing code to the SAFE Network Software, or to this project generally, you agree to be // bound by the terms of the MaidSafe Contributor Agreement, version 1.0. This, along with the // Licenses can be found in the root directory of this project at LICENSE, COPYING and CONTRIBUTOR. // // Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed // under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. // // Please review the Licences for the specific language governing permissions and limitations // relating to use of the SAFE Network Software. use maidsafe_utilities::serialisation::{serialise, deserialise}; const DNS_CONFIG_DIR_NAME: &'static str = "DnsReservedDirectory"; const DNS_CONFIG_FILE_NAME: &'static str = "DnsConfigurationFile"; #[derive(Clone, Debug, Eq, PartialEq, RustcEncodable, RustcDecodable)] pub struct
{ pub long_name : String, pub encryption_keypair: (::sodiumoxide::crypto::box_::PublicKey, ::sodiumoxide::crypto::box_::SecretKey), } pub fn initialise_dns_configuaration(client: ::std::sync::Arc<::std::sync::Mutex<::safe_core::client::Client>>) -> Result<(), ::errors::DnsError> { let dir_helper = ::safe_nfs::helper::directory_helper::DirectoryHelper::new(client.clone()); let dir_listing = try!(dir_helper.get_configuration_directory_listing(DNS_CONFIG_DIR_NAME.to_string())); let file_helper = ::safe_nfs::helper::file_helper::FileHelper::new(client.clone()); match file_helper.create(DNS_CONFIG_FILE_NAME.to_string(), vec![], dir_listing) { Ok(writer) => { let _ = try!(writer.close()); Ok(()) }, Err(::safe_nfs::errors::NfsError::FileAlreadyExistsWithSameName) => Ok(()), Err(error) => Err(::errors::DnsError::from(error)), } } pub fn get_dns_configuaration_data(client: ::std::sync::Arc<::std::sync::Mutex<::safe_core::client::Client>>) -> Result<Vec<DnsConfiguation>, ::errors::DnsError> { let dir_helper = ::safe_nfs::helper::directory_helper::DirectoryHelper::new(client.clone()); let dir_listing = try!(dir_helper.get_configuration_directory_listing(DNS_CONFIG_DIR_NAME.to_string())); let file = try!(dir_listing.get_files().iter().find(|file| file.get_name() == DNS_CONFIG_FILE_NAME).ok_or(::errors::DnsError::DnsConfigFileNotFoundOrCorrupted)); let file_helper = ::safe_nfs::helper::file_helper::FileHelper::new(client.clone()); debug!("Reading dns configuration data from file..."); let mut reader = file_helper.read(file); let size = reader.size(); if size!= 0 { Ok(try!(deserialise(&try!(reader.read(0, size))))) } else { Ok(vec![]) } } pub fn write_dns_configuaration_data(client: ::std::sync::Arc<::std::sync::Mutex<::safe_core::client::Client>>, config: &Vec<DnsConfiguation>) -> Result<(), ::errors::DnsError> { let dir_helper = ::safe_nfs::helper::directory_helper::DirectoryHelper::new(client.clone()); let dir_listing = try!(dir_helper.get_configuration_directory_listing(DNS_CONFIG_DIR_NAME.to_string())); let file = try!(dir_listing.get_files().iter().find(|file| file.get_name() == DNS_CONFIG_FILE_NAME).ok_or(::errors::DnsError::DnsConfigFileNotFoundOrCorrupted)).clone(); let file_helper = ::safe_nfs::helper::file_helper::FileHelper::new(client.clone()); let mut writer = try!(file_helper.update_content(file, ::safe_nfs::helper::writer::Mode::Overwrite, dir_listing)); debug!("Writing dns configuration data..."); writer.write(&try!(serialise(&config)), 0); let _ = try!(writer.close()); Ok(()) } #[cfg(test)] mod test { use super::*; #[test] fn read_write_dns_configuration_file() { let client = ::std::sync::Arc::new(::std::sync::Mutex::new(unwrap_result!(::safe_core::utility::test_utils::get_client()))); // Initialise Dns Configuration File unwrap_result!(initialise_dns_configuaration(client.clone())); // Get the Stored Configurations let mut config_vec = unwrap_result!(get_dns_configuaration_data(client.clone())); assert_eq!(config_vec.len(), 0); let long_name = unwrap_result!(::safe_core::utility::generate_random_string(10)); // Put in the 1st record let mut keypair = ::sodiumoxide::crypto::box_::gen_keypair(); let config_0 = DnsConfiguation { long_name : long_name.clone(), encryption_keypair: (keypair.0, keypair.1), }; config_vec.push(config_0.clone()); unwrap_result!(write_dns_configuaration_data(client.clone(), &config_vec)); // Get the Stored Configurations config_vec = unwrap_result!(get_dns_configuaration_data(client.clone())); assert_eq!(config_vec.len(), 1); assert_eq!(config_vec[0], config_0); // Modify the content keypair = ::sodiumoxide::crypto::box_::gen_keypair(); let config_1 = DnsConfiguation { long_name : long_name, encryption_keypair: (keypair.0, keypair.1), }; config_vec[0] = config_1.clone(); unwrap_result!(write_dns_configuaration_data(client.clone(), &config_vec)); // Get the Stored Configurations config_vec = unwrap_result!(get_dns_configuaration_data(client.clone())); assert_eq!(config_vec.len(), 1); assert!(config_vec[0]!= config_0); assert_eq!(config_vec[0], config_1); // Delete Record config_vec.clear(); unwrap_result!(write_dns_configuaration_data(client.clone(), &config_vec)); // Get the Stored Configurations config_vec = unwrap_result!(get_dns_configuaration_data(client.clone())); assert_eq!(config_vec.len(), 0); } }
DnsConfiguation
identifier_name
dns_configuration.rs
// Copyright 2015 MaidSafe.net limited. // // This SAFE Network Software is licensed to you under (1) the MaidSafe.net Commercial License, // version 1.0 or later, or (2) The General Public License (GPL), version 3, depending on which // licence you accepted on initial access to the Software (the "Licences"). // // By contributing code to the SAFE Network Software, or to this project generally, you agree to be // bound by the terms of the MaidSafe Contributor Agreement, version 1.0. This, along with the // Licenses can be found in the root directory of this project at LICENSE, COPYING and CONTRIBUTOR. // // Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed // under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. // // Please review the Licences for the specific language governing permissions and limitations // relating to use of the SAFE Network Software. use maidsafe_utilities::serialisation::{serialise, deserialise}; const DNS_CONFIG_DIR_NAME: &'static str = "DnsReservedDirectory"; const DNS_CONFIG_FILE_NAME: &'static str = "DnsConfigurationFile"; #[derive(Clone, Debug, Eq, PartialEq, RustcEncodable, RustcDecodable)] pub struct DnsConfiguation { pub long_name : String, pub encryption_keypair: (::sodiumoxide::crypto::box_::PublicKey, ::sodiumoxide::crypto::box_::SecretKey), } pub fn initialise_dns_configuaration(client: ::std::sync::Arc<::std::sync::Mutex<::safe_core::client::Client>>) -> Result<(), ::errors::DnsError> { let dir_helper = ::safe_nfs::helper::directory_helper::DirectoryHelper::new(client.clone()); let dir_listing = try!(dir_helper.get_configuration_directory_listing(DNS_CONFIG_DIR_NAME.to_string())); let file_helper = ::safe_nfs::helper::file_helper::FileHelper::new(client.clone()); match file_helper.create(DNS_CONFIG_FILE_NAME.to_string(), vec![], dir_listing) { Ok(writer) => { let _ = try!(writer.close()); Ok(()) }, Err(::safe_nfs::errors::NfsError::FileAlreadyExistsWithSameName) => Ok(()), Err(error) => Err(::errors::DnsError::from(error)), } } pub fn get_dns_configuaration_data(client: ::std::sync::Arc<::std::sync::Mutex<::safe_core::client::Client>>) -> Result<Vec<DnsConfiguation>, ::errors::DnsError> { let dir_helper = ::safe_nfs::helper::directory_helper::DirectoryHelper::new(client.clone()); let dir_listing = try!(dir_helper.get_configuration_directory_listing(DNS_CONFIG_DIR_NAME.to_string())); let file = try!(dir_listing.get_files().iter().find(|file| file.get_name() == DNS_CONFIG_FILE_NAME).ok_or(::errors::DnsError::DnsConfigFileNotFoundOrCorrupted)); let file_helper = ::safe_nfs::helper::file_helper::FileHelper::new(client.clone()); debug!("Reading dns configuration data from file..."); let mut reader = file_helper.read(file); let size = reader.size(); if size!= 0 { Ok(try!(deserialise(&try!(reader.read(0, size))))) } else
} pub fn write_dns_configuaration_data(client: ::std::sync::Arc<::std::sync::Mutex<::safe_core::client::Client>>, config: &Vec<DnsConfiguation>) -> Result<(), ::errors::DnsError> { let dir_helper = ::safe_nfs::helper::directory_helper::DirectoryHelper::new(client.clone()); let dir_listing = try!(dir_helper.get_configuration_directory_listing(DNS_CONFIG_DIR_NAME.to_string())); let file = try!(dir_listing.get_files().iter().find(|file| file.get_name() == DNS_CONFIG_FILE_NAME).ok_or(::errors::DnsError::DnsConfigFileNotFoundOrCorrupted)).clone(); let file_helper = ::safe_nfs::helper::file_helper::FileHelper::new(client.clone()); let mut writer = try!(file_helper.update_content(file, ::safe_nfs::helper::writer::Mode::Overwrite, dir_listing)); debug!("Writing dns configuration data..."); writer.write(&try!(serialise(&config)), 0); let _ = try!(writer.close()); Ok(()) } #[cfg(test)] mod test { use super::*; #[test] fn read_write_dns_configuration_file() { let client = ::std::sync::Arc::new(::std::sync::Mutex::new(unwrap_result!(::safe_core::utility::test_utils::get_client()))); // Initialise Dns Configuration File unwrap_result!(initialise_dns_configuaration(client.clone())); // Get the Stored Configurations let mut config_vec = unwrap_result!(get_dns_configuaration_data(client.clone())); assert_eq!(config_vec.len(), 0); let long_name = unwrap_result!(::safe_core::utility::generate_random_string(10)); // Put in the 1st record let mut keypair = ::sodiumoxide::crypto::box_::gen_keypair(); let config_0 = DnsConfiguation { long_name : long_name.clone(), encryption_keypair: (keypair.0, keypair.1), }; config_vec.push(config_0.clone()); unwrap_result!(write_dns_configuaration_data(client.clone(), &config_vec)); // Get the Stored Configurations config_vec = unwrap_result!(get_dns_configuaration_data(client.clone())); assert_eq!(config_vec.len(), 1); assert_eq!(config_vec[0], config_0); // Modify the content keypair = ::sodiumoxide::crypto::box_::gen_keypair(); let config_1 = DnsConfiguation { long_name : long_name, encryption_keypair: (keypair.0, keypair.1), }; config_vec[0] = config_1.clone(); unwrap_result!(write_dns_configuaration_data(client.clone(), &config_vec)); // Get the Stored Configurations config_vec = unwrap_result!(get_dns_configuaration_data(client.clone())); assert_eq!(config_vec.len(), 1); assert!(config_vec[0]!= config_0); assert_eq!(config_vec[0], config_1); // Delete Record config_vec.clear(); unwrap_result!(write_dns_configuaration_data(client.clone(), &config_vec)); // Get the Stored Configurations config_vec = unwrap_result!(get_dns_configuaration_data(client.clone())); assert_eq!(config_vec.len(), 0); } }
{ Ok(vec![]) }
conditional_block
dns_configuration.rs
// Copyright 2015 MaidSafe.net limited. // // This SAFE Network Software is licensed to you under (1) the MaidSafe.net Commercial License, // version 1.0 or later, or (2) The General Public License (GPL), version 3, depending on which // licence you accepted on initial access to the Software (the "Licences"). // // By contributing code to the SAFE Network Software, or to this project generally, you agree to be // bound by the terms of the MaidSafe Contributor Agreement, version 1.0. This, along with the // Licenses can be found in the root directory of this project at LICENSE, COPYING and CONTRIBUTOR. // // Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed // under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. // // Please review the Licences for the specific language governing permissions and limitations // relating to use of the SAFE Network Software. use maidsafe_utilities::serialisation::{serialise, deserialise}; const DNS_CONFIG_DIR_NAME: &'static str = "DnsReservedDirectory"; const DNS_CONFIG_FILE_NAME: &'static str = "DnsConfigurationFile"; #[derive(Clone, Debug, Eq, PartialEq, RustcEncodable, RustcDecodable)] pub struct DnsConfiguation { pub long_name : String, pub encryption_keypair: (::sodiumoxide::crypto::box_::PublicKey, ::sodiumoxide::crypto::box_::SecretKey), } pub fn initialise_dns_configuaration(client: ::std::sync::Arc<::std::sync::Mutex<::safe_core::client::Client>>) -> Result<(), ::errors::DnsError> { let dir_helper = ::safe_nfs::helper::directory_helper::DirectoryHelper::new(client.clone()); let dir_listing = try!(dir_helper.get_configuration_directory_listing(DNS_CONFIG_DIR_NAME.to_string())); let file_helper = ::safe_nfs::helper::file_helper::FileHelper::new(client.clone()); match file_helper.create(DNS_CONFIG_FILE_NAME.to_string(), vec![], dir_listing) { Ok(writer) => { let _ = try!(writer.close()); Ok(()) }, Err(::safe_nfs::errors::NfsError::FileAlreadyExistsWithSameName) => Ok(()), Err(error) => Err(::errors::DnsError::from(error)), } } pub fn get_dns_configuaration_data(client: ::std::sync::Arc<::std::sync::Mutex<::safe_core::client::Client>>) -> Result<Vec<DnsConfiguation>, ::errors::DnsError> { let dir_helper = ::safe_nfs::helper::directory_helper::DirectoryHelper::new(client.clone()); let dir_listing = try!(dir_helper.get_configuration_directory_listing(DNS_CONFIG_DIR_NAME.to_string())); let file = try!(dir_listing.get_files().iter().find(|file| file.get_name() == DNS_CONFIG_FILE_NAME).ok_or(::errors::DnsError::DnsConfigFileNotFoundOrCorrupted)); let file_helper = ::safe_nfs::helper::file_helper::FileHelper::new(client.clone()); debug!("Reading dns configuration data from file..."); let mut reader = file_helper.read(file); let size = reader.size(); if size!= 0 { Ok(try!(deserialise(&try!(reader.read(0, size))))) } else { Ok(vec![]) } } pub fn write_dns_configuaration_data(client: ::std::sync::Arc<::std::sync::Mutex<::safe_core::client::Client>>, config: &Vec<DnsConfiguation>) -> Result<(), ::errors::DnsError> { let dir_helper = ::safe_nfs::helper::directory_helper::DirectoryHelper::new(client.clone()); let dir_listing = try!(dir_helper.get_configuration_directory_listing(DNS_CONFIG_DIR_NAME.to_string())); let file = try!(dir_listing.get_files().iter().find(|file| file.get_name() == DNS_CONFIG_FILE_NAME).ok_or(::errors::DnsError::DnsConfigFileNotFoundOrCorrupted)).clone(); let file_helper = ::safe_nfs::helper::file_helper::FileHelper::new(client.clone()); let mut writer = try!(file_helper.update_content(file, ::safe_nfs::helper::writer::Mode::Overwrite, dir_listing)); debug!("Writing dns configuration data..."); writer.write(&try!(serialise(&config)), 0); let _ = try!(writer.close()); Ok(()) } #[cfg(test)] mod test { use super::*; #[test] fn read_write_dns_configuration_file() { let client = ::std::sync::Arc::new(::std::sync::Mutex::new(unwrap_result!(::safe_core::utility::test_utils::get_client()))); // Initialise Dns Configuration File unwrap_result!(initialise_dns_configuaration(client.clone())); // Get the Stored Configurations let mut config_vec = unwrap_result!(get_dns_configuaration_data(client.clone())); assert_eq!(config_vec.len(), 0); let long_name = unwrap_result!(::safe_core::utility::generate_random_string(10)); // Put in the 1st record let mut keypair = ::sodiumoxide::crypto::box_::gen_keypair(); let config_0 = DnsConfiguation { long_name : long_name.clone(), encryption_keypair: (keypair.0, keypair.1), }; config_vec.push(config_0.clone()); unwrap_result!(write_dns_configuaration_data(client.clone(), &config_vec)); // Get the Stored Configurations config_vec = unwrap_result!(get_dns_configuaration_data(client.clone())); assert_eq!(config_vec.len(), 1); assert_eq!(config_vec[0], config_0); // Modify the content keypair = ::sodiumoxide::crypto::box_::gen_keypair(); let config_1 = DnsConfiguation { long_name : long_name, encryption_keypair: (keypair.0, keypair.1), }; config_vec[0] = config_1.clone(); unwrap_result!(write_dns_configuaration_data(client.clone(), &config_vec)); // Get the Stored Configurations config_vec = unwrap_result!(get_dns_configuaration_data(client.clone())); assert_eq!(config_vec.len(), 1); assert!(config_vec[0]!= config_0); assert_eq!(config_vec[0], config_1); // Delete Record config_vec.clear(); unwrap_result!(write_dns_configuaration_data(client.clone(), &config_vec));
}
// Get the Stored Configurations config_vec = unwrap_result!(get_dns_configuaration_data(client.clone())); assert_eq!(config_vec.len(), 0); }
random_line_split
io.rs
// Copyright 2020 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use crate::sections::SectionOrderError; use thiserror::Error; pub use wasmbin_derive::Wasmbin; #[derive(Error, Debug)] pub enum DecodeErrorKind { #[error("{0}")] Io(#[from] std::io::Error), #[error("{0}")] Leb128(#[from] leb128::read::Error), #[error("{0}")] Utf8(#[from] std::string::FromUtf8Error), #[error("Could not recognise discriminant 0x{discriminant:X} for type {ty}")] UnsupportedDiscriminant { ty: &'static str, discriminant: i128, }, #[error("Invalid module magic signature [{actual:02X?}]")] InvalidMagic { actual: [u8; 8] }, #[error("Unrecognized data")] UnrecognizedData, #[error("{0}")] SectionOutOfOrder(#[from] SectionOrderError), } #[derive(Debug)] pub(crate) enum PathItem { Name(&'static str), Index(usize), Variant(&'static str), } #[derive(Error, Debug)] pub struct DecodeError { path: Vec<PathItem>, #[source] pub kind: DecodeErrorKind, } impl DecodeError { pub(crate) fn in_path(mut self, item: PathItem) -> Self { self.path.push(item); self } } impl<E: Into<DecodeErrorKind>> From<E> for DecodeError { fn from(err: E) -> DecodeError { DecodeError { path: vec![], kind: err.into(), } } } impl std::fmt::Display for DecodeError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_str("(root)")?; for item in self.path.iter().rev() { match *item { PathItem::Name(name) => write!(f, ".{}", name), PathItem::Index(index) => write!(f, "[{}]", index), PathItem::Variant(variant) => write!(f, ":<{}>", variant), }?; } write!(f, ": {}", self.kind) } } impl From<std::num::TryFromIntError> for DecodeErrorKind { fn from(_err: std::num::TryFromIntError) -> Self { DecodeErrorKind::Leb128(leb128::read::Error::Overflow) } } impl From<std::convert::Infallible> for DecodeErrorKind { fn from(err: std::convert::Infallible) -> Self { match err {} }
pub trait Encode { fn encode(&self, w: &mut impl std::io::Write) -> std::io::Result<()>; } pub trait Decode: Sized { fn decode(r: &mut impl std::io::Read) -> Result<Self, DecodeError>; } macro_rules! encode_decode_as { ($ty:ty, { $($lhs:tt <=> $rhs:tt,)* } $(, |$other:pat| $other_handler:expr)?) => { impl crate::io::Encode for $ty { #[allow(unused_parens)] fn encode(&self, w: &mut impl std::io::Write) -> std::io::Result<()> { match *self { $($lhs => $rhs,)* }.encode(w) } } impl crate::io::Decode for $ty { #[allow(unused_parens)] fn decode(r: &mut impl std::io::Read) -> Result<Self, crate::io::DecodeError> { Ok(match crate::io::Decode::decode(r)? { $($rhs => $lhs,)* $($other => return $other_handler)? }) } } }; } pub trait DecodeWithDiscriminant: Decode { const NAME: &'static str; type Discriminant: Decode + Copy + Into<i128>; fn maybe_decode_with_discriminant( discriminant: Self::Discriminant, r: &mut impl std::io::Read, ) -> Result<Option<Self>, DecodeError>; fn decode_with_discriminant( discriminant: Self::Discriminant, r: &mut impl std::io::Read, ) -> Result<Self, DecodeError> { Self::maybe_decode_with_discriminant(discriminant, r)?.ok_or_else(|| { DecodeErrorKind::UnsupportedDiscriminant { ty: Self::NAME, discriminant: discriminant.into(), } .into() }) } fn decode_without_discriminant(r: &mut impl std::io::Read) -> Result<Self, DecodeError> { Self::decode_with_discriminant(Self::Discriminant::decode(r)?, r) } }
}
random_line_split
io.rs
// Copyright 2020 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use crate::sections::SectionOrderError; use thiserror::Error; pub use wasmbin_derive::Wasmbin; #[derive(Error, Debug)] pub enum DecodeErrorKind { #[error("{0}")] Io(#[from] std::io::Error), #[error("{0}")] Leb128(#[from] leb128::read::Error), #[error("{0}")] Utf8(#[from] std::string::FromUtf8Error), #[error("Could not recognise discriminant 0x{discriminant:X} for type {ty}")] UnsupportedDiscriminant { ty: &'static str, discriminant: i128, }, #[error("Invalid module magic signature [{actual:02X?}]")] InvalidMagic { actual: [u8; 8] }, #[error("Unrecognized data")] UnrecognizedData, #[error("{0}")] SectionOutOfOrder(#[from] SectionOrderError), } #[derive(Debug)] pub(crate) enum PathItem { Name(&'static str), Index(usize), Variant(&'static str), } #[derive(Error, Debug)] pub struct DecodeError { path: Vec<PathItem>, #[source] pub kind: DecodeErrorKind, } impl DecodeError { pub(crate) fn in_path(mut self, item: PathItem) -> Self { self.path.push(item); self } } impl<E: Into<DecodeErrorKind>> From<E> for DecodeError { fn from(err: E) -> DecodeError
} impl std::fmt::Display for DecodeError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_str("(root)")?; for item in self.path.iter().rev() { match *item { PathItem::Name(name) => write!(f, ".{}", name), PathItem::Index(index) => write!(f, "[{}]", index), PathItem::Variant(variant) => write!(f, ":<{}>", variant), }?; } write!(f, ": {}", self.kind) } } impl From<std::num::TryFromIntError> for DecodeErrorKind { fn from(_err: std::num::TryFromIntError) -> Self { DecodeErrorKind::Leb128(leb128::read::Error::Overflow) } } impl From<std::convert::Infallible> for DecodeErrorKind { fn from(err: std::convert::Infallible) -> Self { match err {} } } pub trait Encode { fn encode(&self, w: &mut impl std::io::Write) -> std::io::Result<()>; } pub trait Decode: Sized { fn decode(r: &mut impl std::io::Read) -> Result<Self, DecodeError>; } macro_rules! encode_decode_as { ($ty:ty, { $($lhs:tt <=> $rhs:tt,)* } $(, |$other:pat| $other_handler:expr)?) => { impl crate::io::Encode for $ty { #[allow(unused_parens)] fn encode(&self, w: &mut impl std::io::Write) -> std::io::Result<()> { match *self { $($lhs => $rhs,)* }.encode(w) } } impl crate::io::Decode for $ty { #[allow(unused_parens)] fn decode(r: &mut impl std::io::Read) -> Result<Self, crate::io::DecodeError> { Ok(match crate::io::Decode::decode(r)? { $($rhs => $lhs,)* $($other => return $other_handler)? }) } } }; } pub trait DecodeWithDiscriminant: Decode { const NAME: &'static str; type Discriminant: Decode + Copy + Into<i128>; fn maybe_decode_with_discriminant( discriminant: Self::Discriminant, r: &mut impl std::io::Read, ) -> Result<Option<Self>, DecodeError>; fn decode_with_discriminant( discriminant: Self::Discriminant, r: &mut impl std::io::Read, ) -> Result<Self, DecodeError> { Self::maybe_decode_with_discriminant(discriminant, r)?.ok_or_else(|| { DecodeErrorKind::UnsupportedDiscriminant { ty: Self::NAME, discriminant: discriminant.into(), } .into() }) } fn decode_without_discriminant(r: &mut impl std::io::Read) -> Result<Self, DecodeError> { Self::decode_with_discriminant(Self::Discriminant::decode(r)?, r) } }
{ DecodeError { path: vec![], kind: err.into(), } }
identifier_body
io.rs
// Copyright 2020 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use crate::sections::SectionOrderError; use thiserror::Error; pub use wasmbin_derive::Wasmbin; #[derive(Error, Debug)] pub enum DecodeErrorKind { #[error("{0}")] Io(#[from] std::io::Error), #[error("{0}")] Leb128(#[from] leb128::read::Error), #[error("{0}")] Utf8(#[from] std::string::FromUtf8Error), #[error("Could not recognise discriminant 0x{discriminant:X} for type {ty}")] UnsupportedDiscriminant { ty: &'static str, discriminant: i128, }, #[error("Invalid module magic signature [{actual:02X?}]")] InvalidMagic { actual: [u8; 8] }, #[error("Unrecognized data")] UnrecognizedData, #[error("{0}")] SectionOutOfOrder(#[from] SectionOrderError), } #[derive(Debug)] pub(crate) enum PathItem { Name(&'static str), Index(usize), Variant(&'static str), } #[derive(Error, Debug)] pub struct DecodeError { path: Vec<PathItem>, #[source] pub kind: DecodeErrorKind, } impl DecodeError { pub(crate) fn in_path(mut self, item: PathItem) -> Self { self.path.push(item); self } } impl<E: Into<DecodeErrorKind>> From<E> for DecodeError { fn from(err: E) -> DecodeError { DecodeError { path: vec![], kind: err.into(), } } } impl std::fmt::Display for DecodeError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_str("(root)")?; for item in self.path.iter().rev() { match *item { PathItem::Name(name) => write!(f, ".{}", name), PathItem::Index(index) => write!(f, "[{}]", index), PathItem::Variant(variant) => write!(f, ":<{}>", variant), }?; } write!(f, ": {}", self.kind) } } impl From<std::num::TryFromIntError> for DecodeErrorKind { fn
(_err: std::num::TryFromIntError) -> Self { DecodeErrorKind::Leb128(leb128::read::Error::Overflow) } } impl From<std::convert::Infallible> for DecodeErrorKind { fn from(err: std::convert::Infallible) -> Self { match err {} } } pub trait Encode { fn encode(&self, w: &mut impl std::io::Write) -> std::io::Result<()>; } pub trait Decode: Sized { fn decode(r: &mut impl std::io::Read) -> Result<Self, DecodeError>; } macro_rules! encode_decode_as { ($ty:ty, { $($lhs:tt <=> $rhs:tt,)* } $(, |$other:pat| $other_handler:expr)?) => { impl crate::io::Encode for $ty { #[allow(unused_parens)] fn encode(&self, w: &mut impl std::io::Write) -> std::io::Result<()> { match *self { $($lhs => $rhs,)* }.encode(w) } } impl crate::io::Decode for $ty { #[allow(unused_parens)] fn decode(r: &mut impl std::io::Read) -> Result<Self, crate::io::DecodeError> { Ok(match crate::io::Decode::decode(r)? { $($rhs => $lhs,)* $($other => return $other_handler)? }) } } }; } pub trait DecodeWithDiscriminant: Decode { const NAME: &'static str; type Discriminant: Decode + Copy + Into<i128>; fn maybe_decode_with_discriminant( discriminant: Self::Discriminant, r: &mut impl std::io::Read, ) -> Result<Option<Self>, DecodeError>; fn decode_with_discriminant( discriminant: Self::Discriminant, r: &mut impl std::io::Read, ) -> Result<Self, DecodeError> { Self::maybe_decode_with_discriminant(discriminant, r)?.ok_or_else(|| { DecodeErrorKind::UnsupportedDiscriminant { ty: Self::NAME, discriminant: discriminant.into(), } .into() }) } fn decode_without_discriminant(r: &mut impl std::io::Read) -> Result<Self, DecodeError> { Self::decode_with_discriminant(Self::Discriminant::decode(r)?, r) } }
from
identifier_name
domtokenlist.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::attr::{Attr, AttrHelpers}; use dom::bindings::codegen::Bindings::DOMTokenListBinding; use dom::bindings::codegen::Bindings::DOMTokenListBinding::DOMTokenListMethods; use dom::bindings::error::{Fallible, InvalidCharacter, Syntax}; use dom::bindings::global::Window; use dom::bindings::js::{JS, JSRef, Temporary, OptionalRootable}; use dom::bindings::utils::{Reflector, Reflectable, reflect_dom_object}; use dom::element::{Element, AttributeHandlers}; use dom::node::window_from_node; use servo_util::atom::Atom; use servo_util::namespace::Null; use servo_util::str::{DOMString, HTML_SPACE_CHARACTERS}; #[deriving(Encodable)] #[must_root] pub struct DOMTokenList { reflector_: Reflector, element: JS<Element>, local_name: &'static str, } impl DOMTokenList { pub fn new_inherited(element: JSRef<Element>, local_name: &'static str) -> DOMTokenList { DOMTokenList { reflector_: Reflector::new(), element: JS::from_rooted(element), local_name: local_name, } } pub fn new(element: JSRef<Element>, local_name: &'static str) -> Temporary<DOMTokenList> { let window = window_from_node(element).root(); reflect_dom_object(box DOMTokenList::new_inherited(element, local_name), &Window(*window), DOMTokenListBinding::Wrap) } } impl Reflectable for DOMTokenList { fn reflector<'a>(&'a self) -> &'a Reflector { &self.reflector_ } } trait PrivateDOMTokenListHelpers { fn attribute(self) -> Option<Temporary<Attr>>; fn check_token_exceptions<'a>(self, token: &'a str) -> Fallible<&'a str>; } impl<'a> PrivateDOMTokenListHelpers for JSRef<'a, DOMTokenList> { fn attribute(self) -> Option<Temporary<Attr>> { let element = self.element.root(); element.deref().get_attribute(Null, self.local_name) } fn check_token_exceptions<'a>(self, token: &'a str) -> Fallible<&'a str> { match token { "" => Err(Syntax), token if token.find(HTML_SPACE_CHARACTERS).is_some() => Err(InvalidCharacter), token => Ok(token) } } } // http://dom.spec.whatwg.org/#domtokenlist impl<'a> DOMTokenListMethods for JSRef<'a, DOMTokenList> { // http://dom.spec.whatwg.org/#dom-domtokenlist-length fn
(self) -> u32 { self.attribute().root().map(|attr| { attr.value().tokens().map(|tokens| tokens.len()).unwrap_or(0) }).unwrap_or(0) as u32 } // http://dom.spec.whatwg.org/#dom-domtokenlist-item fn Item(self, index: u32) -> Option<DOMString> { self.attribute().root().and_then(|attr| attr.value().tokens().and_then(|mut tokens| { tokens.idx(index as uint).map(|token| token.as_slice().to_string()) })) } fn IndexedGetter(self, index: u32, found: &mut bool) -> Option<DOMString> { let item = self.Item(index); *found = item.is_some(); item } // http://dom.spec.whatwg.org/#dom-domtokenlist-contains fn Contains(self, token: DOMString) -> Fallible<bool> { self.check_token_exceptions(token.as_slice()).map(|slice| { self.attribute().root().and_then(|attr| attr.value().tokens().map(|mut tokens| { let atom = Atom::from_slice(slice); tokens.any(|token| *token == atom) })).unwrap_or(false) }) } }
Length
identifier_name
domtokenlist.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::attr::{Attr, AttrHelpers}; use dom::bindings::codegen::Bindings::DOMTokenListBinding; use dom::bindings::codegen::Bindings::DOMTokenListBinding::DOMTokenListMethods; use dom::bindings::error::{Fallible, InvalidCharacter, Syntax}; use dom::bindings::global::Window; use dom::bindings::js::{JS, JSRef, Temporary, OptionalRootable}; use dom::bindings::utils::{Reflector, Reflectable, reflect_dom_object}; use dom::element::{Element, AttributeHandlers}; use dom::node::window_from_node; use servo_util::atom::Atom; use servo_util::namespace::Null; use servo_util::str::{DOMString, HTML_SPACE_CHARACTERS}; #[deriving(Encodable)] #[must_root] pub struct DOMTokenList { reflector_: Reflector, element: JS<Element>, local_name: &'static str, } impl DOMTokenList { pub fn new_inherited(element: JSRef<Element>, local_name: &'static str) -> DOMTokenList { DOMTokenList { reflector_: Reflector::new(), element: JS::from_rooted(element), local_name: local_name, } } pub fn new(element: JSRef<Element>, local_name: &'static str) -> Temporary<DOMTokenList> { let window = window_from_node(element).root(); reflect_dom_object(box DOMTokenList::new_inherited(element, local_name), &Window(*window), DOMTokenListBinding::Wrap) } } impl Reflectable for DOMTokenList { fn reflector<'a>(&'a self) -> &'a Reflector { &self.reflector_ } } trait PrivateDOMTokenListHelpers { fn attribute(self) -> Option<Temporary<Attr>>; fn check_token_exceptions<'a>(self, token: &'a str) -> Fallible<&'a str>; } impl<'a> PrivateDOMTokenListHelpers for JSRef<'a, DOMTokenList> { fn attribute(self) -> Option<Temporary<Attr>> { let element = self.element.root(); element.deref().get_attribute(Null, self.local_name) } fn check_token_exceptions<'a>(self, token: &'a str) -> Fallible<&'a str> { match token { "" => Err(Syntax), token if token.find(HTML_SPACE_CHARACTERS).is_some() => Err(InvalidCharacter), token => Ok(token) } } }
impl<'a> DOMTokenListMethods for JSRef<'a, DOMTokenList> { // http://dom.spec.whatwg.org/#dom-domtokenlist-length fn Length(self) -> u32 { self.attribute().root().map(|attr| { attr.value().tokens().map(|tokens| tokens.len()).unwrap_or(0) }).unwrap_or(0) as u32 } // http://dom.spec.whatwg.org/#dom-domtokenlist-item fn Item(self, index: u32) -> Option<DOMString> { self.attribute().root().and_then(|attr| attr.value().tokens().and_then(|mut tokens| { tokens.idx(index as uint).map(|token| token.as_slice().to_string()) })) } fn IndexedGetter(self, index: u32, found: &mut bool) -> Option<DOMString> { let item = self.Item(index); *found = item.is_some(); item } // http://dom.spec.whatwg.org/#dom-domtokenlist-contains fn Contains(self, token: DOMString) -> Fallible<bool> { self.check_token_exceptions(token.as_slice()).map(|slice| { self.attribute().root().and_then(|attr| attr.value().tokens().map(|mut tokens| { let atom = Atom::from_slice(slice); tokens.any(|token| *token == atom) })).unwrap_or(false) }) } }
// http://dom.spec.whatwg.org/#domtokenlist
random_line_split
crate-method-reexport-grrrrrrr.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(managed_boxes)] // This is a regression test that the metadata for the // name_pool::methods impl in the other crate is reachable from this // crate. // aux-build:crate-method-reexport-grrrrrrr2.rs extern crate crate_method_reexport_grrrrrrr2; use std::gc::GC; pub fn main() { use crate_method_reexport_grrrrrrr2::rust::add; use crate_method_reexport_grrrrrrr2::rust::cx;
let x = box(GC) (); x.cx(); let y = (); y.add("hi".to_string()); }
random_line_split
crate-method-reexport-grrrrrrr.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(managed_boxes)] // This is a regression test that the metadata for the // name_pool::methods impl in the other crate is reachable from this // crate. // aux-build:crate-method-reexport-grrrrrrr2.rs extern crate crate_method_reexport_grrrrrrr2; use std::gc::GC; pub fn main()
{ use crate_method_reexport_grrrrrrr2::rust::add; use crate_method_reexport_grrrrrrr2::rust::cx; let x = box(GC) (); x.cx(); let y = (); y.add("hi".to_string()); }
identifier_body
crate-method-reexport-grrrrrrr.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(managed_boxes)] // This is a regression test that the metadata for the // name_pool::methods impl in the other crate is reachable from this // crate. // aux-build:crate-method-reexport-grrrrrrr2.rs extern crate crate_method_reexport_grrrrrrr2; use std::gc::GC; pub fn
() { use crate_method_reexport_grrrrrrr2::rust::add; use crate_method_reexport_grrrrrrr2::rust::cx; let x = box(GC) (); x.cx(); let y = (); y.add("hi".to_string()); }
main
identifier_name
parse.rs
//! Parse byte iterators to float. #![doc(hidden)] #[cfg(feature = "compact")] use crate::bellerophon::bellerophon; use crate::extended_float::{extended_to_float, ExtendedFloat}; #[cfg(not(feature = "compact"))] use crate::lemire::lemire; use crate::num::Float; use crate::number::Number; use crate::slow::slow; /// Try to parse the significant digits quickly. /// /// This attempts a very quick parse, to deal with common cases. /// /// * `integer` - Slice containing the integer digits. /// * `fraction` - Slice containing the fraction digits. #[inline] fn parse_number_fast<'a, Iter1, Iter2>( integer: Iter1, fraction: Iter2, exponent: i32, ) -> Option<Number> where Iter1: Iterator<Item = &'a u8>, Iter2: Iterator<Item = &'a u8>, { let mut num = Number::default(); let mut integer_count: usize = 0; let mut fraction_count: usize = 0; for &c in integer { integer_count += 1; let digit = c - b'0'; num.mantissa = num.mantissa.wrapping_mul(10).wrapping_add(digit as u64); } for &c in fraction { fraction_count += 1; let digit = c - b'0'; num.mantissa = num.mantissa.wrapping_mul(10).wrapping_add(digit as u64); } if integer_count + fraction_count <= 19 { // Can't overflow, since must be <= 19. num.exponent = exponent.saturating_sub(fraction_count as i32); Some(num) } else { None } } /// Parse the significant digits of the float and adjust the exponent. /// /// * `integer` - Slice containing the integer digits. /// * `fraction` - Slice containing the fraction digits. #[inline] fn parse_number<'a, Iter1, Iter2>(mut integer: Iter1, mut fraction: Iter2, exponent: i32) -> Number where Iter1: Iterator<Item = &'a u8> + Clone, Iter2: Iterator<Item = &'a u8> + Clone, { // NOTE: for performance, we do this in 2 passes: if let Some(num) = parse_number_fast(integer.clone(), fraction.clone(), exponent) { return num; } // Can only add 19 digits. let mut num = Number::default(); let mut count = 0; while let Some(&c) = integer.next() { count += 1; if count == 20 { // Only the integer digits affect the exponent. num.many_digits = true; num.exponent = exponent.saturating_add(into_i32(1 + integer.count())); return num; } else { let digit = c - b'0'; num.mantissa = num.mantissa * 10 + digit as u64; } } // Skip leading fraction zeros. // This is required otherwise we might have a 0 mantissa and many digits. let mut fraction_count: usize = 0; if count == 0 { for &c in &mut fraction { fraction_count += 1; if c!= b'0' { count += 1; let digit = c - b'0'; num.mantissa = num.mantissa * 10 + digit as u64; break; } } } for c in fraction { fraction_count += 1; count += 1; if count == 20 { num.many_digits = true; // This can't wrap, since we have at most 20 digits. // We've adjusted the exponent too high by `fraction_count - 1`. // Note: -1 is due to incrementing this loop iteration, which we // didn't use. num.exponent = exponent.saturating_sub(fraction_count as i32 - 1); return num; } else { let digit = c - b'0'; num.mantissa = num.mantissa * 10 + digit as u64; } } // No truncated digits: easy. // Cannot overflow: <= 20 digits. num.exponent = exponent.saturating_sub(fraction_count as i32); num } /// Parse float from extracted float components. /// /// * `integer` - Cloneable, forward iterator over integer digits. /// * `fraction` - Cloneable, forward iterator over integer digits. /// * `exponent` - Parsed, 32-bit exponent. /// /// # Preconditions /// 1. The integer should not have leading zeros. /// 2. The fraction should not have trailing zeros. /// 3. All bytes in `integer` and `fraction` should be valid digits, /// in the range [`b'0', b'9']. /// /// # Panics /// /// Although passing garbage input will not cause memory safety issues, /// it is very likely to cause a panic with a large number of digits, or /// in debug mode. The big-integer arithmetic without the `alloc` feature /// assumes a maximum, fixed-width input, which assumes at maximum a /// value of `10^(769 + 342)`, or ~4000 bits of storage. Passing in /// nonsensical digits may require up to ~6000 bits of storage, which will /// panic when attempting to add it to the big integer. It is therefore /// up to the caller to validate this input. /// /// We cannot efficiently remove trailing zeros while only accepting a /// forward iterator. pub fn parse_float<'a, F, Iter1, Iter2>(integer: Iter1, fraction: Iter2, exponent: i32) -> F where F: Float, Iter1: Iterator<Item = &'a u8> + Clone, Iter2: Iterator<Item = &'a u8> + Clone, { // Parse the mantissa and attempt the fast and moderate-path algorithms. let num = parse_number(integer.clone(), fraction.clone(), exponent); // Try the fast-path algorithm. if let Some(value) = num.try_fast_path()
// Now try the moderate path algorithm. let mut fp = moderate_path::<F>(&num); if fp.exp < 0 { // Undo the invalid extended float biasing. fp.exp -= F::INVALID_FP; fp = slow::<F, _, _>(num, fp, integer, fraction); } // Unable to correctly round the float using the fast or moderate algorithms. // Fallback to a slower, but always correct algorithm. If we have // lossy, we can't be here. extended_to_float::<F>(fp) } /// Wrapper for different moderate-path algorithms. /// A return exponent of `-1` indicates an invalid value. #[inline] pub fn moderate_path<F: Float>(num: &Number) -> ExtendedFloat { #[cfg(not(feature = "compact"))] return lemire::<F>(num); #[cfg(feature = "compact")] return bellerophon::<F>(num); } /// Convert usize into i32 without overflow. /// /// This is needed to ensure when adjusting the exponent relative to /// the mantissa we do not overflow for comically-long exponents. #[inline] fn into_i32(value: usize) -> i32 { if value > i32::max_value() as usize { i32::max_value() } else { value as i32 } } // Add digit to mantissa. #[inline] pub fn add_digit(value: u64, digit: u8) -> Option<u64> { value.checked_mul(10)?.checked_add(digit as u64) }
{ return value; }
conditional_block
parse.rs
//! Parse byte iterators to float. #![doc(hidden)] #[cfg(feature = "compact")] use crate::bellerophon::bellerophon; use crate::extended_float::{extended_to_float, ExtendedFloat}; #[cfg(not(feature = "compact"))] use crate::lemire::lemire; use crate::num::Float; use crate::number::Number; use crate::slow::slow; /// Try to parse the significant digits quickly. /// /// This attempts a very quick parse, to deal with common cases. /// /// * `integer` - Slice containing the integer digits. /// * `fraction` - Slice containing the fraction digits. #[inline] fn parse_number_fast<'a, Iter1, Iter2>( integer: Iter1, fraction: Iter2, exponent: i32, ) -> Option<Number> where Iter1: Iterator<Item = &'a u8>, Iter2: Iterator<Item = &'a u8>, { let mut num = Number::default(); let mut integer_count: usize = 0; let mut fraction_count: usize = 0; for &c in integer { integer_count += 1; let digit = c - b'0'; num.mantissa = num.mantissa.wrapping_mul(10).wrapping_add(digit as u64); } for &c in fraction { fraction_count += 1; let digit = c - b'0'; num.mantissa = num.mantissa.wrapping_mul(10).wrapping_add(digit as u64); } if integer_count + fraction_count <= 19 { // Can't overflow, since must be <= 19. num.exponent = exponent.saturating_sub(fraction_count as i32); Some(num) } else { None } } /// Parse the significant digits of the float and adjust the exponent. /// /// * `integer` - Slice containing the integer digits. /// * `fraction` - Slice containing the fraction digits. #[inline] fn parse_number<'a, Iter1, Iter2>(mut integer: Iter1, mut fraction: Iter2, exponent: i32) -> Number where Iter1: Iterator<Item = &'a u8> + Clone, Iter2: Iterator<Item = &'a u8> + Clone, { // NOTE: for performance, we do this in 2 passes: if let Some(num) = parse_number_fast(integer.clone(), fraction.clone(), exponent) { return num; } // Can only add 19 digits. let mut num = Number::default(); let mut count = 0; while let Some(&c) = integer.next() { count += 1; if count == 20 { // Only the integer digits affect the exponent. num.many_digits = true; num.exponent = exponent.saturating_add(into_i32(1 + integer.count())); return num; } else { let digit = c - b'0'; num.mantissa = num.mantissa * 10 + digit as u64; } } // Skip leading fraction zeros. // This is required otherwise we might have a 0 mantissa and many digits. let mut fraction_count: usize = 0; if count == 0 { for &c in &mut fraction { fraction_count += 1; if c!= b'0' {
let digit = c - b'0'; num.mantissa = num.mantissa * 10 + digit as u64; break; } } } for c in fraction { fraction_count += 1; count += 1; if count == 20 { num.many_digits = true; // This can't wrap, since we have at most 20 digits. // We've adjusted the exponent too high by `fraction_count - 1`. // Note: -1 is due to incrementing this loop iteration, which we // didn't use. num.exponent = exponent.saturating_sub(fraction_count as i32 - 1); return num; } else { let digit = c - b'0'; num.mantissa = num.mantissa * 10 + digit as u64; } } // No truncated digits: easy. // Cannot overflow: <= 20 digits. num.exponent = exponent.saturating_sub(fraction_count as i32); num } /// Parse float from extracted float components. /// /// * `integer` - Cloneable, forward iterator over integer digits. /// * `fraction` - Cloneable, forward iterator over integer digits. /// * `exponent` - Parsed, 32-bit exponent. /// /// # Preconditions /// 1. The integer should not have leading zeros. /// 2. The fraction should not have trailing zeros. /// 3. All bytes in `integer` and `fraction` should be valid digits, /// in the range [`b'0', b'9']. /// /// # Panics /// /// Although passing garbage input will not cause memory safety issues, /// it is very likely to cause a panic with a large number of digits, or /// in debug mode. The big-integer arithmetic without the `alloc` feature /// assumes a maximum, fixed-width input, which assumes at maximum a /// value of `10^(769 + 342)`, or ~4000 bits of storage. Passing in /// nonsensical digits may require up to ~6000 bits of storage, which will /// panic when attempting to add it to the big integer. It is therefore /// up to the caller to validate this input. /// /// We cannot efficiently remove trailing zeros while only accepting a /// forward iterator. pub fn parse_float<'a, F, Iter1, Iter2>(integer: Iter1, fraction: Iter2, exponent: i32) -> F where F: Float, Iter1: Iterator<Item = &'a u8> + Clone, Iter2: Iterator<Item = &'a u8> + Clone, { // Parse the mantissa and attempt the fast and moderate-path algorithms. let num = parse_number(integer.clone(), fraction.clone(), exponent); // Try the fast-path algorithm. if let Some(value) = num.try_fast_path() { return value; } // Now try the moderate path algorithm. let mut fp = moderate_path::<F>(&num); if fp.exp < 0 { // Undo the invalid extended float biasing. fp.exp -= F::INVALID_FP; fp = slow::<F, _, _>(num, fp, integer, fraction); } // Unable to correctly round the float using the fast or moderate algorithms. // Fallback to a slower, but always correct algorithm. If we have // lossy, we can't be here. extended_to_float::<F>(fp) } /// Wrapper for different moderate-path algorithms. /// A return exponent of `-1` indicates an invalid value. #[inline] pub fn moderate_path<F: Float>(num: &Number) -> ExtendedFloat { #[cfg(not(feature = "compact"))] return lemire::<F>(num); #[cfg(feature = "compact")] return bellerophon::<F>(num); } /// Convert usize into i32 without overflow. /// /// This is needed to ensure when adjusting the exponent relative to /// the mantissa we do not overflow for comically-long exponents. #[inline] fn into_i32(value: usize) -> i32 { if value > i32::max_value() as usize { i32::max_value() } else { value as i32 } } // Add digit to mantissa. #[inline] pub fn add_digit(value: u64, digit: u8) -> Option<u64> { value.checked_mul(10)?.checked_add(digit as u64) }
count += 1;
random_line_split
parse.rs
//! Parse byte iterators to float. #![doc(hidden)] #[cfg(feature = "compact")] use crate::bellerophon::bellerophon; use crate::extended_float::{extended_to_float, ExtendedFloat}; #[cfg(not(feature = "compact"))] use crate::lemire::lemire; use crate::num::Float; use crate::number::Number; use crate::slow::slow; /// Try to parse the significant digits quickly. /// /// This attempts a very quick parse, to deal with common cases. /// /// * `integer` - Slice containing the integer digits. /// * `fraction` - Slice containing the fraction digits. #[inline] fn parse_number_fast<'a, Iter1, Iter2>( integer: Iter1, fraction: Iter2, exponent: i32, ) -> Option<Number> where Iter1: Iterator<Item = &'a u8>, Iter2: Iterator<Item = &'a u8>, { let mut num = Number::default(); let mut integer_count: usize = 0; let mut fraction_count: usize = 0; for &c in integer { integer_count += 1; let digit = c - b'0'; num.mantissa = num.mantissa.wrapping_mul(10).wrapping_add(digit as u64); } for &c in fraction { fraction_count += 1; let digit = c - b'0'; num.mantissa = num.mantissa.wrapping_mul(10).wrapping_add(digit as u64); } if integer_count + fraction_count <= 19 { // Can't overflow, since must be <= 19. num.exponent = exponent.saturating_sub(fraction_count as i32); Some(num) } else { None } } /// Parse the significant digits of the float and adjust the exponent. /// /// * `integer` - Slice containing the integer digits. /// * `fraction` - Slice containing the fraction digits. #[inline] fn parse_number<'a, Iter1, Iter2>(mut integer: Iter1, mut fraction: Iter2, exponent: i32) -> Number where Iter1: Iterator<Item = &'a u8> + Clone, Iter2: Iterator<Item = &'a u8> + Clone, { // NOTE: for performance, we do this in 2 passes: if let Some(num) = parse_number_fast(integer.clone(), fraction.clone(), exponent) { return num; } // Can only add 19 digits. let mut num = Number::default(); let mut count = 0; while let Some(&c) = integer.next() { count += 1; if count == 20 { // Only the integer digits affect the exponent. num.many_digits = true; num.exponent = exponent.saturating_add(into_i32(1 + integer.count())); return num; } else { let digit = c - b'0'; num.mantissa = num.mantissa * 10 + digit as u64; } } // Skip leading fraction zeros. // This is required otherwise we might have a 0 mantissa and many digits. let mut fraction_count: usize = 0; if count == 0 { for &c in &mut fraction { fraction_count += 1; if c!= b'0' { count += 1; let digit = c - b'0'; num.mantissa = num.mantissa * 10 + digit as u64; break; } } } for c in fraction { fraction_count += 1; count += 1; if count == 20 { num.many_digits = true; // This can't wrap, since we have at most 20 digits. // We've adjusted the exponent too high by `fraction_count - 1`. // Note: -1 is due to incrementing this loop iteration, which we // didn't use. num.exponent = exponent.saturating_sub(fraction_count as i32 - 1); return num; } else { let digit = c - b'0'; num.mantissa = num.mantissa * 10 + digit as u64; } } // No truncated digits: easy. // Cannot overflow: <= 20 digits. num.exponent = exponent.saturating_sub(fraction_count as i32); num } /// Parse float from extracted float components. /// /// * `integer` - Cloneable, forward iterator over integer digits. /// * `fraction` - Cloneable, forward iterator over integer digits. /// * `exponent` - Parsed, 32-bit exponent. /// /// # Preconditions /// 1. The integer should not have leading zeros. /// 2. The fraction should not have trailing zeros. /// 3. All bytes in `integer` and `fraction` should be valid digits, /// in the range [`b'0', b'9']. /// /// # Panics /// /// Although passing garbage input will not cause memory safety issues, /// it is very likely to cause a panic with a large number of digits, or /// in debug mode. The big-integer arithmetic without the `alloc` feature /// assumes a maximum, fixed-width input, which assumes at maximum a /// value of `10^(769 + 342)`, or ~4000 bits of storage. Passing in /// nonsensical digits may require up to ~6000 bits of storage, which will /// panic when attempting to add it to the big integer. It is therefore /// up to the caller to validate this input. /// /// We cannot efficiently remove trailing zeros while only accepting a /// forward iterator. pub fn parse_float<'a, F, Iter1, Iter2>(integer: Iter1, fraction: Iter2, exponent: i32) -> F where F: Float, Iter1: Iterator<Item = &'a u8> + Clone, Iter2: Iterator<Item = &'a u8> + Clone, { // Parse the mantissa and attempt the fast and moderate-path algorithms. let num = parse_number(integer.clone(), fraction.clone(), exponent); // Try the fast-path algorithm. if let Some(value) = num.try_fast_path() { return value; } // Now try the moderate path algorithm. let mut fp = moderate_path::<F>(&num); if fp.exp < 0 { // Undo the invalid extended float biasing. fp.exp -= F::INVALID_FP; fp = slow::<F, _, _>(num, fp, integer, fraction); } // Unable to correctly round the float using the fast or moderate algorithms. // Fallback to a slower, but always correct algorithm. If we have // lossy, we can't be here. extended_to_float::<F>(fp) } /// Wrapper for different moderate-path algorithms. /// A return exponent of `-1` indicates an invalid value. #[inline] pub fn moderate_path<F: Float>(num: &Number) -> ExtendedFloat { #[cfg(not(feature = "compact"))] return lemire::<F>(num); #[cfg(feature = "compact")] return bellerophon::<F>(num); } /// Convert usize into i32 without overflow. /// /// This is needed to ensure when adjusting the exponent relative to /// the mantissa we do not overflow for comically-long exponents. #[inline] fn
(value: usize) -> i32 { if value > i32::max_value() as usize { i32::max_value() } else { value as i32 } } // Add digit to mantissa. #[inline] pub fn add_digit(value: u64, digit: u8) -> Option<u64> { value.checked_mul(10)?.checked_add(digit as u64) }
into_i32
identifier_name
parse.rs
//! Parse byte iterators to float. #![doc(hidden)] #[cfg(feature = "compact")] use crate::bellerophon::bellerophon; use crate::extended_float::{extended_to_float, ExtendedFloat}; #[cfg(not(feature = "compact"))] use crate::lemire::lemire; use crate::num::Float; use crate::number::Number; use crate::slow::slow; /// Try to parse the significant digits quickly. /// /// This attempts a very quick parse, to deal with common cases. /// /// * `integer` - Slice containing the integer digits. /// * `fraction` - Slice containing the fraction digits. #[inline] fn parse_number_fast<'a, Iter1, Iter2>( integer: Iter1, fraction: Iter2, exponent: i32, ) -> Option<Number> where Iter1: Iterator<Item = &'a u8>, Iter2: Iterator<Item = &'a u8>, { let mut num = Number::default(); let mut integer_count: usize = 0; let mut fraction_count: usize = 0; for &c in integer { integer_count += 1; let digit = c - b'0'; num.mantissa = num.mantissa.wrapping_mul(10).wrapping_add(digit as u64); } for &c in fraction { fraction_count += 1; let digit = c - b'0'; num.mantissa = num.mantissa.wrapping_mul(10).wrapping_add(digit as u64); } if integer_count + fraction_count <= 19 { // Can't overflow, since must be <= 19. num.exponent = exponent.saturating_sub(fraction_count as i32); Some(num) } else { None } } /// Parse the significant digits of the float and adjust the exponent. /// /// * `integer` - Slice containing the integer digits. /// * `fraction` - Slice containing the fraction digits. #[inline] fn parse_number<'a, Iter1, Iter2>(mut integer: Iter1, mut fraction: Iter2, exponent: i32) -> Number where Iter1: Iterator<Item = &'a u8> + Clone, Iter2: Iterator<Item = &'a u8> + Clone, { // NOTE: for performance, we do this in 2 passes: if let Some(num) = parse_number_fast(integer.clone(), fraction.clone(), exponent) { return num; } // Can only add 19 digits. let mut num = Number::default(); let mut count = 0; while let Some(&c) = integer.next() { count += 1; if count == 20 { // Only the integer digits affect the exponent. num.many_digits = true; num.exponent = exponent.saturating_add(into_i32(1 + integer.count())); return num; } else { let digit = c - b'0'; num.mantissa = num.mantissa * 10 + digit as u64; } } // Skip leading fraction zeros. // This is required otherwise we might have a 0 mantissa and many digits. let mut fraction_count: usize = 0; if count == 0 { for &c in &mut fraction { fraction_count += 1; if c!= b'0' { count += 1; let digit = c - b'0'; num.mantissa = num.mantissa * 10 + digit as u64; break; } } } for c in fraction { fraction_count += 1; count += 1; if count == 20 { num.many_digits = true; // This can't wrap, since we have at most 20 digits. // We've adjusted the exponent too high by `fraction_count - 1`. // Note: -1 is due to incrementing this loop iteration, which we // didn't use. num.exponent = exponent.saturating_sub(fraction_count as i32 - 1); return num; } else { let digit = c - b'0'; num.mantissa = num.mantissa * 10 + digit as u64; } } // No truncated digits: easy. // Cannot overflow: <= 20 digits. num.exponent = exponent.saturating_sub(fraction_count as i32); num } /// Parse float from extracted float components. /// /// * `integer` - Cloneable, forward iterator over integer digits. /// * `fraction` - Cloneable, forward iterator over integer digits. /// * `exponent` - Parsed, 32-bit exponent. /// /// # Preconditions /// 1. The integer should not have leading zeros. /// 2. The fraction should not have trailing zeros. /// 3. All bytes in `integer` and `fraction` should be valid digits, /// in the range [`b'0', b'9']. /// /// # Panics /// /// Although passing garbage input will not cause memory safety issues, /// it is very likely to cause a panic with a large number of digits, or /// in debug mode. The big-integer arithmetic without the `alloc` feature /// assumes a maximum, fixed-width input, which assumes at maximum a /// value of `10^(769 + 342)`, or ~4000 bits of storage. Passing in /// nonsensical digits may require up to ~6000 bits of storage, which will /// panic when attempting to add it to the big integer. It is therefore /// up to the caller to validate this input. /// /// We cannot efficiently remove trailing zeros while only accepting a /// forward iterator. pub fn parse_float<'a, F, Iter1, Iter2>(integer: Iter1, fraction: Iter2, exponent: i32) -> F where F: Float, Iter1: Iterator<Item = &'a u8> + Clone, Iter2: Iterator<Item = &'a u8> + Clone, { // Parse the mantissa and attempt the fast and moderate-path algorithms. let num = parse_number(integer.clone(), fraction.clone(), exponent); // Try the fast-path algorithm. if let Some(value) = num.try_fast_path() { return value; } // Now try the moderate path algorithm. let mut fp = moderate_path::<F>(&num); if fp.exp < 0 { // Undo the invalid extended float biasing. fp.exp -= F::INVALID_FP; fp = slow::<F, _, _>(num, fp, integer, fraction); } // Unable to correctly round the float using the fast or moderate algorithms. // Fallback to a slower, but always correct algorithm. If we have // lossy, we can't be here. extended_to_float::<F>(fp) } /// Wrapper for different moderate-path algorithms. /// A return exponent of `-1` indicates an invalid value. #[inline] pub fn moderate_path<F: Float>(num: &Number) -> ExtendedFloat
/// Convert usize into i32 without overflow. /// /// This is needed to ensure when adjusting the exponent relative to /// the mantissa we do not overflow for comically-long exponents. #[inline] fn into_i32(value: usize) -> i32 { if value > i32::max_value() as usize { i32::max_value() } else { value as i32 } } // Add digit to mantissa. #[inline] pub fn add_digit(value: u64, digit: u8) -> Option<u64> { value.checked_mul(10)?.checked_add(digit as u64) }
{ #[cfg(not(feature = "compact"))] return lemire::<F>(num); #[cfg(feature = "compact")] return bellerophon::<F>(num); }
identifier_body
snippets.rs
use ast::with_error_checking_parse; use core::{Match, MatchType, Session}; use typeinf::get_function_declaration; use syntex_syntax::ast::ImplItemKind; pub fn snippet_for_match(m: &Match, session: &Session) -> String
struct MethodInfo { name: String, args: Vec<String> } impl MethodInfo { ///Parses method declaration as string and returns relevant data fn from_source_str(source: &str) -> Option<MethodInfo> { let trim: &[_] = &['\n', '\r', '{','']; let decorated = format!("{} {{}}()", source.trim_right_matches(trim)); with_error_checking_parse(decorated, |p| { use std::result::Result::{Ok, Err}; match p.parse_impl_item() { Ok(method) => { match method.node { ImplItemKind::Method(ref msig, _) => { let decl = &msig.decl; Some(MethodInfo { // ident.as_str calls Ident.name.as_str name: method.ident.name.as_str().to_string(), args: decl.inputs .iter() .map(|arg| { let codemap = &p.sess.codemap(); match codemap.span_to_snippet(arg.pat.span) { Ok(name) => name, _ => "".into() } }) .collect() }) }, _ => { debug!("Unable to parse method declaration. |{}|", source); None } } }, Err(_) => { debug!("Unable to parse method declaration. |{}|", source); None } } }) } ///Returns completion snippets usable by some editors fn snippet(&self) -> String { format!("{}({})", self.name, &self.args .iter() .filter(|&s| *s!= "self") .enumerate() .fold(String::new(), |cur, (i, ref s)| { let arg = format!("${{{}:{}}}", i + 1, s); let delim = if i > 0 { ", " } else { "" }; cur + delim + &arg })) } } #[test] fn method_info_test() { let info = MethodInfo::from_source_str("pub fn new() -> Vec<T>").unwrap(); assert_eq!(info.name, "new"); assert_eq!(info.args.len(), 0); assert_eq!(info.snippet(), "new()"); let info = MethodInfo::from_source_str("pub fn reserve(&mut self, additional: uint)").unwrap(); assert_eq!(info.name, "reserve"); assert_eq!(info.args.len(), 2); assert_eq!(info.args[0], "self"); assert_eq!(info.snippet(), "reserve(${1:additional})"); }
{ match m.mtype { MatchType::Function => { let method = get_function_declaration(&m, session); if let Some(m) = MethodInfo::from_source_str(&method) { m.snippet() } else { "".into() } } _ => m.matchstr.clone() } }
identifier_body
snippets.rs
use ast::with_error_checking_parse; use core::{Match, MatchType, Session}; use typeinf::get_function_declaration; use syntex_syntax::ast::ImplItemKind; pub fn snippet_for_match(m: &Match, session: &Session) -> String { match m.mtype { MatchType::Function => { let method = get_function_declaration(&m, session); if let Some(m) = MethodInfo::from_source_str(&method) { m.snippet() } else { "".into() } }
struct MethodInfo { name: String, args: Vec<String> } impl MethodInfo { ///Parses method declaration as string and returns relevant data fn from_source_str(source: &str) -> Option<MethodInfo> { let trim: &[_] = &['\n', '\r', '{','']; let decorated = format!("{} {{}}()", source.trim_right_matches(trim)); with_error_checking_parse(decorated, |p| { use std::result::Result::{Ok, Err}; match p.parse_impl_item() { Ok(method) => { match method.node { ImplItemKind::Method(ref msig, _) => { let decl = &msig.decl; Some(MethodInfo { // ident.as_str calls Ident.name.as_str name: method.ident.name.as_str().to_string(), args: decl.inputs .iter() .map(|arg| { let codemap = &p.sess.codemap(); match codemap.span_to_snippet(arg.pat.span) { Ok(name) => name, _ => "".into() } }) .collect() }) }, _ => { debug!("Unable to parse method declaration. |{}|", source); None } } }, Err(_) => { debug!("Unable to parse method declaration. |{}|", source); None } } }) } ///Returns completion snippets usable by some editors fn snippet(&self) -> String { format!("{}({})", self.name, &self.args .iter() .filter(|&s| *s!= "self") .enumerate() .fold(String::new(), |cur, (i, ref s)| { let arg = format!("${{{}:{}}}", i + 1, s); let delim = if i > 0 { ", " } else { "" }; cur + delim + &arg })) } } #[test] fn method_info_test() { let info = MethodInfo::from_source_str("pub fn new() -> Vec<T>").unwrap(); assert_eq!(info.name, "new"); assert_eq!(info.args.len(), 0); assert_eq!(info.snippet(), "new()"); let info = MethodInfo::from_source_str("pub fn reserve(&mut self, additional: uint)").unwrap(); assert_eq!(info.name, "reserve"); assert_eq!(info.args.len(), 2); assert_eq!(info.args[0], "self"); assert_eq!(info.snippet(), "reserve(${1:additional})"); }
_ => m.matchstr.clone() } }
random_line_split
snippets.rs
use ast::with_error_checking_parse; use core::{Match, MatchType, Session}; use typeinf::get_function_declaration; use syntex_syntax::ast::ImplItemKind; pub fn snippet_for_match(m: &Match, session: &Session) -> String { match m.mtype { MatchType::Function => { let method = get_function_declaration(&m, session); if let Some(m) = MethodInfo::from_source_str(&method)
else { "".into() } } _ => m.matchstr.clone() } } struct MethodInfo { name: String, args: Vec<String> } impl MethodInfo { ///Parses method declaration as string and returns relevant data fn from_source_str(source: &str) -> Option<MethodInfo> { let trim: &[_] = &['\n', '\r', '{','']; let decorated = format!("{} {{}}()", source.trim_right_matches(trim)); with_error_checking_parse(decorated, |p| { use std::result::Result::{Ok, Err}; match p.parse_impl_item() { Ok(method) => { match method.node { ImplItemKind::Method(ref msig, _) => { let decl = &msig.decl; Some(MethodInfo { // ident.as_str calls Ident.name.as_str name: method.ident.name.as_str().to_string(), args: decl.inputs .iter() .map(|arg| { let codemap = &p.sess.codemap(); match codemap.span_to_snippet(arg.pat.span) { Ok(name) => name, _ => "".into() } }) .collect() }) }, _ => { debug!("Unable to parse method declaration. |{}|", source); None } } }, Err(_) => { debug!("Unable to parse method declaration. |{}|", source); None } } }) } ///Returns completion snippets usable by some editors fn snippet(&self) -> String { format!("{}({})", self.name, &self.args .iter() .filter(|&s| *s!= "self") .enumerate() .fold(String::new(), |cur, (i, ref s)| { let arg = format!("${{{}:{}}}", i + 1, s); let delim = if i > 0 { ", " } else { "" }; cur + delim + &arg })) } } #[test] fn method_info_test() { let info = MethodInfo::from_source_str("pub fn new() -> Vec<T>").unwrap(); assert_eq!(info.name, "new"); assert_eq!(info.args.len(), 0); assert_eq!(info.snippet(), "new()"); let info = MethodInfo::from_source_str("pub fn reserve(&mut self, additional: uint)").unwrap(); assert_eq!(info.name, "reserve"); assert_eq!(info.args.len(), 2); assert_eq!(info.args[0], "self"); assert_eq!(info.snippet(), "reserve(${1:additional})"); }
{ m.snippet() }
conditional_block
snippets.rs
use ast::with_error_checking_parse; use core::{Match, MatchType, Session}; use typeinf::get_function_declaration; use syntex_syntax::ast::ImplItemKind; pub fn
(m: &Match, session: &Session) -> String { match m.mtype { MatchType::Function => { let method = get_function_declaration(&m, session); if let Some(m) = MethodInfo::from_source_str(&method) { m.snippet() } else { "".into() } } _ => m.matchstr.clone() } } struct MethodInfo { name: String, args: Vec<String> } impl MethodInfo { ///Parses method declaration as string and returns relevant data fn from_source_str(source: &str) -> Option<MethodInfo> { let trim: &[_] = &['\n', '\r', '{','']; let decorated = format!("{} {{}}()", source.trim_right_matches(trim)); with_error_checking_parse(decorated, |p| { use std::result::Result::{Ok, Err}; match p.parse_impl_item() { Ok(method) => { match method.node { ImplItemKind::Method(ref msig, _) => { let decl = &msig.decl; Some(MethodInfo { // ident.as_str calls Ident.name.as_str name: method.ident.name.as_str().to_string(), args: decl.inputs .iter() .map(|arg| { let codemap = &p.sess.codemap(); match codemap.span_to_snippet(arg.pat.span) { Ok(name) => name, _ => "".into() } }) .collect() }) }, _ => { debug!("Unable to parse method declaration. |{}|", source); None } } }, Err(_) => { debug!("Unable to parse method declaration. |{}|", source); None } } }) } ///Returns completion snippets usable by some editors fn snippet(&self) -> String { format!("{}({})", self.name, &self.args .iter() .filter(|&s| *s!= "self") .enumerate() .fold(String::new(), |cur, (i, ref s)| { let arg = format!("${{{}:{}}}", i + 1, s); let delim = if i > 0 { ", " } else { "" }; cur + delim + &arg })) } } #[test] fn method_info_test() { let info = MethodInfo::from_source_str("pub fn new() -> Vec<T>").unwrap(); assert_eq!(info.name, "new"); assert_eq!(info.args.len(), 0); assert_eq!(info.snippet(), "new()"); let info = MethodInfo::from_source_str("pub fn reserve(&mut self, additional: uint)").unwrap(); assert_eq!(info.name, "reserve"); assert_eq!(info.args.len(), 2); assert_eq!(info.args[0], "self"); assert_eq!(info.snippet(), "reserve(${1:additional})"); }
snippet_for_match
identifier_name
borrowck-preserve-expl-deref.rs
// ignore-pretty // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // exec-env:RUST_POISON_ON_FREE=1 #![feature(managed_boxes)] fn borrow(x: &int, f: |x: &int|) { let before = *x; f(x); let after = *x; assert_eq!(before, after); } struct
{ f: ~int } pub fn main() { let mut x = @F {f: ~3}; borrow((*x).f, |b_x| { assert_eq!(*b_x, 3); assert_eq!(&(*x.f) as *int, &(*b_x) as *int); x = @F {f: ~4}; println!("&*b_x = {:p}", &(*b_x)); assert_eq!(*b_x, 3); assert!(&(*x.f) as *int!= &(*b_x) as *int); }) }
F
identifier_name
borrowck-preserve-expl-deref.rs
// ignore-pretty // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // exec-env:RUST_POISON_ON_FREE=1 #![feature(managed_boxes)] fn borrow(x: &int, f: |x: &int|)
struct F { f: ~int } pub fn main() { let mut x = @F {f: ~3}; borrow((*x).f, |b_x| { assert_eq!(*b_x, 3); assert_eq!(&(*x.f) as *int, &(*b_x) as *int); x = @F {f: ~4}; println!("&*b_x = {:p}", &(*b_x)); assert_eq!(*b_x, 3); assert!(&(*x.f) as *int!= &(*b_x) as *int); }) }
{ let before = *x; f(x); let after = *x; assert_eq!(before, after); }
identifier_body
borrowck-preserve-expl-deref.rs
// ignore-pretty // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // exec-env:RUST_POISON_ON_FREE=1 #![feature(managed_boxes)] fn borrow(x: &int, f: |x: &int|) { let before = *x;
} struct F { f: ~int } pub fn main() { let mut x = @F {f: ~3}; borrow((*x).f, |b_x| { assert_eq!(*b_x, 3); assert_eq!(&(*x.f) as *int, &(*b_x) as *int); x = @F {f: ~4}; println!("&*b_x = {:p}", &(*b_x)); assert_eq!(*b_x, 3); assert!(&(*x.f) as *int!= &(*b_x) as *int); }) }
f(x); let after = *x; assert_eq!(before, after);
random_line_split
issue-14853.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::fmt::Show; trait Something { fn yay<T: Show>(_: Option<Self>, thing: &[T]) -> String { } } struct X { data: u32 } impl Something for X { fn yay<T: Str>(_:Option<X>, thing: &[T]) -> String { //~^ ERROR in method `yay`, type parameter 0 requires bound `core::str::Str`, which is not required format!("{:s}", thing[0]) } } fn main() { let arr = &["one", "two", "three"]; println!("{}", Something::yay(None::<X>, arr)); }
random_line_split
issue-14853.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::fmt::Show; trait Something { fn yay<T: Show>(_: Option<Self>, thing: &[T]) -> String
} struct X { data: u32 } impl Something for X { fn yay<T: Str>(_:Option<X>, thing: &[T]) -> String { //~^ ERROR in method `yay`, type parameter 0 requires bound `core::str::Str`, which is not required format!("{:s}", thing[0]) } } fn main() { let arr = &["one", "two", "three"]; println!("{}", Something::yay(None::<X>, arr)); }
{ }
identifier_body
issue-14853.rs
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::fmt::Show; trait Something { fn
<T: Show>(_: Option<Self>, thing: &[T]) -> String { } } struct X { data: u32 } impl Something for X { fn yay<T: Str>(_:Option<X>, thing: &[T]) -> String { //~^ ERROR in method `yay`, type parameter 0 requires bound `core::str::Str`, which is not required format!("{:s}", thing[0]) } } fn main() { let arr = &["one", "two", "three"]; println!("{}", Something::yay(None::<X>, arr)); }
yay
identifier_name
lib.rs
// Copyright 2015, 2016 Parity Technologies (UK) Ltd. // This file is part of Parity. // Parity is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Parity is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Parity. If not, see <http://www.gnu.org/licenses/>. #![warn(missing_docs)] #![cfg_attr(feature="benches", feature(test))] #![cfg_attr(feature="dev", feature(plugin))] #![cfg_attr(feature="dev", plugin(clippy))] // Clippy settings // Most of the time much more readable #![cfg_attr(feature="dev", allow(needless_range_loop))] // Shorter than if-else #![cfg_attr(feature="dev", allow(match_bool))] // Keeps consistency (all lines with `.clone()`). #![cfg_attr(feature="dev", allow(clone_on_copy))] // Complains on Box<E> when implementing From<Box<E>> #![cfg_attr(feature="dev", allow(boxed_local))] // Complains about nested modules with same name as parent #![cfg_attr(feature="dev", allow(module_inception))] // TODO [todr] a lot of warnings to be fixed #![cfg_attr(feature="dev", allow(assign_op_pattern))] //! Ethcore library //! //! ### Rust version: //! - nightly //! //! ### Supported platforms: //! - OSX //! - Linux //! //! ### Building: //! //! - Ubuntu 14.04 and later: //! //! ```bash //! //! # install multirust //! curl -sf https://raw.githubusercontent.com/brson/multirust/master/blastoff.sh | sh -s -- --yes //! //! # export rust LIBRARY_PATH
//! # download and build parity //! git clone https://github.com/ethcore/parity //! cd parity //! multirust override beta //! cargo build --release //! ``` //! //! - OSX: //! //! ```bash //! # install rocksdb && multirust //! brew update //! brew install multirust //! //! # export rust LIBRARY_PATH //! export LIBRARY_PATH=/usr/local/lib //! //! # download and build parity //! git clone https://github.com/ethcore/parity //! cd parity //! multirust override beta //! cargo build --release //! ``` extern crate ethcore_io as io; extern crate rustc_serialize; extern crate crypto; extern crate time; extern crate env_logger; extern crate num_cpus; extern crate crossbeam; extern crate ethjson; extern crate bloomchain; extern crate hyper; extern crate ethash; extern crate ethkey; extern crate semver; extern crate ethcore_ipc_nano as nanoipc; extern crate ethcore_devtools as devtools; extern crate rand; extern crate bit_set; extern crate rlp; extern crate ethcore_bloom_journal as bloom_journal; extern crate byteorder; extern crate transient_hashmap; extern crate linked_hash_map; #[macro_use] extern crate log; #[macro_use] extern crate ethcore_util as util; #[macro_use] extern crate lazy_static; #[macro_use] extern crate heapsize; #[macro_use] extern crate ethcore_ipc as ipc; extern crate lru_cache; #[cfg(feature = "jit" )] extern crate evmjit; extern crate ethabi; pub extern crate ethstore; pub mod account_provider; pub mod engines; pub mod block; pub mod client; pub mod error; pub mod ethereum; pub mod header; pub mod service; pub mod trace; pub mod spec; pub mod views; pub mod pod_state; pub mod migrations; pub mod miner; pub mod snapshot; pub mod action_params; pub mod db; pub mod verification; #[macro_use] pub mod evm; mod cache_manager; mod blooms; mod basic_types; mod env_info; mod pod_account; mod state; mod state_db; mod account_db; mod builtin; mod executive; mod externalities; mod blockchain; mod types; mod factory; #[cfg(test)] mod tests; #[cfg(test)] #[cfg(feature="json-tests")] mod json_tests; pub use types::*; pub use executive::contract_address;
//! export LIBRARY_PATH=/usr/local/lib //!
random_line_split
classes-simple-cross-crate.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // xfail-fast // aux-build:cci_class.rs extern mod cci_class; use cci_class::kitties::cat; pub fn main() { let nyan : cat = cat(52u, 99); let kitty = cat(1000u, 2); assert_eq!(nyan.how_hungry, 99); assert_eq!(kitty.how_hungry, 2);
}
random_line_split
classes-simple-cross-crate.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // xfail-fast // aux-build:cci_class.rs extern mod cci_class; use cci_class::kitties::cat; pub fn main()
{ let nyan : cat = cat(52u, 99); let kitty = cat(1000u, 2); assert_eq!(nyan.how_hungry, 99); assert_eq!(kitty.how_hungry, 2); }
identifier_body
classes-simple-cross-crate.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // xfail-fast // aux-build:cci_class.rs extern mod cci_class; use cci_class::kitties::cat; pub fn
() { let nyan : cat = cat(52u, 99); let kitty = cat(1000u, 2); assert_eq!(nyan.how_hungry, 99); assert_eq!(kitty.how_hungry, 2); }
main
identifier_name
lib.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Calculate [specified][specified] and [computed values][computed] from a //! tree of DOM nodes and a set of stylesheets. //! //! [computed]: https://drafts.csswg.org/css-cascade/#computed //! [specified]: https://drafts.csswg.org/css-cascade/#specified //! //! In particular, this crate contains the definitions of supported properties, //! the code to parse them into specified values and calculate the computed //! values based on the specified values, as well as the code to serialize both //! specified and computed values. //! //! The main entry point is [`recalc_style_at`][recalc_style_at]. //! //! [recalc_style_at]: traversal/fn.recalc_style_at.html //! //! Major dependencies are the [cssparser][cssparser] and [selectors][selectors] //! crates. //! //! [cssparser]:../cssparser/index.html //! [selectors]:../selectors/index.html #![deny(warnings)] #![deny(missing_docs)] // FIXME(bholley): We need to blanket-allow unsafe code in order to make the // gecko atom!() macro work. When Rust 1.14 is released [1], we can uncomment // the commented-out attributes in regen_atoms.py and go back to denying unsafe // code by default. // // [1] https://github.com/rust-lang/rust/issues/15701#issuecomment-251900615 //#![deny(unsafe_code)] #![allow(unused_unsafe)] #![recursion_limit = "500"] // For define_css_keyword_enum! in -moz-appearance extern crate app_units; extern crate arrayvec; extern crate atomic_refcell; extern crate bit_vec; #[macro_use] extern crate bitflags; #[allow(unused_extern_crates)] extern crate byteorder; #[cfg(feature = "gecko")] #[macro_use] #[no_link] extern crate cfg_if; #[macro_use] extern crate cssparser; extern crate euclid; extern crate fnv; #[cfg(feature = "gecko")] #[macro_use] pub mod gecko_string_cache; extern crate hashglobe; #[cfg(feature = "servo")] extern crate heapsize; #[cfg(feature = "servo")] #[macro_use] extern crate heapsize_derive; extern crate itertools; extern crate itoa; #[cfg(feature = "servo")] #[macro_use] extern crate html5ever; #[macro_use] extern crate lazy_static; #[macro_use] extern crate log; #[allow(unused_extern_crates)] #[macro_use] extern crate matches; #[cfg(feature = "gecko")] #[macro_use] pub extern crate nsstring_vendor as nsstring; #[cfg(feature = "gecko")] extern crate num_cpus;
extern crate pdqsort; extern crate precomputed_hash; extern crate rayon; extern crate selectors; #[cfg(feature = "servo")] #[macro_use] extern crate serde; pub extern crate servo_arc; #[cfg(feature = "servo")] #[macro_use] extern crate servo_atoms; #[cfg(feature = "servo")] extern crate servo_config; #[cfg(feature = "servo")] extern crate servo_url; extern crate smallvec; #[macro_use] extern crate style_derive; #[macro_use] extern crate style_traits; extern crate time; extern crate unicode_bidi; #[allow(unused_extern_crates)] extern crate unicode_segmentation; #[macro_use] mod macros; #[cfg(feature = "servo")] pub mod animation; pub mod applicable_declarations; #[allow(missing_docs)] // TODO. #[cfg(feature = "servo")] pub mod attr; pub mod bezier; pub mod bloom; pub mod cache; pub mod context; pub mod counter_style; pub mod custom_properties; pub mod data; pub mod dom; pub mod driver; pub mod element_state; #[cfg(feature = "servo")] mod encoding_support; pub mod error_reporting; pub mod font_face; pub mod font_metrics; #[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko; #[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko_bindings; pub mod hash; pub mod invalidation; #[allow(missing_docs)] // TODO. pub mod logical_geometry; pub mod matching; pub mod media_queries; pub mod parallel; pub mod parser; pub mod rule_tree; pub mod scoped_tls; pub mod selector_map; pub mod selector_parser; pub mod shared_lock; pub mod sharing; pub mod style_resolver; pub mod stylist; #[cfg(feature = "servo")] #[allow(unsafe_code)] pub mod servo; pub mod str; pub mod style_adjuster; pub mod stylesheet_set; pub mod stylesheets; pub mod thread_state; pub mod timer; pub mod traversal; pub mod traversal_flags; #[macro_use] #[allow(non_camel_case_types)] pub mod values; use std::fmt; use style_traits::ToCss; #[cfg(feature = "gecko")] pub use gecko_string_cache as string_cache; #[cfg(feature = "gecko")] pub use gecko_string_cache::Atom; #[cfg(feature = "gecko")] pub use gecko_string_cache::Namespace; #[cfg(feature = "gecko")] pub use gecko_string_cache::Atom as Prefix; #[cfg(feature = "gecko")] pub use gecko_string_cache::Atom as LocalName; #[cfg(feature = "servo")] pub use servo_atoms::Atom; #[cfg(feature = "servo")] pub use html5ever::Prefix; #[cfg(feature = "servo")] pub use html5ever::LocalName; #[cfg(feature = "servo")] pub use html5ever::Namespace; /// The CSS properties supported by the style system. /// Generated from the properties.mako.rs template by build.rs #[macro_use] #[allow(unsafe_code)] #[deny(missing_docs)] pub mod properties { include!(concat!(env!("OUT_DIR"), "/properties.rs")); } #[cfg(feature = "gecko")] #[allow(unsafe_code, missing_docs)] pub mod gecko_properties { include!(concat!(env!("OUT_DIR"), "/gecko_properties.rs")); } macro_rules! reexport_computed_values { ( $( { $name: ident, $boxed: expr } )+ ) => { /// Types for [computed values][computed]. /// /// [computed]: https://drafts.csswg.org/css-cascade/#computed pub mod computed_values { $( pub use properties::longhands::$name::computed_value as $name; )+ // Don't use a side-specific name needlessly: pub use properties::longhands::border_top_style::computed_value as border_style; } } } longhand_properties_idents!(reexport_computed_values); /// Serializes as CSS a comma-separated list of any `T` that supports being /// serialized as CSS. pub fn serialize_comma_separated_list<W, T>(dest: &mut W, list: &[T]) -> fmt::Result where W: fmt::Write, T: ToCss, { if list.is_empty() { return Ok(()); } list[0].to_css(dest)?; for item in list.iter().skip(1) { dest.write_str(", ")?; item.to_css(dest)?; } Ok(()) } #[cfg(feature = "gecko")] use gecko_string_cache::WeakAtom; #[cfg(feature = "servo")] use servo_atoms::Atom as WeakAtom; /// Extension methods for selectors::attr::CaseSensitivity pub trait CaseSensitivityExt { /// Return whether two atoms compare equal according to this case sensitivity. fn eq_atom(self, a: &WeakAtom, b: &WeakAtom) -> bool; } impl CaseSensitivityExt for selectors::attr::CaseSensitivity { fn eq_atom(self, a: &WeakAtom, b: &WeakAtom) -> bool { match self { selectors::attr::CaseSensitivity::CaseSensitive => a == b, selectors::attr::CaseSensitivity::AsciiCaseInsensitive => a.eq_ignore_ascii_case(b), } } }
extern crate num_integer; extern crate num_traits; extern crate ordered_float; extern crate owning_ref; extern crate parking_lot;
random_line_split
lib.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Calculate [specified][specified] and [computed values][computed] from a //! tree of DOM nodes and a set of stylesheets. //! //! [computed]: https://drafts.csswg.org/css-cascade/#computed //! [specified]: https://drafts.csswg.org/css-cascade/#specified //! //! In particular, this crate contains the definitions of supported properties, //! the code to parse them into specified values and calculate the computed //! values based on the specified values, as well as the code to serialize both //! specified and computed values. //! //! The main entry point is [`recalc_style_at`][recalc_style_at]. //! //! [recalc_style_at]: traversal/fn.recalc_style_at.html //! //! Major dependencies are the [cssparser][cssparser] and [selectors][selectors] //! crates. //! //! [cssparser]:../cssparser/index.html //! [selectors]:../selectors/index.html #![deny(warnings)] #![deny(missing_docs)] // FIXME(bholley): We need to blanket-allow unsafe code in order to make the // gecko atom!() macro work. When Rust 1.14 is released [1], we can uncomment // the commented-out attributes in regen_atoms.py and go back to denying unsafe // code by default. // // [1] https://github.com/rust-lang/rust/issues/15701#issuecomment-251900615 //#![deny(unsafe_code)] #![allow(unused_unsafe)] #![recursion_limit = "500"] // For define_css_keyword_enum! in -moz-appearance extern crate app_units; extern crate arrayvec; extern crate atomic_refcell; extern crate bit_vec; #[macro_use] extern crate bitflags; #[allow(unused_extern_crates)] extern crate byteorder; #[cfg(feature = "gecko")] #[macro_use] #[no_link] extern crate cfg_if; #[macro_use] extern crate cssparser; extern crate euclid; extern crate fnv; #[cfg(feature = "gecko")] #[macro_use] pub mod gecko_string_cache; extern crate hashglobe; #[cfg(feature = "servo")] extern crate heapsize; #[cfg(feature = "servo")] #[macro_use] extern crate heapsize_derive; extern crate itertools; extern crate itoa; #[cfg(feature = "servo")] #[macro_use] extern crate html5ever; #[macro_use] extern crate lazy_static; #[macro_use] extern crate log; #[allow(unused_extern_crates)] #[macro_use] extern crate matches; #[cfg(feature = "gecko")] #[macro_use] pub extern crate nsstring_vendor as nsstring; #[cfg(feature = "gecko")] extern crate num_cpus; extern crate num_integer; extern crate num_traits; extern crate ordered_float; extern crate owning_ref; extern crate parking_lot; extern crate pdqsort; extern crate precomputed_hash; extern crate rayon; extern crate selectors; #[cfg(feature = "servo")] #[macro_use] extern crate serde; pub extern crate servo_arc; #[cfg(feature = "servo")] #[macro_use] extern crate servo_atoms; #[cfg(feature = "servo")] extern crate servo_config; #[cfg(feature = "servo")] extern crate servo_url; extern crate smallvec; #[macro_use] extern crate style_derive; #[macro_use] extern crate style_traits; extern crate time; extern crate unicode_bidi; #[allow(unused_extern_crates)] extern crate unicode_segmentation; #[macro_use] mod macros; #[cfg(feature = "servo")] pub mod animation; pub mod applicable_declarations; #[allow(missing_docs)] // TODO. #[cfg(feature = "servo")] pub mod attr; pub mod bezier; pub mod bloom; pub mod cache; pub mod context; pub mod counter_style; pub mod custom_properties; pub mod data; pub mod dom; pub mod driver; pub mod element_state; #[cfg(feature = "servo")] mod encoding_support; pub mod error_reporting; pub mod font_face; pub mod font_metrics; #[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko; #[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko_bindings; pub mod hash; pub mod invalidation; #[allow(missing_docs)] // TODO. pub mod logical_geometry; pub mod matching; pub mod media_queries; pub mod parallel; pub mod parser; pub mod rule_tree; pub mod scoped_tls; pub mod selector_map; pub mod selector_parser; pub mod shared_lock; pub mod sharing; pub mod style_resolver; pub mod stylist; #[cfg(feature = "servo")] #[allow(unsafe_code)] pub mod servo; pub mod str; pub mod style_adjuster; pub mod stylesheet_set; pub mod stylesheets; pub mod thread_state; pub mod timer; pub mod traversal; pub mod traversal_flags; #[macro_use] #[allow(non_camel_case_types)] pub mod values; use std::fmt; use style_traits::ToCss; #[cfg(feature = "gecko")] pub use gecko_string_cache as string_cache; #[cfg(feature = "gecko")] pub use gecko_string_cache::Atom; #[cfg(feature = "gecko")] pub use gecko_string_cache::Namespace; #[cfg(feature = "gecko")] pub use gecko_string_cache::Atom as Prefix; #[cfg(feature = "gecko")] pub use gecko_string_cache::Atom as LocalName; #[cfg(feature = "servo")] pub use servo_atoms::Atom; #[cfg(feature = "servo")] pub use html5ever::Prefix; #[cfg(feature = "servo")] pub use html5ever::LocalName; #[cfg(feature = "servo")] pub use html5ever::Namespace; /// The CSS properties supported by the style system. /// Generated from the properties.mako.rs template by build.rs #[macro_use] #[allow(unsafe_code)] #[deny(missing_docs)] pub mod properties { include!(concat!(env!("OUT_DIR"), "/properties.rs")); } #[cfg(feature = "gecko")] #[allow(unsafe_code, missing_docs)] pub mod gecko_properties { include!(concat!(env!("OUT_DIR"), "/gecko_properties.rs")); } macro_rules! reexport_computed_values { ( $( { $name: ident, $boxed: expr } )+ ) => { /// Types for [computed values][computed]. /// /// [computed]: https://drafts.csswg.org/css-cascade/#computed pub mod computed_values { $( pub use properties::longhands::$name::computed_value as $name; )+ // Don't use a side-specific name needlessly: pub use properties::longhands::border_top_style::computed_value as border_style; } } } longhand_properties_idents!(reexport_computed_values); /// Serializes as CSS a comma-separated list of any `T` that supports being /// serialized as CSS. pub fn serialize_comma_separated_list<W, T>(dest: &mut W, list: &[T]) -> fmt::Result where W: fmt::Write, T: ToCss,
#[cfg(feature = "gecko")] use gecko_string_cache::WeakAtom; #[cfg(feature = "servo")] use servo_atoms::Atom as WeakAtom; /// Extension methods for selectors::attr::CaseSensitivity pub trait CaseSensitivityExt { /// Return whether two atoms compare equal according to this case sensitivity. fn eq_atom(self, a: &WeakAtom, b: &WeakAtom) -> bool; } impl CaseSensitivityExt for selectors::attr::CaseSensitivity { fn eq_atom(self, a: &WeakAtom, b: &WeakAtom) -> bool { match self { selectors::attr::CaseSensitivity::CaseSensitive => a == b, selectors::attr::CaseSensitivity::AsciiCaseInsensitive => a.eq_ignore_ascii_case(b), } } }
{ if list.is_empty() { return Ok(()); } list[0].to_css(dest)?; for item in list.iter().skip(1) { dest.write_str(", ")?; item.to_css(dest)?; } Ok(()) }
identifier_body
lib.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Calculate [specified][specified] and [computed values][computed] from a //! tree of DOM nodes and a set of stylesheets. //! //! [computed]: https://drafts.csswg.org/css-cascade/#computed //! [specified]: https://drafts.csswg.org/css-cascade/#specified //! //! In particular, this crate contains the definitions of supported properties, //! the code to parse them into specified values and calculate the computed //! values based on the specified values, as well as the code to serialize both //! specified and computed values. //! //! The main entry point is [`recalc_style_at`][recalc_style_at]. //! //! [recalc_style_at]: traversal/fn.recalc_style_at.html //! //! Major dependencies are the [cssparser][cssparser] and [selectors][selectors] //! crates. //! //! [cssparser]:../cssparser/index.html //! [selectors]:../selectors/index.html #![deny(warnings)] #![deny(missing_docs)] // FIXME(bholley): We need to blanket-allow unsafe code in order to make the // gecko atom!() macro work. When Rust 1.14 is released [1], we can uncomment // the commented-out attributes in regen_atoms.py and go back to denying unsafe // code by default. // // [1] https://github.com/rust-lang/rust/issues/15701#issuecomment-251900615 //#![deny(unsafe_code)] #![allow(unused_unsafe)] #![recursion_limit = "500"] // For define_css_keyword_enum! in -moz-appearance extern crate app_units; extern crate arrayvec; extern crate atomic_refcell; extern crate bit_vec; #[macro_use] extern crate bitflags; #[allow(unused_extern_crates)] extern crate byteorder; #[cfg(feature = "gecko")] #[macro_use] #[no_link] extern crate cfg_if; #[macro_use] extern crate cssparser; extern crate euclid; extern crate fnv; #[cfg(feature = "gecko")] #[macro_use] pub mod gecko_string_cache; extern crate hashglobe; #[cfg(feature = "servo")] extern crate heapsize; #[cfg(feature = "servo")] #[macro_use] extern crate heapsize_derive; extern crate itertools; extern crate itoa; #[cfg(feature = "servo")] #[macro_use] extern crate html5ever; #[macro_use] extern crate lazy_static; #[macro_use] extern crate log; #[allow(unused_extern_crates)] #[macro_use] extern crate matches; #[cfg(feature = "gecko")] #[macro_use] pub extern crate nsstring_vendor as nsstring; #[cfg(feature = "gecko")] extern crate num_cpus; extern crate num_integer; extern crate num_traits; extern crate ordered_float; extern crate owning_ref; extern crate parking_lot; extern crate pdqsort; extern crate precomputed_hash; extern crate rayon; extern crate selectors; #[cfg(feature = "servo")] #[macro_use] extern crate serde; pub extern crate servo_arc; #[cfg(feature = "servo")] #[macro_use] extern crate servo_atoms; #[cfg(feature = "servo")] extern crate servo_config; #[cfg(feature = "servo")] extern crate servo_url; extern crate smallvec; #[macro_use] extern crate style_derive; #[macro_use] extern crate style_traits; extern crate time; extern crate unicode_bidi; #[allow(unused_extern_crates)] extern crate unicode_segmentation; #[macro_use] mod macros; #[cfg(feature = "servo")] pub mod animation; pub mod applicable_declarations; #[allow(missing_docs)] // TODO. #[cfg(feature = "servo")] pub mod attr; pub mod bezier; pub mod bloom; pub mod cache; pub mod context; pub mod counter_style; pub mod custom_properties; pub mod data; pub mod dom; pub mod driver; pub mod element_state; #[cfg(feature = "servo")] mod encoding_support; pub mod error_reporting; pub mod font_face; pub mod font_metrics; #[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko; #[cfg(feature = "gecko")] #[allow(unsafe_code)] pub mod gecko_bindings; pub mod hash; pub mod invalidation; #[allow(missing_docs)] // TODO. pub mod logical_geometry; pub mod matching; pub mod media_queries; pub mod parallel; pub mod parser; pub mod rule_tree; pub mod scoped_tls; pub mod selector_map; pub mod selector_parser; pub mod shared_lock; pub mod sharing; pub mod style_resolver; pub mod stylist; #[cfg(feature = "servo")] #[allow(unsafe_code)] pub mod servo; pub mod str; pub mod style_adjuster; pub mod stylesheet_set; pub mod stylesheets; pub mod thread_state; pub mod timer; pub mod traversal; pub mod traversal_flags; #[macro_use] #[allow(non_camel_case_types)] pub mod values; use std::fmt; use style_traits::ToCss; #[cfg(feature = "gecko")] pub use gecko_string_cache as string_cache; #[cfg(feature = "gecko")] pub use gecko_string_cache::Atom; #[cfg(feature = "gecko")] pub use gecko_string_cache::Namespace; #[cfg(feature = "gecko")] pub use gecko_string_cache::Atom as Prefix; #[cfg(feature = "gecko")] pub use gecko_string_cache::Atom as LocalName; #[cfg(feature = "servo")] pub use servo_atoms::Atom; #[cfg(feature = "servo")] pub use html5ever::Prefix; #[cfg(feature = "servo")] pub use html5ever::LocalName; #[cfg(feature = "servo")] pub use html5ever::Namespace; /// The CSS properties supported by the style system. /// Generated from the properties.mako.rs template by build.rs #[macro_use] #[allow(unsafe_code)] #[deny(missing_docs)] pub mod properties { include!(concat!(env!("OUT_DIR"), "/properties.rs")); } #[cfg(feature = "gecko")] #[allow(unsafe_code, missing_docs)] pub mod gecko_properties { include!(concat!(env!("OUT_DIR"), "/gecko_properties.rs")); } macro_rules! reexport_computed_values { ( $( { $name: ident, $boxed: expr } )+ ) => { /// Types for [computed values][computed]. /// /// [computed]: https://drafts.csswg.org/css-cascade/#computed pub mod computed_values { $( pub use properties::longhands::$name::computed_value as $name; )+ // Don't use a side-specific name needlessly: pub use properties::longhands::border_top_style::computed_value as border_style; } } } longhand_properties_idents!(reexport_computed_values); /// Serializes as CSS a comma-separated list of any `T` that supports being /// serialized as CSS. pub fn
<W, T>(dest: &mut W, list: &[T]) -> fmt::Result where W: fmt::Write, T: ToCss, { if list.is_empty() { return Ok(()); } list[0].to_css(dest)?; for item in list.iter().skip(1) { dest.write_str(", ")?; item.to_css(dest)?; } Ok(()) } #[cfg(feature = "gecko")] use gecko_string_cache::WeakAtom; #[cfg(feature = "servo")] use servo_atoms::Atom as WeakAtom; /// Extension methods for selectors::attr::CaseSensitivity pub trait CaseSensitivityExt { /// Return whether two atoms compare equal according to this case sensitivity. fn eq_atom(self, a: &WeakAtom, b: &WeakAtom) -> bool; } impl CaseSensitivityExt for selectors::attr::CaseSensitivity { fn eq_atom(self, a: &WeakAtom, b: &WeakAtom) -> bool { match self { selectors::attr::CaseSensitivity::CaseSensitive => a == b, selectors::attr::CaseSensitivity::AsciiCaseInsensitive => a.eq_ignore_ascii_case(b), } } }
serialize_comma_separated_list
identifier_name
mod.rs
//! Types and traits to build and send responses. //! //! The return type of a Rocket handler can be any type that implements the //! [Responder](trait.Responder.html) trait. Among other things, this module //! contains several such types. //! //! # Composing //! //! Many of the built-in `Responder` types _chain_ responses: they take in //! another `Responder` and add, remove, or change information in the response. //! In other words, many `Responder` types are built to compose well. As a //! result, you'll often have types of the form `A<B<C>>` consisting of three //! `Responder`s `A`, `B`, and `C`. This is normal and encouraged as the type //! names typically illustrate the intended response. //! //! # Contrib //! //! The [contrib crate](/rocket_contrib) contains several useful `Responder`s //! including [Template](/rocket_contrib/struct.Template.html) and //! [JSON](/rocket_contrib/struct.JSON.html). mod responder; mod redirect;
mod response; mod failure; pub mod content; pub mod status; pub use self::response::{Response, ResponseBuilder, Body, DEFAULT_CHUNK_SIZE}; pub use self::responder::Responder; pub use self::redirect::Redirect; pub use self::flash::Flash; pub use self::named_file::NamedFile; pub use self::stream::Stream; pub use self::failure::Failure; #[doc(inline)] pub use self::content::Content; /// Type alias for the `Result` of a `Responder::respond` call. pub type Result<'r> = ::std::result::Result<self::Response<'r>, ::http::Status>;
mod flash; mod named_file; mod stream;
random_line_split
str-growth.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// option. This file may not be copied, modified, or distributed // except according to those terms. pub fn main() { let mut s = ~"a"; s.push_char('b'); assert_eq!(s[0], 'a' as u8); assert_eq!(s[1], 'b' as u8); s.push_char('c'); s.push_char('d'); assert_eq!(s[0], 'a' as u8); assert_eq!(s[1], 'b' as u8); assert_eq!(s[2], 'c' as u8); assert_eq!(s[3], 'd' as u8); }
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
random_line_split
str-growth.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. pub fn
() { let mut s = ~"a"; s.push_char('b'); assert_eq!(s[0], 'a' as u8); assert_eq!(s[1], 'b' as u8); s.push_char('c'); s.push_char('d'); assert_eq!(s[0], 'a' as u8); assert_eq!(s[1], 'b' as u8); assert_eq!(s[2], 'c' as u8); assert_eq!(s[3], 'd' as u8); }
main
identifier_name
str-growth.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. pub fn main()
{ let mut s = ~"a"; s.push_char('b'); assert_eq!(s[0], 'a' as u8); assert_eq!(s[1], 'b' as u8); s.push_char('c'); s.push_char('d'); assert_eq!(s[0], 'a' as u8); assert_eq!(s[1], 'b' as u8); assert_eq!(s[2], 'c' as u8); assert_eq!(s[3], 'd' as u8); }
identifier_body
review.rs
use refs; use git2::{ Oid, Repository, Note, Commit }; use super::{ Error, Result, Request, Requests, CIStatuses, Analyses, Comments, Event, Events }; pub struct Review<'r> { git: &'r Repository, id: Oid, request: Request, requests: Vec<Request>, } impl<'r> Review<'r> { pub fn for_commit(git: &'r Repository, id: Oid) -> Result<Review<'r>> { git.find_note(refs::REVIEWS, id) .map_err(From::from) .and_then(|note| Review::from_note(git, id, note)) } pub fn from_note(git: &'r Repository, id: Oid, note: Note<'r>) -> Result<Review<'r>> { Request::all_from_note(id, note) .and_then(|mut requests| if requests.is_empty()
else { requests.sort_by(|a, b| a.timestamp().cmp(&b.timestamp())); Ok((requests.pop().unwrap(), requests)) }) .map(|(request, requests)| Review::from_requests(git, id, request, requests)) } pub fn id(&self) -> Oid { self.id } pub fn commit(&self) -> Result<Commit> { self.git.find_commit(self.id).map_err(From::from) } pub fn request(&self) -> &Request { &self.request } pub fn all_requests(&self) -> Requests { Requests::new(&self.request, &self.requests) } pub fn all_ci_statuses(&self) -> CIStatuses { CIStatuses::all_for_commit(&self.git, self.id()) } pub fn latest_ci_statuses(&self) -> CIStatuses { CIStatuses::latest_for_commit(&self.git, self.id()) } pub fn comments(&self) -> Comments { Comments::for_commit(&self.git, self.id()) } pub fn analyses(&self) -> Analyses { Analyses::for_commit(&self.git, self.id()) } pub fn events(&self) -> Events { let mut vec = Vec::new(); vec.extend(self.all_ci_statuses().map(|status| Box::new(status) as Box<Event>)); vec.extend(self.comments().map(|comment| Box::new(comment) as Box<Event>)); vec.extend(self.analyses().map(|analysis| Box::new(analysis) as Box<Event>)); vec.sort_by(|a, b| a.timestamp().cmp(&b.timestamp())); vec.insert(0, Box::new(self.request().clone()) as Box<Event>); Events::new(vec) } fn from_requests(git: &'r Repository, id: Oid, request: Request, requests: Vec<Request>) -> Review<'r> { Review { git: git, id: id, request: request, requests: requests, } } }
{ Err(Error::NotFound) }
conditional_block
review.rs
use refs; use git2::{ Oid, Repository, Note, Commit }; use super::{ Error, Result, Request, Requests, CIStatuses, Analyses, Comments, Event, Events }; pub struct Review<'r> { git: &'r Repository, id: Oid, request: Request, requests: Vec<Request>, } impl<'r> Review<'r> { pub fn for_commit(git: &'r Repository, id: Oid) -> Result<Review<'r>> { git.find_note(refs::REVIEWS, id) .map_err(From::from) .and_then(|note| Review::from_note(git, id, note)) } pub fn from_note(git: &'r Repository, id: Oid, note: Note<'r>) -> Result<Review<'r>> { Request::all_from_note(id, note) .and_then(|mut requests| if requests.is_empty() { Err(Error::NotFound) } else { requests.sort_by(|a, b| a.timestamp().cmp(&b.timestamp())); Ok((requests.pop().unwrap(), requests)) }) .map(|(request, requests)| Review::from_requests(git, id, request, requests)) } pub fn id(&self) -> Oid { self.id } pub fn commit(&self) -> Result<Commit> { self.git.find_commit(self.id).map_err(From::from) } pub fn request(&self) -> &Request { &self.request } pub fn all_requests(&self) -> Requests { Requests::new(&self.request, &self.requests) } pub fn all_ci_statuses(&self) -> CIStatuses { CIStatuses::all_for_commit(&self.git, self.id()) } pub fn latest_ci_statuses(&self) -> CIStatuses { CIStatuses::latest_for_commit(&self.git, self.id()) } pub fn comments(&self) -> Comments { Comments::for_commit(&self.git, self.id()) } pub fn analyses(&self) -> Analyses { Analyses::for_commit(&self.git, self.id()) } pub fn events(&self) -> Events { let mut vec = Vec::new(); vec.extend(self.all_ci_statuses().map(|status| Box::new(status) as Box<Event>)); vec.extend(self.comments().map(|comment| Box::new(comment) as Box<Event>)); vec.extend(self.analyses().map(|analysis| Box::new(analysis) as Box<Event>)); vec.sort_by(|a, b| a.timestamp().cmp(&b.timestamp())); vec.insert(0, Box::new(self.request().clone()) as Box<Event>); Events::new(vec) } fn from_requests(git: &'r Repository, id: Oid, request: Request, requests: Vec<Request>) -> Review<'r> { Review { git: git, id: id, request: request,
requests: requests, } } }
random_line_split
review.rs
use refs; use git2::{ Oid, Repository, Note, Commit }; use super::{ Error, Result, Request, Requests, CIStatuses, Analyses, Comments, Event, Events }; pub struct Review<'r> { git: &'r Repository, id: Oid, request: Request, requests: Vec<Request>, } impl<'r> Review<'r> { pub fn for_commit(git: &'r Repository, id: Oid) -> Result<Review<'r>> { git.find_note(refs::REVIEWS, id) .map_err(From::from) .and_then(|note| Review::from_note(git, id, note)) } pub fn
(git: &'r Repository, id: Oid, note: Note<'r>) -> Result<Review<'r>> { Request::all_from_note(id, note) .and_then(|mut requests| if requests.is_empty() { Err(Error::NotFound) } else { requests.sort_by(|a, b| a.timestamp().cmp(&b.timestamp())); Ok((requests.pop().unwrap(), requests)) }) .map(|(request, requests)| Review::from_requests(git, id, request, requests)) } pub fn id(&self) -> Oid { self.id } pub fn commit(&self) -> Result<Commit> { self.git.find_commit(self.id).map_err(From::from) } pub fn request(&self) -> &Request { &self.request } pub fn all_requests(&self) -> Requests { Requests::new(&self.request, &self.requests) } pub fn all_ci_statuses(&self) -> CIStatuses { CIStatuses::all_for_commit(&self.git, self.id()) } pub fn latest_ci_statuses(&self) -> CIStatuses { CIStatuses::latest_for_commit(&self.git, self.id()) } pub fn comments(&self) -> Comments { Comments::for_commit(&self.git, self.id()) } pub fn analyses(&self) -> Analyses { Analyses::for_commit(&self.git, self.id()) } pub fn events(&self) -> Events { let mut vec = Vec::new(); vec.extend(self.all_ci_statuses().map(|status| Box::new(status) as Box<Event>)); vec.extend(self.comments().map(|comment| Box::new(comment) as Box<Event>)); vec.extend(self.analyses().map(|analysis| Box::new(analysis) as Box<Event>)); vec.sort_by(|a, b| a.timestamp().cmp(&b.timestamp())); vec.insert(0, Box::new(self.request().clone()) as Box<Event>); Events::new(vec) } fn from_requests(git: &'r Repository, id: Oid, request: Request, requests: Vec<Request>) -> Review<'r> { Review { git: git, id: id, request: request, requests: requests, } } }
from_note
identifier_name
review.rs
use refs; use git2::{ Oid, Repository, Note, Commit }; use super::{ Error, Result, Request, Requests, CIStatuses, Analyses, Comments, Event, Events }; pub struct Review<'r> { git: &'r Repository, id: Oid, request: Request, requests: Vec<Request>, } impl<'r> Review<'r> { pub fn for_commit(git: &'r Repository, id: Oid) -> Result<Review<'r>> { git.find_note(refs::REVIEWS, id) .map_err(From::from) .and_then(|note| Review::from_note(git, id, note)) } pub fn from_note(git: &'r Repository, id: Oid, note: Note<'r>) -> Result<Review<'r>> { Request::all_from_note(id, note) .and_then(|mut requests| if requests.is_empty() { Err(Error::NotFound) } else { requests.sort_by(|a, b| a.timestamp().cmp(&b.timestamp())); Ok((requests.pop().unwrap(), requests)) }) .map(|(request, requests)| Review::from_requests(git, id, request, requests)) } pub fn id(&self) -> Oid { self.id } pub fn commit(&self) -> Result<Commit> { self.git.find_commit(self.id).map_err(From::from) } pub fn request(&self) -> &Request { &self.request } pub fn all_requests(&self) -> Requests { Requests::new(&self.request, &self.requests) } pub fn all_ci_statuses(&self) -> CIStatuses { CIStatuses::all_for_commit(&self.git, self.id()) } pub fn latest_ci_statuses(&self) -> CIStatuses { CIStatuses::latest_for_commit(&self.git, self.id()) } pub fn comments(&self) -> Comments { Comments::for_commit(&self.git, self.id()) } pub fn analyses(&self) -> Analyses { Analyses::for_commit(&self.git, self.id()) } pub fn events(&self) -> Events
fn from_requests(git: &'r Repository, id: Oid, request: Request, requests: Vec<Request>) -> Review<'r> { Review { git: git, id: id, request: request, requests: requests, } } }
{ let mut vec = Vec::new(); vec.extend(self.all_ci_statuses().map(|status| Box::new(status) as Box<Event>)); vec.extend(self.comments().map(|comment| Box::new(comment) as Box<Event>)); vec.extend(self.analyses().map(|analysis| Box::new(analysis) as Box<Event>)); vec.sort_by(|a, b| a.timestamp().cmp(&b.timestamp())); vec.insert(0, Box::new(self.request().clone()) as Box<Event>); Events::new(vec) }
identifier_body
sha1.rs
use std::clone::Clone; use digest::{Digest, DigestError, DigestResult, DigestSuccess}; const SHA1_HASH_SIZE: usize = 20; const BLOCK_SIZE: usize = 64; // Initial hash value. const INIT_STATE: [u32; SHA1_HASH_SIZE / 4] = [ 0x67452301u32, 0xefcdab89u32, 0x98badcfeu32, 0x10325476u32, 0xc3d2e1f0u32 ]; /** * This structure will hold context information for the SHA-1 * hashing operation */ #[derive(Copy)] pub struct Sha1 { /// Message Digest intermediate_hash: [u32; SHA1_HASH_SIZE / 4], /// Message length in bits length_low: u32, /// Message length in bits length_high: u32, /// Index into message block array message_block_index: i16, /// 512-bit message blocks message_block: [u8; BLOCK_SIZE], /// Is the digest computed? computed: bool, /// Is the message digest corrupted? corrupted: DigestResult } impl Clone for Sha1 { fn clone(&self) -> Self { *self } } impl Sha1 { pub fn new() -> Sha1 { let mut new_sha1 = Sha1 { intermediate_hash: [0u32; SHA1_HASH_SIZE / 4], length_low: 0, length_high: 0, message_block_index: 0, message_block: [0u8; BLOCK_SIZE], computed: false, corrupted: Ok(DigestSuccess) }; new_sha1.reset(); new_sha1 } /** * This function will process the next 512 bits of the message * stored in the Message_Block array. * * Many of the variable names in this code, especially the * single character names, were used because those were the * names used in the publication. */ fn sha1_process_block(&mut self) { // Temporary word value let mut temp: u32; let mut w: [u32; 80] = [0; 80]; for i in 0..16 { w[i] = (self.message_block[i * 4] as u32) << 24; w[i] |= (self.message_block[i * 4 + 1] as u32) << 16; w[i] |= (self.message_block[i * 4 + 2] as u32) << 8; w[i] |= self.message_block[i * 4 + 3] as u32; } for i in 16..80 { w[i] = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]).rotate_left(1); } let mut a: u32 = self.intermediate_hash[0]; let mut b: u32 = self.intermediate_hash[1]; let mut c: u32 = self.intermediate_hash[2]; let mut d: u32 = self.intermediate_hash[3]; let mut e: u32 = self.intermediate_hash[4]; for i in 0..80 { let (k, f) = match i { 0...19 => (0x5A827999, ch!(b, c, d)), 20...39 => (0x6ED9EBA1, parity!(b, c, d)), 40...59 => (0x8F1BBCDC, maj!(b, c, d)), 60...79 => (0xCA62C1D6, parity!(b, c, d)), _ => unreachable!() }; temp = a.rotate_left(5).wrapping_add(f).wrapping_add(e).wrapping_add(k).wrapping_add(w[i]); e = d; d = c; c = b.rotate_left(30); b = a; a = temp; } self.intermediate_hash[0] = self.intermediate_hash[0].wrapping_add(a); self.intermediate_hash[1] = self.intermediate_hash[1].wrapping_add(b); self.intermediate_hash[2] = self.intermediate_hash[2].wrapping_add(c); self.intermediate_hash[3] = self.intermediate_hash[3].wrapping_add(d); self.intermediate_hash[4] = self.intermediate_hash[4].wrapping_add(e); self.message_block_index = 0; } fn sha1_result(&mut self, out: &mut [u8]) { assert!(self.corrupted.is_ok()); if!self.computed { self.sha1_pad_message(); self.message_block = [0; BLOCK_SIZE]; self.length_high = 0; self.length_low = 0; self.computed = true; } for i in 0..SHA1_HASH_SIZE { out[i] = (self.intermediate_hash[i >> 2] >> 8 * (3 - (i & 0x03))) as u8; } } fn sha1_pad_message(&mut self) { /* * Check to see if the current message block is too small to hold * the initial padding bits and length. If so, we will pad the * block, process it, and then continue padding into a second * block. */ if self.message_block_index > 55 { self.message_block[self.message_block_index as usize] = 0x80; self.message_block_index += 1; while self.message_block_index < 64 { self.message_block[self.message_block_index as usize] = 0; self.message_block_index += 1; } self.sha1_process_block(); while self.message_block_index < 56 { self.message_block[self.message_block_index as usize] = 0; self.message_block_index += 1; } } else { self.message_block[self.message_block_index as usize] = 0x80; self.message_block_index += 1; while self.message_block_index < 56 { self.message_block[self.message_block_index as usize] = 0; self.message_block_index += 1; } } self.message_block[56] = (self.length_high >> 24) as u8; self.message_block[57] = (self.length_high >> 16) as u8; self.message_block[58] = (self.length_high >> 8) as u8; self.message_block[59] = (self.length_high) as u8; self.message_block[60] = (self.length_low >> 24) as u8; self.message_block[61] = (self.length_low >> 16) as u8; self.message_block[62] = (self.length_low >> 8) as u8; self.message_block[63] = (self.length_low) as u8; self.sha1_process_block(); } } impl Digest for Sha1 { fn reset(&mut self) { self.intermediate_hash = INIT_STATE; self.length_low = 0; self.length_high = 0; self.message_block_index = 0; self.computed = false; self.corrupted = Ok(DigestSuccess); } fn input(&mut self, input: &[u8]) { assert!(!self.computed); assert!(self.corrupted.is_ok()); for i in 0..input.len() { self.message_block[self.message_block_index as usize] = input[i]; self.message_block_index += 1; self.length_low = self.length_low.wrapping_add(8); if self.length_low == 0 { self.length_high = self.length_high.wrapping_add(1); if self.length_high == 0 { self.corrupted = Err(DigestError::InputTooLongError); } } if self.message_block_index == 64 { self.sha1_process_block(); } } } fn result(&mut self, out: &mut [u8]) { self.sha1_result(out); } fn output_bits(&self) -> usize
fn block_size(&self) -> usize { BLOCK_SIZE } } #[cfg(test)] mod tests { use super::*; #[derive(Clone)] struct Test { input: &'static str, output: Vec<u8>, output_str: &'static str, } #[test] fn test_sha1() { let tests = vec![ Test { input: "abc", output: vec![ 0xA9u8, 0x99u8, 0x3Eu8, 0x36u8, 0x47u8, 0x06u8, 0x81u8, 0x6Au8, 0xBAu8, 0x3Eu8, 0x25u8, 0x71u8, 0x78u8, 0x50u8, 0xC2u8, 0x6Cu8, 0x9Cu8, 0xD0u8, 0xD8u8, 0x9Du8, ], output_str: "a9993e364706816aba3e25717850c26c9cd0d89d" }, Test { input: "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", output: vec![ 0x84u8, 0x98u8, 0x3Eu8, 0x44u8, 0x1Cu8, 0x3Bu8, 0xD2u8, 0x6Eu8, 0xBAu8, 0xAEu8, 0x4Au8, 0xA1u8, 0xF9u8, 0x51u8, 0x29u8, 0xE5u8, 0xE5u8, 0x46u8, 0x70u8, 0xF1u8, ], output_str: "84983e441c3bd26ebaae4aa1f95129e5e54670f1" }, // Examples from wikipedia Test { input: "The quick brown fox jumps over the lazy dog", output: vec![ 0x2fu8, 0xd4u8, 0xe1u8, 0xc6u8, 0x7au8, 0x2du8, 0x28u8, 0xfcu8, 0xedu8, 0x84u8, 0x9eu8, 0xe1u8, 0xbbu8, 0x76u8, 0xe7u8, 0x39u8, 0x1bu8, 0x93u8, 0xebu8, 0x12u8, ], output_str: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", }, Test { input: "The quick brown fox jumps over the lazy cog", output: vec![ 0xdeu8, 0x9fu8, 0x2cu8, 0x7fu8, 0xd2u8, 0x5eu8, 0x1bu8, 0x3au8, 0xfau8, 0xd3u8, 0xe8u8, 0x5au8, 0x0bu8, 0xd1u8, 0x7du8, 0x9bu8, 0x10u8, 0x0du8, 0xb4u8, 0xb3u8, ], output_str: "de9f2c7fd25e1b3afad3e85a0bd17d9b100db4b3", }, ]; let mut out = [0u8; 20]; let mut sh = Box::new(Sha1::new()); for t in tests.iter() { (*sh).input_str(t.input); sh.result(&mut out); assert!(t.output[..] == out[..]); let out_str = (*sh).result_str(); assert_eq!(out_str.len(), 40); assert!(&out_str[..] == t.output_str); sh.reset(); } // Test that it works when accepting the message in pieces for t in tests.iter() { let len = t.input.len(); let mut left = len; while left > 0 { let take = (left + 1) / 2; (*sh).input_str(&t.input[len - left..take + len - left]); left = left - take; } sh.result(&mut out); assert!(t.output[..] == out[..]); let out_str = (*sh).result_str(); assert_eq!(out_str.len(), 40); assert!(&out_str[..] == t.output_str); sh.reset(); } } }
{ 160 }
identifier_body
sha1.rs
use std::clone::Clone; use digest::{Digest, DigestError, DigestResult, DigestSuccess}; const SHA1_HASH_SIZE: usize = 20; const BLOCK_SIZE: usize = 64; // Initial hash value. const INIT_STATE: [u32; SHA1_HASH_SIZE / 4] = [ 0x67452301u32, 0xefcdab89u32, 0x98badcfeu32, 0x10325476u32, 0xc3d2e1f0u32 ]; /** * This structure will hold context information for the SHA-1 * hashing operation */ #[derive(Copy)] pub struct Sha1 { /// Message Digest intermediate_hash: [u32; SHA1_HASH_SIZE / 4], /// Message length in bits length_low: u32, /// Message length in bits length_high: u32, /// Index into message block array message_block_index: i16, /// 512-bit message blocks message_block: [u8; BLOCK_SIZE], /// Is the digest computed? computed: bool, /// Is the message digest corrupted? corrupted: DigestResult } impl Clone for Sha1 { fn clone(&self) -> Self { *self } } impl Sha1 { pub fn new() -> Sha1 { let mut new_sha1 = Sha1 { intermediate_hash: [0u32; SHA1_HASH_SIZE / 4], length_low: 0, length_high: 0, message_block_index: 0, message_block: [0u8; BLOCK_SIZE], computed: false, corrupted: Ok(DigestSuccess) }; new_sha1.reset(); new_sha1 } /** * This function will process the next 512 bits of the message * stored in the Message_Block array. * * Many of the variable names in this code, especially the * single character names, were used because those were the * names used in the publication. */ fn sha1_process_block(&mut self) { // Temporary word value let mut temp: u32; let mut w: [u32; 80] = [0; 80]; for i in 0..16 { w[i] = (self.message_block[i * 4] as u32) << 24; w[i] |= (self.message_block[i * 4 + 1] as u32) << 16; w[i] |= (self.message_block[i * 4 + 2] as u32) << 8; w[i] |= self.message_block[i * 4 + 3] as u32; } for i in 16..80 { w[i] = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]).rotate_left(1); } let mut a: u32 = self.intermediate_hash[0]; let mut b: u32 = self.intermediate_hash[1]; let mut c: u32 = self.intermediate_hash[2]; let mut d: u32 = self.intermediate_hash[3]; let mut e: u32 = self.intermediate_hash[4]; for i in 0..80 { let (k, f) = match i { 0...19 => (0x5A827999, ch!(b, c, d)), 20...39 => (0x6ED9EBA1, parity!(b, c, d)), 40...59 => (0x8F1BBCDC, maj!(b, c, d)), 60...79 => (0xCA62C1D6, parity!(b, c, d)), _ => unreachable!() }; temp = a.rotate_left(5).wrapping_add(f).wrapping_add(e).wrapping_add(k).wrapping_add(w[i]); e = d; d = c; c = b.rotate_left(30); b = a; a = temp; } self.intermediate_hash[0] = self.intermediate_hash[0].wrapping_add(a); self.intermediate_hash[1] = self.intermediate_hash[1].wrapping_add(b); self.intermediate_hash[2] = self.intermediate_hash[2].wrapping_add(c); self.intermediate_hash[3] = self.intermediate_hash[3].wrapping_add(d); self.intermediate_hash[4] = self.intermediate_hash[4].wrapping_add(e); self.message_block_index = 0; } fn sha1_result(&mut self, out: &mut [u8]) { assert!(self.corrupted.is_ok()); if!self.computed { self.sha1_pad_message(); self.message_block = [0; BLOCK_SIZE]; self.length_high = 0; self.length_low = 0; self.computed = true; } for i in 0..SHA1_HASH_SIZE { out[i] = (self.intermediate_hash[i >> 2] >> 8 * (3 - (i & 0x03))) as u8; } } fn sha1_pad_message(&mut self) { /* * Check to see if the current message block is too small to hold * the initial padding bits and length. If so, we will pad the * block, process it, and then continue padding into a second * block. */ if self.message_block_index > 55 { self.message_block[self.message_block_index as usize] = 0x80; self.message_block_index += 1; while self.message_block_index < 64 { self.message_block[self.message_block_index as usize] = 0; self.message_block_index += 1; } self.sha1_process_block(); while self.message_block_index < 56 { self.message_block[self.message_block_index as usize] = 0; self.message_block_index += 1; } } else { self.message_block[self.message_block_index as usize] = 0x80; self.message_block_index += 1; while self.message_block_index < 56 { self.message_block[self.message_block_index as usize] = 0; self.message_block_index += 1; } } self.message_block[56] = (self.length_high >> 24) as u8; self.message_block[57] = (self.length_high >> 16) as u8; self.message_block[58] = (self.length_high >> 8) as u8; self.message_block[59] = (self.length_high) as u8; self.message_block[60] = (self.length_low >> 24) as u8; self.message_block[61] = (self.length_low >> 16) as u8; self.message_block[62] = (self.length_low >> 8) as u8; self.message_block[63] = (self.length_low) as u8; self.sha1_process_block(); } } impl Digest for Sha1 { fn reset(&mut self) { self.intermediate_hash = INIT_STATE; self.length_low = 0; self.length_high = 0; self.message_block_index = 0; self.computed = false; self.corrupted = Ok(DigestSuccess); } fn input(&mut self, input: &[u8]) { assert!(!self.computed); assert!(self.corrupted.is_ok()); for i in 0..input.len() { self.message_block[self.message_block_index as usize] = input[i]; self.message_block_index += 1; self.length_low = self.length_low.wrapping_add(8); if self.length_low == 0
if self.message_block_index == 64 { self.sha1_process_block(); } } } fn result(&mut self, out: &mut [u8]) { self.sha1_result(out); } fn output_bits(&self) -> usize { 160 } fn block_size(&self) -> usize { BLOCK_SIZE } } #[cfg(test)] mod tests { use super::*; #[derive(Clone)] struct Test { input: &'static str, output: Vec<u8>, output_str: &'static str, } #[test] fn test_sha1() { let tests = vec![ Test { input: "abc", output: vec![ 0xA9u8, 0x99u8, 0x3Eu8, 0x36u8, 0x47u8, 0x06u8, 0x81u8, 0x6Au8, 0xBAu8, 0x3Eu8, 0x25u8, 0x71u8, 0x78u8, 0x50u8, 0xC2u8, 0x6Cu8, 0x9Cu8, 0xD0u8, 0xD8u8, 0x9Du8, ], output_str: "a9993e364706816aba3e25717850c26c9cd0d89d" }, Test { input: "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", output: vec![ 0x84u8, 0x98u8, 0x3Eu8, 0x44u8, 0x1Cu8, 0x3Bu8, 0xD2u8, 0x6Eu8, 0xBAu8, 0xAEu8, 0x4Au8, 0xA1u8, 0xF9u8, 0x51u8, 0x29u8, 0xE5u8, 0xE5u8, 0x46u8, 0x70u8, 0xF1u8, ], output_str: "84983e441c3bd26ebaae4aa1f95129e5e54670f1" }, // Examples from wikipedia Test { input: "The quick brown fox jumps over the lazy dog", output: vec![ 0x2fu8, 0xd4u8, 0xe1u8, 0xc6u8, 0x7au8, 0x2du8, 0x28u8, 0xfcu8, 0xedu8, 0x84u8, 0x9eu8, 0xe1u8, 0xbbu8, 0x76u8, 0xe7u8, 0x39u8, 0x1bu8, 0x93u8, 0xebu8, 0x12u8, ], output_str: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", }, Test { input: "The quick brown fox jumps over the lazy cog", output: vec![ 0xdeu8, 0x9fu8, 0x2cu8, 0x7fu8, 0xd2u8, 0x5eu8, 0x1bu8, 0x3au8, 0xfau8, 0xd3u8, 0xe8u8, 0x5au8, 0x0bu8, 0xd1u8, 0x7du8, 0x9bu8, 0x10u8, 0x0du8, 0xb4u8, 0xb3u8, ], output_str: "de9f2c7fd25e1b3afad3e85a0bd17d9b100db4b3", }, ]; let mut out = [0u8; 20]; let mut sh = Box::new(Sha1::new()); for t in tests.iter() { (*sh).input_str(t.input); sh.result(&mut out); assert!(t.output[..] == out[..]); let out_str = (*sh).result_str(); assert_eq!(out_str.len(), 40); assert!(&out_str[..] == t.output_str); sh.reset(); } // Test that it works when accepting the message in pieces for t in tests.iter() { let len = t.input.len(); let mut left = len; while left > 0 { let take = (left + 1) / 2; (*sh).input_str(&t.input[len - left..take + len - left]); left = left - take; } sh.result(&mut out); assert!(t.output[..] == out[..]); let out_str = (*sh).result_str(); assert_eq!(out_str.len(), 40); assert!(&out_str[..] == t.output_str); sh.reset(); } } }
{ self.length_high = self.length_high.wrapping_add(1); if self.length_high == 0 { self.corrupted = Err(DigestError::InputTooLongError); } }
conditional_block
sha1.rs
use std::clone::Clone; use digest::{Digest, DigestError, DigestResult, DigestSuccess}; const SHA1_HASH_SIZE: usize = 20; const BLOCK_SIZE: usize = 64; // Initial hash value. const INIT_STATE: [u32; SHA1_HASH_SIZE / 4] = [ 0x67452301u32, 0xefcdab89u32, 0x98badcfeu32, 0x10325476u32, 0xc3d2e1f0u32 ]; /** * This structure will hold context information for the SHA-1 * hashing operation */ #[derive(Copy)] pub struct Sha1 { /// Message Digest intermediate_hash: [u32; SHA1_HASH_SIZE / 4], /// Message length in bits length_low: u32, /// Message length in bits length_high: u32, /// Index into message block array message_block_index: i16, /// 512-bit message blocks message_block: [u8; BLOCK_SIZE], /// Is the digest computed? computed: bool, /// Is the message digest corrupted? corrupted: DigestResult } impl Clone for Sha1 { fn
(&self) -> Self { *self } } impl Sha1 { pub fn new() -> Sha1 { let mut new_sha1 = Sha1 { intermediate_hash: [0u32; SHA1_HASH_SIZE / 4], length_low: 0, length_high: 0, message_block_index: 0, message_block: [0u8; BLOCK_SIZE], computed: false, corrupted: Ok(DigestSuccess) }; new_sha1.reset(); new_sha1 } /** * This function will process the next 512 bits of the message * stored in the Message_Block array. * * Many of the variable names in this code, especially the * single character names, were used because those were the * names used in the publication. */ fn sha1_process_block(&mut self) { // Temporary word value let mut temp: u32; let mut w: [u32; 80] = [0; 80]; for i in 0..16 { w[i] = (self.message_block[i * 4] as u32) << 24; w[i] |= (self.message_block[i * 4 + 1] as u32) << 16; w[i] |= (self.message_block[i * 4 + 2] as u32) << 8; w[i] |= self.message_block[i * 4 + 3] as u32; } for i in 16..80 { w[i] = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]).rotate_left(1); } let mut a: u32 = self.intermediate_hash[0]; let mut b: u32 = self.intermediate_hash[1]; let mut c: u32 = self.intermediate_hash[2]; let mut d: u32 = self.intermediate_hash[3]; let mut e: u32 = self.intermediate_hash[4]; for i in 0..80 { let (k, f) = match i { 0...19 => (0x5A827999, ch!(b, c, d)), 20...39 => (0x6ED9EBA1, parity!(b, c, d)), 40...59 => (0x8F1BBCDC, maj!(b, c, d)), 60...79 => (0xCA62C1D6, parity!(b, c, d)), _ => unreachable!() }; temp = a.rotate_left(5).wrapping_add(f).wrapping_add(e).wrapping_add(k).wrapping_add(w[i]); e = d; d = c; c = b.rotate_left(30); b = a; a = temp; } self.intermediate_hash[0] = self.intermediate_hash[0].wrapping_add(a); self.intermediate_hash[1] = self.intermediate_hash[1].wrapping_add(b); self.intermediate_hash[2] = self.intermediate_hash[2].wrapping_add(c); self.intermediate_hash[3] = self.intermediate_hash[3].wrapping_add(d); self.intermediate_hash[4] = self.intermediate_hash[4].wrapping_add(e); self.message_block_index = 0; } fn sha1_result(&mut self, out: &mut [u8]) { assert!(self.corrupted.is_ok()); if!self.computed { self.sha1_pad_message(); self.message_block = [0; BLOCK_SIZE]; self.length_high = 0; self.length_low = 0; self.computed = true; } for i in 0..SHA1_HASH_SIZE { out[i] = (self.intermediate_hash[i >> 2] >> 8 * (3 - (i & 0x03))) as u8; } } fn sha1_pad_message(&mut self) { /* * Check to see if the current message block is too small to hold * the initial padding bits and length. If so, we will pad the * block, process it, and then continue padding into a second * block. */ if self.message_block_index > 55 { self.message_block[self.message_block_index as usize] = 0x80; self.message_block_index += 1; while self.message_block_index < 64 { self.message_block[self.message_block_index as usize] = 0; self.message_block_index += 1; } self.sha1_process_block(); while self.message_block_index < 56 { self.message_block[self.message_block_index as usize] = 0; self.message_block_index += 1; } } else { self.message_block[self.message_block_index as usize] = 0x80; self.message_block_index += 1; while self.message_block_index < 56 { self.message_block[self.message_block_index as usize] = 0; self.message_block_index += 1; } } self.message_block[56] = (self.length_high >> 24) as u8; self.message_block[57] = (self.length_high >> 16) as u8; self.message_block[58] = (self.length_high >> 8) as u8; self.message_block[59] = (self.length_high) as u8; self.message_block[60] = (self.length_low >> 24) as u8; self.message_block[61] = (self.length_low >> 16) as u8; self.message_block[62] = (self.length_low >> 8) as u8; self.message_block[63] = (self.length_low) as u8; self.sha1_process_block(); } } impl Digest for Sha1 { fn reset(&mut self) { self.intermediate_hash = INIT_STATE; self.length_low = 0; self.length_high = 0; self.message_block_index = 0; self.computed = false; self.corrupted = Ok(DigestSuccess); } fn input(&mut self, input: &[u8]) { assert!(!self.computed); assert!(self.corrupted.is_ok()); for i in 0..input.len() { self.message_block[self.message_block_index as usize] = input[i]; self.message_block_index += 1; self.length_low = self.length_low.wrapping_add(8); if self.length_low == 0 { self.length_high = self.length_high.wrapping_add(1); if self.length_high == 0 { self.corrupted = Err(DigestError::InputTooLongError); } } if self.message_block_index == 64 { self.sha1_process_block(); } } } fn result(&mut self, out: &mut [u8]) { self.sha1_result(out); } fn output_bits(&self) -> usize { 160 } fn block_size(&self) -> usize { BLOCK_SIZE } } #[cfg(test)] mod tests { use super::*; #[derive(Clone)] struct Test { input: &'static str, output: Vec<u8>, output_str: &'static str, } #[test] fn test_sha1() { let tests = vec![ Test { input: "abc", output: vec![ 0xA9u8, 0x99u8, 0x3Eu8, 0x36u8, 0x47u8, 0x06u8, 0x81u8, 0x6Au8, 0xBAu8, 0x3Eu8, 0x25u8, 0x71u8, 0x78u8, 0x50u8, 0xC2u8, 0x6Cu8, 0x9Cu8, 0xD0u8, 0xD8u8, 0x9Du8, ], output_str: "a9993e364706816aba3e25717850c26c9cd0d89d" }, Test { input: "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", output: vec![ 0x84u8, 0x98u8, 0x3Eu8, 0x44u8, 0x1Cu8, 0x3Bu8, 0xD2u8, 0x6Eu8, 0xBAu8, 0xAEu8, 0x4Au8, 0xA1u8, 0xF9u8, 0x51u8, 0x29u8, 0xE5u8, 0xE5u8, 0x46u8, 0x70u8, 0xF1u8, ], output_str: "84983e441c3bd26ebaae4aa1f95129e5e54670f1" }, // Examples from wikipedia Test { input: "The quick brown fox jumps over the lazy dog", output: vec![ 0x2fu8, 0xd4u8, 0xe1u8, 0xc6u8, 0x7au8, 0x2du8, 0x28u8, 0xfcu8, 0xedu8, 0x84u8, 0x9eu8, 0xe1u8, 0xbbu8, 0x76u8, 0xe7u8, 0x39u8, 0x1bu8, 0x93u8, 0xebu8, 0x12u8, ], output_str: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", }, Test { input: "The quick brown fox jumps over the lazy cog", output: vec![ 0xdeu8, 0x9fu8, 0x2cu8, 0x7fu8, 0xd2u8, 0x5eu8, 0x1bu8, 0x3au8, 0xfau8, 0xd3u8, 0xe8u8, 0x5au8, 0x0bu8, 0xd1u8, 0x7du8, 0x9bu8, 0x10u8, 0x0du8, 0xb4u8, 0xb3u8, ], output_str: "de9f2c7fd25e1b3afad3e85a0bd17d9b100db4b3", }, ]; let mut out = [0u8; 20]; let mut sh = Box::new(Sha1::new()); for t in tests.iter() { (*sh).input_str(t.input); sh.result(&mut out); assert!(t.output[..] == out[..]); let out_str = (*sh).result_str(); assert_eq!(out_str.len(), 40); assert!(&out_str[..] == t.output_str); sh.reset(); } // Test that it works when accepting the message in pieces for t in tests.iter() { let len = t.input.len(); let mut left = len; while left > 0 { let take = (left + 1) / 2; (*sh).input_str(&t.input[len - left..take + len - left]); left = left - take; } sh.result(&mut out); assert!(t.output[..] == out[..]); let out_str = (*sh).result_str(); assert_eq!(out_str.len(), 40); assert!(&out_str[..] == t.output_str); sh.reset(); } } }
clone
identifier_name
sha1.rs
use std::clone::Clone; use digest::{Digest, DigestError, DigestResult, DigestSuccess}; const SHA1_HASH_SIZE: usize = 20; const BLOCK_SIZE: usize = 64; // Initial hash value. const INIT_STATE: [u32; SHA1_HASH_SIZE / 4] = [ 0x67452301u32, 0xefcdab89u32, 0x98badcfeu32, 0x10325476u32, 0xc3d2e1f0u32 ]; /** * This structure will hold context information for the SHA-1 * hashing operation */ #[derive(Copy)] pub struct Sha1 { /// Message Digest intermediate_hash: [u32; SHA1_HASH_SIZE / 4], /// Message length in bits length_low: u32, /// Message length in bits length_high: u32, /// Index into message block array message_block_index: i16, /// 512-bit message blocks message_block: [u8; BLOCK_SIZE], /// Is the digest computed? computed: bool, /// Is the message digest corrupted? corrupted: DigestResult } impl Clone for Sha1 { fn clone(&self) -> Self { *self } } impl Sha1 { pub fn new() -> Sha1 { let mut new_sha1 = Sha1 { intermediate_hash: [0u32; SHA1_HASH_SIZE / 4], length_low: 0, length_high: 0, message_block_index: 0, message_block: [0u8; BLOCK_SIZE], computed: false,
new_sha1 } /** * This function will process the next 512 bits of the message * stored in the Message_Block array. * * Many of the variable names in this code, especially the * single character names, were used because those were the * names used in the publication. */ fn sha1_process_block(&mut self) { // Temporary word value let mut temp: u32; let mut w: [u32; 80] = [0; 80]; for i in 0..16 { w[i] = (self.message_block[i * 4] as u32) << 24; w[i] |= (self.message_block[i * 4 + 1] as u32) << 16; w[i] |= (self.message_block[i * 4 + 2] as u32) << 8; w[i] |= self.message_block[i * 4 + 3] as u32; } for i in 16..80 { w[i] = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]).rotate_left(1); } let mut a: u32 = self.intermediate_hash[0]; let mut b: u32 = self.intermediate_hash[1]; let mut c: u32 = self.intermediate_hash[2]; let mut d: u32 = self.intermediate_hash[3]; let mut e: u32 = self.intermediate_hash[4]; for i in 0..80 { let (k, f) = match i { 0...19 => (0x5A827999, ch!(b, c, d)), 20...39 => (0x6ED9EBA1, parity!(b, c, d)), 40...59 => (0x8F1BBCDC, maj!(b, c, d)), 60...79 => (0xCA62C1D6, parity!(b, c, d)), _ => unreachable!() }; temp = a.rotate_left(5).wrapping_add(f).wrapping_add(e).wrapping_add(k).wrapping_add(w[i]); e = d; d = c; c = b.rotate_left(30); b = a; a = temp; } self.intermediate_hash[0] = self.intermediate_hash[0].wrapping_add(a); self.intermediate_hash[1] = self.intermediate_hash[1].wrapping_add(b); self.intermediate_hash[2] = self.intermediate_hash[2].wrapping_add(c); self.intermediate_hash[3] = self.intermediate_hash[3].wrapping_add(d); self.intermediate_hash[4] = self.intermediate_hash[4].wrapping_add(e); self.message_block_index = 0; } fn sha1_result(&mut self, out: &mut [u8]) { assert!(self.corrupted.is_ok()); if!self.computed { self.sha1_pad_message(); self.message_block = [0; BLOCK_SIZE]; self.length_high = 0; self.length_low = 0; self.computed = true; } for i in 0..SHA1_HASH_SIZE { out[i] = (self.intermediate_hash[i >> 2] >> 8 * (3 - (i & 0x03))) as u8; } } fn sha1_pad_message(&mut self) { /* * Check to see if the current message block is too small to hold * the initial padding bits and length. If so, we will pad the * block, process it, and then continue padding into a second * block. */ if self.message_block_index > 55 { self.message_block[self.message_block_index as usize] = 0x80; self.message_block_index += 1; while self.message_block_index < 64 { self.message_block[self.message_block_index as usize] = 0; self.message_block_index += 1; } self.sha1_process_block(); while self.message_block_index < 56 { self.message_block[self.message_block_index as usize] = 0; self.message_block_index += 1; } } else { self.message_block[self.message_block_index as usize] = 0x80; self.message_block_index += 1; while self.message_block_index < 56 { self.message_block[self.message_block_index as usize] = 0; self.message_block_index += 1; } } self.message_block[56] = (self.length_high >> 24) as u8; self.message_block[57] = (self.length_high >> 16) as u8; self.message_block[58] = (self.length_high >> 8) as u8; self.message_block[59] = (self.length_high) as u8; self.message_block[60] = (self.length_low >> 24) as u8; self.message_block[61] = (self.length_low >> 16) as u8; self.message_block[62] = (self.length_low >> 8) as u8; self.message_block[63] = (self.length_low) as u8; self.sha1_process_block(); } } impl Digest for Sha1 { fn reset(&mut self) { self.intermediate_hash = INIT_STATE; self.length_low = 0; self.length_high = 0; self.message_block_index = 0; self.computed = false; self.corrupted = Ok(DigestSuccess); } fn input(&mut self, input: &[u8]) { assert!(!self.computed); assert!(self.corrupted.is_ok()); for i in 0..input.len() { self.message_block[self.message_block_index as usize] = input[i]; self.message_block_index += 1; self.length_low = self.length_low.wrapping_add(8); if self.length_low == 0 { self.length_high = self.length_high.wrapping_add(1); if self.length_high == 0 { self.corrupted = Err(DigestError::InputTooLongError); } } if self.message_block_index == 64 { self.sha1_process_block(); } } } fn result(&mut self, out: &mut [u8]) { self.sha1_result(out); } fn output_bits(&self) -> usize { 160 } fn block_size(&self) -> usize { BLOCK_SIZE } } #[cfg(test)] mod tests { use super::*; #[derive(Clone)] struct Test { input: &'static str, output: Vec<u8>, output_str: &'static str, } #[test] fn test_sha1() { let tests = vec![ Test { input: "abc", output: vec![ 0xA9u8, 0x99u8, 0x3Eu8, 0x36u8, 0x47u8, 0x06u8, 0x81u8, 0x6Au8, 0xBAu8, 0x3Eu8, 0x25u8, 0x71u8, 0x78u8, 0x50u8, 0xC2u8, 0x6Cu8, 0x9Cu8, 0xD0u8, 0xD8u8, 0x9Du8, ], output_str: "a9993e364706816aba3e25717850c26c9cd0d89d" }, Test { input: "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", output: vec![ 0x84u8, 0x98u8, 0x3Eu8, 0x44u8, 0x1Cu8, 0x3Bu8, 0xD2u8, 0x6Eu8, 0xBAu8, 0xAEu8, 0x4Au8, 0xA1u8, 0xF9u8, 0x51u8, 0x29u8, 0xE5u8, 0xE5u8, 0x46u8, 0x70u8, 0xF1u8, ], output_str: "84983e441c3bd26ebaae4aa1f95129e5e54670f1" }, // Examples from wikipedia Test { input: "The quick brown fox jumps over the lazy dog", output: vec![ 0x2fu8, 0xd4u8, 0xe1u8, 0xc6u8, 0x7au8, 0x2du8, 0x28u8, 0xfcu8, 0xedu8, 0x84u8, 0x9eu8, 0xe1u8, 0xbbu8, 0x76u8, 0xe7u8, 0x39u8, 0x1bu8, 0x93u8, 0xebu8, 0x12u8, ], output_str: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", }, Test { input: "The quick brown fox jumps over the lazy cog", output: vec![ 0xdeu8, 0x9fu8, 0x2cu8, 0x7fu8, 0xd2u8, 0x5eu8, 0x1bu8, 0x3au8, 0xfau8, 0xd3u8, 0xe8u8, 0x5au8, 0x0bu8, 0xd1u8, 0x7du8, 0x9bu8, 0x10u8, 0x0du8, 0xb4u8, 0xb3u8, ], output_str: "de9f2c7fd25e1b3afad3e85a0bd17d9b100db4b3", }, ]; let mut out = [0u8; 20]; let mut sh = Box::new(Sha1::new()); for t in tests.iter() { (*sh).input_str(t.input); sh.result(&mut out); assert!(t.output[..] == out[..]); let out_str = (*sh).result_str(); assert_eq!(out_str.len(), 40); assert!(&out_str[..] == t.output_str); sh.reset(); } // Test that it works when accepting the message in pieces for t in tests.iter() { let len = t.input.len(); let mut left = len; while left > 0 { let take = (left + 1) / 2; (*sh).input_str(&t.input[len - left..take + len - left]); left = left - take; } sh.result(&mut out); assert!(t.output[..] == out[..]); let out_str = (*sh).result_str(); assert_eq!(out_str.len(), 40); assert!(&out_str[..] == t.output_str); sh.reset(); } } }
corrupted: Ok(DigestSuccess) }; new_sha1.reset();
random_line_split
cargo_expand.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use crate::bindgen::config::Profile; use std::env; use std::error; use std::fmt; use std::io; use std::path::{Path, PathBuf}; use std::process::Command; use std::str::{from_utf8, Utf8Error}; extern crate tempfile; use self::tempfile::Builder; #[derive(Debug)] /// Possible errors that can occur during `rustc -Zunpretty=expanded`. pub enum Error { /// Error during creation of temporary directory Io(io::Error), /// Output of `cargo metadata` was not valid utf8 Utf8(Utf8Error), /// Error during execution of `cargo rustc -Zunpretty=expanded` Compile(String), } impl From<io::Error> for Error { fn
(err: io::Error) -> Self { Error::Io(err) } } impl From<Utf8Error> for Error { fn from(err: Utf8Error) -> Self { Error::Utf8(err) } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Error::Io(ref err) => err.fmt(f), Error::Utf8(ref err) => err.fmt(f), Error::Compile(ref err) => write!(f, "{}", err), } } } impl error::Error for Error { fn source(&self) -> Option<&(dyn error::Error +'static)> { match self { Error::Io(ref err) => Some(err), Error::Utf8(ref err) => Some(err), Error::Compile(..) => None, } } } /// Use rustc to expand and pretty print the crate into a single file, /// removing any macros in the process. #[allow(clippy::too_many_arguments)] pub fn expand( manifest_path: &Path, crate_name: &str, version: Option<&str>, use_tempdir: bool, expand_all_features: bool, expand_default_features: bool, expand_features: &Option<Vec<String>>, profile: Profile, ) -> Result<String, Error> { let cargo = env::var("CARGO").unwrap_or_else(|_| String::from("cargo")); let mut cmd = Command::new(cargo); let mut _temp_dir = None; // drop guard if use_tempdir { _temp_dir = Some(Builder::new().prefix("cbindgen-expand").tempdir()?); cmd.env("CARGO_TARGET_DIR", _temp_dir.unwrap().path()); } else if let Ok(ref path) = env::var("CARGO_EXPAND_TARGET_DIR") { cmd.env("CARGO_TARGET_DIR", path); } else if let Ok(ref path) = env::var("OUT_DIR") { // When cbindgen was started programatically from a build.rs file, Cargo is running and // locking the default target directory. In this case we need to use another directory, // else we would end up in a deadlock. If Cargo is running `OUT_DIR` will be set, so we // can use a directory relative to that. cmd.env("CARGO_TARGET_DIR", PathBuf::from(path).join("expanded")); } // Set this variable so that we don't call it recursively if we expand a crate that is using // cbindgen cmd.env("_CBINDGEN_IS_RUNNING", "1"); cmd.arg("rustc"); cmd.arg("--lib"); // When build with the release profile we can't choose the `check` profile. if profile!= Profile::Release { cmd.arg("--profile=check"); } cmd.arg("--manifest-path"); cmd.arg(manifest_path); if let Some(features) = expand_features { cmd.arg("--features"); let mut features_str = String::new(); for (index, feature) in features.iter().enumerate() { if index!= 0 { features_str.push(' '); } features_str.push_str(feature); } cmd.arg(features_str); } if expand_all_features { cmd.arg("--all-features"); } if!expand_default_features { cmd.arg("--no-default-features"); } match profile { Profile::Debug => {} Profile::Release => { cmd.arg("--release"); } } cmd.arg("-p"); let mut package = crate_name.to_owned(); if let Some(version) = version { package.push(':'); package.push_str(version); } cmd.arg(&package); cmd.arg("--verbose"); cmd.arg("--"); cmd.arg("-Zunpretty=expanded"); info!("Command: {:?}", cmd); let output = cmd.output()?; let src = from_utf8(&output.stdout)?.to_owned(); let error = from_utf8(&output.stderr)?.to_owned(); if src.is_empty() { Err(Error::Compile(error)) } else { Ok(src) } }
from
identifier_name
cargo_expand.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use crate::bindgen::config::Profile; use std::env; use std::error; use std::fmt; use std::io; use std::path::{Path, PathBuf}; use std::process::Command; use std::str::{from_utf8, Utf8Error}; extern crate tempfile; use self::tempfile::Builder; #[derive(Debug)] /// Possible errors that can occur during `rustc -Zunpretty=expanded`. pub enum Error { /// Error during creation of temporary directory Io(io::Error), /// Output of `cargo metadata` was not valid utf8 Utf8(Utf8Error), /// Error during execution of `cargo rustc -Zunpretty=expanded` Compile(String), } impl From<io::Error> for Error { fn from(err: io::Error) -> Self { Error::Io(err) } } impl From<Utf8Error> for Error { fn from(err: Utf8Error) -> Self { Error::Utf8(err) } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Error::Io(ref err) => err.fmt(f), Error::Utf8(ref err) => err.fmt(f), Error::Compile(ref err) => write!(f, "{}", err), } } } impl error::Error for Error { fn source(&self) -> Option<&(dyn error::Error +'static)> { match self { Error::Io(ref err) => Some(err), Error::Utf8(ref err) => Some(err), Error::Compile(..) => None, } } } /// Use rustc to expand and pretty print the crate into a single file, /// removing any macros in the process. #[allow(clippy::too_many_arguments)] pub fn expand( manifest_path: &Path, crate_name: &str, version: Option<&str>, use_tempdir: bool, expand_all_features: bool, expand_default_features: bool, expand_features: &Option<Vec<String>>, profile: Profile, ) -> Result<String, Error>
cmd.env("_CBINDGEN_IS_RUNNING", "1"); cmd.arg("rustc"); cmd.arg("--lib"); // When build with the release profile we can't choose the `check` profile. if profile!= Profile::Release { cmd.arg("--profile=check"); } cmd.arg("--manifest-path"); cmd.arg(manifest_path); if let Some(features) = expand_features { cmd.arg("--features"); let mut features_str = String::new(); for (index, feature) in features.iter().enumerate() { if index!= 0 { features_str.push(' '); } features_str.push_str(feature); } cmd.arg(features_str); } if expand_all_features { cmd.arg("--all-features"); } if!expand_default_features { cmd.arg("--no-default-features"); } match profile { Profile::Debug => {} Profile::Release => { cmd.arg("--release"); } } cmd.arg("-p"); let mut package = crate_name.to_owned(); if let Some(version) = version { package.push(':'); package.push_str(version); } cmd.arg(&package); cmd.arg("--verbose"); cmd.arg("--"); cmd.arg("-Zunpretty=expanded"); info!("Command: {:?}", cmd); let output = cmd.output()?; let src = from_utf8(&output.stdout)?.to_owned(); let error = from_utf8(&output.stderr)?.to_owned(); if src.is_empty() { Err(Error::Compile(error)) } else { Ok(src) } }
{ let cargo = env::var("CARGO").unwrap_or_else(|_| String::from("cargo")); let mut cmd = Command::new(cargo); let mut _temp_dir = None; // drop guard if use_tempdir { _temp_dir = Some(Builder::new().prefix("cbindgen-expand").tempdir()?); cmd.env("CARGO_TARGET_DIR", _temp_dir.unwrap().path()); } else if let Ok(ref path) = env::var("CARGO_EXPAND_TARGET_DIR") { cmd.env("CARGO_TARGET_DIR", path); } else if let Ok(ref path) = env::var("OUT_DIR") { // When cbindgen was started programatically from a build.rs file, Cargo is running and // locking the default target directory. In this case we need to use another directory, // else we would end up in a deadlock. If Cargo is running `OUT_DIR` will be set, so we // can use a directory relative to that. cmd.env("CARGO_TARGET_DIR", PathBuf::from(path).join("expanded")); } // Set this variable so that we don't call it recursively if we expand a crate that is using // cbindgen
identifier_body
cargo_expand.rs
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use crate::bindgen::config::Profile; use std::env; use std::error; use std::fmt; use std::io; use std::path::{Path, PathBuf}; use std::process::Command; use std::str::{from_utf8, Utf8Error}; extern crate tempfile; use self::tempfile::Builder; #[derive(Debug)] /// Possible errors that can occur during `rustc -Zunpretty=expanded`. pub enum Error { /// Error during creation of temporary directory Io(io::Error), /// Output of `cargo metadata` was not valid utf8 Utf8(Utf8Error), /// Error during execution of `cargo rustc -Zunpretty=expanded` Compile(String), } impl From<io::Error> for Error { fn from(err: io::Error) -> Self { Error::Io(err) } } impl From<Utf8Error> for Error { fn from(err: Utf8Error) -> Self { Error::Utf8(err) } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Error::Io(ref err) => err.fmt(f), Error::Utf8(ref err) => err.fmt(f), Error::Compile(ref err) => write!(f, "{}", err), } } } impl error::Error for Error { fn source(&self) -> Option<&(dyn error::Error +'static)> { match self { Error::Io(ref err) => Some(err), Error::Utf8(ref err) => Some(err), Error::Compile(..) => None, } } } /// Use rustc to expand and pretty print the crate into a single file, /// removing any macros in the process. #[allow(clippy::too_many_arguments)] pub fn expand( manifest_path: &Path, crate_name: &str, version: Option<&str>, use_tempdir: bool, expand_all_features: bool, expand_default_features: bool, expand_features: &Option<Vec<String>>, profile: Profile, ) -> Result<String, Error> { let cargo = env::var("CARGO").unwrap_or_else(|_| String::from("cargo")); let mut cmd = Command::new(cargo); let mut _temp_dir = None; // drop guard if use_tempdir { _temp_dir = Some(Builder::new().prefix("cbindgen-expand").tempdir()?); cmd.env("CARGO_TARGET_DIR", _temp_dir.unwrap().path()); } else if let Ok(ref path) = env::var("CARGO_EXPAND_TARGET_DIR") { cmd.env("CARGO_TARGET_DIR", path); } else if let Ok(ref path) = env::var("OUT_DIR") { // When cbindgen was started programatically from a build.rs file, Cargo is running and // locking the default target directory. In this case we need to use another directory, // else we would end up in a deadlock. If Cargo is running `OUT_DIR` will be set, so we // can use a directory relative to that. cmd.env("CARGO_TARGET_DIR", PathBuf::from(path).join("expanded")); } // Set this variable so that we don't call it recursively if we expand a crate that is using // cbindgen cmd.env("_CBINDGEN_IS_RUNNING", "1"); cmd.arg("rustc"); cmd.arg("--lib"); // When build with the release profile we can't choose the `check` profile. if profile!= Profile::Release { cmd.arg("--profile=check"); } cmd.arg("--manifest-path"); cmd.arg(manifest_path); if let Some(features) = expand_features { cmd.arg("--features"); let mut features_str = String::new(); for (index, feature) in features.iter().enumerate() { if index!= 0 { features_str.push(' '); } features_str.push_str(feature); } cmd.arg(features_str); } if expand_all_features { cmd.arg("--all-features"); } if!expand_default_features { cmd.arg("--no-default-features"); } match profile { Profile::Debug => {} Profile::Release => { cmd.arg("--release"); } } cmd.arg("-p"); let mut package = crate_name.to_owned(); if let Some(version) = version { package.push(':'); package.push_str(version); } cmd.arg(&package); cmd.arg("--verbose"); cmd.arg("--"); cmd.arg("-Zunpretty=expanded"); info!("Command: {:?}", cmd); let output = cmd.output()?; let src = from_utf8(&output.stdout)?.to_owned(); let error = from_utf8(&output.stderr)?.to_owned(); if src.is_empty() {
} else { Ok(src) } }
Err(Error::Compile(error))
random_line_split
proxy.rs
#![deny(warnings)] extern crate hyper; extern crate pretty_env_logger; use hyper::{Client, Server}; use hyper::service::service_fn; use hyper::rt::{self, Future}; use std::net::SocketAddr; fn
() { pretty_env_logger::init(); let in_addr = ([127, 0, 0, 1], 3001).into(); let out_addr: SocketAddr = ([127, 0, 0, 1], 3000).into(); let client_main = Client::new(); let out_addr_clone = out_addr.clone(); // new_service is run for each connection, creating a'service' // to handle requests for that specific connection. let new_service = move || { let client = client_main.clone(); // This is the `Service` that will handle the connection. // `service_fn_ok` is a helper to convert a function that // returns a Response into a `Service`. service_fn(move |mut req| { let uri_string = format!("http://{}/{}", out_addr_clone, req.uri().path_and_query().map(|x| x.as_str()).unwrap_or("")); let uri = uri_string.parse().unwrap(); *req.uri_mut() = uri; client.request(req) }) }; let server = Server::bind(&in_addr) .serve(new_service) .map_err(|e| eprintln!("server error: {}", e)); println!("Listening on http://{}", in_addr); println!("Proxying on http://{}", out_addr); rt::run(server); }
main
identifier_name
proxy.rs
#![deny(warnings)] extern crate hyper; extern crate pretty_env_logger; use hyper::{Client, Server}; use hyper::service::service_fn; use hyper::rt::{self, Future}; use std::net::SocketAddr; fn main()
let uri = uri_string.parse().unwrap(); *req.uri_mut() = uri; client.request(req) }) }; let server = Server::bind(&in_addr) .serve(new_service) .map_err(|e| eprintln!("server error: {}", e)); println!("Listening on http://{}", in_addr); println!("Proxying on http://{}", out_addr); rt::run(server); }
{ pretty_env_logger::init(); let in_addr = ([127, 0, 0, 1], 3001).into(); let out_addr: SocketAddr = ([127, 0, 0, 1], 3000).into(); let client_main = Client::new(); let out_addr_clone = out_addr.clone(); // new_service is run for each connection, creating a 'service' // to handle requests for that specific connection. let new_service = move || { let client = client_main.clone(); // This is the `Service` that will handle the connection. // `service_fn_ok` is a helper to convert a function that // returns a Response into a `Service`. service_fn(move |mut req| { let uri_string = format!("http://{}/{}", out_addr_clone, req.uri().path_and_query().map(|x| x.as_str()).unwrap_or(""));
identifier_body
proxy.rs
#![deny(warnings)] extern crate hyper; extern crate pretty_env_logger; use hyper::{Client, Server}; use hyper::service::service_fn; use hyper::rt::{self, Future}; use std::net::SocketAddr; fn main() { pretty_env_logger::init(); let in_addr = ([127, 0, 0, 1], 3001).into(); let out_addr: SocketAddr = ([127, 0, 0, 1], 3000).into(); let client_main = Client::new(); let out_addr_clone = out_addr.clone(); // new_service is run for each connection, creating a'service' // to handle requests for that specific connection. let new_service = move || { let client = client_main.clone(); // This is the `Service` that will handle the connection. // `service_fn_ok` is a helper to convert a function that // returns a Response into a `Service`. service_fn(move |mut req| { let uri_string = format!("http://{}/{}",
*req.uri_mut() = uri; client.request(req) }) }; let server = Server::bind(&in_addr) .serve(new_service) .map_err(|e| eprintln!("server error: {}", e)); println!("Listening on http://{}", in_addr); println!("Proxying on http://{}", out_addr); rt::run(server); }
out_addr_clone, req.uri().path_and_query().map(|x| x.as_str()).unwrap_or("")); let uri = uri_string.parse().unwrap();
random_line_split
allocator-override.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // no-prefer-dynamic // aux-build:allocator-dummy.rs extern crate allocator_dummy; fn
() { unsafe { let before = allocator_dummy::HITS; let b = Box::new(3); assert_eq!(allocator_dummy::HITS - before, 1); drop(b); assert_eq!(allocator_dummy::HITS - before, 2); } }
main
identifier_name
allocator-override.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // no-prefer-dynamic // aux-build:allocator-dummy.rs extern crate allocator_dummy; fn main()
{ unsafe { let before = allocator_dummy::HITS; let b = Box::new(3); assert_eq!(allocator_dummy::HITS - before, 1); drop(b); assert_eq!(allocator_dummy::HITS - before, 2); } }
identifier_body
type_.rs
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #[allow(non_uppercase_pattern_statics)]; use lib::llvm::{llvm, TypeRef, Bool, False, True, TypeKind}; use lib::llvm::{Float, Double, X86_FP80, PPC_FP128, FP128}; use middle::ty; use middle::trans::context::CrateContext; use middle::trans::base; use syntax::ast; use syntax::abi::{Architecture, X86, X86_64, Arm, Mips}; use std::c_str::ToCStr; use std::vec; use std::cast; use std::libc::{c_uint}; #[deriving(Clone, Eq)] pub struct Type { priv rf: TypeRef } macro_rules! ty ( ($e:expr) => ( Type::from_ref(unsafe { $e })) ) /** * Wrapper for LLVM TypeRef */ impl Type { #[inline(always)] pub fn from_ref(r: TypeRef) -> Type { Type { rf: r } } #[inline(always)] // So it doesn't kill --opt-level=0 builds of the compiler pub fn to_ref(&self) -> TypeRef { self.rf } pub fn void() -> Type { ty!(llvm::LLVMVoidTypeInContext(base::task_llcx())) } pub fn nil() -> Type { Type::empty_struct() } pub fn metadata() -> Type { ty!(llvm::LLVMMetadataTypeInContext(base::task_llcx())) } pub fn i1() -> Type { ty!(llvm::LLVMInt1TypeInContext(base::task_llcx())) } pub fn i8() -> Type { ty!(llvm::LLVMInt8TypeInContext(base::task_llcx())) } pub fn i16() -> Type { ty!(llvm::LLVMInt16TypeInContext(base::task_llcx())) } pub fn i32() -> Type { ty!(llvm::LLVMInt32TypeInContext(base::task_llcx())) } pub fn i64() -> Type { ty!(llvm::LLVMInt64TypeInContext(base::task_llcx())) } pub fn f32() -> Type { ty!(llvm::LLVMFloatTypeInContext(base::task_llcx())) } pub fn f64() -> Type { ty!(llvm::LLVMDoubleTypeInContext(base::task_llcx())) } pub fn bool() -> Type { Type::i8() } pub fn char() -> Type { Type::i32() } pub fn i8p() -> Type { Type::i8().ptr_to() } pub fn int(arch: Architecture) -> Type { match arch { X86 | Arm | Mips => Type::i32(), X86_64 => Type::i64() } } pub fn float(_: Architecture) -> Type { // All architectures currently just use doubles as the default // float size Type::f64() } pub fn int_from_ty(ctx: &CrateContext, t: ast::IntTy) -> Type { match t { ast::TyI => ctx.int_type, ast::TyI8 => Type::i8(), ast::TyI16 => Type::i16(), ast::TyI32 => Type::i32(), ast::TyI64 => Type::i64() } } pub fn uint_from_ty(ctx: &CrateContext, t: ast::UintTy) -> Type { match t { ast::TyU => ctx.int_type, ast::TyU8 => Type::i8(), ast::TyU16 => Type::i16(), ast::TyU32 => Type::i32(), ast::TyU64 => Type::i64() } } pub fn float_from_ty(t: ast::FloatTy) -> Type { match t { ast::TyF32 => Type::f32(), ast::TyF64 => Type::f64() } } pub fn size_t(arch: Architecture) -> Type { Type::int(arch) } pub fn func(args: &[Type], ret: &Type) -> Type { let vec : &[TypeRef] = unsafe { cast::transmute(args) }; ty!(llvm::LLVMFunctionType(ret.to_ref(), vec.as_ptr(), args.len() as c_uint, False)) } pub fn variadic_func(args: &[Type], ret: &Type) -> Type { let vec : &[TypeRef] = unsafe { cast::transmute(args) }; ty!(llvm::LLVMFunctionType(ret.to_ref(), vec.as_ptr(), args.len() as c_uint, True)) } pub fn func_pair(cx: &CrateContext, fn_ty: &Type) -> Type { Type::struct_([fn_ty.ptr_to(), Type::opaque_cbox_ptr(cx)], false) } pub fn ptr(ty: Type) -> Type { ty!(llvm::LLVMPointerType(ty.to_ref(), 0 as c_uint)) } pub fn struct_(els: &[Type], packed: bool) -> Type { let els : &[TypeRef] = unsafe { cast::transmute(els) }; ty!(llvm::LLVMStructTypeInContext(base::task_llcx(), els.as_ptr(), els.len() as c_uint, packed as Bool)) } pub fn named_struct(name: &str) -> Type { let ctx = base::task_llcx(); ty!(name.with_c_str(|s| llvm::LLVMStructCreateNamed(ctx, s))) } pub fn empty_struct() -> Type { Type::struct_([], false) } pub fn vtable() -> Type { Type::array(&Type::i8().ptr_to(), 1) } pub fn generic_glue_fn(cx: &CrateContext) -> Type { match cx.tn.find_type("glue_fn") { Some(ty) => return ty, None => () } let ty = Type::glue_fn(Type::i8p()); cx.tn.associate_type("glue_fn", &ty); return ty; } pub fn glue_fn(t: Type) -> Type { Type::func([ Type::nil().ptr_to(), t ], &Type::void()) } pub fn tydesc(arch: Architecture) -> Type { let mut tydesc = Type::named_struct("tydesc"); let glue_fn_ty = Type::glue_fn(Type::i8p()).ptr_to(); let int_ty = Type::int(arch); // Must mirror: // // std::unstable::intrinsics::TyDesc // type_desc in rt let elems = [int_ty, // size int_ty, // align glue_fn_ty, // take glue_fn_ty, // drop glue_fn_ty, // visit int_ty, // borrow_offset Type::struct_([Type::i8p(), Type::int(arch)], false)]; // name tydesc.set_struct_body(elems, false); return tydesc; } pub fn array(ty: &Type, len: u64) -> Type { ty!(llvm::LLVMArrayType(ty.to_ref(), len as c_uint)) } pub fn vector(ty: &Type, len: u64) -> Type { ty!(llvm::LLVMVectorType(ty.to_ref(), len as c_uint)) } pub fn vec(arch: Architecture, ty: &Type) -> Type { Type::struct_( [ Type::int(arch), Type::int(arch), Type::array(ty, 0) ], false) } pub fn opaque_vec(arch: Architecture) -> Type { Type::vec(arch, &Type::i8()) } #[inline] pub fn box_header_fields(ctx: &CrateContext) -> ~[Type] { ~[ ctx.int_type, ctx.tydesc_type.ptr_to(), Type::i8().ptr_to(), Type::i8().ptr_to() ] } pub fn box_header(ctx: &CrateContext) -> Type { Type::struct_(Type::box_header_fields(ctx), false) } pub fn smart_ptr(ctx: &CrateContext, ty: &Type) -> Type { Type::struct_(Type::box_header_fields(ctx) + &[*ty], false) } pub fn opaque() -> Type { Type::i8() } pub fn opaque_box(ctx: &CrateContext) -> Type { Type::smart_ptr(ctx, &Type::opaque()) } pub fn unique(ctx: &CrateContext, ty: &Type) -> Type { Type::smart_ptr(ctx, ty) } pub fn opaque_cbox_ptr(cx: &CrateContext) -> Type { Type::opaque_box(cx).ptr_to() } pub fn opaque_trait(ctx: &CrateContext, store: ty::TraitStore) -> Type { let tydesc_ptr = ctx.tydesc_type.ptr_to(); let box_ty = match store { ty::BoxTraitStore => Type::opaque_box(ctx), ty::UniqTraitStore => Type::unique(ctx, &Type::i8()), ty::RegionTraitStore(..) => Type::i8() }; Type::struct_([tydesc_ptr, box_ty.ptr_to()], false) } pub fn kind(&self) -> TypeKind { unsafe { llvm::LLVMGetTypeKind(self.to_ref()) } } pub fn set_struct_body(&mut self, els: &[Type], packed: bool) { unsafe { let vec : &[TypeRef] = cast::transmute(els); llvm::LLVMStructSetBody(self.to_ref(), vec.as_ptr(), els.len() as c_uint, packed as Bool) } } pub fn ptr_to(&self) -> Type { ty!(llvm::LLVMPointerType(self.to_ref(), 0)) } pub fn get_field(&self, idx: uint) -> Type { unsafe { let num_fields = llvm::LLVMCountStructElementTypes(self.to_ref()) as uint; let mut elems = vec::from_elem(num_fields, 0 as TypeRef); llvm::LLVMGetStructElementTypes(self.to_ref(), elems.as_mut_ptr()); Type::from_ref(elems[idx]) } } pub fn is_packed(&self) -> bool { unsafe { llvm::LLVMIsPackedStruct(self.to_ref()) == True } } pub fn element_type(&self) -> Type { unsafe { Type::from_ref(llvm::LLVMGetElementType(self.to_ref())) } } pub fn array_length(&self) -> uint { unsafe { llvm::LLVMGetArrayLength(self.to_ref()) as uint } } pub fn field_types(&self) -> ~[Type] { unsafe { let n_elts = llvm::LLVMCountStructElementTypes(self.to_ref()) as uint; if n_elts == 0 { return ~[]; } let mut elts = vec::from_elem(n_elts, 0 as TypeRef); llvm::LLVMGetStructElementTypes(self.to_ref(), &mut elts[0]); cast::transmute(elts) } } pub fn return_type(&self) -> Type { ty!(llvm::LLVMGetReturnType(self.to_ref())) } pub fn
(&self) -> ~[Type] { unsafe { let n_args = llvm::LLVMCountParamTypes(self.to_ref()) as uint; let args = vec::from_elem(n_args, 0 as TypeRef); llvm::LLVMGetParamTypes(self.to_ref(), args.as_ptr()); cast::transmute(args) } } pub fn float_width(&self) -> uint { match self.kind() { Float => 32, Double => 64, X86_FP80 => 80, FP128 | PPC_FP128 => 128, _ => fail!("llvm_float_width called on a non-float type") } } }
func_params
identifier_name
type_.rs
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #[allow(non_uppercase_pattern_statics)]; use lib::llvm::{llvm, TypeRef, Bool, False, True, TypeKind}; use lib::llvm::{Float, Double, X86_FP80, PPC_FP128, FP128}; use middle::ty; use middle::trans::context::CrateContext; use middle::trans::base; use syntax::ast; use syntax::abi::{Architecture, X86, X86_64, Arm, Mips}; use std::c_str::ToCStr; use std::vec; use std::cast; use std::libc::{c_uint}; #[deriving(Clone, Eq)] pub struct Type { priv rf: TypeRef } macro_rules! ty ( ($e:expr) => ( Type::from_ref(unsafe { $e })) ) /** * Wrapper for LLVM TypeRef */ impl Type { #[inline(always)] pub fn from_ref(r: TypeRef) -> Type { Type { rf: r } } #[inline(always)] // So it doesn't kill --opt-level=0 builds of the compiler pub fn to_ref(&self) -> TypeRef { self.rf } pub fn void() -> Type { ty!(llvm::LLVMVoidTypeInContext(base::task_llcx())) } pub fn nil() -> Type { Type::empty_struct() } pub fn metadata() -> Type { ty!(llvm::LLVMMetadataTypeInContext(base::task_llcx())) } pub fn i1() -> Type { ty!(llvm::LLVMInt1TypeInContext(base::task_llcx())) } pub fn i8() -> Type { ty!(llvm::LLVMInt8TypeInContext(base::task_llcx())) } pub fn i16() -> Type { ty!(llvm::LLVMInt16TypeInContext(base::task_llcx())) } pub fn i32() -> Type { ty!(llvm::LLVMInt32TypeInContext(base::task_llcx())) } pub fn i64() -> Type { ty!(llvm::LLVMInt64TypeInContext(base::task_llcx())) } pub fn f32() -> Type { ty!(llvm::LLVMFloatTypeInContext(base::task_llcx())) } pub fn f64() -> Type { ty!(llvm::LLVMDoubleTypeInContext(base::task_llcx())) } pub fn bool() -> Type { Type::i8() } pub fn char() -> Type { Type::i32() } pub fn i8p() -> Type { Type::i8().ptr_to() } pub fn int(arch: Architecture) -> Type { match arch { X86 | Arm | Mips => Type::i32(), X86_64 => Type::i64() } } pub fn float(_: Architecture) -> Type { // All architectures currently just use doubles as the default // float size Type::f64() } pub fn int_from_ty(ctx: &CrateContext, t: ast::IntTy) -> Type { match t { ast::TyI => ctx.int_type, ast::TyI8 => Type::i8(), ast::TyI16 => Type::i16(), ast::TyI32 => Type::i32(), ast::TyI64 => Type::i64() } } pub fn uint_from_ty(ctx: &CrateContext, t: ast::UintTy) -> Type
pub fn float_from_ty(t: ast::FloatTy) -> Type { match t { ast::TyF32 => Type::f32(), ast::TyF64 => Type::f64() } } pub fn size_t(arch: Architecture) -> Type { Type::int(arch) } pub fn func(args: &[Type], ret: &Type) -> Type { let vec : &[TypeRef] = unsafe { cast::transmute(args) }; ty!(llvm::LLVMFunctionType(ret.to_ref(), vec.as_ptr(), args.len() as c_uint, False)) } pub fn variadic_func(args: &[Type], ret: &Type) -> Type { let vec : &[TypeRef] = unsafe { cast::transmute(args) }; ty!(llvm::LLVMFunctionType(ret.to_ref(), vec.as_ptr(), args.len() as c_uint, True)) } pub fn func_pair(cx: &CrateContext, fn_ty: &Type) -> Type { Type::struct_([fn_ty.ptr_to(), Type::opaque_cbox_ptr(cx)], false) } pub fn ptr(ty: Type) -> Type { ty!(llvm::LLVMPointerType(ty.to_ref(), 0 as c_uint)) } pub fn struct_(els: &[Type], packed: bool) -> Type { let els : &[TypeRef] = unsafe { cast::transmute(els) }; ty!(llvm::LLVMStructTypeInContext(base::task_llcx(), els.as_ptr(), els.len() as c_uint, packed as Bool)) } pub fn named_struct(name: &str) -> Type { let ctx = base::task_llcx(); ty!(name.with_c_str(|s| llvm::LLVMStructCreateNamed(ctx, s))) } pub fn empty_struct() -> Type { Type::struct_([], false) } pub fn vtable() -> Type { Type::array(&Type::i8().ptr_to(), 1) } pub fn generic_glue_fn(cx: &CrateContext) -> Type { match cx.tn.find_type("glue_fn") { Some(ty) => return ty, None => () } let ty = Type::glue_fn(Type::i8p()); cx.tn.associate_type("glue_fn", &ty); return ty; } pub fn glue_fn(t: Type) -> Type { Type::func([ Type::nil().ptr_to(), t ], &Type::void()) } pub fn tydesc(arch: Architecture) -> Type { let mut tydesc = Type::named_struct("tydesc"); let glue_fn_ty = Type::glue_fn(Type::i8p()).ptr_to(); let int_ty = Type::int(arch); // Must mirror: // // std::unstable::intrinsics::TyDesc // type_desc in rt let elems = [int_ty, // size int_ty, // align glue_fn_ty, // take glue_fn_ty, // drop glue_fn_ty, // visit int_ty, // borrow_offset Type::struct_([Type::i8p(), Type::int(arch)], false)]; // name tydesc.set_struct_body(elems, false); return tydesc; } pub fn array(ty: &Type, len: u64) -> Type { ty!(llvm::LLVMArrayType(ty.to_ref(), len as c_uint)) } pub fn vector(ty: &Type, len: u64) -> Type { ty!(llvm::LLVMVectorType(ty.to_ref(), len as c_uint)) } pub fn vec(arch: Architecture, ty: &Type) -> Type { Type::struct_( [ Type::int(arch), Type::int(arch), Type::array(ty, 0) ], false) } pub fn opaque_vec(arch: Architecture) -> Type { Type::vec(arch, &Type::i8()) } #[inline] pub fn box_header_fields(ctx: &CrateContext) -> ~[Type] { ~[ ctx.int_type, ctx.tydesc_type.ptr_to(), Type::i8().ptr_to(), Type::i8().ptr_to() ] } pub fn box_header(ctx: &CrateContext) -> Type { Type::struct_(Type::box_header_fields(ctx), false) } pub fn smart_ptr(ctx: &CrateContext, ty: &Type) -> Type { Type::struct_(Type::box_header_fields(ctx) + &[*ty], false) } pub fn opaque() -> Type { Type::i8() } pub fn opaque_box(ctx: &CrateContext) -> Type { Type::smart_ptr(ctx, &Type::opaque()) } pub fn unique(ctx: &CrateContext, ty: &Type) -> Type { Type::smart_ptr(ctx, ty) } pub fn opaque_cbox_ptr(cx: &CrateContext) -> Type { Type::opaque_box(cx).ptr_to() } pub fn opaque_trait(ctx: &CrateContext, store: ty::TraitStore) -> Type { let tydesc_ptr = ctx.tydesc_type.ptr_to(); let box_ty = match store { ty::BoxTraitStore => Type::opaque_box(ctx), ty::UniqTraitStore => Type::unique(ctx, &Type::i8()), ty::RegionTraitStore(..) => Type::i8() }; Type::struct_([tydesc_ptr, box_ty.ptr_to()], false) } pub fn kind(&self) -> TypeKind { unsafe { llvm::LLVMGetTypeKind(self.to_ref()) } } pub fn set_struct_body(&mut self, els: &[Type], packed: bool) { unsafe { let vec : &[TypeRef] = cast::transmute(els); llvm::LLVMStructSetBody(self.to_ref(), vec.as_ptr(), els.len() as c_uint, packed as Bool) } } pub fn ptr_to(&self) -> Type { ty!(llvm::LLVMPointerType(self.to_ref(), 0)) } pub fn get_field(&self, idx: uint) -> Type { unsafe { let num_fields = llvm::LLVMCountStructElementTypes(self.to_ref()) as uint; let mut elems = vec::from_elem(num_fields, 0 as TypeRef); llvm::LLVMGetStructElementTypes(self.to_ref(), elems.as_mut_ptr()); Type::from_ref(elems[idx]) } } pub fn is_packed(&self) -> bool { unsafe { llvm::LLVMIsPackedStruct(self.to_ref()) == True } } pub fn element_type(&self) -> Type { unsafe { Type::from_ref(llvm::LLVMGetElementType(self.to_ref())) } } pub fn array_length(&self) -> uint { unsafe { llvm::LLVMGetArrayLength(self.to_ref()) as uint } } pub fn field_types(&self) -> ~[Type] { unsafe { let n_elts = llvm::LLVMCountStructElementTypes(self.to_ref()) as uint; if n_elts == 0 { return ~[]; } let mut elts = vec::from_elem(n_elts, 0 as TypeRef); llvm::LLVMGetStructElementTypes(self.to_ref(), &mut elts[0]); cast::transmute(elts) } } pub fn return_type(&self) -> Type { ty!(llvm::LLVMGetReturnType(self.to_ref())) } pub fn func_params(&self) -> ~[Type] { unsafe { let n_args = llvm::LLVMCountParamTypes(self.to_ref()) as uint; let args = vec::from_elem(n_args, 0 as TypeRef); llvm::LLVMGetParamTypes(self.to_ref(), args.as_ptr()); cast::transmute(args) } } pub fn float_width(&self) -> uint { match self.kind() { Float => 32, Double => 64, X86_FP80 => 80, FP128 | PPC_FP128 => 128, _ => fail!("llvm_float_width called on a non-float type") } } }
{ match t { ast::TyU => ctx.int_type, ast::TyU8 => Type::i8(), ast::TyU16 => Type::i16(), ast::TyU32 => Type::i32(), ast::TyU64 => Type::i64() } }
identifier_body
type_.rs
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #[allow(non_uppercase_pattern_statics)]; use lib::llvm::{llvm, TypeRef, Bool, False, True, TypeKind}; use lib::llvm::{Float, Double, X86_FP80, PPC_FP128, FP128}; use middle::ty; use middle::trans::context::CrateContext; use middle::trans::base; use syntax::ast; use syntax::abi::{Architecture, X86, X86_64, Arm, Mips}; use std::c_str::ToCStr; use std::vec; use std::cast; use std::libc::{c_uint}; #[deriving(Clone, Eq)] pub struct Type { priv rf: TypeRef } macro_rules! ty ( ($e:expr) => ( Type::from_ref(unsafe { $e })) ) /** * Wrapper for LLVM TypeRef */ impl Type { #[inline(always)] pub fn from_ref(r: TypeRef) -> Type { Type { rf: r } } #[inline(always)] // So it doesn't kill --opt-level=0 builds of the compiler pub fn to_ref(&self) -> TypeRef { self.rf } pub fn void() -> Type { ty!(llvm::LLVMVoidTypeInContext(base::task_llcx())) } pub fn nil() -> Type { Type::empty_struct() } pub fn metadata() -> Type {
pub fn i1() -> Type { ty!(llvm::LLVMInt1TypeInContext(base::task_llcx())) } pub fn i8() -> Type { ty!(llvm::LLVMInt8TypeInContext(base::task_llcx())) } pub fn i16() -> Type { ty!(llvm::LLVMInt16TypeInContext(base::task_llcx())) } pub fn i32() -> Type { ty!(llvm::LLVMInt32TypeInContext(base::task_llcx())) } pub fn i64() -> Type { ty!(llvm::LLVMInt64TypeInContext(base::task_llcx())) } pub fn f32() -> Type { ty!(llvm::LLVMFloatTypeInContext(base::task_llcx())) } pub fn f64() -> Type { ty!(llvm::LLVMDoubleTypeInContext(base::task_llcx())) } pub fn bool() -> Type { Type::i8() } pub fn char() -> Type { Type::i32() } pub fn i8p() -> Type { Type::i8().ptr_to() } pub fn int(arch: Architecture) -> Type { match arch { X86 | Arm | Mips => Type::i32(), X86_64 => Type::i64() } } pub fn float(_: Architecture) -> Type { // All architectures currently just use doubles as the default // float size Type::f64() } pub fn int_from_ty(ctx: &CrateContext, t: ast::IntTy) -> Type { match t { ast::TyI => ctx.int_type, ast::TyI8 => Type::i8(), ast::TyI16 => Type::i16(), ast::TyI32 => Type::i32(), ast::TyI64 => Type::i64() } } pub fn uint_from_ty(ctx: &CrateContext, t: ast::UintTy) -> Type { match t { ast::TyU => ctx.int_type, ast::TyU8 => Type::i8(), ast::TyU16 => Type::i16(), ast::TyU32 => Type::i32(), ast::TyU64 => Type::i64() } } pub fn float_from_ty(t: ast::FloatTy) -> Type { match t { ast::TyF32 => Type::f32(), ast::TyF64 => Type::f64() } } pub fn size_t(arch: Architecture) -> Type { Type::int(arch) } pub fn func(args: &[Type], ret: &Type) -> Type { let vec : &[TypeRef] = unsafe { cast::transmute(args) }; ty!(llvm::LLVMFunctionType(ret.to_ref(), vec.as_ptr(), args.len() as c_uint, False)) } pub fn variadic_func(args: &[Type], ret: &Type) -> Type { let vec : &[TypeRef] = unsafe { cast::transmute(args) }; ty!(llvm::LLVMFunctionType(ret.to_ref(), vec.as_ptr(), args.len() as c_uint, True)) } pub fn func_pair(cx: &CrateContext, fn_ty: &Type) -> Type { Type::struct_([fn_ty.ptr_to(), Type::opaque_cbox_ptr(cx)], false) } pub fn ptr(ty: Type) -> Type { ty!(llvm::LLVMPointerType(ty.to_ref(), 0 as c_uint)) } pub fn struct_(els: &[Type], packed: bool) -> Type { let els : &[TypeRef] = unsafe { cast::transmute(els) }; ty!(llvm::LLVMStructTypeInContext(base::task_llcx(), els.as_ptr(), els.len() as c_uint, packed as Bool)) } pub fn named_struct(name: &str) -> Type { let ctx = base::task_llcx(); ty!(name.with_c_str(|s| llvm::LLVMStructCreateNamed(ctx, s))) } pub fn empty_struct() -> Type { Type::struct_([], false) } pub fn vtable() -> Type { Type::array(&Type::i8().ptr_to(), 1) } pub fn generic_glue_fn(cx: &CrateContext) -> Type { match cx.tn.find_type("glue_fn") { Some(ty) => return ty, None => () } let ty = Type::glue_fn(Type::i8p()); cx.tn.associate_type("glue_fn", &ty); return ty; } pub fn glue_fn(t: Type) -> Type { Type::func([ Type::nil().ptr_to(), t ], &Type::void()) } pub fn tydesc(arch: Architecture) -> Type { let mut tydesc = Type::named_struct("tydesc"); let glue_fn_ty = Type::glue_fn(Type::i8p()).ptr_to(); let int_ty = Type::int(arch); // Must mirror: // // std::unstable::intrinsics::TyDesc // type_desc in rt let elems = [int_ty, // size int_ty, // align glue_fn_ty, // take glue_fn_ty, // drop glue_fn_ty, // visit int_ty, // borrow_offset Type::struct_([Type::i8p(), Type::int(arch)], false)]; // name tydesc.set_struct_body(elems, false); return tydesc; } pub fn array(ty: &Type, len: u64) -> Type { ty!(llvm::LLVMArrayType(ty.to_ref(), len as c_uint)) } pub fn vector(ty: &Type, len: u64) -> Type { ty!(llvm::LLVMVectorType(ty.to_ref(), len as c_uint)) } pub fn vec(arch: Architecture, ty: &Type) -> Type { Type::struct_( [ Type::int(arch), Type::int(arch), Type::array(ty, 0) ], false) } pub fn opaque_vec(arch: Architecture) -> Type { Type::vec(arch, &Type::i8()) } #[inline] pub fn box_header_fields(ctx: &CrateContext) -> ~[Type] { ~[ ctx.int_type, ctx.tydesc_type.ptr_to(), Type::i8().ptr_to(), Type::i8().ptr_to() ] } pub fn box_header(ctx: &CrateContext) -> Type { Type::struct_(Type::box_header_fields(ctx), false) } pub fn smart_ptr(ctx: &CrateContext, ty: &Type) -> Type { Type::struct_(Type::box_header_fields(ctx) + &[*ty], false) } pub fn opaque() -> Type { Type::i8() } pub fn opaque_box(ctx: &CrateContext) -> Type { Type::smart_ptr(ctx, &Type::opaque()) } pub fn unique(ctx: &CrateContext, ty: &Type) -> Type { Type::smart_ptr(ctx, ty) } pub fn opaque_cbox_ptr(cx: &CrateContext) -> Type { Type::opaque_box(cx).ptr_to() } pub fn opaque_trait(ctx: &CrateContext, store: ty::TraitStore) -> Type { let tydesc_ptr = ctx.tydesc_type.ptr_to(); let box_ty = match store { ty::BoxTraitStore => Type::opaque_box(ctx), ty::UniqTraitStore => Type::unique(ctx, &Type::i8()), ty::RegionTraitStore(..) => Type::i8() }; Type::struct_([tydesc_ptr, box_ty.ptr_to()], false) } pub fn kind(&self) -> TypeKind { unsafe { llvm::LLVMGetTypeKind(self.to_ref()) } } pub fn set_struct_body(&mut self, els: &[Type], packed: bool) { unsafe { let vec : &[TypeRef] = cast::transmute(els); llvm::LLVMStructSetBody(self.to_ref(), vec.as_ptr(), els.len() as c_uint, packed as Bool) } } pub fn ptr_to(&self) -> Type { ty!(llvm::LLVMPointerType(self.to_ref(), 0)) } pub fn get_field(&self, idx: uint) -> Type { unsafe { let num_fields = llvm::LLVMCountStructElementTypes(self.to_ref()) as uint; let mut elems = vec::from_elem(num_fields, 0 as TypeRef); llvm::LLVMGetStructElementTypes(self.to_ref(), elems.as_mut_ptr()); Type::from_ref(elems[idx]) } } pub fn is_packed(&self) -> bool { unsafe { llvm::LLVMIsPackedStruct(self.to_ref()) == True } } pub fn element_type(&self) -> Type { unsafe { Type::from_ref(llvm::LLVMGetElementType(self.to_ref())) } } pub fn array_length(&self) -> uint { unsafe { llvm::LLVMGetArrayLength(self.to_ref()) as uint } } pub fn field_types(&self) -> ~[Type] { unsafe { let n_elts = llvm::LLVMCountStructElementTypes(self.to_ref()) as uint; if n_elts == 0 { return ~[]; } let mut elts = vec::from_elem(n_elts, 0 as TypeRef); llvm::LLVMGetStructElementTypes(self.to_ref(), &mut elts[0]); cast::transmute(elts) } } pub fn return_type(&self) -> Type { ty!(llvm::LLVMGetReturnType(self.to_ref())) } pub fn func_params(&self) -> ~[Type] { unsafe { let n_args = llvm::LLVMCountParamTypes(self.to_ref()) as uint; let args = vec::from_elem(n_args, 0 as TypeRef); llvm::LLVMGetParamTypes(self.to_ref(), args.as_ptr()); cast::transmute(args) } } pub fn float_width(&self) -> uint { match self.kind() { Float => 32, Double => 64, X86_FP80 => 80, FP128 | PPC_FP128 => 128, _ => fail!("llvm_float_width called on a non-float type") } } }
ty!(llvm::LLVMMetadataTypeInContext(base::task_llcx())) }
random_line_split
main.rs
#![cfg_attr(feature="clippy", feature(plugin))] #![cfg_attr(feature="clippy", plugin(clippy))] #![cfg_attr(feature="clippy", deny(clippy_pedantic))] #![cfg_attr(feature="clippy", allow(missing_docs_in_private_items))] #![deny(missing_debug_implementations, missing_copy_implementations, trivial_casts, trivial_numeric_casts, unsafe_code, unused_import_braces, unused_qualifications)] #[macro_use] extern crate slog; extern crate slog_bunyan; extern crate slog_term; #[macro_use] extern crate clap; extern crate regex; extern crate notify; #[macro_use] extern crate serde_derive; extern crate bincode; extern crate byteorder; extern crate filetime; extern crate time; extern crate tempdir; extern crate semver; use clap::{App, ArgMatches}; use slog::Drain; use std::fs::{self, OpenOptions}; use std::path::Path; use std::sync::Mutex; mod master; mod slave; mod helpers; mod processors; mod structs; const VERSION: &str = env!("CARGO_PKG_VERSION"); fn main() { #[cfg_attr(feature="clippy", allow(indexing_slicing))] let yaml = load_yaml!("cli.yml"); let m = App::from_yaml(yaml).version(VERSION).get_matches(); if let Some(sub_m) = m.subcommand_matches("run") { run_master(sub_m); } else if let Some(sub_m) = m.subcommand_matches("slave") { run_slave(sub_m); } } fn run_master(m: &ArgMatches) { #[cfg_attr(feature="clippy", allow(option_unwrap_used))] // Unwrap is safe - required by clap let base_dir = get_base_dir(m.value_of("base_dir").unwrap()); #[cfg_attr(feature="clippy", allow(option_unwrap_used))] let remote_dir = m.value_of("remote_dir").unwrap(); // Unwrap is safe - required by clap let remote_port = m.value_of("port"); let verbose_mode = m.is_present("verbose"); let mut ignore_strings = get_ignore_strings(m); let log = setup_log(&base_dir, verbose_mode, true); info!(log, "Starting BindRS"); master::run( &log, &base_dir, remote_dir, remote_port, &mut ignore_strings, verbose_mode, ) } fn run_slave(m: &ArgMatches) { #[cfg_attr(feature="clippy", allow(option_unwrap_used))] // Unwrap is safe - required by clap let base_dir = get_base_dir(m.value_of("base_dir").unwrap()); let mut ignore_strings = get_ignore_strings(m); let verbose_mode = m.is_present("verbose"); let log = setup_log(&base_dir, verbose_mode, false); info!(log, "Starting BindRS"); slave::run(&log, &base_dir, &mut ignore_strings) } fn
(m: &ArgMatches) -> Vec<String> { match m.values_of("ignore") { Some(i) => i.into_iter().map(|str| str.to_owned()).collect(), None => vec![], } } fn get_base_dir(base_dir: &str) -> String { helpers::resolve_path(base_dir).unwrap_or_else(|| { helpers::print_error_and_exit("failed to find base directory"); "".to_owned() }) } fn setup_log(base_dir: &str, verbose_mode: bool, master_mode: bool) -> slog::Logger { let mut path_buf = Path::new(base_dir).to_path_buf(); path_buf.push(".bindrs"); match fs::create_dir_all(path_buf.as_path()) { Ok(_) => (), Err(_) => helpers::print_error_and_exit("Failed to create.bindrs directory!"), } path_buf.push("bindrs"); path_buf.set_extension("log"); let wrapped_file = OpenOptions::new() .create(true) .write(true) .truncate(true) .open(path_buf.as_path()); if let Ok(file) = wrapped_file { let level = if verbose_mode { slog::Level::Debug } else { slog::Level::Info }; let file_decorator = slog_term::PlainSyncDecorator::new(file); let file_drain = slog_term::FullFormat::new(file_decorator).build(); let file_drain = slog::LevelFilter::new(file_drain, level); if master_mode { let term_decorator = slog_term::TermDecorator::new().build(); let term_drain = Mutex::new(slog_term::CompactFormat::new(term_decorator).build()).fuse(); let term_drain = slog::LevelFilter::new(term_drain, level); let drain = slog::Duplicate::new(file_drain, term_drain); slog::Logger::root(drain.fuse(), o!("version" => VERSION, "mode" => "master")) } else { slog::Logger::root( file_drain.fuse(), o!("version" => VERSION, "mode" => "slave"), ) } } else { helpers::print_error_and_exit("Failed to create log file."); panic!(); // For compilation } }
get_ignore_strings
identifier_name
main.rs
#![cfg_attr(feature="clippy", feature(plugin))] #![cfg_attr(feature="clippy", plugin(clippy))] #![cfg_attr(feature="clippy", deny(clippy_pedantic))] #![cfg_attr(feature="clippy", allow(missing_docs_in_private_items))] #![deny(missing_debug_implementations, missing_copy_implementations, trivial_casts, trivial_numeric_casts, unsafe_code, unused_import_braces, unused_qualifications)] #[macro_use] extern crate slog; extern crate slog_bunyan; extern crate slog_term; #[macro_use] extern crate clap; extern crate regex; extern crate notify; #[macro_use] extern crate serde_derive; extern crate bincode; extern crate byteorder; extern crate filetime; extern crate time; extern crate tempdir; extern crate semver; use clap::{App, ArgMatches}; use slog::Drain; use std::fs::{self, OpenOptions}; use std::path::Path; use std::sync::Mutex; mod master; mod slave; mod helpers; mod processors; mod structs; const VERSION: &str = env!("CARGO_PKG_VERSION"); fn main() { #[cfg_attr(feature="clippy", allow(indexing_slicing))] let yaml = load_yaml!("cli.yml"); let m = App::from_yaml(yaml).version(VERSION).get_matches(); if let Some(sub_m) = m.subcommand_matches("run") { run_master(sub_m); } else if let Some(sub_m) = m.subcommand_matches("slave") { run_slave(sub_m); } } fn run_master(m: &ArgMatches) { #[cfg_attr(feature="clippy", allow(option_unwrap_used))] // Unwrap is safe - required by clap let base_dir = get_base_dir(m.value_of("base_dir").unwrap()); #[cfg_attr(feature="clippy", allow(option_unwrap_used))] let remote_dir = m.value_of("remote_dir").unwrap(); // Unwrap is safe - required by clap let remote_port = m.value_of("port"); let verbose_mode = m.is_present("verbose"); let mut ignore_strings = get_ignore_strings(m); let log = setup_log(&base_dir, verbose_mode, true); info!(log, "Starting BindRS"); master::run( &log, &base_dir, remote_dir, remote_port, &mut ignore_strings, verbose_mode, ) } fn run_slave(m: &ArgMatches) { #[cfg_attr(feature="clippy", allow(option_unwrap_used))] // Unwrap is safe - required by clap let base_dir = get_base_dir(m.value_of("base_dir").unwrap()); let mut ignore_strings = get_ignore_strings(m); let verbose_mode = m.is_present("verbose"); let log = setup_log(&base_dir, verbose_mode, false); info!(log, "Starting BindRS"); slave::run(&log, &base_dir, &mut ignore_strings) } fn get_ignore_strings(m: &ArgMatches) -> Vec<String> { match m.values_of("ignore") { Some(i) => i.into_iter().map(|str| str.to_owned()).collect(), None => vec![], } } fn get_base_dir(base_dir: &str) -> String { helpers::resolve_path(base_dir).unwrap_or_else(|| { helpers::print_error_and_exit("failed to find base directory"); "".to_owned() }) } fn setup_log(base_dir: &str, verbose_mode: bool, master_mode: bool) -> slog::Logger { let mut path_buf = Path::new(base_dir).to_path_buf(); path_buf.push(".bindrs"); match fs::create_dir_all(path_buf.as_path()) { Ok(_) => (), Err(_) => helpers::print_error_and_exit("Failed to create.bindrs directory!"), } path_buf.push("bindrs"); path_buf.set_extension("log"); let wrapped_file = OpenOptions::new() .create(true) .write(true) .truncate(true)
.open(path_buf.as_path()); if let Ok(file) = wrapped_file { let level = if verbose_mode { slog::Level::Debug } else { slog::Level::Info }; let file_decorator = slog_term::PlainSyncDecorator::new(file); let file_drain = slog_term::FullFormat::new(file_decorator).build(); let file_drain = slog::LevelFilter::new(file_drain, level); if master_mode { let term_decorator = slog_term::TermDecorator::new().build(); let term_drain = Mutex::new(slog_term::CompactFormat::new(term_decorator).build()).fuse(); let term_drain = slog::LevelFilter::new(term_drain, level); let drain = slog::Duplicate::new(file_drain, term_drain); slog::Logger::root(drain.fuse(), o!("version" => VERSION, "mode" => "master")) } else { slog::Logger::root( file_drain.fuse(), o!("version" => VERSION, "mode" => "slave"), ) } } else { helpers::print_error_and_exit("Failed to create log file."); panic!(); // For compilation } }
random_line_split
main.rs
#![cfg_attr(feature="clippy", feature(plugin))] #![cfg_attr(feature="clippy", plugin(clippy))] #![cfg_attr(feature="clippy", deny(clippy_pedantic))] #![cfg_attr(feature="clippy", allow(missing_docs_in_private_items))] #![deny(missing_debug_implementations, missing_copy_implementations, trivial_casts, trivial_numeric_casts, unsafe_code, unused_import_braces, unused_qualifications)] #[macro_use] extern crate slog; extern crate slog_bunyan; extern crate slog_term; #[macro_use] extern crate clap; extern crate regex; extern crate notify; #[macro_use] extern crate serde_derive; extern crate bincode; extern crate byteorder; extern crate filetime; extern crate time; extern crate tempdir; extern crate semver; use clap::{App, ArgMatches}; use slog::Drain; use std::fs::{self, OpenOptions}; use std::path::Path; use std::sync::Mutex; mod master; mod slave; mod helpers; mod processors; mod structs; const VERSION: &str = env!("CARGO_PKG_VERSION"); fn main()
fn run_master(m: &ArgMatches) { #[cfg_attr(feature="clippy", allow(option_unwrap_used))] // Unwrap is safe - required by clap let base_dir = get_base_dir(m.value_of("base_dir").unwrap()); #[cfg_attr(feature="clippy", allow(option_unwrap_used))] let remote_dir = m.value_of("remote_dir").unwrap(); // Unwrap is safe - required by clap let remote_port = m.value_of("port"); let verbose_mode = m.is_present("verbose"); let mut ignore_strings = get_ignore_strings(m); let log = setup_log(&base_dir, verbose_mode, true); info!(log, "Starting BindRS"); master::run( &log, &base_dir, remote_dir, remote_port, &mut ignore_strings, verbose_mode, ) } fn run_slave(m: &ArgMatches) { #[cfg_attr(feature="clippy", allow(option_unwrap_used))] // Unwrap is safe - required by clap let base_dir = get_base_dir(m.value_of("base_dir").unwrap()); let mut ignore_strings = get_ignore_strings(m); let verbose_mode = m.is_present("verbose"); let log = setup_log(&base_dir, verbose_mode, false); info!(log, "Starting BindRS"); slave::run(&log, &base_dir, &mut ignore_strings) } fn get_ignore_strings(m: &ArgMatches) -> Vec<String> { match m.values_of("ignore") { Some(i) => i.into_iter().map(|str| str.to_owned()).collect(), None => vec![], } } fn get_base_dir(base_dir: &str) -> String { helpers::resolve_path(base_dir).unwrap_or_else(|| { helpers::print_error_and_exit("failed to find base directory"); "".to_owned() }) } fn setup_log(base_dir: &str, verbose_mode: bool, master_mode: bool) -> slog::Logger { let mut path_buf = Path::new(base_dir).to_path_buf(); path_buf.push(".bindrs"); match fs::create_dir_all(path_buf.as_path()) { Ok(_) => (), Err(_) => helpers::print_error_and_exit("Failed to create.bindrs directory!"), } path_buf.push("bindrs"); path_buf.set_extension("log"); let wrapped_file = OpenOptions::new() .create(true) .write(true) .truncate(true) .open(path_buf.as_path()); if let Ok(file) = wrapped_file { let level = if verbose_mode { slog::Level::Debug } else { slog::Level::Info }; let file_decorator = slog_term::PlainSyncDecorator::new(file); let file_drain = slog_term::FullFormat::new(file_decorator).build(); let file_drain = slog::LevelFilter::new(file_drain, level); if master_mode { let term_decorator = slog_term::TermDecorator::new().build(); let term_drain = Mutex::new(slog_term::CompactFormat::new(term_decorator).build()).fuse(); let term_drain = slog::LevelFilter::new(term_drain, level); let drain = slog::Duplicate::new(file_drain, term_drain); slog::Logger::root(drain.fuse(), o!("version" => VERSION, "mode" => "master")) } else { slog::Logger::root( file_drain.fuse(), o!("version" => VERSION, "mode" => "slave"), ) } } else { helpers::print_error_and_exit("Failed to create log file."); panic!(); // For compilation } }
{ #[cfg_attr(feature="clippy", allow(indexing_slicing))] let yaml = load_yaml!("cli.yml"); let m = App::from_yaml(yaml).version(VERSION).get_matches(); if let Some(sub_m) = m.subcommand_matches("run") { run_master(sub_m); } else if let Some(sub_m) = m.subcommand_matches("slave") { run_slave(sub_m); } }
identifier_body
ord.rs
use crate::sql_types::{self, is_nullable, SqlType}; /// Marker trait for types which can be used with `MAX` and `MIN` pub trait SqlOrd: SqlType {} impl SqlOrd for sql_types::SmallInt {} impl SqlOrd for sql_types::Integer {} impl SqlOrd for sql_types::BigInt {} impl SqlOrd for sql_types::Float {} impl SqlOrd for sql_types::Double {} impl SqlOrd for sql_types::Text {} impl SqlOrd for sql_types::Date {} impl SqlOrd for sql_types::Interval {} impl SqlOrd for sql_types::Time {} impl SqlOrd for sql_types::Timestamp {}
impl<T> SqlOrd for sql_types::Nullable<T> where T: SqlOrd + SqlType<IsNull = is_nullable::NotNull> {} #[cfg(feature = "postgres")] impl SqlOrd for sql_types::Timestamptz {} #[cfg(feature = "postgres")] impl<T: SqlOrd> SqlOrd for sql_types::Array<T> {} #[cfg(feature = "mysql")] impl SqlOrd for sql_types::Datetime {} #[cfg(feature = "mysql")] impl SqlOrd for sql_types::Unsigned<sql_types::SmallInt> {} #[cfg(feature = "mysql")] impl SqlOrd for sql_types::Unsigned<sql_types::Integer> {} #[cfg(feature = "mysql")] impl SqlOrd for sql_types::Unsigned<sql_types::BigInt> {}
random_line_split
project-fn-ret-invariant.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(unboxed_closures)] #![feature(rustc_attrs)] // Test for projection cache. We should be able to project distinct // lifetimes from `foo` as we reinstantiate it multiple times, but not // if we do it just once. In this variant, the region `'a` is used in // an invariant position, which affects the results. // revisions: ok oneuse transmute krisskross #![allow(dead_code, unused_variables)] use std::marker::PhantomData; struct Type<'a> { // Invariant data: PhantomData<fn(&'a u32) -> &'a u32> } fn foo<'a>() -> Type<'a> { loop { } } fn bar<T>(t: T, x: T::Output) -> T::Output where T: FnOnce<()>
#[cfg(ok)] // two instantiations: OK fn baz<'a,'b>(x: Type<'a>, y: Type<'b>) -> (Type<'a>, Type<'b>) { let a = bar(foo, x); let b = bar(foo, y); (a, b) } #[cfg(oneuse)] // one instantiation: BAD fn baz<'a,'b>(x: Type<'a>, y: Type<'b>) -> (Type<'a>, Type<'b>) { let f = foo; // <-- No consistent type can be inferred for `f` here. let a = bar(f, x); let b = bar(f, y); //[oneuse]~ ERROR 49:19: 49:20: lifetime mismatch [E0623] (a, b) } #[cfg(transmute)] // one instantiations: BAD fn baz<'a,'b>(x: Type<'a>) -> Type<'static> { // Cannot instantiate `foo` with any lifetime other than `'a`, // since it is provided as input. bar(foo, x) //[transmute]~ ERROR E0495 } #[cfg(krisskross)] // two instantiations, mixing and matching: BAD fn transmute<'a,'b>(x: Type<'a>, y: Type<'b>) -> (Type<'a>, Type<'b>) { let a = bar(foo, y); //[krisskross]~ ERROR E0623 let b = bar(foo, x); (a, b) //[krisskross]~ ERROR E0623 } #[rustc_error] fn main() { } //[ok]~^ ERROR compilation successful
{ t() }
identifier_body
project-fn-ret-invariant.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. //
// except according to those terms. #![feature(unboxed_closures)] #![feature(rustc_attrs)] // Test for projection cache. We should be able to project distinct // lifetimes from `foo` as we reinstantiate it multiple times, but not // if we do it just once. In this variant, the region `'a` is used in // an invariant position, which affects the results. // revisions: ok oneuse transmute krisskross #![allow(dead_code, unused_variables)] use std::marker::PhantomData; struct Type<'a> { // Invariant data: PhantomData<fn(&'a u32) -> &'a u32> } fn foo<'a>() -> Type<'a> { loop { } } fn bar<T>(t: T, x: T::Output) -> T::Output where T: FnOnce<()> { t() } #[cfg(ok)] // two instantiations: OK fn baz<'a,'b>(x: Type<'a>, y: Type<'b>) -> (Type<'a>, Type<'b>) { let a = bar(foo, x); let b = bar(foo, y); (a, b) } #[cfg(oneuse)] // one instantiation: BAD fn baz<'a,'b>(x: Type<'a>, y: Type<'b>) -> (Type<'a>, Type<'b>) { let f = foo; // <-- No consistent type can be inferred for `f` here. let a = bar(f, x); let b = bar(f, y); //[oneuse]~ ERROR 49:19: 49:20: lifetime mismatch [E0623] (a, b) } #[cfg(transmute)] // one instantiations: BAD fn baz<'a,'b>(x: Type<'a>) -> Type<'static> { // Cannot instantiate `foo` with any lifetime other than `'a`, // since it is provided as input. bar(foo, x) //[transmute]~ ERROR E0495 } #[cfg(krisskross)] // two instantiations, mixing and matching: BAD fn transmute<'a,'b>(x: Type<'a>, y: Type<'b>) -> (Type<'a>, Type<'b>) { let a = bar(foo, y); //[krisskross]~ ERROR E0623 let b = bar(foo, x); (a, b) //[krisskross]~ ERROR E0623 } #[rustc_error] fn main() { } //[ok]~^ ERROR compilation successful
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed
random_line_split
project-fn-ret-invariant.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(unboxed_closures)] #![feature(rustc_attrs)] // Test for projection cache. We should be able to project distinct // lifetimes from `foo` as we reinstantiate it multiple times, but not // if we do it just once. In this variant, the region `'a` is used in // an invariant position, which affects the results. // revisions: ok oneuse transmute krisskross #![allow(dead_code, unused_variables)] use std::marker::PhantomData; struct
<'a> { // Invariant data: PhantomData<fn(&'a u32) -> &'a u32> } fn foo<'a>() -> Type<'a> { loop { } } fn bar<T>(t: T, x: T::Output) -> T::Output where T: FnOnce<()> { t() } #[cfg(ok)] // two instantiations: OK fn baz<'a,'b>(x: Type<'a>, y: Type<'b>) -> (Type<'a>, Type<'b>) { let a = bar(foo, x); let b = bar(foo, y); (a, b) } #[cfg(oneuse)] // one instantiation: BAD fn baz<'a,'b>(x: Type<'a>, y: Type<'b>) -> (Type<'a>, Type<'b>) { let f = foo; // <-- No consistent type can be inferred for `f` here. let a = bar(f, x); let b = bar(f, y); //[oneuse]~ ERROR 49:19: 49:20: lifetime mismatch [E0623] (a, b) } #[cfg(transmute)] // one instantiations: BAD fn baz<'a,'b>(x: Type<'a>) -> Type<'static> { // Cannot instantiate `foo` with any lifetime other than `'a`, // since it is provided as input. bar(foo, x) //[transmute]~ ERROR E0495 } #[cfg(krisskross)] // two instantiations, mixing and matching: BAD fn transmute<'a,'b>(x: Type<'a>, y: Type<'b>) -> (Type<'a>, Type<'b>) { let a = bar(foo, y); //[krisskross]~ ERROR E0623 let b = bar(foo, x); (a, b) //[krisskross]~ ERROR E0623 } #[rustc_error] fn main() { } //[ok]~^ ERROR compilation successful
Type
identifier_name
client.rs
extern crate getopts; extern crate serde_json; extern crate udt; extern crate punchtunnel; use std::env; use std::fs::File; use std::iter::FromIterator; use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr, ToSocketAddrs, UdpSocket}; use std::process::exit; use std::thread; use std::time::Duration; use getopts::Options; use udt::{SocketFamily, SocketType, UdtOpts, UdtSocket}; use punchtunnel::key::Key; use punchtunnel::relay_client::{RelayClient, ClientConfiguration}; use punchtunnel::relay_protocol::ConnectRequest; #[derive(Clone)] enum ConnectionSource { StdIo } fn serve(connection_source: ConnectionSource, conf: ClientConfiguration) -> Result<(), String> { udt::init(); let server_socket = UdtSocket::new(SocketFamily::AFInet, SocketType::Stream) .map_err(|e| format!("Could not create UDT socket: {}", e.err_msg))?; server_socket .setsockopt(UdtOpts::UDT_RENDEZVOUS, true) .map_err(|e| { format!("Could not set UDT socket to rendezvous mode: {}", e.err_msg) })?; let local_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 0); server_socket.bind(local_addr).map_err(|e| { format!( "Could not bind UDT socket to '{}': {}", local_addr, e.err_msg ) })?; let local_port = server_socket.getsockname().map_err(|e| format!("Could not determine the local port of the UDT socket: {}", e.err_msg))?.port(); let relay_client = RelayClient::new(&conf.relay_hostname)?; match relay_client.connect(ConnectRequest { connect_key: conf.connect_key, port: local_port, })?.server { Some((remote_addr, secret)) => { eprintln!("Connecting to {}", remote_addr); server_socket.connect(remote_addr).map_err(|e| { format!( "Could not connect UDT socket to remote address '{}': {}", remote_addr, e.err_msg ) })?; eprintln!("Sending"); server_socket.send(format!("HEHEI").as_bytes()).map_err(|e| format!("Could not send data: {}", e.err_msg))?; server_socket.close(); eprintln!("Done"); Ok(()) }, None => { Err(format!("No server address received from the relay server")) } } } fn usage(program: &str, opts: Options) -> String { let brief = format!( "Usage: {} [OPTION]... CONF_FILENAME stdio", program ); format!("{}", opts.usage(&brief)) } fn run() -> Result<(), String> { let args: Vec<String> = env::args().collect(); let program = args[0].clone(); let mut opts = Options::new(); opts.optflag("h", "help", "print this help menu"); let matches = match opts.parse(&args[1..]) { Ok(x) => x, Err(e) => { eprintln!("{}", usage(&program, opts)); return Err(format!("Could not parse command line: {}", e)); } }; if matches.opt_present("h") { println!("{}", usage(&program, opts)); return Ok(()); } if matches.free.len()!= 2 { eprintln!("{}", usage(&program, opts)); return Err("Could not parse command line: wrong number of arguments".to_string()); } let conf_filename = matches.free[0].clone(); let conf_file = File::open(&conf_filename).map_err(|x| { format!( "Could not open configuration file '{}': {}", conf_filename, x ) })?; let conf = serde_json::from_reader(conf_file).map_err(|x| { format!( "Could not read configuration file '{}': {}", conf_filename, x ) })?; match matches.free[1].as_ref() { "stdio" => { serve(ConnectionSource::StdIo, conf)?; } _ => { eprintln!("{}", usage(&program, opts)); return Err(format!( "Could not parse command line: unknown connection source type '{}'", matches.free[1] )); } } Ok(()) } fn main() { match run() { Ok(()) => {} Err(x) => { eprintln!("Fatal error: {}", x); exit(1); } }
}
random_line_split
client.rs
extern crate getopts; extern crate serde_json; extern crate udt; extern crate punchtunnel; use std::env; use std::fs::File; use std::iter::FromIterator; use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr, ToSocketAddrs, UdpSocket}; use std::process::exit; use std::thread; use std::time::Duration; use getopts::Options; use udt::{SocketFamily, SocketType, UdtOpts, UdtSocket}; use punchtunnel::key::Key; use punchtunnel::relay_client::{RelayClient, ClientConfiguration}; use punchtunnel::relay_protocol::ConnectRequest; #[derive(Clone)] enum ConnectionSource { StdIo } fn serve(connection_source: ConnectionSource, conf: ClientConfiguration) -> Result<(), String> { udt::init(); let server_socket = UdtSocket::new(SocketFamily::AFInet, SocketType::Stream) .map_err(|e| format!("Could not create UDT socket: {}", e.err_msg))?; server_socket .setsockopt(UdtOpts::UDT_RENDEZVOUS, true) .map_err(|e| { format!("Could not set UDT socket to rendezvous mode: {}", e.err_msg) })?; let local_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 0); server_socket.bind(local_addr).map_err(|e| { format!( "Could not bind UDT socket to '{}': {}", local_addr, e.err_msg ) })?; let local_port = server_socket.getsockname().map_err(|e| format!("Could not determine the local port of the UDT socket: {}", e.err_msg))?.port(); let relay_client = RelayClient::new(&conf.relay_hostname)?; match relay_client.connect(ConnectRequest { connect_key: conf.connect_key, port: local_port, })?.server { Some((remote_addr, secret)) => { eprintln!("Connecting to {}", remote_addr); server_socket.connect(remote_addr).map_err(|e| { format!( "Could not connect UDT socket to remote address '{}': {}", remote_addr, e.err_msg ) })?; eprintln!("Sending"); server_socket.send(format!("HEHEI").as_bytes()).map_err(|e| format!("Could not send data: {}", e.err_msg))?; server_socket.close(); eprintln!("Done"); Ok(()) }, None => { Err(format!("No server address received from the relay server")) } } } fn usage(program: &str, opts: Options) -> String { let brief = format!( "Usage: {} [OPTION]... CONF_FILENAME stdio", program ); format!("{}", opts.usage(&brief)) } fn
() -> Result<(), String> { let args: Vec<String> = env::args().collect(); let program = args[0].clone(); let mut opts = Options::new(); opts.optflag("h", "help", "print this help menu"); let matches = match opts.parse(&args[1..]) { Ok(x) => x, Err(e) => { eprintln!("{}", usage(&program, opts)); return Err(format!("Could not parse command line: {}", e)); } }; if matches.opt_present("h") { println!("{}", usage(&program, opts)); return Ok(()); } if matches.free.len()!= 2 { eprintln!("{}", usage(&program, opts)); return Err("Could not parse command line: wrong number of arguments".to_string()); } let conf_filename = matches.free[0].clone(); let conf_file = File::open(&conf_filename).map_err(|x| { format!( "Could not open configuration file '{}': {}", conf_filename, x ) })?; let conf = serde_json::from_reader(conf_file).map_err(|x| { format!( "Could not read configuration file '{}': {}", conf_filename, x ) })?; match matches.free[1].as_ref() { "stdio" => { serve(ConnectionSource::StdIo, conf)?; } _ => { eprintln!("{}", usage(&program, opts)); return Err(format!( "Could not parse command line: unknown connection source type '{}'", matches.free[1] )); } } Ok(()) } fn main() { match run() { Ok(()) => {} Err(x) => { eprintln!("Fatal error: {}", x); exit(1); } } }
run
identifier_name
trait-cast.rs
// xfail-test FIXME #5882 // Weird borrow check bug // Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Test cyclic detector when using trait instances. struct Tree(@mut TreeR); struct TreeR { left: Option<Tree>, right: Option<Tree>, val: ~to_str } trait to_str { fn to_str_(&self) -> ~str; } impl<T:to_str> to_str for Option<T> { fn to_str_(&self) -> ~str { match *self { None => { ~"none" } Some(ref t) => { ~"some(" + t.to_str_() + ~")" } } } } impl to_str for int { fn to_str_(&self) -> ~str { self.to_str() } } impl to_str for Tree { fn to_str_(&self) -> ~str { let (l, r) = (self.left, self.right); let val = &self.val; fmt!("[%s, %s, %s]", val.to_str_(), l.to_str_(), r.to_str_()) } } fn foo<T:to_str>(x: T) -> ~str { x.to_str_() } pub fn main() {
let t1 = Tree(@mut TreeR{left: None, right: None, val: ~1 as ~to_str }); let t2 = Tree(@mut TreeR{left: Some(t1), right: Some(t1), val: ~2 as ~to_str }); let expected = ~"[2, some([1, none, none]), some([1, none, none])]"; assert!(t2.to_str_() == expected); assert!(foo(t2) == expected); t1.left = Some(t2); // create cycle }
random_line_split
trait-cast.rs
// xfail-test FIXME #5882 // Weird borrow check bug // Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Test cyclic detector when using trait instances. struct Tree(@mut TreeR); struct TreeR { left: Option<Tree>, right: Option<Tree>, val: ~to_str } trait to_str { fn to_str_(&self) -> ~str; } impl<T:to_str> to_str for Option<T> { fn to_str_(&self) -> ~str { match *self { None => { ~"none" } Some(ref t) => { ~"some(" + t.to_str_() + ~")" } } } } impl to_str for int { fn to_str_(&self) -> ~str { self.to_str() } } impl to_str for Tree { fn
(&self) -> ~str { let (l, r) = (self.left, self.right); let val = &self.val; fmt!("[%s, %s, %s]", val.to_str_(), l.to_str_(), r.to_str_()) } } fn foo<T:to_str>(x: T) -> ~str { x.to_str_() } pub fn main() { let t1 = Tree(@mut TreeR{left: None, right: None, val: ~1 as ~to_str }); let t2 = Tree(@mut TreeR{left: Some(t1), right: Some(t1), val: ~2 as ~to_str }); let expected = ~"[2, some([1, none, none]), some([1, none, none])]"; assert!(t2.to_str_() == expected); assert!(foo(t2) == expected); t1.left = Some(t2); // create cycle }
to_str_
identifier_name
trait-cast.rs
// xfail-test FIXME #5882 // Weird borrow check bug // Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Test cyclic detector when using trait instances. struct Tree(@mut TreeR); struct TreeR { left: Option<Tree>, right: Option<Tree>, val: ~to_str } trait to_str { fn to_str_(&self) -> ~str; } impl<T:to_str> to_str for Option<T> { fn to_str_(&self) -> ~str { match *self { None => { ~"none" } Some(ref t) => { ~"some(" + t.to_str_() + ~")" } } } } impl to_str for int { fn to_str_(&self) -> ~str { self.to_str() } } impl to_str for Tree { fn to_str_(&self) -> ~str { let (l, r) = (self.left, self.right); let val = &self.val; fmt!("[%s, %s, %s]", val.to_str_(), l.to_str_(), r.to_str_()) } } fn foo<T:to_str>(x: T) -> ~str { x.to_str_() } pub fn main()
{ let t1 = Tree(@mut TreeR{left: None, right: None, val: ~1 as ~to_str }); let t2 = Tree(@mut TreeR{left: Some(t1), right: Some(t1), val: ~2 as ~to_str }); let expected = ~"[2, some([1, none, none]), some([1, none, none])]"; assert!(t2.to_str_() == expected); assert!(foo(t2) == expected); t1.left = Some(t2); // create cycle }
identifier_body
structural_match.rs
use crate::infer::{InferCtxt, TyCtxtInferExt}; use crate::traits::ObligationCause; use crate::traits::{self, TraitEngine}; use rustc_data_structures::fx::FxHashSet; use rustc_hir as hir; use rustc_hir::lang_items::LangItem; use rustc_middle::ty::query::Providers; use rustc_middle::ty::{self, AdtDef, Ty, TyCtxt, TypeFoldable, TypeVisitor}; use rustc_span::Span; use std::ops::ControlFlow; #[derive(Debug)] pub enum NonStructuralMatchTy<'tcx> { Adt(&'tcx AdtDef), Param, Dynamic, Foreign, Opaque, Generator, Projection, } /// This method traverses the structure of `ty`, trying to find an /// instance of an ADT (i.e. struct or enum) that doesn't implement /// the structural-match traits, or a generic type parameter /// (which cannot be determined to be structural-match). /// /// The "structure of a type" includes all components that would be /// considered when doing a pattern match on a constant of that /// type. /// /// * This means this method descends into fields of structs/enums, /// and also descends into the inner type `T` of `&T` and `&mut T` /// /// * The traversal doesn't dereference unsafe pointers (`*const T`, /// `*mut T`), and it does not visit the type arguments of an /// instantiated generic like `PhantomData<T>`. /// /// The reason we do this search is Rust currently require all ADTs /// reachable from a constant's type to implement the /// structural-match traits, which essentially say that /// the implementation of `PartialEq::eq` behaves *equivalently* to a /// comparison against the unfolded structure. /// /// For more background on why Rust has this requirement, and issues /// that arose when the requirement was not enforced completely, see /// Rust RFC 1445, rust-lang/rust#61188, and rust-lang/rust#62307. pub fn search_for_structural_match_violation<'tcx>( _id: hir::HirId, span: Span, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, ) -> Option<NonStructuralMatchTy<'tcx>> { // FIXME: we should instead pass in an `infcx` from the outside. tcx.infer_ctxt().enter(|infcx| { ty.visit_with(&mut Search { infcx, span, seen: FxHashSet::default() }).break_value() }) } /// This method returns true if and only if `adt_ty` itself has been marked as /// eligible for structural-match: namely, if it implements both /// `StructuralPartialEq` and `StructuralEq` (which are respectively injected by /// `#[derive(PartialEq)]` and `#[derive(Eq)]`). /// /// Note that this does *not* recursively check if the substructure of `adt_ty` /// implements the traits. fn type_marked_structural( infcx: &InferCtxt<'_, 'tcx>, adt_ty: Ty<'tcx>, cause: ObligationCause<'tcx>, ) -> bool { let mut fulfillment_cx = traits::FulfillmentContext::new(); // require `#[derive(PartialEq)]` let structural_peq_def_id = infcx.tcx.require_lang_item(LangItem::StructuralPeq, Some(cause.span)); fulfillment_cx.register_bound( infcx, ty::ParamEnv::empty(), adt_ty, structural_peq_def_id, cause.clone(), ); // for now, require `#[derive(Eq)]`. (Doing so is a hack to work around // the type `for<'a> fn(&'a ())` failing to implement `Eq` itself.) let structural_teq_def_id = infcx.tcx.require_lang_item(LangItem::StructuralTeq, Some(cause.span)); fulfillment_cx.register_bound( infcx, ty::ParamEnv::empty(), adt_ty, structural_teq_def_id, cause, ); // We deliberately skip *reporting* fulfillment errors (via // `report_fulfillment_errors`), for two reasons: // // 1. The error messages would mention `std::marker::StructuralPartialEq` // (a trait which is solely meant as an implementation detail // for now), and // // 2. We are sometimes doing future-incompatibility lints for // now, so we do not want unconditional errors here. fulfillment_cx.select_all_or_error(infcx).is_ok() } /// This implements the traversal over the structure of a given type to try to /// find instances of ADTs (specifically structs or enums) that do not implement /// the structural-match traits (`StructuralPartialEq` and `StructuralEq`). struct Search<'a, 'tcx> { span: Span, infcx: InferCtxt<'a, 'tcx>, /// Tracks ADTs previously encountered during search, so that /// we will not recur on them again. seen: FxHashSet<hir::def_id::DefId>, } impl Search<'a, 'tcx> { fn tcx(&self) -> TyCtxt<'tcx> { self.infcx.tcx } fn type_marked_structural(&self, adt_ty: Ty<'tcx>) -> bool { adt_ty.is_structural_eq_shallow(self.tcx()) } } impl<'a, 'tcx> TypeVisitor<'tcx> for Search<'a, 'tcx> { type BreakTy = NonStructuralMatchTy<'tcx>; fn tcx_for_anon_const_substs(&self) -> Option<TyCtxt<'tcx>> { Some(self.tcx()) } fn
(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> { debug!("Search visiting ty: {:?}", ty); let (adt_def, substs) = match *ty.kind() { ty::Adt(adt_def, substs) => (adt_def, substs), ty::Param(_) => { return ControlFlow::Break(NonStructuralMatchTy::Param); } ty::Dynamic(..) => { return ControlFlow::Break(NonStructuralMatchTy::Dynamic); } ty::Foreign(_) => { return ControlFlow::Break(NonStructuralMatchTy::Foreign); } ty::Opaque(..) => { return ControlFlow::Break(NonStructuralMatchTy::Opaque); } ty::Projection(..) => { return ControlFlow::Break(NonStructuralMatchTy::Projection); } ty::Generator(..) | ty::GeneratorWitness(..) => { return ControlFlow::Break(NonStructuralMatchTy::Generator); } ty::RawPtr(..) => { // structural-match ignores substructure of // `*const _`/`*mut _`, so skip `super_visit_with`. // // For example, if you have: // ``` // struct NonStructural; // #[derive(PartialEq, Eq)] // struct T(*const NonStructural); // const C: T = T(std::ptr::null()); // ``` // // Even though `NonStructural` does not implement `PartialEq`, // structural equality on `T` does not recur into the raw // pointer. Therefore, one can still use `C` in a pattern. return ControlFlow::CONTINUE; } ty::FnDef(..) | ty::FnPtr(..) => { // Types of formals and return in `fn(_) -> _` are also irrelevant; // so we do not recur into them via `super_visit_with` return ControlFlow::CONTINUE; } ty::Array(_, n) if { n.try_eval_usize(self.tcx(), ty::ParamEnv::reveal_all()) == Some(0) } => { // rust-lang/rust#62336: ignore type of contents // for empty array. return ControlFlow::CONTINUE; } ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Str | ty::Never => { // These primitive types are always structural match. // // `Never` is kind of special here, but as it is not inhabitable, this should be fine. return ControlFlow::CONTINUE; } ty::Array(..) | ty::Slice(_) | ty::Ref(..) | ty::Tuple(..) => { // First check all contained types and then tell the caller to continue searching. return ty.super_visit_with(self); } ty::Closure(..) | ty::Infer(_) | ty::Placeholder(_) | ty::Bound(..) => { bug!("unexpected type during structural-match checking: {:?}", ty); } ty::Error(_) => { self.tcx().sess.delay_span_bug(self.span, "ty::Error in structural-match check"); // We still want to check other types after encountering an error, // as this may still emit relevant errors. return ControlFlow::CONTINUE; } }; if!self.seen.insert(adt_def.did) { debug!("Search already seen adt_def: {:?}", adt_def); return ControlFlow::CONTINUE; } if!self.type_marked_structural(ty) { debug!("Search found ty: {:?}", ty); return ControlFlow::Break(NonStructuralMatchTy::Adt(&adt_def)); } // structural-match does not care about the // instantiation of the generics in an ADT (it // instead looks directly at its fields outside // this match), so we skip super_visit_with. // // (Must not recur on substs for `PhantomData<T>` cf // rust-lang/rust#55028 and rust-lang/rust#55837; but also // want to skip substs when only uses of generic are // behind unsafe pointers `*const T`/`*mut T`.) // even though we skip super_visit_with, we must recur on // fields of ADT. let tcx = self.tcx(); adt_def.all_fields().map(|field| field.ty(tcx, substs)).try_for_each(|field_ty| { let ty = self.tcx().normalize_erasing_regions(ty::ParamEnv::empty(), field_ty); debug!("structural-match ADT: field_ty={:?}, ty={:?}", field_ty, ty); ty.visit_with(self) }) } } pub fn provide(providers: &mut Providers) { providers.has_structural_eq_impls = |tcx, ty| { tcx.infer_ctxt().enter(|infcx| { let cause = ObligationCause::dummy(); type_marked_structural(&infcx, ty, cause) }) }; }
visit_ty
identifier_name
structural_match.rs
use crate::infer::{InferCtxt, TyCtxtInferExt}; use crate::traits::ObligationCause; use crate::traits::{self, TraitEngine}; use rustc_data_structures::fx::FxHashSet; use rustc_hir as hir; use rustc_hir::lang_items::LangItem; use rustc_middle::ty::query::Providers; use rustc_middle::ty::{self, AdtDef, Ty, TyCtxt, TypeFoldable, TypeVisitor}; use rustc_span::Span; use std::ops::ControlFlow; #[derive(Debug)] pub enum NonStructuralMatchTy<'tcx> { Adt(&'tcx AdtDef), Param, Dynamic, Foreign, Opaque, Generator, Projection, } /// This method traverses the structure of `ty`, trying to find an /// instance of an ADT (i.e. struct or enum) that doesn't implement /// the structural-match traits, or a generic type parameter /// (which cannot be determined to be structural-match). /// /// The "structure of a type" includes all components that would be /// considered when doing a pattern match on a constant of that /// type. /// /// * This means this method descends into fields of structs/enums, /// and also descends into the inner type `T` of `&T` and `&mut T` /// /// * The traversal doesn't dereference unsafe pointers (`*const T`, /// `*mut T`), and it does not visit the type arguments of an /// instantiated generic like `PhantomData<T>`. /// /// The reason we do this search is Rust currently require all ADTs /// reachable from a constant's type to implement the /// structural-match traits, which essentially say that /// the implementation of `PartialEq::eq` behaves *equivalently* to a /// comparison against the unfolded structure. /// /// For more background on why Rust has this requirement, and issues /// that arose when the requirement was not enforced completely, see
_id: hir::HirId, span: Span, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, ) -> Option<NonStructuralMatchTy<'tcx>> { // FIXME: we should instead pass in an `infcx` from the outside. tcx.infer_ctxt().enter(|infcx| { ty.visit_with(&mut Search { infcx, span, seen: FxHashSet::default() }).break_value() }) } /// This method returns true if and only if `adt_ty` itself has been marked as /// eligible for structural-match: namely, if it implements both /// `StructuralPartialEq` and `StructuralEq` (which are respectively injected by /// `#[derive(PartialEq)]` and `#[derive(Eq)]`). /// /// Note that this does *not* recursively check if the substructure of `adt_ty` /// implements the traits. fn type_marked_structural( infcx: &InferCtxt<'_, 'tcx>, adt_ty: Ty<'tcx>, cause: ObligationCause<'tcx>, ) -> bool { let mut fulfillment_cx = traits::FulfillmentContext::new(); // require `#[derive(PartialEq)]` let structural_peq_def_id = infcx.tcx.require_lang_item(LangItem::StructuralPeq, Some(cause.span)); fulfillment_cx.register_bound( infcx, ty::ParamEnv::empty(), adt_ty, structural_peq_def_id, cause.clone(), ); // for now, require `#[derive(Eq)]`. (Doing so is a hack to work around // the type `for<'a> fn(&'a ())` failing to implement `Eq` itself.) let structural_teq_def_id = infcx.tcx.require_lang_item(LangItem::StructuralTeq, Some(cause.span)); fulfillment_cx.register_bound( infcx, ty::ParamEnv::empty(), adt_ty, structural_teq_def_id, cause, ); // We deliberately skip *reporting* fulfillment errors (via // `report_fulfillment_errors`), for two reasons: // // 1. The error messages would mention `std::marker::StructuralPartialEq` // (a trait which is solely meant as an implementation detail // for now), and // // 2. We are sometimes doing future-incompatibility lints for // now, so we do not want unconditional errors here. fulfillment_cx.select_all_or_error(infcx).is_ok() } /// This implements the traversal over the structure of a given type to try to /// find instances of ADTs (specifically structs or enums) that do not implement /// the structural-match traits (`StructuralPartialEq` and `StructuralEq`). struct Search<'a, 'tcx> { span: Span, infcx: InferCtxt<'a, 'tcx>, /// Tracks ADTs previously encountered during search, so that /// we will not recur on them again. seen: FxHashSet<hir::def_id::DefId>, } impl Search<'a, 'tcx> { fn tcx(&self) -> TyCtxt<'tcx> { self.infcx.tcx } fn type_marked_structural(&self, adt_ty: Ty<'tcx>) -> bool { adt_ty.is_structural_eq_shallow(self.tcx()) } } impl<'a, 'tcx> TypeVisitor<'tcx> for Search<'a, 'tcx> { type BreakTy = NonStructuralMatchTy<'tcx>; fn tcx_for_anon_const_substs(&self) -> Option<TyCtxt<'tcx>> { Some(self.tcx()) } fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> { debug!("Search visiting ty: {:?}", ty); let (adt_def, substs) = match *ty.kind() { ty::Adt(adt_def, substs) => (adt_def, substs), ty::Param(_) => { return ControlFlow::Break(NonStructuralMatchTy::Param); } ty::Dynamic(..) => { return ControlFlow::Break(NonStructuralMatchTy::Dynamic); } ty::Foreign(_) => { return ControlFlow::Break(NonStructuralMatchTy::Foreign); } ty::Opaque(..) => { return ControlFlow::Break(NonStructuralMatchTy::Opaque); } ty::Projection(..) => { return ControlFlow::Break(NonStructuralMatchTy::Projection); } ty::Generator(..) | ty::GeneratorWitness(..) => { return ControlFlow::Break(NonStructuralMatchTy::Generator); } ty::RawPtr(..) => { // structural-match ignores substructure of // `*const _`/`*mut _`, so skip `super_visit_with`. // // For example, if you have: // ``` // struct NonStructural; // #[derive(PartialEq, Eq)] // struct T(*const NonStructural); // const C: T = T(std::ptr::null()); // ``` // // Even though `NonStructural` does not implement `PartialEq`, // structural equality on `T` does not recur into the raw // pointer. Therefore, one can still use `C` in a pattern. return ControlFlow::CONTINUE; } ty::FnDef(..) | ty::FnPtr(..) => { // Types of formals and return in `fn(_) -> _` are also irrelevant; // so we do not recur into them via `super_visit_with` return ControlFlow::CONTINUE; } ty::Array(_, n) if { n.try_eval_usize(self.tcx(), ty::ParamEnv::reveal_all()) == Some(0) } => { // rust-lang/rust#62336: ignore type of contents // for empty array. return ControlFlow::CONTINUE; } ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Str | ty::Never => { // These primitive types are always structural match. // // `Never` is kind of special here, but as it is not inhabitable, this should be fine. return ControlFlow::CONTINUE; } ty::Array(..) | ty::Slice(_) | ty::Ref(..) | ty::Tuple(..) => { // First check all contained types and then tell the caller to continue searching. return ty.super_visit_with(self); } ty::Closure(..) | ty::Infer(_) | ty::Placeholder(_) | ty::Bound(..) => { bug!("unexpected type during structural-match checking: {:?}", ty); } ty::Error(_) => { self.tcx().sess.delay_span_bug(self.span, "ty::Error in structural-match check"); // We still want to check other types after encountering an error, // as this may still emit relevant errors. return ControlFlow::CONTINUE; } }; if!self.seen.insert(adt_def.did) { debug!("Search already seen adt_def: {:?}", adt_def); return ControlFlow::CONTINUE; } if!self.type_marked_structural(ty) { debug!("Search found ty: {:?}", ty); return ControlFlow::Break(NonStructuralMatchTy::Adt(&adt_def)); } // structural-match does not care about the // instantiation of the generics in an ADT (it // instead looks directly at its fields outside // this match), so we skip super_visit_with. // // (Must not recur on substs for `PhantomData<T>` cf // rust-lang/rust#55028 and rust-lang/rust#55837; but also // want to skip substs when only uses of generic are // behind unsafe pointers `*const T`/`*mut T`.) // even though we skip super_visit_with, we must recur on // fields of ADT. let tcx = self.tcx(); adt_def.all_fields().map(|field| field.ty(tcx, substs)).try_for_each(|field_ty| { let ty = self.tcx().normalize_erasing_regions(ty::ParamEnv::empty(), field_ty); debug!("structural-match ADT: field_ty={:?}, ty={:?}", field_ty, ty); ty.visit_with(self) }) } } pub fn provide(providers: &mut Providers) { providers.has_structural_eq_impls = |tcx, ty| { tcx.infer_ctxt().enter(|infcx| { let cause = ObligationCause::dummy(); type_marked_structural(&infcx, ty, cause) }) }; }
/// Rust RFC 1445, rust-lang/rust#61188, and rust-lang/rust#62307. pub fn search_for_structural_match_violation<'tcx>(
random_line_split
structural_match.rs
use crate::infer::{InferCtxt, TyCtxtInferExt}; use crate::traits::ObligationCause; use crate::traits::{self, TraitEngine}; use rustc_data_structures::fx::FxHashSet; use rustc_hir as hir; use rustc_hir::lang_items::LangItem; use rustc_middle::ty::query::Providers; use rustc_middle::ty::{self, AdtDef, Ty, TyCtxt, TypeFoldable, TypeVisitor}; use rustc_span::Span; use std::ops::ControlFlow; #[derive(Debug)] pub enum NonStructuralMatchTy<'tcx> { Adt(&'tcx AdtDef), Param, Dynamic, Foreign, Opaque, Generator, Projection, } /// This method traverses the structure of `ty`, trying to find an /// instance of an ADT (i.e. struct or enum) that doesn't implement /// the structural-match traits, or a generic type parameter /// (which cannot be determined to be structural-match). /// /// The "structure of a type" includes all components that would be /// considered when doing a pattern match on a constant of that /// type. /// /// * This means this method descends into fields of structs/enums, /// and also descends into the inner type `T` of `&T` and `&mut T` /// /// * The traversal doesn't dereference unsafe pointers (`*const T`, /// `*mut T`), and it does not visit the type arguments of an /// instantiated generic like `PhantomData<T>`. /// /// The reason we do this search is Rust currently require all ADTs /// reachable from a constant's type to implement the /// structural-match traits, which essentially say that /// the implementation of `PartialEq::eq` behaves *equivalently* to a /// comparison against the unfolded structure. /// /// For more background on why Rust has this requirement, and issues /// that arose when the requirement was not enforced completely, see /// Rust RFC 1445, rust-lang/rust#61188, and rust-lang/rust#62307. pub fn search_for_structural_match_violation<'tcx>( _id: hir::HirId, span: Span, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, ) -> Option<NonStructuralMatchTy<'tcx>> { // FIXME: we should instead pass in an `infcx` from the outside. tcx.infer_ctxt().enter(|infcx| { ty.visit_with(&mut Search { infcx, span, seen: FxHashSet::default() }).break_value() }) } /// This method returns true if and only if `adt_ty` itself has been marked as /// eligible for structural-match: namely, if it implements both /// `StructuralPartialEq` and `StructuralEq` (which are respectively injected by /// `#[derive(PartialEq)]` and `#[derive(Eq)]`). /// /// Note that this does *not* recursively check if the substructure of `adt_ty` /// implements the traits. fn type_marked_structural( infcx: &InferCtxt<'_, 'tcx>, adt_ty: Ty<'tcx>, cause: ObligationCause<'tcx>, ) -> bool { let mut fulfillment_cx = traits::FulfillmentContext::new(); // require `#[derive(PartialEq)]` let structural_peq_def_id = infcx.tcx.require_lang_item(LangItem::StructuralPeq, Some(cause.span)); fulfillment_cx.register_bound( infcx, ty::ParamEnv::empty(), adt_ty, structural_peq_def_id, cause.clone(), ); // for now, require `#[derive(Eq)]`. (Doing so is a hack to work around // the type `for<'a> fn(&'a ())` failing to implement `Eq` itself.) let structural_teq_def_id = infcx.tcx.require_lang_item(LangItem::StructuralTeq, Some(cause.span)); fulfillment_cx.register_bound( infcx, ty::ParamEnv::empty(), adt_ty, structural_teq_def_id, cause, ); // We deliberately skip *reporting* fulfillment errors (via // `report_fulfillment_errors`), for two reasons: // // 1. The error messages would mention `std::marker::StructuralPartialEq` // (a trait which is solely meant as an implementation detail // for now), and // // 2. We are sometimes doing future-incompatibility lints for // now, so we do not want unconditional errors here. fulfillment_cx.select_all_or_error(infcx).is_ok() } /// This implements the traversal over the structure of a given type to try to /// find instances of ADTs (specifically structs or enums) that do not implement /// the structural-match traits (`StructuralPartialEq` and `StructuralEq`). struct Search<'a, 'tcx> { span: Span, infcx: InferCtxt<'a, 'tcx>, /// Tracks ADTs previously encountered during search, so that /// we will not recur on them again. seen: FxHashSet<hir::def_id::DefId>, } impl Search<'a, 'tcx> { fn tcx(&self) -> TyCtxt<'tcx> { self.infcx.tcx } fn type_marked_structural(&self, adt_ty: Ty<'tcx>) -> bool { adt_ty.is_structural_eq_shallow(self.tcx()) } } impl<'a, 'tcx> TypeVisitor<'tcx> for Search<'a, 'tcx> { type BreakTy = NonStructuralMatchTy<'tcx>; fn tcx_for_anon_const_substs(&self) -> Option<TyCtxt<'tcx>> { Some(self.tcx()) } fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> { debug!("Search visiting ty: {:?}", ty); let (adt_def, substs) = match *ty.kind() { ty::Adt(adt_def, substs) => (adt_def, substs), ty::Param(_) => { return ControlFlow::Break(NonStructuralMatchTy::Param); } ty::Dynamic(..) => { return ControlFlow::Break(NonStructuralMatchTy::Dynamic); } ty::Foreign(_) => { return ControlFlow::Break(NonStructuralMatchTy::Foreign); } ty::Opaque(..) =>
ty::Projection(..) => { return ControlFlow::Break(NonStructuralMatchTy::Projection); } ty::Generator(..) | ty::GeneratorWitness(..) => { return ControlFlow::Break(NonStructuralMatchTy::Generator); } ty::RawPtr(..) => { // structural-match ignores substructure of // `*const _`/`*mut _`, so skip `super_visit_with`. // // For example, if you have: // ``` // struct NonStructural; // #[derive(PartialEq, Eq)] // struct T(*const NonStructural); // const C: T = T(std::ptr::null()); // ``` // // Even though `NonStructural` does not implement `PartialEq`, // structural equality on `T` does not recur into the raw // pointer. Therefore, one can still use `C` in a pattern. return ControlFlow::CONTINUE; } ty::FnDef(..) | ty::FnPtr(..) => { // Types of formals and return in `fn(_) -> _` are also irrelevant; // so we do not recur into them via `super_visit_with` return ControlFlow::CONTINUE; } ty::Array(_, n) if { n.try_eval_usize(self.tcx(), ty::ParamEnv::reveal_all()) == Some(0) } => { // rust-lang/rust#62336: ignore type of contents // for empty array. return ControlFlow::CONTINUE; } ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Str | ty::Never => { // These primitive types are always structural match. // // `Never` is kind of special here, but as it is not inhabitable, this should be fine. return ControlFlow::CONTINUE; } ty::Array(..) | ty::Slice(_) | ty::Ref(..) | ty::Tuple(..) => { // First check all contained types and then tell the caller to continue searching. return ty.super_visit_with(self); } ty::Closure(..) | ty::Infer(_) | ty::Placeholder(_) | ty::Bound(..) => { bug!("unexpected type during structural-match checking: {:?}", ty); } ty::Error(_) => { self.tcx().sess.delay_span_bug(self.span, "ty::Error in structural-match check"); // We still want to check other types after encountering an error, // as this may still emit relevant errors. return ControlFlow::CONTINUE; } }; if!self.seen.insert(adt_def.did) { debug!("Search already seen adt_def: {:?}", adt_def); return ControlFlow::CONTINUE; } if!self.type_marked_structural(ty) { debug!("Search found ty: {:?}", ty); return ControlFlow::Break(NonStructuralMatchTy::Adt(&adt_def)); } // structural-match does not care about the // instantiation of the generics in an ADT (it // instead looks directly at its fields outside // this match), so we skip super_visit_with. // // (Must not recur on substs for `PhantomData<T>` cf // rust-lang/rust#55028 and rust-lang/rust#55837; but also // want to skip substs when only uses of generic are // behind unsafe pointers `*const T`/`*mut T`.) // even though we skip super_visit_with, we must recur on // fields of ADT. let tcx = self.tcx(); adt_def.all_fields().map(|field| field.ty(tcx, substs)).try_for_each(|field_ty| { let ty = self.tcx().normalize_erasing_regions(ty::ParamEnv::empty(), field_ty); debug!("structural-match ADT: field_ty={:?}, ty={:?}", field_ty, ty); ty.visit_with(self) }) } } pub fn provide(providers: &mut Providers) { providers.has_structural_eq_impls = |tcx, ty| { tcx.infer_ctxt().enter(|infcx| { let cause = ObligationCause::dummy(); type_marked_structural(&infcx, ty, cause) }) }; }
{ return ControlFlow::Break(NonStructuralMatchTy::Opaque); }
conditional_block
bridge.rs
extern crate serde_json; use std; use std::str; use std::io::Result as IoResult; use std::time::Duration; use std::net::IpAddr; use std::net::Ipv4Addr; use std::net::UdpSocket; use std::collections::HashSet; use hyper::Client; use serde_json::Value; use user::User; use utils; use error::Result; /// Returns a HashSet of hue bridge SocketAddr's pub fn discover() -> IoResult<HashSet<Ipv4Addr>> { let string_list = vec![ "M-SEARCH * HTTP/1.1",
let joined = string_list.join("\r\n"); let socket = UdpSocket::bind("0.0.0.0:0")?; let two_second_timeout = Duration::new(2, 0); let _ = socket.set_read_timeout(Some(two_second_timeout)); socket.send_to(joined.as_bytes(), "239.255.255.250:1900")?; let mut bridges = HashSet::new(); loop { let mut buf = [0;255]; let sockread = match socket.recv_from(&mut buf) { Ok(val) => val, Err(e) => { match e.kind() { // a timeout on unix is considered a WouldBlock std::io::ErrorKind::WouldBlock => break, _ => panic!(e), } } }; let _ = str::from_utf8(&buf).and_then(|s| { // Hue docs say to use "IpBridge" over "hue-bridgeid" if s.contains("IpBridge") { if let IpAddr::V4(addr) = sockread.1.ip() { bridges.insert(addr); } } Ok(s) }); } Ok(bridges) } /// Hue Bridge #[derive(Debug)] pub struct Bridge { ip: Ipv4Addr, client: Client, } impl Bridge { /// Returns a hue bridge with the given ip pub fn new(addr: Ipv4Addr) -> Bridge { let mut client = Client::new(); client.set_read_timeout(Some(Duration::new(2,0))); client.set_write_timeout(Some(Duration::new(2,0))); Bridge { ip: addr, client: client, } } /// Attempt to register with the hue bridge pub fn register(&self, name: &str) -> Result<User>{ #[derive(Debug, Serialize, Deserialize)] struct Devicetype { devicetype: String, } let url = format!("http://{}/api", self.ip); let payload = Devicetype { devicetype: name.to_owned() }; let body = serde_json::to_string(&payload)?; let response = self.client.post(&url).body(&body).send()?; let json: Value = serde_json::from_reader(response)?; utils::hue_result(json).and_then(|json| { let user: User = serde_json::from_value(json)?; Ok(user) }) } }
"HOST:239.255.255.250:1900", "MAN:\"ssdp:discover\"", "ST:ssdp:all", "MX:1" ];
random_line_split
bridge.rs
extern crate serde_json; use std; use std::str; use std::io::Result as IoResult; use std::time::Duration; use std::net::IpAddr; use std::net::Ipv4Addr; use std::net::UdpSocket; use std::collections::HashSet; use hyper::Client; use serde_json::Value; use user::User; use utils; use error::Result; /// Returns a HashSet of hue bridge SocketAddr's pub fn discover() -> IoResult<HashSet<Ipv4Addr>> { let string_list = vec![ "M-SEARCH * HTTP/1.1", "HOST:239.255.255.250:1900", "MAN:\"ssdp:discover\"", "ST:ssdp:all", "MX:1" ]; let joined = string_list.join("\r\n"); let socket = UdpSocket::bind("0.0.0.0:0")?; let two_second_timeout = Duration::new(2, 0); let _ = socket.set_read_timeout(Some(two_second_timeout)); socket.send_to(joined.as_bytes(), "239.255.255.250:1900")?; let mut bridges = HashSet::new(); loop { let mut buf = [0;255]; let sockread = match socket.recv_from(&mut buf) { Ok(val) => val, Err(e) => { match e.kind() { // a timeout on unix is considered a WouldBlock std::io::ErrorKind::WouldBlock => break, _ => panic!(e), } } }; let _ = str::from_utf8(&buf).and_then(|s| { // Hue docs say to use "IpBridge" over "hue-bridgeid" if s.contains("IpBridge") { if let IpAddr::V4(addr) = sockread.1.ip() { bridges.insert(addr); } } Ok(s) }); } Ok(bridges) } /// Hue Bridge #[derive(Debug)] pub struct Bridge { ip: Ipv4Addr, client: Client, } impl Bridge { /// Returns a hue bridge with the given ip pub fn new(addr: Ipv4Addr) -> Bridge { let mut client = Client::new(); client.set_read_timeout(Some(Duration::new(2,0))); client.set_write_timeout(Some(Duration::new(2,0))); Bridge { ip: addr, client: client, } } /// Attempt to register with the hue bridge pub fn register(&self, name: &str) -> Result<User>
}
{ #[derive(Debug, Serialize, Deserialize)] struct Devicetype { devicetype: String, } let url = format!("http://{}/api", self.ip); let payload = Devicetype { devicetype: name.to_owned() }; let body = serde_json::to_string(&payload)?; let response = self.client.post(&url).body(&body).send()?; let json: Value = serde_json::from_reader(response)?; utils::hue_result(json).and_then(|json| { let user: User = serde_json::from_value(json)?; Ok(user) }) }
identifier_body
bridge.rs
extern crate serde_json; use std; use std::str; use std::io::Result as IoResult; use std::time::Duration; use std::net::IpAddr; use std::net::Ipv4Addr; use std::net::UdpSocket; use std::collections::HashSet; use hyper::Client; use serde_json::Value; use user::User; use utils; use error::Result; /// Returns a HashSet of hue bridge SocketAddr's pub fn discover() -> IoResult<HashSet<Ipv4Addr>> { let string_list = vec![ "M-SEARCH * HTTP/1.1", "HOST:239.255.255.250:1900", "MAN:\"ssdp:discover\"", "ST:ssdp:all", "MX:1" ]; let joined = string_list.join("\r\n"); let socket = UdpSocket::bind("0.0.0.0:0")?; let two_second_timeout = Duration::new(2, 0); let _ = socket.set_read_timeout(Some(two_second_timeout)); socket.send_to(joined.as_bytes(), "239.255.255.250:1900")?; let mut bridges = HashSet::new(); loop { let mut buf = [0;255]; let sockread = match socket.recv_from(&mut buf) { Ok(val) => val, Err(e) => { match e.kind() { // a timeout on unix is considered a WouldBlock std::io::ErrorKind::WouldBlock => break, _ => panic!(e), } } }; let _ = str::from_utf8(&buf).and_then(|s| { // Hue docs say to use "IpBridge" over "hue-bridgeid" if s.contains("IpBridge") { if let IpAddr::V4(addr) = sockread.1.ip() { bridges.insert(addr); } } Ok(s) }); } Ok(bridges) } /// Hue Bridge #[derive(Debug)] pub struct Bridge { ip: Ipv4Addr, client: Client, } impl Bridge { /// Returns a hue bridge with the given ip pub fn new(addr: Ipv4Addr) -> Bridge { let mut client = Client::new(); client.set_read_timeout(Some(Duration::new(2,0))); client.set_write_timeout(Some(Duration::new(2,0))); Bridge { ip: addr, client: client, } } /// Attempt to register with the hue bridge pub fn register(&self, name: &str) -> Result<User>{ #[derive(Debug, Serialize, Deserialize)] struct
{ devicetype: String, } let url = format!("http://{}/api", self.ip); let payload = Devicetype { devicetype: name.to_owned() }; let body = serde_json::to_string(&payload)?; let response = self.client.post(&url).body(&body).send()?; let json: Value = serde_json::from_reader(response)?; utils::hue_result(json).and_then(|json| { let user: User = serde_json::from_value(json)?; Ok(user) }) } }
Devicetype
identifier_name
derive-hash-struct-with-float-array.rs
#![allow( dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals )] /// A struct containing an array of floats that cannot derive Hash/Eq/Ord but can derive PartialEq/PartialOrd #[repr(C)] #[derive(Debug, Default, Copy, Clone, PartialOrd, PartialEq)] pub struct foo { pub bar: [f32; 3usize], } #[test] fn bindgen_test_layout_foo() { assert_eq!( ::std::mem::size_of::<foo>(), 12usize, concat!("Size of: ", stringify!(foo)) ); assert_eq!( ::std::mem::align_of::<foo>(), 4usize, concat!("Alignment of ", stringify!(foo)) );
concat!("Offset of field: ", stringify!(foo), "::", stringify!(bar)) ); }
assert_eq!( unsafe { &(*(::std::ptr::null::<foo>())).bar as *const _ as usize }, 0usize,
random_line_split
derive-hash-struct-with-float-array.rs
#![allow( dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals )] /// A struct containing an array of floats that cannot derive Hash/Eq/Ord but can derive PartialEq/PartialOrd #[repr(C)] #[derive(Debug, Default, Copy, Clone, PartialOrd, PartialEq)] pub struct foo { pub bar: [f32; 3usize], } #[test] fn
() { assert_eq!( ::std::mem::size_of::<foo>(), 12usize, concat!("Size of: ", stringify!(foo)) ); assert_eq!( ::std::mem::align_of::<foo>(), 4usize, concat!("Alignment of ", stringify!(foo)) ); assert_eq!( unsafe { &(*(::std::ptr::null::<foo>())).bar as *const _ as usize }, 0usize, concat!("Offset of field: ", stringify!(foo), "::", stringify!(bar)) ); }
bindgen_test_layout_foo
identifier_name
owned.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A unique pointer type use core::any::{Any, AnyRefExt}; use core::clone::Clone; use core::cmp::{Eq, Ord, TotalEq, TotalOrd, Ordering}; use core::default::Default; use core::fmt; use core::intrinsics; use core::mem; use core::raw::TraitObject; use core::result::{Ok, Err, Result}; /// A value that represents the global exchange heap. This is the default /// place that the `box` keyword allocates into when no place is supplied. /// /// The following two examples are equivalent: /// /// let foo = box(HEAP) Bar::new(...); /// let foo = box Bar::new(...); #[lang="exchange_heap"] pub static HEAP: () = (); /// A type that represents a uniquely-owned value. #[lang="owned_box"] pub struct Box<T>(*T); impl<T: Default> Default for Box<T> { fn default() -> Box<T> { box Default::default() } } impl<T: Clone> Clone for Box<T> { /// Return a copy of the owned box. #[inline] fn clone(&self) -> Box<T> { box {(**self).clone()} } /// Perform copy-assignment from `source` by reusing the existing allocation. #[inline] fn clone_from(&mut self, source: &Box<T>) { (**self).clone_from(&(**source)); } } // box pointers impl<T:Eq> Eq for Box<T> { #[inline] fn eq(&self, other: &Box<T>) -> bool { *(*self) == *(*other) } #[inline] fn ne(&self, other: &Box<T>) -> bool { *(*self)!= *(*other) } } impl<T:Ord> Ord for Box<T> { #[inline] fn lt(&self, other: &Box<T>) -> bool { *(*self) < *(*other) } #[inline] fn le(&self, other: &Box<T>) -> bool { *(*self) <= *(*other) } #[inline] fn ge(&self, other: &Box<T>) -> bool { *(*self) >= *(*other) } #[inline] fn gt(&self, other: &Box<T>) -> bool { *(*self) > *(*other) } } impl<T: TotalOrd> TotalOrd for Box<T> { #[inline] fn cmp(&self, other: &Box<T>) -> Ordering { (**self).cmp(*other) } } impl<T: TotalEq> TotalEq for Box<T> {} /// Extension methods for an owning `Any` trait object pub trait AnyOwnExt { /// Returns the boxed value if it is of type `T`, or /// `Err(Self)` if it isn't. fn move<T:'static>(self) -> Result<Box<T>, Self>; } impl AnyOwnExt for Box<Any> { #[inline] fn move<T:'static>(self) -> Result<Box<T>, Box<Any>>
} impl<T: fmt::Show> fmt::Show for Box<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { (**self).fmt(f) } } impl fmt::Show for Box<Any> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.pad("Box<Any>") } }
{ if self.is::<T>() { unsafe { // Get the raw representation of the trait object let to: TraitObject = *mem::transmute::<&Box<Any>, &TraitObject>(&self); // Prevent destructor on self being run intrinsics::forget(self); // Extract the data pointer Ok(mem::transmute(to.data)) } } else { Err(self) } }
identifier_body
owned.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A unique pointer type use core::any::{Any, AnyRefExt}; use core::clone::Clone; use core::cmp::{Eq, Ord, TotalEq, TotalOrd, Ordering}; use core::default::Default; use core::fmt; use core::intrinsics; use core::mem; use core::raw::TraitObject; use core::result::{Ok, Err, Result}; /// A value that represents the global exchange heap. This is the default /// place that the `box` keyword allocates into when no place is supplied. /// /// The following two examples are equivalent: /// /// let foo = box(HEAP) Bar::new(...); /// let foo = box Bar::new(...); #[lang="exchange_heap"] pub static HEAP: () = (); /// A type that represents a uniquely-owned value. #[lang="owned_box"] pub struct Box<T>(*T); impl<T: Default> Default for Box<T> { fn default() -> Box<T> { box Default::default() } } impl<T: Clone> Clone for Box<T> { /// Return a copy of the owned box. #[inline] fn clone(&self) -> Box<T> { box {(**self).clone()} } /// Perform copy-assignment from `source` by reusing the existing allocation. #[inline] fn clone_from(&mut self, source: &Box<T>) { (**self).clone_from(&(**source)); } } // box pointers impl<T:Eq> Eq for Box<T> { #[inline] fn eq(&self, other: &Box<T>) -> bool { *(*self) == *(*other) } #[inline] fn ne(&self, other: &Box<T>) -> bool { *(*self)!= *(*other) } } impl<T:Ord> Ord for Box<T> { #[inline] fn lt(&self, other: &Box<T>) -> bool { *(*self) < *(*other) } #[inline] fn le(&self, other: &Box<T>) -> bool { *(*self) <= *(*other) } #[inline] fn ge(&self, other: &Box<T>) -> bool { *(*self) >= *(*other) } #[inline] fn gt(&self, other: &Box<T>) -> bool { *(*self) > *(*other) } } impl<T: TotalOrd> TotalOrd for Box<T> { #[inline] fn cmp(&self, other: &Box<T>) -> Ordering { (**self).cmp(*other) } } impl<T: TotalEq> TotalEq for Box<T> {} /// Extension methods for an owning `Any` trait object pub trait AnyOwnExt { /// Returns the boxed value if it is of type `T`, or /// `Err(Self)` if it isn't. fn move<T:'static>(self) -> Result<Box<T>, Self>; } impl AnyOwnExt for Box<Any> { #[inline] fn move<T:'static>(self) -> Result<Box<T>, Box<Any>> { if self.is::<T>() { unsafe { // Get the raw representation of the trait object let to: TraitObject = *mem::transmute::<&Box<Any>, &TraitObject>(&self); // Prevent destructor on self being run intrinsics::forget(self); // Extract the data pointer Ok(mem::transmute(to.data)) } } else
} } impl<T: fmt::Show> fmt::Show for Box<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { (**self).fmt(f) } } impl fmt::Show for Box<Any> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.pad("Box<Any>") } }
{ Err(self) }
conditional_block
owned.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A unique pointer type use core::any::{Any, AnyRefExt}; use core::clone::Clone; use core::cmp::{Eq, Ord, TotalEq, TotalOrd, Ordering}; use core::default::Default; use core::fmt; use core::intrinsics; use core::mem; use core::raw::TraitObject; use core::result::{Ok, Err, Result}; /// A value that represents the global exchange heap. This is the default /// place that the `box` keyword allocates into when no place is supplied. /// /// The following two examples are equivalent: /// /// let foo = box(HEAP) Bar::new(...); /// let foo = box Bar::new(...); #[lang="exchange_heap"] pub static HEAP: () = (); /// A type that represents a uniquely-owned value. #[lang="owned_box"] pub struct Box<T>(*T); impl<T: Default> Default for Box<T> { fn default() -> Box<T> { box Default::default() } } impl<T: Clone> Clone for Box<T> { /// Return a copy of the owned box. #[inline] fn clone(&self) -> Box<T> { box {(**self).clone()} } /// Perform copy-assignment from `source` by reusing the existing allocation. #[inline] fn clone_from(&mut self, source: &Box<T>) { (**self).clone_from(&(**source)); } } // box pointers impl<T:Eq> Eq for Box<T> { #[inline] fn eq(&self, other: &Box<T>) -> bool { *(*self) == *(*other) } #[inline] fn ne(&self, other: &Box<T>) -> bool { *(*self)!= *(*other) } } impl<T:Ord> Ord for Box<T> { #[inline] fn lt(&self, other: &Box<T>) -> bool { *(*self) < *(*other) } #[inline] fn
(&self, other: &Box<T>) -> bool { *(*self) <= *(*other) } #[inline] fn ge(&self, other: &Box<T>) -> bool { *(*self) >= *(*other) } #[inline] fn gt(&self, other: &Box<T>) -> bool { *(*self) > *(*other) } } impl<T: TotalOrd> TotalOrd for Box<T> { #[inline] fn cmp(&self, other: &Box<T>) -> Ordering { (**self).cmp(*other) } } impl<T: TotalEq> TotalEq for Box<T> {} /// Extension methods for an owning `Any` trait object pub trait AnyOwnExt { /// Returns the boxed value if it is of type `T`, or /// `Err(Self)` if it isn't. fn move<T:'static>(self) -> Result<Box<T>, Self>; } impl AnyOwnExt for Box<Any> { #[inline] fn move<T:'static>(self) -> Result<Box<T>, Box<Any>> { if self.is::<T>() { unsafe { // Get the raw representation of the trait object let to: TraitObject = *mem::transmute::<&Box<Any>, &TraitObject>(&self); // Prevent destructor on self being run intrinsics::forget(self); // Extract the data pointer Ok(mem::transmute(to.data)) } } else { Err(self) } } } impl<T: fmt::Show> fmt::Show for Box<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { (**self).fmt(f) } } impl fmt::Show for Box<Any> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.pad("Box<Any>") } }
le
identifier_name