file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
component.rs
|
use std::comm::{TryRecvError,Empty,Disconnected};
use std::fmt;
use message::{Message,MessageData};
use message::MessageData::{MsgStart};
#[deriving(PartialEq,Clone)]
pub enum ComponentType {
ManagerComponent,
ExtractorComponent,
AudioDecoderComponent,
VideoDecoderComponent,
ClockComponent,
AudioRendererComponent,
VideoRendererComponent,
UiComponent,
}
impl fmt::Show for ComponentType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ComponentType::ManagerComponent => write!(f, "ComponentManager"),
ComponentType::ExtractorComponent => write!(f, "Extractor"),
ComponentType::AudioDecoderComponent => write!(f, "AudioDecoder"),
ComponentType::VideoDecoderComponent => write!(f, "VideoDecoder"),
ComponentType::ClockComponent => write!(f, "Clock"),
ComponentType::AudioRendererComponent => write!(f, "AudioRenderer"),
ComponentType::VideoRendererComponent => write!(f, "VideoRenderer"),
ComponentType::UiComponent => write!(f, "UI"),
}
}
}
pub struct ComponentStruct {
pub component_type: ComponentType,
pub mgr_sender: Option<Sender<Message>>,
pub receiver: Receiver<Message>,
pub sender: Option<Sender<Message>>,
}
|
component_type: component_type,
mgr_sender: None,
receiver: receiver,
sender: Some(sender),
}
}
pub fn set_mgr_sender(&mut self, sender: Sender<Message>) {
self.mgr_sender= Some(sender);
}
pub fn take_sender(&mut self) -> Sender<Message> {
self.sender.take().unwrap()
}
pub fn send(&self, to: ComponentType, msg:MessageData) -> bool {
match self.mgr_sender.as_ref().unwrap().send_opt(Message {
from: self.component_type.clone(),
to: to,
msg: msg
}) {
Ok(_) => true,
Err(_) => false
}
}
pub fn recv(&self) -> Message {
self.receiver.recv()
}
pub fn try_recv(&self) -> Result<Message, TryRecvError> {
self.receiver.try_recv()
}
pub fn flush(&self) {
loop {
match self.receiver.try_recv() {
Ok(_msg) => {
debug!("{} flush", self.component_type);
}
Err(Empty) => {
break
}
Err(Disconnected) => {
break;
}
}
}
}
pub fn wait_for_start(&self) {
match self.recv() {
Message { from: ComponentType::ManagerComponent, msg: MsgStart,.. } => {
info!("start {}", self.component_type);
}
_ => {
panic!("unexpected message received");
}
}
}
}
pub trait Component {
fn get<'a>(&'a mut self) -> &'a mut ComponentStruct;
}
|
impl ComponentStruct {
pub fn new(component_type: ComponentType) -> ComponentStruct {
let (sender, receiver) = channel::<Message>();
ComponentStruct {
|
random_line_split
|
component.rs
|
use std::comm::{TryRecvError,Empty,Disconnected};
use std::fmt;
use message::{Message,MessageData};
use message::MessageData::{MsgStart};
#[deriving(PartialEq,Clone)]
pub enum ComponentType {
ManagerComponent,
ExtractorComponent,
AudioDecoderComponent,
VideoDecoderComponent,
ClockComponent,
AudioRendererComponent,
VideoRendererComponent,
UiComponent,
}
impl fmt::Show for ComponentType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ComponentType::ManagerComponent => write!(f, "ComponentManager"),
ComponentType::ExtractorComponent => write!(f, "Extractor"),
ComponentType::AudioDecoderComponent => write!(f, "AudioDecoder"),
ComponentType::VideoDecoderComponent => write!(f, "VideoDecoder"),
ComponentType::ClockComponent => write!(f, "Clock"),
ComponentType::AudioRendererComponent => write!(f, "AudioRenderer"),
ComponentType::VideoRendererComponent => write!(f, "VideoRenderer"),
ComponentType::UiComponent => write!(f, "UI"),
}
}
}
pub struct ComponentStruct {
pub component_type: ComponentType,
pub mgr_sender: Option<Sender<Message>>,
pub receiver: Receiver<Message>,
pub sender: Option<Sender<Message>>,
}
impl ComponentStruct {
pub fn new(component_type: ComponentType) -> ComponentStruct
|
pub fn set_mgr_sender(&mut self, sender: Sender<Message>) {
self.mgr_sender= Some(sender);
}
pub fn take_sender(&mut self) -> Sender<Message> {
self.sender.take().unwrap()
}
pub fn send(&self, to: ComponentType, msg:MessageData) -> bool {
match self.mgr_sender.as_ref().unwrap().send_opt(Message {
from: self.component_type.clone(),
to: to,
msg: msg
}) {
Ok(_) => true,
Err(_) => false
}
}
pub fn recv(&self) -> Message {
self.receiver.recv()
}
pub fn try_recv(&self) -> Result<Message, TryRecvError> {
self.receiver.try_recv()
}
pub fn flush(&self) {
loop {
match self.receiver.try_recv() {
Ok(_msg) => {
debug!("{} flush", self.component_type);
}
Err(Empty) => {
break
}
Err(Disconnected) => {
break;
}
}
}
}
pub fn wait_for_start(&self) {
match self.recv() {
Message { from: ComponentType::ManagerComponent, msg: MsgStart,.. } => {
info!("start {}", self.component_type);
}
_ => {
panic!("unexpected message received");
}
}
}
}
pub trait Component {
fn get<'a>(&'a mut self) -> &'a mut ComponentStruct;
}
|
{
let (sender, receiver) = channel::<Message>();
ComponentStruct {
component_type: component_type,
mgr_sender: None,
receiver: receiver,
sender: Some(sender),
}
}
|
identifier_body
|
codemap.rs
|
Add<CharPos,CharPos> for CharPos {
fn add(&self, rhs: &CharPos) -> CharPos {
CharPos(self.to_uint() + rhs.to_uint())
}
}
impl Sub<CharPos,CharPos> for CharPos {
fn sub(&self, rhs: &CharPos) -> CharPos {
CharPos(self.to_uint() - rhs.to_uint())
}
}
/**
Spans represent a region of code, used for error reporting. Positions in spans
are *absolute* positions from the beginning of the codemap, not positions
relative to FileMaps. Methods on the CodeMap can be used to relate spans back
to the original source.
*/
#[deriving(Clone, Show, Hash)]
pub struct Span {
pub lo: BytePos,
pub hi: BytePos,
/// Information about where the macro came from, if this piece of
/// code was created by a macro expansion.
pub expn_id: ExpnId
}
pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), expn_id: NO_EXPANSION };
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
pub struct Spanned<T> {
pub node: T,
pub span: Span,
}
impl PartialEq for Span {
fn eq(&self, other: &Span) -> bool {
return (*self).lo == (*other).lo && (*self).hi == (*other).hi;
}
fn ne(&self, other: &Span) -> bool {!(*self).eq(other) }
}
impl Eq for Span {}
impl<S:Encoder<E>, E> Encodable<S, E> for Span {
/* Note #1972 -- spans are encoded but not decoded */
fn encode(&self, s: &mut S) -> Result<(), E> {
s.emit_nil()
}
}
impl<D:Decoder<E>, E> Decodable<D, E> for Span {
fn decode(_d: &mut D) -> Result<Span, E> {
Ok(DUMMY_SP)
}
}
pub fn spanned<T>(lo: BytePos, hi: BytePos, t: T) -> Spanned<T> {
respan(mk_sp(lo, hi), t)
}
pub fn respan<T>(sp: Span, t: T) -> Spanned<T> {
Spanned {node: t, span: sp}
}
pub fn dummy_spanned<T>(t: T) -> Spanned<T> {
respan(DUMMY_SP, t)
}
/* assuming that we're not in macro expansion */
pub fn mk_sp(lo: BytePos, hi: BytePos) -> Span {
Span {lo: lo, hi: hi, expn_id: NO_EXPANSION}
}
/// Return the span itself if it doesn't come from a macro expansion,
/// otherwise return the call site span up to the `enclosing_sp` by
/// following the `expn_info` chain.
pub fn original_sp(cm: &CodeMap, sp: Span, enclosing_sp: Span) -> Span {
let call_site1 = cm.with_expn_info(sp.expn_id, |ei| ei.map(|ei| ei.call_site));
let call_site2 = cm.with_expn_info(enclosing_sp.expn_id, |ei| ei.map(|ei| ei.call_site));
match (call_site1, call_site2) {
(None, _) => sp,
(Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp,
(Some(call_site1), _) => original_sp(cm, call_site1, enclosing_sp),
}
}
/// A source code location used for error reporting
pub struct Loc {
/// Information about the original source
pub file: Rc<FileMap>,
/// The (1-based) line number
pub line: uint,
/// The (0-based) column offset
pub col: CharPos
}
/// A source code location used as the result of lookup_char_pos_adj
// Actually, *none* of the clients use the filename *or* file field;
// perhaps they should just be removed.
pub struct LocWithOpt {
pub filename: FileName,
pub line: uint,
pub col: CharPos,
pub file: Option<Rc<FileMap>>,
}
// used to be structural records. Better names, anyone?
pub struct FileMapAndLine { pub fm: Rc<FileMap>, pub line: uint }
pub struct FileMapAndBytePos { pub fm: Rc<FileMap>, pub pos: BytePos }
/// The syntax with which a macro was invoked.
#[deriving(Clone, Hash, Show)]
pub enum MacroFormat {
/// e.g. #[deriving(...)] <item>
MacroAttribute,
/// e.g. `format!()`
MacroBang
}
#[deriving(Clone, Hash, Show)]
pub struct NameAndSpan {
/// The name of the macro that was invoked to create the thing
/// with this Span.
pub name: String,
/// The format with which the macro was invoked.
pub format: MacroFormat,
/// The span of the macro definition itself. The macro may not
/// have a sensible definition span (e.g. something defined
/// completely inside libsyntax) in which case this is None.
pub span: Option<Span>
}
/// Extra information for tracking macro expansion of spans
#[deriving(Hash, Show)]
pub struct ExpnInfo {
/// The location of the actual macro invocation, e.g. `let x =
/// foo!();`
///
/// This may recursively refer to other macro invocations, e.g. if
/// `foo!()` invoked `bar!()` internally, and there was an
/// expression inside `bar!`; the call_site of the expression in
/// the expansion would point to the `bar!` invocation; that
/// call_site span would have its own ExpnInfo, with the call_site
/// pointing to the `foo!` invocation.
pub call_site: Span,
/// Information about the macro and its definition.
///
/// The `callee` of the inner expression in the `call_site`
/// example would point to the `macro_rules! bar {... }` and that
/// of the `bar!()` invocation would point to the `macro_rules!
/// foo {... }`.
pub callee: NameAndSpan
}
#[deriving(PartialEq, Eq, Clone, Show, Hash, Encodable, Decodable)]
pub struct ExpnId(u32);
pub const NO_EXPANSION: ExpnId = ExpnId(-1);
impl ExpnId {
pub fn from_llvm_cookie(cookie: c_uint) -> ExpnId {
ExpnId(cookie as u32)
}
pub fn to_llvm_cookie(self) -> i32 {
let ExpnId(cookie) = self;
cookie as i32
}
}
pub type FileName = String;
pub struct FileLines {
pub file: Rc<FileMap>,
pub lines: Vec<uint>
}
/// Identifies an offset of a multi-byte character in a FileMap
pub struct MultiByteChar {
/// The absolute offset of the character in the CodeMap
pub pos: BytePos,
/// The number of bytes, >=2
pub bytes: uint,
}
/// A single source in the CodeMap
pub struct FileMap {
/// The name of the file that the source came from, source that doesn't
/// originate from files has names between angle brackets by convention,
/// e.g. `<anon>`
pub name: FileName,
/// The complete source code
pub src: String,
/// The start position of this source in the CodeMap
pub start_pos: BytePos,
/// Locations of lines beginnings in the source code
pub lines: RefCell<Vec<BytePos> >,
/// Locations of multi-byte characters in the source code
pub multibyte_chars: RefCell<Vec<MultiByteChar> >,
}
impl FileMap {
/// EFFECT: register a start-of-line offset in the
/// table of line-beginnings.
/// UNCHECKED INVARIANT: these offsets must be added in the right
/// order and must be in the right places; there is shared knowledge
/// about what ends a line between this file and parse.rs
/// WARNING: pos param here is the offset relative to start of CodeMap,
/// and CodeMap will append a newline when adding a filemap without a newline at the end,
/// so the safe way to call this is with value calculated as
/// filemap.start_pos + newline_offset_relative_to_the_start_of_filemap.
pub fn next_line(&self, pos: BytePos) {
// the new charpos must be > the last one (or it's the first one).
let mut lines = self.lines.borrow_mut();
let line_len = lines.len();
assert!(line_len == 0 || ((*lines)[line_len - 1] < pos))
lines.push(pos);
}
/// get a line from the list of pre-computed line-beginnings
///
pub fn get_line(&self, line: int) -> String {
let lines = self.lines.borrow();
let begin: BytePos = (*lines)[line as uint] - self.start_pos;
let begin = begin.to_uint();
let slice = self.src.as_slice().slice_from(begin);
match slice.find('\n') {
Some(e) => slice.slice_to(e).to_string(),
None => slice.to_string()
}
}
pub fn record_multibyte_char(&self, pos: BytePos, bytes: uint) {
assert!(bytes >=2 && bytes <= 4);
let mbc = MultiByteChar {
pos: pos,
bytes: bytes,
};
self.multibyte_chars.borrow_mut().push(mbc);
}
pub fn is_real_file(&self) -> bool {
!(self.name.as_slice().starts_with("<") &&
self.name.as_slice().ends_with(">"))
}
}
pub struct CodeMap {
pub files: RefCell<Vec<Rc<FileMap>>>,
expansions: RefCell<Vec<ExpnInfo>>
}
impl CodeMap {
pub fn new() -> CodeMap {
CodeMap {
files: RefCell::new(Vec::new()),
expansions: RefCell::new(Vec::new()),
}
}
pub fn new_filemap(&self, filename: FileName, src: String) -> Rc<FileMap> {
let mut files = self.files.borrow_mut();
let start_pos = match files.last() {
None => 0,
Some(last) => last.start_pos.to_uint() + last.src.len(),
};
// Remove utf-8 BOM if any.
// FIXME #12884: no efficient/safe way to remove from the start of a string
// and reuse the allocation.
let mut src = if src.as_slice().starts_with("\ufeff") {
String::from_str(src.as_slice().slice_from(3))
} else {
String::from_str(src.as_slice())
};
// Append '\n' in case it's not already there.
// This is a workaround to prevent CodeMap.lookup_filemap_idx from accidentally
// overflowing into the next filemap in case the last byte of span is also the last
// byte of filemap, which leads to incorrect results from CodeMap.span_to_*.
if src.len() > 0 &&!src.as_slice().ends_with("\n") {
src.push('\n');
}
let filemap = Rc::new(FileMap {
name: filename,
src: src.to_string(),
start_pos: Pos::from_uint(start_pos),
lines: RefCell::new(Vec::new()),
multibyte_chars: RefCell::new(Vec::new()),
});
files.push(filemap.clone());
filemap
}
pub fn mk_substr_filename(&self, sp: Span) -> String {
let pos = self.lookup_char_pos(sp.lo);
(format!("<{}:{}:{}>",
pos.file.name,
pos.line,
pos.col.to_uint() + 1)).to_string()
}
/// Lookup source information about a BytePos
pub fn lookup_char_pos(&self, pos: BytePos) -> Loc {
self.lookup_pos(pos)
}
pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt {
let loc = self.lookup_char_pos(pos);
LocWithOpt {
filename: loc.file.name.to_string(),
line: loc.line,
col: loc.col,
file: Some(loc.file)
}
}
pub fn span_to_string(&self, sp: Span) -> String {
if self.files.borrow().len() == 0 && sp == DUMMY_SP {
return "no-location".to_string();
}
let lo = self.lookup_char_pos_adj(sp.lo);
let hi = self.lookup_char_pos_adj(sp.hi);
return (format!("{}:{}:{}: {}:{}",
lo.filename,
lo.line,
lo.col.to_uint() + 1,
hi.line,
hi.col.to_uint() + 1)).to_string()
}
pub fn span_to_filename(&self, sp: Span) -> FileName {
self.lookup_char_pos(sp.lo).file.name.to_string()
}
pub fn span_to_lines(&self, sp: Span) -> FileLines {
let lo = self.lookup_char_pos(sp.lo);
let hi = self.lookup_char_pos(sp.hi);
let mut lines = Vec::new();
for i in range(lo.line - 1u, hi.line as uint) {
lines.push(i);
};
FileLines {file: lo.file, lines: lines}
}
pub fn span_to_snippet(&self, sp: Span) -> Option<String> {
let begin = self.lookup_byte_offset(sp.lo);
let end = self.lookup_byte_offset(sp.hi);
// FIXME #8256: this used to be an assert but whatever precondition
// it's testing isn't true for all spans in the AST, so to allow the
// caller to not have to panic (and it can't catch it since the CodeMap
// isn't sendable), return None
if begin.fm.start_pos!= end.fm.start_pos {
None
} else {
Some(begin.fm.src.as_slice().slice(begin.pos.to_uint(),
end.pos.to_uint()).to_string())
}
}
pub fn get_filemap(&self, filename: &str) -> Rc<FileMap> {
|
return fm.clone();
}
}
panic!("asking for {} which we don't know about", filename);
}
pub fn lookup_byte_offset(&self, bpos: BytePos) -> FileMapAndBytePos {
let idx = self.lookup_filemap_idx(bpos);
let fm = (*self.files.borrow())[idx].clone();
let offset = bpos - fm.start_pos;
FileMapAndBytePos {fm: fm, pos: offset}
}
/// Converts an absolute BytePos to a CharPos relative to the filemap and above.
pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
let idx = self.lookup_filemap_idx(bpos);
let files = self.files.borrow();
let map = &(*files)[idx];
// The number of extra bytes due to multibyte chars in the FileMap
let mut total_extra_bytes = 0;
for mbc in map.multibyte_chars.borrow().iter() {
debug!("{}-byte char at {}", mbc.bytes, mbc.pos);
if mbc.pos < bpos {
// every character is at least one byte, so we only
// count the actual extra bytes.
total_extra_bytes += mbc.bytes - 1;
// We should never see a byte position in the middle of a
// character
assert!(bpos.to_uint() >= mbc.pos.to_uint() + mbc.bytes);
} else {
break;
}
}
assert!(map.start_pos.to_uint() + total_extra_bytes <= bpos.to_uint());
CharPos(bpos.to_uint() - map.start_pos.to_uint() - total_extra_bytes)
}
fn lookup_filemap_idx(&self, pos: BytePos) -> uint {
let files = self.files.borrow();
let files = &*files;
let len = files.len();
let mut a = 0u;
let mut b = len;
while b - a > 1u {
let m = (a + b) / 2u;
if files[m].start_pos > pos {
b = m;
} else {
a = m;
}
}
// There can be filemaps with length 0. These have the same start_pos as
// the previous filemap, but are not the filemaps we want (because they
// are length 0, they cannot contain what we are looking for). So,
// rewind until we find a useful filemap.
loop {
let lines = files[a].lines.borrow();
let lines = lines;
if lines.len() > 0 {
break;
}
if a == 0 {
panic!("position {} does not resolve to a source location",
pos.to_uint());
}
a -= 1;
}
if a >= len {
panic!("position {} does not resolve to a source location",
pos.to_uint())
}
return a;
}
fn lookup_line(&self, pos: BytePos) -> FileMapAndLine {
let idx = self.lookup_filemap_idx(pos);
let files = self.files.borrow();
let f = (*files)[idx].clone();
let mut a = 0u;
{
let lines = f.lines.borrow();
let mut b = lines.len();
while b - a > 1u {
let m = (a + b) / 2u;
if (*lines)[m] > pos { b = m; } else { a = m; }
}
}
FileMapAndLine {fm: f, line: a}
}
fn lookup_pos(&self, pos: BytePos) -> Loc {
let FileMapAndLine {fm: f, line: a} = self.lookup_line(pos);
let line = a + 1u; // Line numbers start at 1
let chpos = self.bytepos_to_file_charpos(pos);
let linebpos = (*f.lines.borrow())[a];
let linechpos = self.bytepos_to_file_charpos(linebpos);
debug!("byte pos {} is on the line at byte pos {}",
pos, linebpos);
debug!("char pos {} is on the line at char pos {}",
chpos, linechpos);
debug!("byte is on line: {}", line);
assert!(chpos >= linechpos);
Loc {
file: f,
line: line,
col: chpos - linechpos
}
}
pub fn record_expansion(&self, expn_info: ExpnInfo) -> ExpnId {
let mut expansions = self.expansions.borrow_mut();
expansions.push(expn_info);
ExpnId(expansions.len().to_u32().expect("too many ExpnInfo's!") - 1)
}
pub fn with_expn_info<T>(&self, id: ExpnId, f: |Option<&ExpnInfo>| -> T) -> T {
match id {
NO_EXPANSION => f(None),
ExpnId(i) => f(Some(&(*self.expansions.borrow())[i as uint]))
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn t1 () {
let cm = CodeMap::new();
let fm = cm.new_filemap("blork.rs".to_string(),
"first line.\nsecond line".to_string());
fm.next_line(BytePos(0));
assert_eq!(&fm.get_line(0),&"first line.".to_string());
// TESTING BROKEN BEHAVIOR:
fm.next_line(BytePos(10));
assert_eq!(&fm.get_line(1), &".".to_string());
}
#[test]
#[should_fail]
fn t2 () {
let cm = CodeMap::new();
let fm = cm.new_filemap("blork.rs".to_string(),
"first line.\nsecond line".to_string());
// TESTING *REALLY* BROKEN BEHAVIOR:
fm.next_line(BytePos(0));
fm.next_line(BytePos(10));
fm.next_line(BytePos(2));
}
fn init_code_map() -> CodeMap {
let cm = CodeMap::new();
let fm1 = cm.new_filemap("blork.rs".to_string(),
"first line.\nsecond line".to_string());
let fm2 = cm.new_filemap("empty.rs".to_string(),
"".to_string());
let fm3 = cm.new_filemap("blork2.rs".to_string(),
"first line.\nsecond line".to_string());
fm1.next_line(BytePos(0));
fm1.next_line(BytePos(12));
fm2.next_line(BytePos(24));
fm3.next_line(BytePos(24));
fm3.next_line(BytePos(34));
cm
}
#[test]
fn t3() {
// Test lookup_byte_offset
let cm = init_code_map();
let fmabp1 = cm.lookup_byte_offset(BytePos(22));
assert_eq!(fmabp1.fm.name, "blork.rs".to_string());
assert_eq!(fmabp1.pos, BytePos(22));
let fmabp2 = cm.lookup_byte_offset(BytePos(24));
|
for fm in self.files.borrow().iter() {
if filename == fm.name.as_slice() {
|
random_line_split
|
codemap.rs
|
<CharPos,CharPos> for CharPos {
fn add(&self, rhs: &CharPos) -> CharPos {
CharPos(self.to_uint() + rhs.to_uint())
}
}
impl Sub<CharPos,CharPos> for CharPos {
fn sub(&self, rhs: &CharPos) -> CharPos {
CharPos(self.to_uint() - rhs.to_uint())
}
}
/**
Spans represent a region of code, used for error reporting. Positions in spans
are *absolute* positions from the beginning of the codemap, not positions
relative to FileMaps. Methods on the CodeMap can be used to relate spans back
to the original source.
*/
#[deriving(Clone, Show, Hash)]
pub struct Span {
pub lo: BytePos,
pub hi: BytePos,
/// Information about where the macro came from, if this piece of
/// code was created by a macro expansion.
pub expn_id: ExpnId
}
pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), expn_id: NO_EXPANSION };
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
pub struct Spanned<T> {
pub node: T,
pub span: Span,
}
impl PartialEq for Span {
fn eq(&self, other: &Span) -> bool {
return (*self).lo == (*other).lo && (*self).hi == (*other).hi;
}
fn ne(&self, other: &Span) -> bool {!(*self).eq(other) }
}
impl Eq for Span {}
impl<S:Encoder<E>, E> Encodable<S, E> for Span {
/* Note #1972 -- spans are encoded but not decoded */
fn encode(&self, s: &mut S) -> Result<(), E> {
s.emit_nil()
}
}
impl<D:Decoder<E>, E> Decodable<D, E> for Span {
fn decode(_d: &mut D) -> Result<Span, E> {
Ok(DUMMY_SP)
}
}
pub fn spanned<T>(lo: BytePos, hi: BytePos, t: T) -> Spanned<T> {
respan(mk_sp(lo, hi), t)
}
pub fn respan<T>(sp: Span, t: T) -> Spanned<T> {
Spanned {node: t, span: sp}
}
pub fn dummy_spanned<T>(t: T) -> Spanned<T> {
respan(DUMMY_SP, t)
}
/* assuming that we're not in macro expansion */
pub fn mk_sp(lo: BytePos, hi: BytePos) -> Span {
Span {lo: lo, hi: hi, expn_id: NO_EXPANSION}
}
/// Return the span itself if it doesn't come from a macro expansion,
/// otherwise return the call site span up to the `enclosing_sp` by
/// following the `expn_info` chain.
pub fn original_sp(cm: &CodeMap, sp: Span, enclosing_sp: Span) -> Span {
let call_site1 = cm.with_expn_info(sp.expn_id, |ei| ei.map(|ei| ei.call_site));
let call_site2 = cm.with_expn_info(enclosing_sp.expn_id, |ei| ei.map(|ei| ei.call_site));
match (call_site1, call_site2) {
(None, _) => sp,
(Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp,
(Some(call_site1), _) => original_sp(cm, call_site1, enclosing_sp),
}
}
/// A source code location used for error reporting
pub struct Loc {
/// Information about the original source
pub file: Rc<FileMap>,
/// The (1-based) line number
pub line: uint,
/// The (0-based) column offset
pub col: CharPos
}
/// A source code location used as the result of lookup_char_pos_adj
// Actually, *none* of the clients use the filename *or* file field;
// perhaps they should just be removed.
pub struct LocWithOpt {
pub filename: FileName,
pub line: uint,
pub col: CharPos,
pub file: Option<Rc<FileMap>>,
}
// used to be structural records. Better names, anyone?
pub struct FileMapAndLine { pub fm: Rc<FileMap>, pub line: uint }
pub struct FileMapAndBytePos { pub fm: Rc<FileMap>, pub pos: BytePos }
/// The syntax with which a macro was invoked.
#[deriving(Clone, Hash, Show)]
pub enum MacroFormat {
/// e.g. #[deriving(...)] <item>
MacroAttribute,
/// e.g. `format!()`
MacroBang
}
#[deriving(Clone, Hash, Show)]
pub struct NameAndSpan {
/// The name of the macro that was invoked to create the thing
/// with this Span.
pub name: String,
/// The format with which the macro was invoked.
pub format: MacroFormat,
/// The span of the macro definition itself. The macro may not
/// have a sensible definition span (e.g. something defined
/// completely inside libsyntax) in which case this is None.
pub span: Option<Span>
}
/// Extra information for tracking macro expansion of spans
#[deriving(Hash, Show)]
pub struct ExpnInfo {
/// The location of the actual macro invocation, e.g. `let x =
/// foo!();`
///
/// This may recursively refer to other macro invocations, e.g. if
/// `foo!()` invoked `bar!()` internally, and there was an
/// expression inside `bar!`; the call_site of the expression in
/// the expansion would point to the `bar!` invocation; that
/// call_site span would have its own ExpnInfo, with the call_site
/// pointing to the `foo!` invocation.
pub call_site: Span,
/// Information about the macro and its definition.
///
/// The `callee` of the inner expression in the `call_site`
/// example would point to the `macro_rules! bar {... }` and that
/// of the `bar!()` invocation would point to the `macro_rules!
/// foo {... }`.
pub callee: NameAndSpan
}
#[deriving(PartialEq, Eq, Clone, Show, Hash, Encodable, Decodable)]
pub struct ExpnId(u32);
pub const NO_EXPANSION: ExpnId = ExpnId(-1);
impl ExpnId {
pub fn from_llvm_cookie(cookie: c_uint) -> ExpnId {
ExpnId(cookie as u32)
}
pub fn to_llvm_cookie(self) -> i32 {
let ExpnId(cookie) = self;
cookie as i32
}
}
pub type FileName = String;
pub struct FileLines {
pub file: Rc<FileMap>,
pub lines: Vec<uint>
}
/// Identifies an offset of a multi-byte character in a FileMap
pub struct MultiByteChar {
/// The absolute offset of the character in the CodeMap
pub pos: BytePos,
/// The number of bytes, >=2
pub bytes: uint,
}
/// A single source in the CodeMap
pub struct FileMap {
/// The name of the file that the source came from, source that doesn't
/// originate from files has names between angle brackets by convention,
/// e.g. `<anon>`
pub name: FileName,
/// The complete source code
pub src: String,
/// The start position of this source in the CodeMap
pub start_pos: BytePos,
/// Locations of lines beginnings in the source code
pub lines: RefCell<Vec<BytePos> >,
/// Locations of multi-byte characters in the source code
pub multibyte_chars: RefCell<Vec<MultiByteChar> >,
}
impl FileMap {
/// EFFECT: register a start-of-line offset in the
/// table of line-beginnings.
/// UNCHECKED INVARIANT: these offsets must be added in the right
/// order and must be in the right places; there is shared knowledge
/// about what ends a line between this file and parse.rs
/// WARNING: pos param here is the offset relative to start of CodeMap,
/// and CodeMap will append a newline when adding a filemap without a newline at the end,
/// so the safe way to call this is with value calculated as
/// filemap.start_pos + newline_offset_relative_to_the_start_of_filemap.
pub fn next_line(&self, pos: BytePos) {
// the new charpos must be > the last one (or it's the first one).
let mut lines = self.lines.borrow_mut();
let line_len = lines.len();
assert!(line_len == 0 || ((*lines)[line_len - 1] < pos))
lines.push(pos);
}
/// get a line from the list of pre-computed line-beginnings
///
pub fn get_line(&self, line: int) -> String {
let lines = self.lines.borrow();
let begin: BytePos = (*lines)[line as uint] - self.start_pos;
let begin = begin.to_uint();
let slice = self.src.as_slice().slice_from(begin);
match slice.find('\n') {
Some(e) => slice.slice_to(e).to_string(),
None => slice.to_string()
}
}
pub fn record_multibyte_char(&self, pos: BytePos, bytes: uint) {
assert!(bytes >=2 && bytes <= 4);
let mbc = MultiByteChar {
pos: pos,
bytes: bytes,
};
self.multibyte_chars.borrow_mut().push(mbc);
}
pub fn is_real_file(&self) -> bool {
!(self.name.as_slice().starts_with("<") &&
self.name.as_slice().ends_with(">"))
}
}
pub struct CodeMap {
pub files: RefCell<Vec<Rc<FileMap>>>,
expansions: RefCell<Vec<ExpnInfo>>
}
impl CodeMap {
pub fn new() -> CodeMap {
CodeMap {
files: RefCell::new(Vec::new()),
expansions: RefCell::new(Vec::new()),
}
}
pub fn new_filemap(&self, filename: FileName, src: String) -> Rc<FileMap> {
let mut files = self.files.borrow_mut();
let start_pos = match files.last() {
None => 0,
Some(last) => last.start_pos.to_uint() + last.src.len(),
};
// Remove utf-8 BOM if any.
// FIXME #12884: no efficient/safe way to remove from the start of a string
// and reuse the allocation.
let mut src = if src.as_slice().starts_with("\ufeff") {
String::from_str(src.as_slice().slice_from(3))
} else {
String::from_str(src.as_slice())
};
// Append '\n' in case it's not already there.
// This is a workaround to prevent CodeMap.lookup_filemap_idx from accidentally
// overflowing into the next filemap in case the last byte of span is also the last
// byte of filemap, which leads to incorrect results from CodeMap.span_to_*.
if src.len() > 0 &&!src.as_slice().ends_with("\n") {
src.push('\n');
}
let filemap = Rc::new(FileMap {
name: filename,
src: src.to_string(),
start_pos: Pos::from_uint(start_pos),
lines: RefCell::new(Vec::new()),
multibyte_chars: RefCell::new(Vec::new()),
});
files.push(filemap.clone());
filemap
}
pub fn mk_substr_filename(&self, sp: Span) -> String {
let pos = self.lookup_char_pos(sp.lo);
(format!("<{}:{}:{}>",
pos.file.name,
pos.line,
pos.col.to_uint() + 1)).to_string()
}
/// Lookup source information about a BytePos
pub fn lookup_char_pos(&self, pos: BytePos) -> Loc {
self.lookup_pos(pos)
}
pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt {
let loc = self.lookup_char_pos(pos);
LocWithOpt {
filename: loc.file.name.to_string(),
line: loc.line,
col: loc.col,
file: Some(loc.file)
}
}
pub fn span_to_string(&self, sp: Span) -> String {
if self.files.borrow().len() == 0 && sp == DUMMY_SP {
return "no-location".to_string();
}
let lo = self.lookup_char_pos_adj(sp.lo);
let hi = self.lookup_char_pos_adj(sp.hi);
return (format!("{}:{}:{}: {}:{}",
lo.filename,
lo.line,
lo.col.to_uint() + 1,
hi.line,
hi.col.to_uint() + 1)).to_string()
}
pub fn span_to_filename(&self, sp: Span) -> FileName {
self.lookup_char_pos(sp.lo).file.name.to_string()
}
pub fn span_to_lines(&self, sp: Span) -> FileLines {
let lo = self.lookup_char_pos(sp.lo);
let hi = self.lookup_char_pos(sp.hi);
let mut lines = Vec::new();
for i in range(lo.line - 1u, hi.line as uint) {
lines.push(i);
};
FileLines {file: lo.file, lines: lines}
}
pub fn span_to_snippet(&self, sp: Span) -> Option<String> {
let begin = self.lookup_byte_offset(sp.lo);
let end = self.lookup_byte_offset(sp.hi);
// FIXME #8256: this used to be an assert but whatever precondition
// it's testing isn't true for all spans in the AST, so to allow the
// caller to not have to panic (and it can't catch it since the CodeMap
// isn't sendable), return None
if begin.fm.start_pos!= end.fm.start_pos {
None
} else {
Some(begin.fm.src.as_slice().slice(begin.pos.to_uint(),
end.pos.to_uint()).to_string())
}
}
pub fn get_filemap(&self, filename: &str) -> Rc<FileMap> {
for fm in self.files.borrow().iter() {
if filename == fm.name.as_slice() {
return fm.clone();
}
}
panic!("asking for {} which we don't know about", filename);
}
pub fn lookup_byte_offset(&self, bpos: BytePos) -> FileMapAndBytePos
|
/// Converts an absolute BytePos to a CharPos relative to the filemap and above.
pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
let idx = self.lookup_filemap_idx(bpos);
let files = self.files.borrow();
let map = &(*files)[idx];
// The number of extra bytes due to multibyte chars in the FileMap
let mut total_extra_bytes = 0;
for mbc in map.multibyte_chars.borrow().iter() {
debug!("{}-byte char at {}", mbc.bytes, mbc.pos);
if mbc.pos < bpos {
// every character is at least one byte, so we only
// count the actual extra bytes.
total_extra_bytes += mbc.bytes - 1;
// We should never see a byte position in the middle of a
// character
assert!(bpos.to_uint() >= mbc.pos.to_uint() + mbc.bytes);
} else {
break;
}
}
assert!(map.start_pos.to_uint() + total_extra_bytes <= bpos.to_uint());
CharPos(bpos.to_uint() - map.start_pos.to_uint() - total_extra_bytes)
}
fn lookup_filemap_idx(&self, pos: BytePos) -> uint {
let files = self.files.borrow();
let files = &*files;
let len = files.len();
let mut a = 0u;
let mut b = len;
while b - a > 1u {
let m = (a + b) / 2u;
if files[m].start_pos > pos {
b = m;
} else {
a = m;
}
}
// There can be filemaps with length 0. These have the same start_pos as
// the previous filemap, but are not the filemaps we want (because they
// are length 0, they cannot contain what we are looking for). So,
// rewind until we find a useful filemap.
loop {
let lines = files[a].lines.borrow();
let lines = lines;
if lines.len() > 0 {
break;
}
if a == 0 {
panic!("position {} does not resolve to a source location",
pos.to_uint());
}
a -= 1;
}
if a >= len {
panic!("position {} does not resolve to a source location",
pos.to_uint())
}
return a;
}
fn lookup_line(&self, pos: BytePos) -> FileMapAndLine {
let idx = self.lookup_filemap_idx(pos);
let files = self.files.borrow();
let f = (*files)[idx].clone();
let mut a = 0u;
{
let lines = f.lines.borrow();
let mut b = lines.len();
while b - a > 1u {
let m = (a + b) / 2u;
if (*lines)[m] > pos { b = m; } else { a = m; }
}
}
FileMapAndLine {fm: f, line: a}
}
fn lookup_pos(&self, pos: BytePos) -> Loc {
let FileMapAndLine {fm: f, line: a} = self.lookup_line(pos);
let line = a + 1u; // Line numbers start at 1
let chpos = self.bytepos_to_file_charpos(pos);
let linebpos = (*f.lines.borrow())[a];
let linechpos = self.bytepos_to_file_charpos(linebpos);
debug!("byte pos {} is on the line at byte pos {}",
pos, linebpos);
debug!("char pos {} is on the line at char pos {}",
chpos, linechpos);
debug!("byte is on line: {}", line);
assert!(chpos >= linechpos);
Loc {
file: f,
line: line,
col: chpos - linechpos
}
}
pub fn record_expansion(&self, expn_info: ExpnInfo) -> ExpnId {
let mut expansions = self.expansions.borrow_mut();
expansions.push(expn_info);
ExpnId(expansions.len().to_u32().expect("too many ExpnInfo's!") - 1)
}
pub fn with_expn_info<T>(&self, id: ExpnId, f: |Option<&ExpnInfo>| -> T) -> T {
match id {
NO_EXPANSION => f(None),
ExpnId(i) => f(Some(&(*self.expansions.borrow())[i as uint]))
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn t1 () {
let cm = CodeMap::new();
let fm = cm.new_filemap("blork.rs".to_string(),
"first line.\nsecond line".to_string());
fm.next_line(BytePos(0));
assert_eq!(&fm.get_line(0),&"first line.".to_string());
// TESTING BROKEN BEHAVIOR:
fm.next_line(BytePos(10));
assert_eq!(&fm.get_line(1), &".".to_string());
}
#[test]
#[should_fail]
fn t2 () {
let cm = CodeMap::new();
let fm = cm.new_filemap("blork.rs".to_string(),
"first line.\nsecond line".to_string());
// TESTING *REALLY* BROKEN BEHAVIOR:
fm.next_line(BytePos(0));
fm.next_line(BytePos(10));
fm.next_line(BytePos(2));
}
fn init_code_map() -> CodeMap {
let cm = CodeMap::new();
let fm1 = cm.new_filemap("blork.rs".to_string(),
"first line.\nsecond line".to_string());
let fm2 = cm.new_filemap("empty.rs".to_string(),
"".to_string());
let fm3 = cm.new_filemap("blork2.rs".to_string(),
"first line.\nsecond line".to_string());
fm1.next_line(BytePos(0));
fm1.next_line(BytePos(12));
fm2.next_line(BytePos(24));
fm3.next_line(BytePos(24));
fm3.next_line(BytePos(34));
cm
}
#[test]
fn t3() {
// Test lookup_byte_offset
let cm = init_code_map();
let fmabp1 = cm.lookup_byte_offset(BytePos(22));
assert_eq!(fmabp1.fm.name, "blork.rs".to_string());
assert_eq!(fmabp1.pos, BytePos(22));
let fmabp2 = cm.lookup_byte_offset(BytePos(24));
|
{
let idx = self.lookup_filemap_idx(bpos);
let fm = (*self.files.borrow())[idx].clone();
let offset = bpos - fm.start_pos;
FileMapAndBytePos {fm: fm, pos: offset}
}
|
identifier_body
|
codemap.rs
|
<CharPos,CharPos> for CharPos {
fn add(&self, rhs: &CharPos) -> CharPos {
CharPos(self.to_uint() + rhs.to_uint())
}
}
impl Sub<CharPos,CharPos> for CharPos {
fn sub(&self, rhs: &CharPos) -> CharPos {
CharPos(self.to_uint() - rhs.to_uint())
}
}
/**
Spans represent a region of code, used for error reporting. Positions in spans
are *absolute* positions from the beginning of the codemap, not positions
relative to FileMaps. Methods on the CodeMap can be used to relate spans back
to the original source.
*/
#[deriving(Clone, Show, Hash)]
pub struct Span {
pub lo: BytePos,
pub hi: BytePos,
/// Information about where the macro came from, if this piece of
/// code was created by a macro expansion.
pub expn_id: ExpnId
}
pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), expn_id: NO_EXPANSION };
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
pub struct Spanned<T> {
pub node: T,
pub span: Span,
}
impl PartialEq for Span {
fn eq(&self, other: &Span) -> bool {
return (*self).lo == (*other).lo && (*self).hi == (*other).hi;
}
fn ne(&self, other: &Span) -> bool {!(*self).eq(other) }
}
impl Eq for Span {}
impl<S:Encoder<E>, E> Encodable<S, E> for Span {
/* Note #1972 -- spans are encoded but not decoded */
fn encode(&self, s: &mut S) -> Result<(), E> {
s.emit_nil()
}
}
impl<D:Decoder<E>, E> Decodable<D, E> for Span {
fn decode(_d: &mut D) -> Result<Span, E> {
Ok(DUMMY_SP)
}
}
pub fn spanned<T>(lo: BytePos, hi: BytePos, t: T) -> Spanned<T> {
respan(mk_sp(lo, hi), t)
}
pub fn respan<T>(sp: Span, t: T) -> Spanned<T> {
Spanned {node: t, span: sp}
}
pub fn dummy_spanned<T>(t: T) -> Spanned<T> {
respan(DUMMY_SP, t)
}
/* assuming that we're not in macro expansion */
pub fn mk_sp(lo: BytePos, hi: BytePos) -> Span {
Span {lo: lo, hi: hi, expn_id: NO_EXPANSION}
}
/// Return the span itself if it doesn't come from a macro expansion,
/// otherwise return the call site span up to the `enclosing_sp` by
/// following the `expn_info` chain.
pub fn original_sp(cm: &CodeMap, sp: Span, enclosing_sp: Span) -> Span {
let call_site1 = cm.with_expn_info(sp.expn_id, |ei| ei.map(|ei| ei.call_site));
let call_site2 = cm.with_expn_info(enclosing_sp.expn_id, |ei| ei.map(|ei| ei.call_site));
match (call_site1, call_site2) {
(None, _) => sp,
(Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp,
(Some(call_site1), _) => original_sp(cm, call_site1, enclosing_sp),
}
}
/// A source code location used for error reporting
pub struct Loc {
/// Information about the original source
pub file: Rc<FileMap>,
/// The (1-based) line number
pub line: uint,
/// The (0-based) column offset
pub col: CharPos
}
/// A source code location used as the result of lookup_char_pos_adj
// Actually, *none* of the clients use the filename *or* file field;
// perhaps they should just be removed.
pub struct LocWithOpt {
pub filename: FileName,
pub line: uint,
pub col: CharPos,
pub file: Option<Rc<FileMap>>,
}
// used to be structural records. Better names, anyone?
pub struct FileMapAndLine { pub fm: Rc<FileMap>, pub line: uint }
pub struct FileMapAndBytePos { pub fm: Rc<FileMap>, pub pos: BytePos }
/// The syntax with which a macro was invoked.
#[deriving(Clone, Hash, Show)]
pub enum MacroFormat {
/// e.g. #[deriving(...)] <item>
MacroAttribute,
/// e.g. `format!()`
MacroBang
}
#[deriving(Clone, Hash, Show)]
pub struct NameAndSpan {
/// The name of the macro that was invoked to create the thing
/// with this Span.
pub name: String,
/// The format with which the macro was invoked.
pub format: MacroFormat,
/// The span of the macro definition itself. The macro may not
/// have a sensible definition span (e.g. something defined
/// completely inside libsyntax) in which case this is None.
pub span: Option<Span>
}
/// Extra information for tracking macro expansion of spans
#[deriving(Hash, Show)]
pub struct ExpnInfo {
/// The location of the actual macro invocation, e.g. `let x =
/// foo!();`
///
/// This may recursively refer to other macro invocations, e.g. if
/// `foo!()` invoked `bar!()` internally, and there was an
/// expression inside `bar!`; the call_site of the expression in
/// the expansion would point to the `bar!` invocation; that
/// call_site span would have its own ExpnInfo, with the call_site
/// pointing to the `foo!` invocation.
pub call_site: Span,
/// Information about the macro and its definition.
///
/// The `callee` of the inner expression in the `call_site`
/// example would point to the `macro_rules! bar {... }` and that
/// of the `bar!()` invocation would point to the `macro_rules!
/// foo {... }`.
pub callee: NameAndSpan
}
#[deriving(PartialEq, Eq, Clone, Show, Hash, Encodable, Decodable)]
pub struct ExpnId(u32);
pub const NO_EXPANSION: ExpnId = ExpnId(-1);
impl ExpnId {
pub fn from_llvm_cookie(cookie: c_uint) -> ExpnId {
ExpnId(cookie as u32)
}
pub fn to_llvm_cookie(self) -> i32 {
let ExpnId(cookie) = self;
cookie as i32
}
}
pub type FileName = String;
pub struct FileLines {
pub file: Rc<FileMap>,
pub lines: Vec<uint>
}
/// Identifies an offset of a multi-byte character in a FileMap
pub struct MultiByteChar {
/// The absolute offset of the character in the CodeMap
pub pos: BytePos,
/// The number of bytes, >=2
pub bytes: uint,
}
/// A single source in the CodeMap
pub struct FileMap {
/// The name of the file that the source came from, source that doesn't
/// originate from files has names between angle brackets by convention,
/// e.g. `<anon>`
pub name: FileName,
/// The complete source code
pub src: String,
/// The start position of this source in the CodeMap
pub start_pos: BytePos,
/// Locations of lines beginnings in the source code
pub lines: RefCell<Vec<BytePos> >,
/// Locations of multi-byte characters in the source code
pub multibyte_chars: RefCell<Vec<MultiByteChar> >,
}
impl FileMap {
/// EFFECT: register a start-of-line offset in the
/// table of line-beginnings.
/// UNCHECKED INVARIANT: these offsets must be added in the right
/// order and must be in the right places; there is shared knowledge
/// about what ends a line between this file and parse.rs
/// WARNING: pos param here is the offset relative to start of CodeMap,
/// and CodeMap will append a newline when adding a filemap without a newline at the end,
/// so the safe way to call this is with value calculated as
/// filemap.start_pos + newline_offset_relative_to_the_start_of_filemap.
pub fn next_line(&self, pos: BytePos) {
// the new charpos must be > the last one (or it's the first one).
let mut lines = self.lines.borrow_mut();
let line_len = lines.len();
assert!(line_len == 0 || ((*lines)[line_len - 1] < pos))
lines.push(pos);
}
/// get a line from the list of pre-computed line-beginnings
///
pub fn get_line(&self, line: int) -> String {
let lines = self.lines.borrow();
let begin: BytePos = (*lines)[line as uint] - self.start_pos;
let begin = begin.to_uint();
let slice = self.src.as_slice().slice_from(begin);
match slice.find('\n') {
Some(e) => slice.slice_to(e).to_string(),
None => slice.to_string()
}
}
pub fn record_multibyte_char(&self, pos: BytePos, bytes: uint) {
assert!(bytes >=2 && bytes <= 4);
let mbc = MultiByteChar {
pos: pos,
bytes: bytes,
};
self.multibyte_chars.borrow_mut().push(mbc);
}
pub fn is_real_file(&self) -> bool {
!(self.name.as_slice().starts_with("<") &&
self.name.as_slice().ends_with(">"))
}
}
pub struct CodeMap {
pub files: RefCell<Vec<Rc<FileMap>>>,
expansions: RefCell<Vec<ExpnInfo>>
}
impl CodeMap {
pub fn new() -> CodeMap {
CodeMap {
files: RefCell::new(Vec::new()),
expansions: RefCell::new(Vec::new()),
}
}
pub fn new_filemap(&self, filename: FileName, src: String) -> Rc<FileMap> {
let mut files = self.files.borrow_mut();
let start_pos = match files.last() {
None => 0,
Some(last) => last.start_pos.to_uint() + last.src.len(),
};
// Remove utf-8 BOM if any.
// FIXME #12884: no efficient/safe way to remove from the start of a string
// and reuse the allocation.
let mut src = if src.as_slice().starts_with("\ufeff")
|
else {
String::from_str(src.as_slice())
};
// Append '\n' in case it's not already there.
// This is a workaround to prevent CodeMap.lookup_filemap_idx from accidentally
// overflowing into the next filemap in case the last byte of span is also the last
// byte of filemap, which leads to incorrect results from CodeMap.span_to_*.
if src.len() > 0 &&!src.as_slice().ends_with("\n") {
src.push('\n');
}
let filemap = Rc::new(FileMap {
name: filename,
src: src.to_string(),
start_pos: Pos::from_uint(start_pos),
lines: RefCell::new(Vec::new()),
multibyte_chars: RefCell::new(Vec::new()),
});
files.push(filemap.clone());
filemap
}
pub fn mk_substr_filename(&self, sp: Span) -> String {
let pos = self.lookup_char_pos(sp.lo);
(format!("<{}:{}:{}>",
pos.file.name,
pos.line,
pos.col.to_uint() + 1)).to_string()
}
/// Lookup source information about a BytePos
pub fn lookup_char_pos(&self, pos: BytePos) -> Loc {
self.lookup_pos(pos)
}
pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt {
let loc = self.lookup_char_pos(pos);
LocWithOpt {
filename: loc.file.name.to_string(),
line: loc.line,
col: loc.col,
file: Some(loc.file)
}
}
pub fn span_to_string(&self, sp: Span) -> String {
if self.files.borrow().len() == 0 && sp == DUMMY_SP {
return "no-location".to_string();
}
let lo = self.lookup_char_pos_adj(sp.lo);
let hi = self.lookup_char_pos_adj(sp.hi);
return (format!("{}:{}:{}: {}:{}",
lo.filename,
lo.line,
lo.col.to_uint() + 1,
hi.line,
hi.col.to_uint() + 1)).to_string()
}
pub fn span_to_filename(&self, sp: Span) -> FileName {
self.lookup_char_pos(sp.lo).file.name.to_string()
}
pub fn span_to_lines(&self, sp: Span) -> FileLines {
let lo = self.lookup_char_pos(sp.lo);
let hi = self.lookup_char_pos(sp.hi);
let mut lines = Vec::new();
for i in range(lo.line - 1u, hi.line as uint) {
lines.push(i);
};
FileLines {file: lo.file, lines: lines}
}
pub fn span_to_snippet(&self, sp: Span) -> Option<String> {
let begin = self.lookup_byte_offset(sp.lo);
let end = self.lookup_byte_offset(sp.hi);
// FIXME #8256: this used to be an assert but whatever precondition
// it's testing isn't true for all spans in the AST, so to allow the
// caller to not have to panic (and it can't catch it since the CodeMap
// isn't sendable), return None
if begin.fm.start_pos!= end.fm.start_pos {
None
} else {
Some(begin.fm.src.as_slice().slice(begin.pos.to_uint(),
end.pos.to_uint()).to_string())
}
}
pub fn get_filemap(&self, filename: &str) -> Rc<FileMap> {
for fm in self.files.borrow().iter() {
if filename == fm.name.as_slice() {
return fm.clone();
}
}
panic!("asking for {} which we don't know about", filename);
}
pub fn lookup_byte_offset(&self, bpos: BytePos) -> FileMapAndBytePos {
let idx = self.lookup_filemap_idx(bpos);
let fm = (*self.files.borrow())[idx].clone();
let offset = bpos - fm.start_pos;
FileMapAndBytePos {fm: fm, pos: offset}
}
/// Converts an absolute BytePos to a CharPos relative to the filemap and above.
pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
let idx = self.lookup_filemap_idx(bpos);
let files = self.files.borrow();
let map = &(*files)[idx];
// The number of extra bytes due to multibyte chars in the FileMap
let mut total_extra_bytes = 0;
for mbc in map.multibyte_chars.borrow().iter() {
debug!("{}-byte char at {}", mbc.bytes, mbc.pos);
if mbc.pos < bpos {
// every character is at least one byte, so we only
// count the actual extra bytes.
total_extra_bytes += mbc.bytes - 1;
// We should never see a byte position in the middle of a
// character
assert!(bpos.to_uint() >= mbc.pos.to_uint() + mbc.bytes);
} else {
break;
}
}
assert!(map.start_pos.to_uint() + total_extra_bytes <= bpos.to_uint());
CharPos(bpos.to_uint() - map.start_pos.to_uint() - total_extra_bytes)
}
fn lookup_filemap_idx(&self, pos: BytePos) -> uint {
let files = self.files.borrow();
let files = &*files;
let len = files.len();
let mut a = 0u;
let mut b = len;
while b - a > 1u {
let m = (a + b) / 2u;
if files[m].start_pos > pos {
b = m;
} else {
a = m;
}
}
// There can be filemaps with length 0. These have the same start_pos as
// the previous filemap, but are not the filemaps we want (because they
// are length 0, they cannot contain what we are looking for). So,
// rewind until we find a useful filemap.
loop {
let lines = files[a].lines.borrow();
let lines = lines;
if lines.len() > 0 {
break;
}
if a == 0 {
panic!("position {} does not resolve to a source location",
pos.to_uint());
}
a -= 1;
}
if a >= len {
panic!("position {} does not resolve to a source location",
pos.to_uint())
}
return a;
}
fn lookup_line(&self, pos: BytePos) -> FileMapAndLine {
let idx = self.lookup_filemap_idx(pos);
let files = self.files.borrow();
let f = (*files)[idx].clone();
let mut a = 0u;
{
let lines = f.lines.borrow();
let mut b = lines.len();
while b - a > 1u {
let m = (a + b) / 2u;
if (*lines)[m] > pos { b = m; } else { a = m; }
}
}
FileMapAndLine {fm: f, line: a}
}
fn lookup_pos(&self, pos: BytePos) -> Loc {
let FileMapAndLine {fm: f, line: a} = self.lookup_line(pos);
let line = a + 1u; // Line numbers start at 1
let chpos = self.bytepos_to_file_charpos(pos);
let linebpos = (*f.lines.borrow())[a];
let linechpos = self.bytepos_to_file_charpos(linebpos);
debug!("byte pos {} is on the line at byte pos {}",
pos, linebpos);
debug!("char pos {} is on the line at char pos {}",
chpos, linechpos);
debug!("byte is on line: {}", line);
assert!(chpos >= linechpos);
Loc {
file: f,
line: line,
col: chpos - linechpos
}
}
pub fn record_expansion(&self, expn_info: ExpnInfo) -> ExpnId {
let mut expansions = self.expansions.borrow_mut();
expansions.push(expn_info);
ExpnId(expansions.len().to_u32().expect("too many ExpnInfo's!") - 1)
}
pub fn with_expn_info<T>(&self, id: ExpnId, f: |Option<&ExpnInfo>| -> T) -> T {
match id {
NO_EXPANSION => f(None),
ExpnId(i) => f(Some(&(*self.expansions.borrow())[i as uint]))
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn t1 () {
let cm = CodeMap::new();
let fm = cm.new_filemap("blork.rs".to_string(),
"first line.\nsecond line".to_string());
fm.next_line(BytePos(0));
assert_eq!(&fm.get_line(0),&"first line.".to_string());
// TESTING BROKEN BEHAVIOR:
fm.next_line(BytePos(10));
assert_eq!(&fm.get_line(1), &".".to_string());
}
#[test]
#[should_fail]
fn t2 () {
let cm = CodeMap::new();
let fm = cm.new_filemap("blork.rs".to_string(),
"first line.\nsecond line".to_string());
// TESTING *REALLY* BROKEN BEHAVIOR:
fm.next_line(BytePos(0));
fm.next_line(BytePos(10));
fm.next_line(BytePos(2));
}
fn init_code_map() -> CodeMap {
let cm = CodeMap::new();
let fm1 = cm.new_filemap("blork.rs".to_string(),
"first line.\nsecond line".to_string());
let fm2 = cm.new_filemap("empty.rs".to_string(),
"".to_string());
let fm3 = cm.new_filemap("blork2.rs".to_string(),
"first line.\nsecond line".to_string());
fm1.next_line(BytePos(0));
fm1.next_line(BytePos(12));
fm2.next_line(BytePos(24));
fm3.next_line(BytePos(24));
fm3.next_line(BytePos(34));
cm
}
#[test]
fn t3() {
// Test lookup_byte_offset
let cm = init_code_map();
let fmabp1 = cm.lookup_byte_offset(BytePos(22));
assert_eq!(fmabp1.fm.name, "blork.rs".to_string());
assert_eq!(fmabp1.pos, BytePos(22));
let fmabp2 = cm.lookup_byte_offset(BytePos(24));
|
{
String::from_str(src.as_slice().slice_from(3))
}
|
conditional_block
|
codemap.rs
|
<CharPos,CharPos> for CharPos {
fn add(&self, rhs: &CharPos) -> CharPos {
CharPos(self.to_uint() + rhs.to_uint())
}
}
impl Sub<CharPos,CharPos> for CharPos {
fn sub(&self, rhs: &CharPos) -> CharPos {
CharPos(self.to_uint() - rhs.to_uint())
}
}
/**
Spans represent a region of code, used for error reporting. Positions in spans
are *absolute* positions from the beginning of the codemap, not positions
relative to FileMaps. Methods on the CodeMap can be used to relate spans back
to the original source.
*/
#[deriving(Clone, Show, Hash)]
pub struct Span {
pub lo: BytePos,
pub hi: BytePos,
/// Information about where the macro came from, if this piece of
/// code was created by a macro expansion.
pub expn_id: ExpnId
}
pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), expn_id: NO_EXPANSION };
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
pub struct Spanned<T> {
pub node: T,
pub span: Span,
}
impl PartialEq for Span {
fn eq(&self, other: &Span) -> bool {
return (*self).lo == (*other).lo && (*self).hi == (*other).hi;
}
fn ne(&self, other: &Span) -> bool {!(*self).eq(other) }
}
impl Eq for Span {}
impl<S:Encoder<E>, E> Encodable<S, E> for Span {
/* Note #1972 -- spans are encoded but not decoded */
fn encode(&self, s: &mut S) -> Result<(), E> {
s.emit_nil()
}
}
impl<D:Decoder<E>, E> Decodable<D, E> for Span {
fn decode(_d: &mut D) -> Result<Span, E> {
Ok(DUMMY_SP)
}
}
pub fn spanned<T>(lo: BytePos, hi: BytePos, t: T) -> Spanned<T> {
respan(mk_sp(lo, hi), t)
}
pub fn respan<T>(sp: Span, t: T) -> Spanned<T> {
Spanned {node: t, span: sp}
}
pub fn dummy_spanned<T>(t: T) -> Spanned<T> {
respan(DUMMY_SP, t)
}
/* assuming that we're not in macro expansion */
pub fn mk_sp(lo: BytePos, hi: BytePos) -> Span {
Span {lo: lo, hi: hi, expn_id: NO_EXPANSION}
}
/// Return the span itself if it doesn't come from a macro expansion,
/// otherwise return the call site span up to the `enclosing_sp` by
/// following the `expn_info` chain.
pub fn original_sp(cm: &CodeMap, sp: Span, enclosing_sp: Span) -> Span {
let call_site1 = cm.with_expn_info(sp.expn_id, |ei| ei.map(|ei| ei.call_site));
let call_site2 = cm.with_expn_info(enclosing_sp.expn_id, |ei| ei.map(|ei| ei.call_site));
match (call_site1, call_site2) {
(None, _) => sp,
(Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp,
(Some(call_site1), _) => original_sp(cm, call_site1, enclosing_sp),
}
}
/// A source code location used for error reporting
pub struct Loc {
/// Information about the original source
pub file: Rc<FileMap>,
/// The (1-based) line number
pub line: uint,
/// The (0-based) column offset
pub col: CharPos
}
/// A source code location used as the result of lookup_char_pos_adj
// Actually, *none* of the clients use the filename *or* file field;
// perhaps they should just be removed.
pub struct LocWithOpt {
pub filename: FileName,
pub line: uint,
pub col: CharPos,
pub file: Option<Rc<FileMap>>,
}
// used to be structural records. Better names, anyone?
pub struct FileMapAndLine { pub fm: Rc<FileMap>, pub line: uint }
pub struct
|
{ pub fm: Rc<FileMap>, pub pos: BytePos }
/// The syntax with which a macro was invoked.
#[deriving(Clone, Hash, Show)]
pub enum MacroFormat {
/// e.g. #[deriving(...)] <item>
MacroAttribute,
/// e.g. `format!()`
MacroBang
}
#[deriving(Clone, Hash, Show)]
pub struct NameAndSpan {
/// The name of the macro that was invoked to create the thing
/// with this Span.
pub name: String,
/// The format with which the macro was invoked.
pub format: MacroFormat,
/// The span of the macro definition itself. The macro may not
/// have a sensible definition span (e.g. something defined
/// completely inside libsyntax) in which case this is None.
pub span: Option<Span>
}
/// Extra information for tracking macro expansion of spans
#[deriving(Hash, Show)]
pub struct ExpnInfo {
/// The location of the actual macro invocation, e.g. `let x =
/// foo!();`
///
/// This may recursively refer to other macro invocations, e.g. if
/// `foo!()` invoked `bar!()` internally, and there was an
/// expression inside `bar!`; the call_site of the expression in
/// the expansion would point to the `bar!` invocation; that
/// call_site span would have its own ExpnInfo, with the call_site
/// pointing to the `foo!` invocation.
pub call_site: Span,
/// Information about the macro and its definition.
///
/// The `callee` of the inner expression in the `call_site`
/// example would point to the `macro_rules! bar {... }` and that
/// of the `bar!()` invocation would point to the `macro_rules!
/// foo {... }`.
pub callee: NameAndSpan
}
#[deriving(PartialEq, Eq, Clone, Show, Hash, Encodable, Decodable)]
pub struct ExpnId(u32);
pub const NO_EXPANSION: ExpnId = ExpnId(-1);
impl ExpnId {
pub fn from_llvm_cookie(cookie: c_uint) -> ExpnId {
ExpnId(cookie as u32)
}
pub fn to_llvm_cookie(self) -> i32 {
let ExpnId(cookie) = self;
cookie as i32
}
}
pub type FileName = String;
pub struct FileLines {
pub file: Rc<FileMap>,
pub lines: Vec<uint>
}
/// Identifies an offset of a multi-byte character in a FileMap
pub struct MultiByteChar {
/// The absolute offset of the character in the CodeMap
pub pos: BytePos,
/// The number of bytes, >=2
pub bytes: uint,
}
/// A single source in the CodeMap
pub struct FileMap {
/// The name of the file that the source came from, source that doesn't
/// originate from files has names between angle brackets by convention,
/// e.g. `<anon>`
pub name: FileName,
/// The complete source code
pub src: String,
/// The start position of this source in the CodeMap
pub start_pos: BytePos,
/// Locations of lines beginnings in the source code
pub lines: RefCell<Vec<BytePos> >,
/// Locations of multi-byte characters in the source code
pub multibyte_chars: RefCell<Vec<MultiByteChar> >,
}
impl FileMap {
/// EFFECT: register a start-of-line offset in the
/// table of line-beginnings.
/// UNCHECKED INVARIANT: these offsets must be added in the right
/// order and must be in the right places; there is shared knowledge
/// about what ends a line between this file and parse.rs
/// WARNING: pos param here is the offset relative to start of CodeMap,
/// and CodeMap will append a newline when adding a filemap without a newline at the end,
/// so the safe way to call this is with value calculated as
/// filemap.start_pos + newline_offset_relative_to_the_start_of_filemap.
pub fn next_line(&self, pos: BytePos) {
// the new charpos must be > the last one (or it's the first one).
let mut lines = self.lines.borrow_mut();
let line_len = lines.len();
assert!(line_len == 0 || ((*lines)[line_len - 1] < pos))
lines.push(pos);
}
/// get a line from the list of pre-computed line-beginnings
///
pub fn get_line(&self, line: int) -> String {
let lines = self.lines.borrow();
let begin: BytePos = (*lines)[line as uint] - self.start_pos;
let begin = begin.to_uint();
let slice = self.src.as_slice().slice_from(begin);
match slice.find('\n') {
Some(e) => slice.slice_to(e).to_string(),
None => slice.to_string()
}
}
pub fn record_multibyte_char(&self, pos: BytePos, bytes: uint) {
assert!(bytes >=2 && bytes <= 4);
let mbc = MultiByteChar {
pos: pos,
bytes: bytes,
};
self.multibyte_chars.borrow_mut().push(mbc);
}
pub fn is_real_file(&self) -> bool {
!(self.name.as_slice().starts_with("<") &&
self.name.as_slice().ends_with(">"))
}
}
pub struct CodeMap {
pub files: RefCell<Vec<Rc<FileMap>>>,
expansions: RefCell<Vec<ExpnInfo>>
}
impl CodeMap {
pub fn new() -> CodeMap {
CodeMap {
files: RefCell::new(Vec::new()),
expansions: RefCell::new(Vec::new()),
}
}
pub fn new_filemap(&self, filename: FileName, src: String) -> Rc<FileMap> {
let mut files = self.files.borrow_mut();
let start_pos = match files.last() {
None => 0,
Some(last) => last.start_pos.to_uint() + last.src.len(),
};
// Remove utf-8 BOM if any.
// FIXME #12884: no efficient/safe way to remove from the start of a string
// and reuse the allocation.
let mut src = if src.as_slice().starts_with("\ufeff") {
String::from_str(src.as_slice().slice_from(3))
} else {
String::from_str(src.as_slice())
};
// Append '\n' in case it's not already there.
// This is a workaround to prevent CodeMap.lookup_filemap_idx from accidentally
// overflowing into the next filemap in case the last byte of span is also the last
// byte of filemap, which leads to incorrect results from CodeMap.span_to_*.
if src.len() > 0 &&!src.as_slice().ends_with("\n") {
src.push('\n');
}
let filemap = Rc::new(FileMap {
name: filename,
src: src.to_string(),
start_pos: Pos::from_uint(start_pos),
lines: RefCell::new(Vec::new()),
multibyte_chars: RefCell::new(Vec::new()),
});
files.push(filemap.clone());
filemap
}
pub fn mk_substr_filename(&self, sp: Span) -> String {
let pos = self.lookup_char_pos(sp.lo);
(format!("<{}:{}:{}>",
pos.file.name,
pos.line,
pos.col.to_uint() + 1)).to_string()
}
/// Lookup source information about a BytePos
pub fn lookup_char_pos(&self, pos: BytePos) -> Loc {
self.lookup_pos(pos)
}
pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt {
let loc = self.lookup_char_pos(pos);
LocWithOpt {
filename: loc.file.name.to_string(),
line: loc.line,
col: loc.col,
file: Some(loc.file)
}
}
pub fn span_to_string(&self, sp: Span) -> String {
if self.files.borrow().len() == 0 && sp == DUMMY_SP {
return "no-location".to_string();
}
let lo = self.lookup_char_pos_adj(sp.lo);
let hi = self.lookup_char_pos_adj(sp.hi);
return (format!("{}:{}:{}: {}:{}",
lo.filename,
lo.line,
lo.col.to_uint() + 1,
hi.line,
hi.col.to_uint() + 1)).to_string()
}
pub fn span_to_filename(&self, sp: Span) -> FileName {
self.lookup_char_pos(sp.lo).file.name.to_string()
}
pub fn span_to_lines(&self, sp: Span) -> FileLines {
let lo = self.lookup_char_pos(sp.lo);
let hi = self.lookup_char_pos(sp.hi);
let mut lines = Vec::new();
for i in range(lo.line - 1u, hi.line as uint) {
lines.push(i);
};
FileLines {file: lo.file, lines: lines}
}
pub fn span_to_snippet(&self, sp: Span) -> Option<String> {
let begin = self.lookup_byte_offset(sp.lo);
let end = self.lookup_byte_offset(sp.hi);
// FIXME #8256: this used to be an assert but whatever precondition
// it's testing isn't true for all spans in the AST, so to allow the
// caller to not have to panic (and it can't catch it since the CodeMap
// isn't sendable), return None
if begin.fm.start_pos!= end.fm.start_pos {
None
} else {
Some(begin.fm.src.as_slice().slice(begin.pos.to_uint(),
end.pos.to_uint()).to_string())
}
}
pub fn get_filemap(&self, filename: &str) -> Rc<FileMap> {
for fm in self.files.borrow().iter() {
if filename == fm.name.as_slice() {
return fm.clone();
}
}
panic!("asking for {} which we don't know about", filename);
}
pub fn lookup_byte_offset(&self, bpos: BytePos) -> FileMapAndBytePos {
let idx = self.lookup_filemap_idx(bpos);
let fm = (*self.files.borrow())[idx].clone();
let offset = bpos - fm.start_pos;
FileMapAndBytePos {fm: fm, pos: offset}
}
/// Converts an absolute BytePos to a CharPos relative to the filemap and above.
pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
let idx = self.lookup_filemap_idx(bpos);
let files = self.files.borrow();
let map = &(*files)[idx];
// The number of extra bytes due to multibyte chars in the FileMap
let mut total_extra_bytes = 0;
for mbc in map.multibyte_chars.borrow().iter() {
debug!("{}-byte char at {}", mbc.bytes, mbc.pos);
if mbc.pos < bpos {
// every character is at least one byte, so we only
// count the actual extra bytes.
total_extra_bytes += mbc.bytes - 1;
// We should never see a byte position in the middle of a
// character
assert!(bpos.to_uint() >= mbc.pos.to_uint() + mbc.bytes);
} else {
break;
}
}
assert!(map.start_pos.to_uint() + total_extra_bytes <= bpos.to_uint());
CharPos(bpos.to_uint() - map.start_pos.to_uint() - total_extra_bytes)
}
fn lookup_filemap_idx(&self, pos: BytePos) -> uint {
let files = self.files.borrow();
let files = &*files;
let len = files.len();
let mut a = 0u;
let mut b = len;
while b - a > 1u {
let m = (a + b) / 2u;
if files[m].start_pos > pos {
b = m;
} else {
a = m;
}
}
// There can be filemaps with length 0. These have the same start_pos as
// the previous filemap, but are not the filemaps we want (because they
// are length 0, they cannot contain what we are looking for). So,
// rewind until we find a useful filemap.
loop {
let lines = files[a].lines.borrow();
let lines = lines;
if lines.len() > 0 {
break;
}
if a == 0 {
panic!("position {} does not resolve to a source location",
pos.to_uint());
}
a -= 1;
}
if a >= len {
panic!("position {} does not resolve to a source location",
pos.to_uint())
}
return a;
}
fn lookup_line(&self, pos: BytePos) -> FileMapAndLine {
let idx = self.lookup_filemap_idx(pos);
let files = self.files.borrow();
let f = (*files)[idx].clone();
let mut a = 0u;
{
let lines = f.lines.borrow();
let mut b = lines.len();
while b - a > 1u {
let m = (a + b) / 2u;
if (*lines)[m] > pos { b = m; } else { a = m; }
}
}
FileMapAndLine {fm: f, line: a}
}
fn lookup_pos(&self, pos: BytePos) -> Loc {
let FileMapAndLine {fm: f, line: a} = self.lookup_line(pos);
let line = a + 1u; // Line numbers start at 1
let chpos = self.bytepos_to_file_charpos(pos);
let linebpos = (*f.lines.borrow())[a];
let linechpos = self.bytepos_to_file_charpos(linebpos);
debug!("byte pos {} is on the line at byte pos {}",
pos, linebpos);
debug!("char pos {} is on the line at char pos {}",
chpos, linechpos);
debug!("byte is on line: {}", line);
assert!(chpos >= linechpos);
Loc {
file: f,
line: line,
col: chpos - linechpos
}
}
pub fn record_expansion(&self, expn_info: ExpnInfo) -> ExpnId {
let mut expansions = self.expansions.borrow_mut();
expansions.push(expn_info);
ExpnId(expansions.len().to_u32().expect("too many ExpnInfo's!") - 1)
}
pub fn with_expn_info<T>(&self, id: ExpnId, f: |Option<&ExpnInfo>| -> T) -> T {
match id {
NO_EXPANSION => f(None),
ExpnId(i) => f(Some(&(*self.expansions.borrow())[i as uint]))
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn t1 () {
let cm = CodeMap::new();
let fm = cm.new_filemap("blork.rs".to_string(),
"first line.\nsecond line".to_string());
fm.next_line(BytePos(0));
assert_eq!(&fm.get_line(0),&"first line.".to_string());
// TESTING BROKEN BEHAVIOR:
fm.next_line(BytePos(10));
assert_eq!(&fm.get_line(1), &".".to_string());
}
#[test]
#[should_fail]
fn t2 () {
let cm = CodeMap::new();
let fm = cm.new_filemap("blork.rs".to_string(),
"first line.\nsecond line".to_string());
// TESTING *REALLY* BROKEN BEHAVIOR:
fm.next_line(BytePos(0));
fm.next_line(BytePos(10));
fm.next_line(BytePos(2));
}
fn init_code_map() -> CodeMap {
let cm = CodeMap::new();
let fm1 = cm.new_filemap("blork.rs".to_string(),
"first line.\nsecond line".to_string());
let fm2 = cm.new_filemap("empty.rs".to_string(),
"".to_string());
let fm3 = cm.new_filemap("blork2.rs".to_string(),
"first line.\nsecond line".to_string());
fm1.next_line(BytePos(0));
fm1.next_line(BytePos(12));
fm2.next_line(BytePos(24));
fm3.next_line(BytePos(24));
fm3.next_line(BytePos(34));
cm
}
#[test]
fn t3() {
// Test lookup_byte_offset
let cm = init_code_map();
let fmabp1 = cm.lookup_byte_offset(BytePos(22));
assert_eq!(fmabp1.fm.name, "blork.rs".to_string());
assert_eq!(fmabp1.pos, BytePos(22));
let fmabp2 = cm.lookup_byte_offset(BytePos(24));
|
FileMapAndBytePos
|
identifier_name
|
main.rs
|
extern crate report;
use report::report_builder::ReportBuilder;
use report::report_builder::html_report_builder::HtmlReportBuilder;
use report::report_builder::markdown_report_builder::MarkdownReportBuilder;
fn main() {
let html_report = HtmlReportBuilder::new()
.with_header("Solar Deities : Hindu Mythological Story")
.with_paragraph("Let us enjoy reading this Hindu Mythological Story \
|
of Solar Deities.")
.with_paragraph("Ravana once went to challenge Surya, the Sun-God, to a fight. \
When he reached the Solar Region he saw that the sun was about to rise and sent an envoy \
to inform Surya of his arrival and the reason for his coming.")
.finish();
println!("{}", html_report);
let markdown_report = MarkdownReportBuilder::new()
.with_header("Why Snakes Have Forked Tongues")
.with_paragraph("Garuda brought the nectar after overcoming numerous obstacles, \
battling even the gods in the process.")
.with_paragraph("The nagas were delighted when he placed the pot \
containing the nectar before them. \
They let Vinata go and then they went to wash themselves before partaking of the ambrosia.")
.with_header("Second header")
.with_paragraph("Third paragraph")
.finish();
println!("{}", markdown_report);
}
|
random_line_split
|
|
main.rs
|
extern crate report;
use report::report_builder::ReportBuilder;
use report::report_builder::html_report_builder::HtmlReportBuilder;
use report::report_builder::markdown_report_builder::MarkdownReportBuilder;
fn
|
() {
let html_report = HtmlReportBuilder::new()
.with_header("Solar Deities : Hindu Mythological Story")
.with_paragraph("Let us enjoy reading this Hindu Mythological Story \
of Solar Deities.")
.with_paragraph("Ravana once went to challenge Surya, the Sun-God, to a fight. \
When he reached the Solar Region he saw that the sun was about to rise and sent an envoy \
to inform Surya of his arrival and the reason for his coming.")
.finish();
println!("{}", html_report);
let markdown_report = MarkdownReportBuilder::new()
.with_header("Why Snakes Have Forked Tongues")
.with_paragraph("Garuda brought the nectar after overcoming numerous obstacles, \
battling even the gods in the process.")
.with_paragraph("The nagas were delighted when he placed the pot \
containing the nectar before them. \
They let Vinata go and then they went to wash themselves before partaking of the ambrosia.")
.with_header("Second header")
.with_paragraph("Third paragraph")
.finish();
println!("{}", markdown_report);
}
|
main
|
identifier_name
|
main.rs
|
extern crate report;
use report::report_builder::ReportBuilder;
use report::report_builder::html_report_builder::HtmlReportBuilder;
use report::report_builder::markdown_report_builder::MarkdownReportBuilder;
fn main()
|
.with_paragraph("Third paragraph")
.finish();
println!("{}", markdown_report);
}
|
{
let html_report = HtmlReportBuilder::new()
.with_header("Solar Deities : Hindu Mythological Story")
.with_paragraph("Let us enjoy reading this Hindu Mythological Story \
of Solar Deities.")
.with_paragraph("Ravana once went to challenge Surya, the Sun-God, to a fight. \
When he reached the Solar Region he saw that the sun was about to rise and sent an envoy \
to inform Surya of his arrival and the reason for his coming.")
.finish();
println!("{}", html_report);
let markdown_report = MarkdownReportBuilder::new()
.with_header("Why Snakes Have Forked Tongues")
.with_paragraph("Garuda brought the nectar after overcoming numerous obstacles, \
battling even the gods in the process.")
.with_paragraph("The nagas were delighted when he placed the pot \
containing the nectar before them. \
They let Vinata go and then they went to wash themselves before partaking of the ambrosia.")
.with_header("Second header")
|
identifier_body
|
compound3.rs
|
extern crate nalgebra as na;
use na::{Isometry3, Point3, RealField, Vector3};
use ncollide3d::shape::{Capsule, Compound, Cuboid, ShapeHandle};
use nphysics3d::force_generator::DefaultForceGeneratorSet;
use nphysics3d::joint::DefaultJointConstraintSet;
use nphysics3d::object::{
BodyPartHandle, ColliderDesc, DefaultBodySet, DefaultColliderSet, Ground, RigidBodyDesc,
};
use nphysics3d::world::{DefaultGeometricalWorld, DefaultMechanicalWorld};
use nphysics_testbed3d::Testbed;
/*
* NOTE: The `r` macro is only here to convert from f64 to the `N` scalar type.
* This simplifies experimentation with various scalar types (f32, fixed-point numbers, etc.)
*/
pub fn init_world<N: RealField>(testbed: &mut Testbed<N>) {
/*
* World
*/
let mechanical_world = DefaultMechanicalWorld::new(Vector3::new(r!(0.0), r!(-9.81), r!(0.0)));
let geometrical_world = DefaultGeometricalWorld::new();
let mut bodies = DefaultBodySet::new();
let mut colliders = DefaultColliderSet::new();
let joint_constraints = DefaultJointConstraintSet::new();
let force_generators = DefaultForceGeneratorSet::new();
/*
* Ground.
*/
let ground_thickness = r!(0.2);
let ground_shape = ShapeHandle::new(Cuboid::new(Vector3::new(
r!(35.0),
ground_thickness,
r!(35.0),
)));
let ground_handle = bodies.insert(Ground::new());
let co = ColliderDesc::new(ground_shape)
.translation(Vector3::y() * -ground_thickness)
.build(BodyPartHandle(ground_handle, 0));
colliders.insert(co);
/*
* U-shaped geometry.
*/
let large_rad = r!(2.5);
let small_rad = r!(0.1);
let delta1 = Isometry3::new(Vector3::new(r!(0.0), -large_rad, r!(0.0)), na::zero());
let delta2 = Isometry3::new(Vector3::new(-large_rad, r!(0.0), r!(0.0)), na::zero());
let delta3 = Isometry3::new(Vector3::new(large_rad, r!(0.0), r!(0.0)), na::zero());
let mut cross_geoms = Vec::new();
let vertical = ShapeHandle::new(Capsule::new(large_rad, small_rad));
let horizontal = ShapeHandle::new(Cuboid::new(Vector3::new(large_rad, small_rad, small_rad)));
cross_geoms.push((delta1, horizontal));
cross_geoms.push((delta2, vertical.clone()));
cross_geoms.push((delta3, vertical));
let compound = Compound::new(cross_geoms);
let cross = ShapeHandle::new(compound);
/*
* Create the crosses
*/
let num = 6;
let rad = r!(5.0);
let shift = rad * r!(2.0);
let centerx = shift * r!(num as f64) / r!(2.0);
let centery = r!(3.0) + shift / r!(2.0);
let centerz = shift * r!(num as f64) / r!(2.0);
|
for i in 0usize..num {
for j in 0usize..num {
for k in 0usize..num {
let x = r!(i as f64) * shift - centerx;
let y = r!(j as f64) * shift + centery;
let z = r!(k as f64) * shift - centerz;
// Build the rigid body.
let rb = RigidBodyDesc::new()
.translation(Vector3::new(x, y, z))
.build();
let rb_handle = bodies.insert(rb);
// Build the collider.
let co = ColliderDesc::new(cross.clone())
.density(r!(1.0))
.build(BodyPartHandle(rb_handle, 0));
colliders.insert(co);
}
}
}
/*
* Set up the testbed.
*/
testbed.set_ground_handle(Some(ground_handle));
testbed.set_world(
mechanical_world,
geometrical_world,
bodies,
colliders,
joint_constraints,
force_generators,
);
testbed.look_at(Point3::new(-10.0, 10.0, -10.0), Point3::origin());
}
fn main() {
let testbed = Testbed::<f32>::from_builders(0, vec![("Compound", init_world)]);
testbed.run()
}
|
random_line_split
|
|
compound3.rs
|
extern crate nalgebra as na;
use na::{Isometry3, Point3, RealField, Vector3};
use ncollide3d::shape::{Capsule, Compound, Cuboid, ShapeHandle};
use nphysics3d::force_generator::DefaultForceGeneratorSet;
use nphysics3d::joint::DefaultJointConstraintSet;
use nphysics3d::object::{
BodyPartHandle, ColliderDesc, DefaultBodySet, DefaultColliderSet, Ground, RigidBodyDesc,
};
use nphysics3d::world::{DefaultGeometricalWorld, DefaultMechanicalWorld};
use nphysics_testbed3d::Testbed;
/*
* NOTE: The `r` macro is only here to convert from f64 to the `N` scalar type.
* This simplifies experimentation with various scalar types (f32, fixed-point numbers, etc.)
*/
pub fn init_world<N: RealField>(testbed: &mut Testbed<N>)
|
let ground_handle = bodies.insert(Ground::new());
let co = ColliderDesc::new(ground_shape)
.translation(Vector3::y() * -ground_thickness)
.build(BodyPartHandle(ground_handle, 0));
colliders.insert(co);
/*
* U-shaped geometry.
*/
let large_rad = r!(2.5);
let small_rad = r!(0.1);
let delta1 = Isometry3::new(Vector3::new(r!(0.0), -large_rad, r!(0.0)), na::zero());
let delta2 = Isometry3::new(Vector3::new(-large_rad, r!(0.0), r!(0.0)), na::zero());
let delta3 = Isometry3::new(Vector3::new(large_rad, r!(0.0), r!(0.0)), na::zero());
let mut cross_geoms = Vec::new();
let vertical = ShapeHandle::new(Capsule::new(large_rad, small_rad));
let horizontal = ShapeHandle::new(Cuboid::new(Vector3::new(large_rad, small_rad, small_rad)));
cross_geoms.push((delta1, horizontal));
cross_geoms.push((delta2, vertical.clone()));
cross_geoms.push((delta3, vertical));
let compound = Compound::new(cross_geoms);
let cross = ShapeHandle::new(compound);
/*
* Create the crosses
*/
let num = 6;
let rad = r!(5.0);
let shift = rad * r!(2.0);
let centerx = shift * r!(num as f64) / r!(2.0);
let centery = r!(3.0) + shift / r!(2.0);
let centerz = shift * r!(num as f64) / r!(2.0);
for i in 0usize..num {
for j in 0usize..num {
for k in 0usize..num {
let x = r!(i as f64) * shift - centerx;
let y = r!(j as f64) * shift + centery;
let z = r!(k as f64) * shift - centerz;
// Build the rigid body.
let rb = RigidBodyDesc::new()
.translation(Vector3::new(x, y, z))
.build();
let rb_handle = bodies.insert(rb);
// Build the collider.
let co = ColliderDesc::new(cross.clone())
.density(r!(1.0))
.build(BodyPartHandle(rb_handle, 0));
colliders.insert(co);
}
}
}
/*
* Set up the testbed.
*/
testbed.set_ground_handle(Some(ground_handle));
testbed.set_world(
mechanical_world,
geometrical_world,
bodies,
colliders,
joint_constraints,
force_generators,
);
testbed.look_at(Point3::new(-10.0, 10.0, -10.0), Point3::origin());
}
fn main() {
let testbed = Testbed::<f32>::from_builders(0, vec![("Compound", init_world)]);
testbed.run()
}
|
{
/*
* World
*/
let mechanical_world = DefaultMechanicalWorld::new(Vector3::new(r!(0.0), r!(-9.81), r!(0.0)));
let geometrical_world = DefaultGeometricalWorld::new();
let mut bodies = DefaultBodySet::new();
let mut colliders = DefaultColliderSet::new();
let joint_constraints = DefaultJointConstraintSet::new();
let force_generators = DefaultForceGeneratorSet::new();
/*
* Ground.
*/
let ground_thickness = r!(0.2);
let ground_shape = ShapeHandle::new(Cuboid::new(Vector3::new(
r!(35.0),
ground_thickness,
r!(35.0),
)));
|
identifier_body
|
compound3.rs
|
extern crate nalgebra as na;
use na::{Isometry3, Point3, RealField, Vector3};
use ncollide3d::shape::{Capsule, Compound, Cuboid, ShapeHandle};
use nphysics3d::force_generator::DefaultForceGeneratorSet;
use nphysics3d::joint::DefaultJointConstraintSet;
use nphysics3d::object::{
BodyPartHandle, ColliderDesc, DefaultBodySet, DefaultColliderSet, Ground, RigidBodyDesc,
};
use nphysics3d::world::{DefaultGeometricalWorld, DefaultMechanicalWorld};
use nphysics_testbed3d::Testbed;
/*
* NOTE: The `r` macro is only here to convert from f64 to the `N` scalar type.
* This simplifies experimentation with various scalar types (f32, fixed-point numbers, etc.)
*/
pub fn init_world<N: RealField>(testbed: &mut Testbed<N>) {
/*
* World
*/
let mechanical_world = DefaultMechanicalWorld::new(Vector3::new(r!(0.0), r!(-9.81), r!(0.0)));
let geometrical_world = DefaultGeometricalWorld::new();
let mut bodies = DefaultBodySet::new();
let mut colliders = DefaultColliderSet::new();
let joint_constraints = DefaultJointConstraintSet::new();
let force_generators = DefaultForceGeneratorSet::new();
/*
* Ground.
*/
let ground_thickness = r!(0.2);
let ground_shape = ShapeHandle::new(Cuboid::new(Vector3::new(
r!(35.0),
ground_thickness,
r!(35.0),
)));
let ground_handle = bodies.insert(Ground::new());
let co = ColliderDesc::new(ground_shape)
.translation(Vector3::y() * -ground_thickness)
.build(BodyPartHandle(ground_handle, 0));
colliders.insert(co);
/*
* U-shaped geometry.
*/
let large_rad = r!(2.5);
let small_rad = r!(0.1);
let delta1 = Isometry3::new(Vector3::new(r!(0.0), -large_rad, r!(0.0)), na::zero());
let delta2 = Isometry3::new(Vector3::new(-large_rad, r!(0.0), r!(0.0)), na::zero());
let delta3 = Isometry3::new(Vector3::new(large_rad, r!(0.0), r!(0.0)), na::zero());
let mut cross_geoms = Vec::new();
let vertical = ShapeHandle::new(Capsule::new(large_rad, small_rad));
let horizontal = ShapeHandle::new(Cuboid::new(Vector3::new(large_rad, small_rad, small_rad)));
cross_geoms.push((delta1, horizontal));
cross_geoms.push((delta2, vertical.clone()));
cross_geoms.push((delta3, vertical));
let compound = Compound::new(cross_geoms);
let cross = ShapeHandle::new(compound);
/*
* Create the crosses
*/
let num = 6;
let rad = r!(5.0);
let shift = rad * r!(2.0);
let centerx = shift * r!(num as f64) / r!(2.0);
let centery = r!(3.0) + shift / r!(2.0);
let centerz = shift * r!(num as f64) / r!(2.0);
for i in 0usize..num {
for j in 0usize..num {
for k in 0usize..num {
let x = r!(i as f64) * shift - centerx;
let y = r!(j as f64) * shift + centery;
let z = r!(k as f64) * shift - centerz;
// Build the rigid body.
let rb = RigidBodyDesc::new()
.translation(Vector3::new(x, y, z))
.build();
let rb_handle = bodies.insert(rb);
// Build the collider.
let co = ColliderDesc::new(cross.clone())
.density(r!(1.0))
.build(BodyPartHandle(rb_handle, 0));
colliders.insert(co);
}
}
}
/*
* Set up the testbed.
*/
testbed.set_ground_handle(Some(ground_handle));
testbed.set_world(
mechanical_world,
geometrical_world,
bodies,
colliders,
joint_constraints,
force_generators,
);
testbed.look_at(Point3::new(-10.0, 10.0, -10.0), Point3::origin());
}
fn
|
() {
let testbed = Testbed::<f32>::from_builders(0, vec![("Compound", init_world)]);
testbed.run()
}
|
main
|
identifier_name
|
bench-serde.rs
|
#![feature(test)]
extern crate quick_xml;
extern crate serde;
extern crate serde_xml_rs;
extern crate test;
use serde::Deserialize;
use test::Bencher;
const SOURCE: &str = include_str!("../../tests/sample_rss.xml");
#[derive(Debug, Deserialize)]
struct Rss {
channel: Channel,
}
#[derive(Debug, Deserialize)]
struct Channel {
title: String,
#[serde(rename = "item", default)]
items: Vec<Item>,
}
#[derive(Debug, Deserialize)]
struct Item {
title: String,
link: String,
#[serde(rename = "pubDate")]
pub_date: String,
enclosure: Option<Enclosure>,
}
#[derive(Debug, Deserialize)]
struct Enclosure {
url: String,
length: String,
#[serde(rename = "type")]
typ: String,
}
#[bench]
|
b.iter(|| {
let rss: Rss = quick_xml::de::from_str(SOURCE).unwrap();
assert_eq!(rss.channel.items.len(), 99);
});
}
#[bench]
fn bench_serde_xml_rs(b: &mut Bencher) {
b.iter(|| {
let rss: Rss = serde_xml_rs::from_str(SOURCE).unwrap();
assert_eq!(rss.channel.items.len(), 99);
});
}
|
fn bench_serde_quick_xml(b: &mut Bencher) {
|
random_line_split
|
bench-serde.rs
|
#![feature(test)]
extern crate quick_xml;
extern crate serde;
extern crate serde_xml_rs;
extern crate test;
use serde::Deserialize;
use test::Bencher;
const SOURCE: &str = include_str!("../../tests/sample_rss.xml");
#[derive(Debug, Deserialize)]
struct Rss {
channel: Channel,
}
#[derive(Debug, Deserialize)]
struct Channel {
title: String,
#[serde(rename = "item", default)]
items: Vec<Item>,
}
#[derive(Debug, Deserialize)]
struct Item {
title: String,
link: String,
#[serde(rename = "pubDate")]
pub_date: String,
enclosure: Option<Enclosure>,
}
#[derive(Debug, Deserialize)]
struct
|
{
url: String,
length: String,
#[serde(rename = "type")]
typ: String,
}
#[bench]
fn bench_serde_quick_xml(b: &mut Bencher) {
b.iter(|| {
let rss: Rss = quick_xml::de::from_str(SOURCE).unwrap();
assert_eq!(rss.channel.items.len(), 99);
});
}
#[bench]
fn bench_serde_xml_rs(b: &mut Bencher) {
b.iter(|| {
let rss: Rss = serde_xml_rs::from_str(SOURCE).unwrap();
assert_eq!(rss.channel.items.len(), 99);
});
}
|
Enclosure
|
identifier_name
|
deriving-hash.rs
|
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::hash::{Hash, SipHasher};
#[derive(Hash)]
struct Person {
id: uint,
name: String,
phone: uint,
}
fn hash<T: Hash<SipHasher>>(t: &T) -> u64 {
std::hash::hash::<T, SipHasher>(t)
}
fn main() {
let person1 = Person {
id: 5,
name: "Janet".to_string(),
phone: 555_666_7777
};
let person2 = Person {
id: 5,
name: "Bob".to_string(),
phone: 555_666_7777
};
assert!(hash(&person1) == hash(&person1));
assert!(hash(&person1)!= hash(&person2));
}
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
|
random_line_split
|
|
deriving-hash.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::hash::{Hash, SipHasher};
#[derive(Hash)]
struct Person {
id: uint,
name: String,
phone: uint,
}
fn hash<T: Hash<SipHasher>>(t: &T) -> u64 {
std::hash::hash::<T, SipHasher>(t)
}
fn
|
() {
let person1 = Person {
id: 5,
name: "Janet".to_string(),
phone: 555_666_7777
};
let person2 = Person {
id: 5,
name: "Bob".to_string(),
phone: 555_666_7777
};
assert!(hash(&person1) == hash(&person1));
assert!(hash(&person1)!= hash(&person2));
}
|
main
|
identifier_name
|
issue_427.rs
|
#![cfg(feature = "derive")]
/// HID-IO Packet Buffer Struct
///
/// # Remarks
/// Used to store HID-IO data chunks. Will be chunked into individual packets on transmission.
#[repr(C)]
#[derive(PartialEq, Clone, Debug, bincode::Encode)]
|
/// Type of packet (Continued is automatically set if needed)
pub ptype: u32,
/// Packet Id
pub id: u32,
/// Packet length for serialization (in bytes)
pub max_len: u32,
/// Payload data, chunking is done automatically by serializer
pub data: [u8; H],
/// Set False if buffer is not complete, True if it is
pub done: bool,
}
#[repr(u32)]
#[derive(PartialEq, Clone, Copy, Debug, bincode::Encode)]
#[allow(dead_code)]
/// Requests for to perform a specific action
pub enum HidIoCommandId {
SupportedIds = 0x00,
GetInfo = 0x01,
TestPacket = 0x02,
ResetHidIo = 0x03,
Reserved = 0x04, //... 0x0F
GetProperties = 0x10,
KeyState = 0x11,
KeyboardLayout = 0x12,
KeyLayout = 0x13,
KeyShapes = 0x14,
LedLayout = 0x15,
FlashMode = 0x16,
UnicodeText = 0x17,
UnicodeState = 0x18,
HostMacro = 0x19,
SleepMode = 0x1A,
KllState = 0x20,
PixelSetting = 0x21,
PixelSet1c8b = 0x22,
PixelSet3c8b = 0x23,
PixelSet1c16b = 0x24,
PixelSet3c16b = 0x25,
OpenUrl = 0x30,
TerminalCmd = 0x31,
GetInputLayout = 0x32,
SetInputLayout = 0x33,
TerminalOut = 0x34,
HidKeyboard = 0x40,
HidKeyboardLed = 0x41,
HidMouse = 0x42,
HidJoystick = 0x43,
HidSystemCtrl = 0x44,
HidConsumerCtrl = 0x45,
ManufacturingTest = 0x50,
ManufacturingResult = 0x51,
Unused = 0xFFFF,
}
|
pub struct HidIoPacketBuffer<const H: usize> {
|
random_line_split
|
issue_427.rs
|
#![cfg(feature = "derive")]
/// HID-IO Packet Buffer Struct
///
/// # Remarks
/// Used to store HID-IO data chunks. Will be chunked into individual packets on transmission.
#[repr(C)]
#[derive(PartialEq, Clone, Debug, bincode::Encode)]
pub struct HidIoPacketBuffer<const H: usize> {
/// Type of packet (Continued is automatically set if needed)
pub ptype: u32,
/// Packet Id
pub id: u32,
/// Packet length for serialization (in bytes)
pub max_len: u32,
/// Payload data, chunking is done automatically by serializer
pub data: [u8; H],
/// Set False if buffer is not complete, True if it is
pub done: bool,
}
#[repr(u32)]
#[derive(PartialEq, Clone, Copy, Debug, bincode::Encode)]
#[allow(dead_code)]
/// Requests for to perform a specific action
pub enum
|
{
SupportedIds = 0x00,
GetInfo = 0x01,
TestPacket = 0x02,
ResetHidIo = 0x03,
Reserved = 0x04, //... 0x0F
GetProperties = 0x10,
KeyState = 0x11,
KeyboardLayout = 0x12,
KeyLayout = 0x13,
KeyShapes = 0x14,
LedLayout = 0x15,
FlashMode = 0x16,
UnicodeText = 0x17,
UnicodeState = 0x18,
HostMacro = 0x19,
SleepMode = 0x1A,
KllState = 0x20,
PixelSetting = 0x21,
PixelSet1c8b = 0x22,
PixelSet3c8b = 0x23,
PixelSet1c16b = 0x24,
PixelSet3c16b = 0x25,
OpenUrl = 0x30,
TerminalCmd = 0x31,
GetInputLayout = 0x32,
SetInputLayout = 0x33,
TerminalOut = 0x34,
HidKeyboard = 0x40,
HidKeyboardLed = 0x41,
HidMouse = 0x42,
HidJoystick = 0x43,
HidSystemCtrl = 0x44,
HidConsumerCtrl = 0x45,
ManufacturingTest = 0x50,
ManufacturingResult = 0x51,
Unused = 0xFFFF,
}
|
HidIoCommandId
|
identifier_name
|
random.rs
|
extern crate rand;
use std::thread;
use rand::Rng;
static NTHREADS: i32 = 15;
const LEN: usize = 10000;
fn main() {
let mut child = vec![];
//writeln!(w, "{}: {}", i, test()).unwrap()
// println!("{}", i)
for i in 0..NTHREADS {
child.push(thread::spawn(move || {
println!("{}: {}", i, test())
}));
}
for c in child {
let _ = c.join();
}
}
fn test() -> i32 {
let mut t: Vec<bool> = Vec::new();
for _i in 0..LEN {
t.push(false);
}
let mut counter = 0;
loop {
rand(&mut t);
let (_, fc) = count(&t);
counter += 1;
if fc == 0
|
}
counter
}
fn rand(t: &mut Vec<bool>) {
let mut rng = rand::thread_rng();
for _i in 0..t.len() {
let rand: usize = rng.gen_range(0, t.len());
t[rand] = true;
}
}
fn count(t: &Vec<bool>) -> (i32, i32) {
let mut tc = 0;
let mut fc = 0;
for x in 0..t.len() {
if t[x] {
tc += 1;
} else {
fc += 1;
}
}
(tc, fc)
}
|
{
break;
}
|
conditional_block
|
random.rs
|
extern crate rand;
use std::thread;
use rand::Rng;
static NTHREADS: i32 = 15;
|
fn main() {
let mut child = vec![];
//writeln!(w, "{}: {}", i, test()).unwrap()
// println!("{}", i)
for i in 0..NTHREADS {
child.push(thread::spawn(move || {
println!("{}: {}", i, test())
}));
}
for c in child {
let _ = c.join();
}
}
fn test() -> i32 {
let mut t: Vec<bool> = Vec::new();
for _i in 0..LEN {
t.push(false);
}
let mut counter = 0;
loop {
rand(&mut t);
let (_, fc) = count(&t);
counter += 1;
if fc == 0 {
break;
}
}
counter
}
fn rand(t: &mut Vec<bool>) {
let mut rng = rand::thread_rng();
for _i in 0..t.len() {
let rand: usize = rng.gen_range(0, t.len());
t[rand] = true;
}
}
fn count(t: &Vec<bool>) -> (i32, i32) {
let mut tc = 0;
let mut fc = 0;
for x in 0..t.len() {
if t[x] {
tc += 1;
} else {
fc += 1;
}
}
(tc, fc)
}
|
const LEN: usize = 10000;
|
random_line_split
|
random.rs
|
extern crate rand;
use std::thread;
use rand::Rng;
static NTHREADS: i32 = 15;
const LEN: usize = 10000;
fn main() {
let mut child = vec![];
//writeln!(w, "{}: {}", i, test()).unwrap()
// println!("{}", i)
for i in 0..NTHREADS {
child.push(thread::spawn(move || {
println!("{}: {}", i, test())
}));
}
for c in child {
let _ = c.join();
}
}
fn test() -> i32 {
let mut t: Vec<bool> = Vec::new();
for _i in 0..LEN {
t.push(false);
}
let mut counter = 0;
loop {
rand(&mut t);
let (_, fc) = count(&t);
counter += 1;
if fc == 0 {
break;
}
}
counter
}
fn rand(t: &mut Vec<bool>)
|
fn count(t: &Vec<bool>) -> (i32, i32) {
let mut tc = 0;
let mut fc = 0;
for x in 0..t.len() {
if t[x] {
tc += 1;
} else {
fc += 1;
}
}
(tc, fc)
}
|
{
let mut rng = rand::thread_rng();
for _i in 0..t.len() {
let rand: usize = rng.gen_range(0, t.len());
t[rand] = true;
}
}
|
identifier_body
|
random.rs
|
extern crate rand;
use std::thread;
use rand::Rng;
static NTHREADS: i32 = 15;
const LEN: usize = 10000;
fn main() {
let mut child = vec![];
//writeln!(w, "{}: {}", i, test()).unwrap()
// println!("{}", i)
for i in 0..NTHREADS {
child.push(thread::spawn(move || {
println!("{}: {}", i, test())
}));
}
for c in child {
let _ = c.join();
}
}
fn test() -> i32 {
let mut t: Vec<bool> = Vec::new();
for _i in 0..LEN {
t.push(false);
}
let mut counter = 0;
loop {
rand(&mut t);
let (_, fc) = count(&t);
counter += 1;
if fc == 0 {
break;
}
}
counter
}
fn rand(t: &mut Vec<bool>) {
let mut rng = rand::thread_rng();
for _i in 0..t.len() {
let rand: usize = rng.gen_range(0, t.len());
t[rand] = true;
}
}
fn
|
(t: &Vec<bool>) -> (i32, i32) {
let mut tc = 0;
let mut fc = 0;
for x in 0..t.len() {
if t[x] {
tc += 1;
} else {
fc += 1;
}
}
(tc, fc)
}
|
count
|
identifier_name
|
main.rs
|
//! main.rs - The entry point for the Ante compiler.
//! Handles command-line argument parsing and dataflow between
//! each compiler phase. The compiler as a whole is separated into
//! the following phases (in order):
//!
//! lexing -> parsing -> name resolution -> type inference -> lifetime inference -> codegen
//!
//! Each phase corresponds to a source folder with roughly the same name (though the codegen
//! folder is named "llvm"), and each phase after parsing operates by traversing the AST.
//! This AST traversal is usually defined in the mod.rs file for that phase and is a good
//! place to start if you're trying to learn how that phase works. An exception is type
//! inference which has its AST pass defined in types/typechecker.rs rather than types/mod.rs.
//! Note that sometimes "phases" are sometimes called "passes" and vice-versa - the terms are
//! interchangeable.
#[macro_use]
mod parser;
mod lexer;
#[macro_use]
mod util;
#[macro_use]
mod error;
mod cache;
mod nameresolution;
mod types;
mod lifetimes;
mod llvm;
use lexer::Lexer;
use nameresolution::NameResolver;
use cache::ModuleCache;
use clap::{App, Arg};
use std::fs::File;
use std::path::Path;
use std::io::{BufReader, Read};
#[global_allocator]
static ALLOCATOR: mimalloc::MiMalloc = mimalloc::MiMalloc;
/// Called when the "--check --show-types" command-line flags are given.
/// Iterates through each Definition from the first compiled module (so excluding imports)
/// and prints the type and required traits for each.
fn print_definition_types<'a>(cache: &ModuleCache<'a>) {
let resolver = cache.name_resolvers.get_mut(0).unwrap();
let mut definitions = resolver.exports.definitions.iter().collect::<Vec<_>>();
// Make sure the output has a deterministic order for testing
definitions.sort();
for (name, definition_id) in definitions {
let info = &cache.definition_infos[definition_id.0];
let typ = info.typ.clone().unwrap_or(types::Type::Primitive(types::PrimitiveType::UnitType));
print!("{} : ", name);
types::typeprinter::show_type_and_traits(&typ, &info.required_traits, cache);
}
}
/// Convenience macro for unwrapping a Result or printing an error message and returning () on Err.
macro_rules! expect {( $result:expr, $fmt_string:expr $(, $($msg:tt)* )? ) => ({
match $result {
Ok(t) => t,
Err(_) => {
print!($fmt_string $(, $($msg)* )? );
return ();
},
}
});}
fn validate_opt_argument(arg: String) -> Result<(), String> {
match arg.as_str() {
"0" | "1" | "2" | "3" | "s" | "z" => Ok(()),
_ => Err("Argument to -O must be one of: 0, 1, 2, 3, s, or z".to_owned()),
}
}
pub fn
|
() {
let args = App::new("ante")
.version("0.1.1")
.author("Jake Fecher <[email protected]>")
.about("Compiler for the Ante programming language")
.arg(Arg::with_name("lex").long("lex").help("Lex the file and output the resulting list of tokens"))
.arg(Arg::with_name("parse").long("parse").help("Parse the file and output the resulting Ast"))
.arg(Arg::with_name("check").long("check").help("Check the file for errors without compiling"))
.arg(Arg::with_name("run").long("run").help("Run the resulting binary"))
.arg(Arg::with_name("O").short("O").value_name("level").default_value("0").validator(validate_opt_argument).help("Sets the current optimization level from 0 (no optimization) to 3 (aggressive optimization). Set to s or z to optimize for size."))
.arg(Arg::with_name("no-color").long("no-color").help("Use plaintext and an indicator line instead of color for pointing out error locations"))
.arg(Arg::with_name("emit-llvm").long("emit-llvm").help("Print out the LLVM-IR of the compiled program"))
.arg(Arg::with_name("delete-binary").long("delete-binary").help("Delete the resulting binary after compiling"))
.arg(Arg::with_name("show-time").long("show-time").help("Print out the time each compiler pass takes for the given program"))
.arg(Arg::with_name("show-types").long("show-types").help("Print out the type of each definition"))
.arg(Arg::with_name("show-lifetimes").long("show-lifetimes").help("Print out the input file annotated with inferred lifetimes of heap allocations"))
.arg(Arg::with_name("file").help("The file to compile").required(true))
.get_matches();
// Setup the cache and read from the first file
let filename = Path::new(args.value_of("file").unwrap());
let file = expect!(File::open(filename), "Could not open file {}\n", filename.display());
let mut cache = ModuleCache::new(filename.parent().unwrap());
let mut reader = BufReader::new(file);
let mut contents = String::new();
expect!(reader.read_to_string(&mut contents), "Failed to read {} into a string\n", filename.display());
error::color_output(!args.is_present("no-color"));
util::timing::time_passes(args.is_present("show-time"));
// Phase 1: Lexing
util::timing::start_time("Lexing");
let tokens = Lexer::new(filename, &contents).collect::<Vec<_>>();
if args.is_present("lex") {
tokens.iter().for_each(|(token, _)| println!("{}", token));
return;
}
// Phase 2: Parsing
util::timing::start_time("Parsing");
let root = expect!(parser::parse(&tokens), "");
if args.is_present("parse") {
println!("{}", root);
return;
}
// Phase 3: Name resolution
// Timing for name resolution is within the start method to
// break up the declare and define passes
expect!(NameResolver::start(root, &mut cache), "");
// Phase 4: Type inference
util::timing::start_time("Type Inference");
let ast = cache.parse_trees.get_mut(0).unwrap();
types::typechecker::infer_ast(ast, &mut cache);
if args.is_present("show-types") {
print_definition_types(&cache);
}
if args.is_present("check") {
return;
}
// Phase 5: Lifetime inference
util::timing::start_time("Lifetime Inference");
lifetimes::infer(ast, &mut cache);
if args.is_present("show-lifetimes") {
println!("{}", ast);
}
// Phase 6: Codegen
if error::get_error_count() == 0 {
llvm::run(&filename, ast, &mut cache,
args.is_present("emit-llvm"),
args.is_present("run"),
args.is_present("delete-binary"),
args.value_of("O").unwrap());
}
// Print out the time each compiler pass took to complete if the --show-time flag was passed
util::timing::show_timings();
}
|
main
|
identifier_name
|
main.rs
|
//! main.rs - The entry point for the Ante compiler.
//! Handles command-line argument parsing and dataflow between
//! each compiler phase. The compiler as a whole is separated into
//! the following phases (in order):
//!
//! lexing -> parsing -> name resolution -> type inference -> lifetime inference -> codegen
//!
//! Each phase corresponds to a source folder with roughly the same name (though the codegen
//! folder is named "llvm"), and each phase after parsing operates by traversing the AST.
//! This AST traversal is usually defined in the mod.rs file for that phase and is a good
//! place to start if you're trying to learn how that phase works. An exception is type
//! inference which has its AST pass defined in types/typechecker.rs rather than types/mod.rs.
//! Note that sometimes "phases" are sometimes called "passes" and vice-versa - the terms are
//! interchangeable.
#[macro_use]
mod parser;
mod lexer;
#[macro_use]
mod util;
#[macro_use]
mod error;
mod cache;
mod nameresolution;
mod types;
mod lifetimes;
mod llvm;
use lexer::Lexer;
use nameresolution::NameResolver;
use cache::ModuleCache;
use clap::{App, Arg};
use std::fs::File;
use std::path::Path;
use std::io::{BufReader, Read};
#[global_allocator]
static ALLOCATOR: mimalloc::MiMalloc = mimalloc::MiMalloc;
/// Called when the "--check --show-types" command-line flags are given.
/// Iterates through each Definition from the first compiled module (so excluding imports)
/// and prints the type and required traits for each.
fn print_definition_types<'a>(cache: &ModuleCache<'a>) {
let resolver = cache.name_resolvers.get_mut(0).unwrap();
let mut definitions = resolver.exports.definitions.iter().collect::<Vec<_>>();
// Make sure the output has a deterministic order for testing
definitions.sort();
for (name, definition_id) in definitions {
let info = &cache.definition_infos[definition_id.0];
let typ = info.typ.clone().unwrap_or(types::Type::Primitive(types::PrimitiveType::UnitType));
print!("{} : ", name);
types::typeprinter::show_type_and_traits(&typ, &info.required_traits, cache);
}
}
/// Convenience macro for unwrapping a Result or printing an error message and returning () on Err.
macro_rules! expect {( $result:expr, $fmt_string:expr $(, $($msg:tt)* )? ) => ({
match $result {
Ok(t) => t,
Err(_) => {
print!($fmt_string $(, $($msg)* )? );
return ();
},
}
});}
fn validate_opt_argument(arg: String) -> Result<(), String> {
match arg.as_str() {
"0" | "1" | "2" | "3" | "s" | "z" => Ok(()),
_ => Err("Argument to -O must be one of: 0, 1, 2, 3, s, or z".to_owned()),
}
}
pub fn main() {
let args = App::new("ante")
.version("0.1.1")
.author("Jake Fecher <[email protected]>")
.about("Compiler for the Ante programming language")
.arg(Arg::with_name("lex").long("lex").help("Lex the file and output the resulting list of tokens"))
.arg(Arg::with_name("parse").long("parse").help("Parse the file and output the resulting Ast"))
.arg(Arg::with_name("check").long("check").help("Check the file for errors without compiling"))
.arg(Arg::with_name("run").long("run").help("Run the resulting binary"))
.arg(Arg::with_name("O").short("O").value_name("level").default_value("0").validator(validate_opt_argument).help("Sets the current optimization level from 0 (no optimization) to 3 (aggressive optimization). Set to s or z to optimize for size."))
.arg(Arg::with_name("no-color").long("no-color").help("Use plaintext and an indicator line instead of color for pointing out error locations"))
.arg(Arg::with_name("emit-llvm").long("emit-llvm").help("Print out the LLVM-IR of the compiled program"))
.arg(Arg::with_name("delete-binary").long("delete-binary").help("Delete the resulting binary after compiling"))
.arg(Arg::with_name("show-time").long("show-time").help("Print out the time each compiler pass takes for the given program"))
.arg(Arg::with_name("show-types").long("show-types").help("Print out the type of each definition"))
.arg(Arg::with_name("show-lifetimes").long("show-lifetimes").help("Print out the input file annotated with inferred lifetimes of heap allocations"))
.arg(Arg::with_name("file").help("The file to compile").required(true))
.get_matches();
// Setup the cache and read from the first file
let filename = Path::new(args.value_of("file").unwrap());
let file = expect!(File::open(filename), "Could not open file {}\n", filename.display());
let mut cache = ModuleCache::new(filename.parent().unwrap());
let mut reader = BufReader::new(file);
let mut contents = String::new();
expect!(reader.read_to_string(&mut contents), "Failed to read {} into a string\n", filename.display());
error::color_output(!args.is_present("no-color"));
util::timing::time_passes(args.is_present("show-time"));
// Phase 1: Lexing
util::timing::start_time("Lexing");
let tokens = Lexer::new(filename, &contents).collect::<Vec<_>>();
if args.is_present("lex") {
tokens.iter().for_each(|(token, _)| println!("{}", token));
return;
}
// Phase 2: Parsing
util::timing::start_time("Parsing");
let root = expect!(parser::parse(&tokens), "");
if args.is_present("parse") {
println!("{}", root);
return;
}
// Phase 3: Name resolution
// Timing for name resolution is within the start method to
// break up the declare and define passes
expect!(NameResolver::start(root, &mut cache), "");
// Phase 4: Type inference
util::timing::start_time("Type Inference");
let ast = cache.parse_trees.get_mut(0).unwrap();
types::typechecker::infer_ast(ast, &mut cache);
if args.is_present("show-types") {
print_definition_types(&cache);
}
if args.is_present("check")
|
// Phase 5: Lifetime inference
util::timing::start_time("Lifetime Inference");
lifetimes::infer(ast, &mut cache);
if args.is_present("show-lifetimes") {
println!("{}", ast);
}
// Phase 6: Codegen
if error::get_error_count() == 0 {
llvm::run(&filename, ast, &mut cache,
args.is_present("emit-llvm"),
args.is_present("run"),
args.is_present("delete-binary"),
args.value_of("O").unwrap());
}
// Print out the time each compiler pass took to complete if the --show-time flag was passed
util::timing::show_timings();
}
|
{
return;
}
|
conditional_block
|
main.rs
|
//! main.rs - The entry point for the Ante compiler.
//! Handles command-line argument parsing and dataflow between
//! each compiler phase. The compiler as a whole is separated into
//! the following phases (in order):
//!
//! lexing -> parsing -> name resolution -> type inference -> lifetime inference -> codegen
//!
//! Each phase corresponds to a source folder with roughly the same name (though the codegen
//! folder is named "llvm"), and each phase after parsing operates by traversing the AST.
//! This AST traversal is usually defined in the mod.rs file for that phase and is a good
//! place to start if you're trying to learn how that phase works. An exception is type
//! inference which has its AST pass defined in types/typechecker.rs rather than types/mod.rs.
//! Note that sometimes "phases" are sometimes called "passes" and vice-versa - the terms are
//! interchangeable.
#[macro_use]
mod parser;
mod lexer;
#[macro_use]
mod util;
#[macro_use]
mod error;
mod cache;
mod nameresolution;
mod types;
mod lifetimes;
mod llvm;
use lexer::Lexer;
use nameresolution::NameResolver;
use cache::ModuleCache;
use clap::{App, Arg};
use std::fs::File;
use std::path::Path;
use std::io::{BufReader, Read};
#[global_allocator]
static ALLOCATOR: mimalloc::MiMalloc = mimalloc::MiMalloc;
/// Called when the "--check --show-types" command-line flags are given.
/// Iterates through each Definition from the first compiled module (so excluding imports)
/// and prints the type and required traits for each.
fn print_definition_types<'a>(cache: &ModuleCache<'a>)
|
/// Convenience macro for unwrapping a Result or printing an error message and returning () on Err.
macro_rules! expect {( $result:expr, $fmt_string:expr $(, $($msg:tt)* )? ) => ({
match $result {
Ok(t) => t,
Err(_) => {
print!($fmt_string $(, $($msg)* )? );
return ();
},
}
});}
fn validate_opt_argument(arg: String) -> Result<(), String> {
match arg.as_str() {
"0" | "1" | "2" | "3" | "s" | "z" => Ok(()),
_ => Err("Argument to -O must be one of: 0, 1, 2, 3, s, or z".to_owned()),
}
}
pub fn main() {
let args = App::new("ante")
.version("0.1.1")
.author("Jake Fecher <[email protected]>")
.about("Compiler for the Ante programming language")
.arg(Arg::with_name("lex").long("lex").help("Lex the file and output the resulting list of tokens"))
.arg(Arg::with_name("parse").long("parse").help("Parse the file and output the resulting Ast"))
.arg(Arg::with_name("check").long("check").help("Check the file for errors without compiling"))
.arg(Arg::with_name("run").long("run").help("Run the resulting binary"))
.arg(Arg::with_name("O").short("O").value_name("level").default_value("0").validator(validate_opt_argument).help("Sets the current optimization level from 0 (no optimization) to 3 (aggressive optimization). Set to s or z to optimize for size."))
.arg(Arg::with_name("no-color").long("no-color").help("Use plaintext and an indicator line instead of color for pointing out error locations"))
.arg(Arg::with_name("emit-llvm").long("emit-llvm").help("Print out the LLVM-IR of the compiled program"))
.arg(Arg::with_name("delete-binary").long("delete-binary").help("Delete the resulting binary after compiling"))
.arg(Arg::with_name("show-time").long("show-time").help("Print out the time each compiler pass takes for the given program"))
.arg(Arg::with_name("show-types").long("show-types").help("Print out the type of each definition"))
.arg(Arg::with_name("show-lifetimes").long("show-lifetimes").help("Print out the input file annotated with inferred lifetimes of heap allocations"))
.arg(Arg::with_name("file").help("The file to compile").required(true))
.get_matches();
// Setup the cache and read from the first file
let filename = Path::new(args.value_of("file").unwrap());
let file = expect!(File::open(filename), "Could not open file {}\n", filename.display());
let mut cache = ModuleCache::new(filename.parent().unwrap());
let mut reader = BufReader::new(file);
let mut contents = String::new();
expect!(reader.read_to_string(&mut contents), "Failed to read {} into a string\n", filename.display());
error::color_output(!args.is_present("no-color"));
util::timing::time_passes(args.is_present("show-time"));
// Phase 1: Lexing
util::timing::start_time("Lexing");
let tokens = Lexer::new(filename, &contents).collect::<Vec<_>>();
if args.is_present("lex") {
tokens.iter().for_each(|(token, _)| println!("{}", token));
return;
}
// Phase 2: Parsing
util::timing::start_time("Parsing");
let root = expect!(parser::parse(&tokens), "");
if args.is_present("parse") {
println!("{}", root);
return;
}
// Phase 3: Name resolution
// Timing for name resolution is within the start method to
// break up the declare and define passes
expect!(NameResolver::start(root, &mut cache), "");
// Phase 4: Type inference
util::timing::start_time("Type Inference");
let ast = cache.parse_trees.get_mut(0).unwrap();
types::typechecker::infer_ast(ast, &mut cache);
if args.is_present("show-types") {
print_definition_types(&cache);
}
if args.is_present("check") {
return;
}
// Phase 5: Lifetime inference
util::timing::start_time("Lifetime Inference");
lifetimes::infer(ast, &mut cache);
if args.is_present("show-lifetimes") {
println!("{}", ast);
}
// Phase 6: Codegen
if error::get_error_count() == 0 {
llvm::run(&filename, ast, &mut cache,
args.is_present("emit-llvm"),
args.is_present("run"),
args.is_present("delete-binary"),
args.value_of("O").unwrap());
}
// Print out the time each compiler pass took to complete if the --show-time flag was passed
util::timing::show_timings();
}
|
{
let resolver = cache.name_resolvers.get_mut(0).unwrap();
let mut definitions = resolver.exports.definitions.iter().collect::<Vec<_>>();
// Make sure the output has a deterministic order for testing
definitions.sort();
for (name, definition_id) in definitions {
let info = &cache.definition_infos[definition_id.0];
let typ = info.typ.clone().unwrap_or(types::Type::Primitive(types::PrimitiveType::UnitType));
print!("{} : ", name);
types::typeprinter::show_type_and_traits(&typ, &info.required_traits, cache);
}
}
|
identifier_body
|
main.rs
|
//! main.rs - The entry point for the Ante compiler.
//! Handles command-line argument parsing and dataflow between
//! each compiler phase. The compiler as a whole is separated into
//! the following phases (in order):
//!
//! lexing -> parsing -> name resolution -> type inference -> lifetime inference -> codegen
//!
//! Each phase corresponds to a source folder with roughly the same name (though the codegen
//! folder is named "llvm"), and each phase after parsing operates by traversing the AST.
//! This AST traversal is usually defined in the mod.rs file for that phase and is a good
//! place to start if you're trying to learn how that phase works. An exception is type
//! inference which has its AST pass defined in types/typechecker.rs rather than types/mod.rs.
//! Note that sometimes "phases" are sometimes called "passes" and vice-versa - the terms are
//! interchangeable.
#[macro_use]
mod parser;
mod lexer;
#[macro_use]
mod util;
#[macro_use]
mod error;
mod cache;
mod nameresolution;
mod types;
mod lifetimes;
mod llvm;
use lexer::Lexer;
use nameresolution::NameResolver;
use cache::ModuleCache;
use clap::{App, Arg};
use std::fs::File;
use std::path::Path;
use std::io::{BufReader, Read};
#[global_allocator]
static ALLOCATOR: mimalloc::MiMalloc = mimalloc::MiMalloc;
/// Called when the "--check --show-types" command-line flags are given.
/// Iterates through each Definition from the first compiled module (so excluding imports)
/// and prints the type and required traits for each.
fn print_definition_types<'a>(cache: &ModuleCache<'a>) {
let resolver = cache.name_resolvers.get_mut(0).unwrap();
let mut definitions = resolver.exports.definitions.iter().collect::<Vec<_>>();
// Make sure the output has a deterministic order for testing
definitions.sort();
for (name, definition_id) in definitions {
let info = &cache.definition_infos[definition_id.0];
let typ = info.typ.clone().unwrap_or(types::Type::Primitive(types::PrimitiveType::UnitType));
print!("{} : ", name);
types::typeprinter::show_type_and_traits(&typ, &info.required_traits, cache);
}
}
/// Convenience macro for unwrapping a Result or printing an error message and returning () on Err.
macro_rules! expect {( $result:expr, $fmt_string:expr $(, $($msg:tt)* )? ) => ({
match $result {
Ok(t) => t,
Err(_) => {
print!($fmt_string $(, $($msg)* )? );
return ();
},
}
});}
fn validate_opt_argument(arg: String) -> Result<(), String> {
match arg.as_str() {
"0" | "1" | "2" | "3" | "s" | "z" => Ok(()),
_ => Err("Argument to -O must be one of: 0, 1, 2, 3, s, or z".to_owned()),
}
}
pub fn main() {
let args = App::new("ante")
.version("0.1.1")
.author("Jake Fecher <[email protected]>")
.about("Compiler for the Ante programming language")
.arg(Arg::with_name("lex").long("lex").help("Lex the file and output the resulting list of tokens"))
.arg(Arg::with_name("parse").long("parse").help("Parse the file and output the resulting Ast"))
.arg(Arg::with_name("check").long("check").help("Check the file for errors without compiling"))
.arg(Arg::with_name("run").long("run").help("Run the resulting binary"))
|
.arg(Arg::with_name("show-types").long("show-types").help("Print out the type of each definition"))
.arg(Arg::with_name("show-lifetimes").long("show-lifetimes").help("Print out the input file annotated with inferred lifetimes of heap allocations"))
.arg(Arg::with_name("file").help("The file to compile").required(true))
.get_matches();
// Setup the cache and read from the first file
let filename = Path::new(args.value_of("file").unwrap());
let file = expect!(File::open(filename), "Could not open file {}\n", filename.display());
let mut cache = ModuleCache::new(filename.parent().unwrap());
let mut reader = BufReader::new(file);
let mut contents = String::new();
expect!(reader.read_to_string(&mut contents), "Failed to read {} into a string\n", filename.display());
error::color_output(!args.is_present("no-color"));
util::timing::time_passes(args.is_present("show-time"));
// Phase 1: Lexing
util::timing::start_time("Lexing");
let tokens = Lexer::new(filename, &contents).collect::<Vec<_>>();
if args.is_present("lex") {
tokens.iter().for_each(|(token, _)| println!("{}", token));
return;
}
// Phase 2: Parsing
util::timing::start_time("Parsing");
let root = expect!(parser::parse(&tokens), "");
if args.is_present("parse") {
println!("{}", root);
return;
}
// Phase 3: Name resolution
// Timing for name resolution is within the start method to
// break up the declare and define passes
expect!(NameResolver::start(root, &mut cache), "");
// Phase 4: Type inference
util::timing::start_time("Type Inference");
let ast = cache.parse_trees.get_mut(0).unwrap();
types::typechecker::infer_ast(ast, &mut cache);
if args.is_present("show-types") {
print_definition_types(&cache);
}
if args.is_present("check") {
return;
}
// Phase 5: Lifetime inference
util::timing::start_time("Lifetime Inference");
lifetimes::infer(ast, &mut cache);
if args.is_present("show-lifetimes") {
println!("{}", ast);
}
// Phase 6: Codegen
if error::get_error_count() == 0 {
llvm::run(&filename, ast, &mut cache,
args.is_present("emit-llvm"),
args.is_present("run"),
args.is_present("delete-binary"),
args.value_of("O").unwrap());
}
// Print out the time each compiler pass took to complete if the --show-time flag was passed
util::timing::show_timings();
}
|
.arg(Arg::with_name("O").short("O").value_name("level").default_value("0").validator(validate_opt_argument).help("Sets the current optimization level from 0 (no optimization) to 3 (aggressive optimization). Set to s or z to optimize for size."))
.arg(Arg::with_name("no-color").long("no-color").help("Use plaintext and an indicator line instead of color for pointing out error locations"))
.arg(Arg::with_name("emit-llvm").long("emit-llvm").help("Print out the LLVM-IR of the compiled program"))
.arg(Arg::with_name("delete-binary").long("delete-binary").help("Delete the resulting binary after compiling"))
.arg(Arg::with_name("show-time").long("show-time").help("Print out the time each compiler pass takes for the given program"))
|
random_line_split
|
exports.rs
|
use crate::ty;
use rustc_data_structures::fx::FxHashMap;
use rustc_hir::def::Res;
use rustc_hir::def_id::LocalDefId;
use rustc_macros::HashStable;
use rustc_span::symbol::Ident;
use rustc_span::Span;
use std::fmt::Debug;
/// This is the replacement export map. It maps a module to all of the exports
/// within.
pub type ExportMap<Id> = FxHashMap<LocalDefId, Vec<Export<Id>>>;
#[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable)]
pub struct Export<Id> {
/// The name of the target.
pub ident: Ident,
/// The resolution of the target.
pub res: Res<Id>,
/// The span of the target.
pub span: Span,
/// The visibility of the export.
/// We include non-`pub` exports for hygienic macros that get used from extern crates.
pub vis: ty::Visibility,
}
impl<Id> Export<Id> {
pub fn map_id<R>(self, map: impl FnMut(Id) -> R) -> Export<R>
|
}
|
{
Export { ident: self.ident, res: self.res.map_id(map), span: self.span, vis: self.vis }
}
|
identifier_body
|
exports.rs
|
use crate::ty;
use rustc_data_structures::fx::FxHashMap;
use rustc_hir::def::Res;
use rustc_hir::def_id::LocalDefId;
use rustc_macros::HashStable;
use rustc_span::symbol::Ident;
|
/// This is the replacement export map. It maps a module to all of the exports
/// within.
pub type ExportMap<Id> = FxHashMap<LocalDefId, Vec<Export<Id>>>;
#[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable)]
pub struct Export<Id> {
/// The name of the target.
pub ident: Ident,
/// The resolution of the target.
pub res: Res<Id>,
/// The span of the target.
pub span: Span,
/// The visibility of the export.
/// We include non-`pub` exports for hygienic macros that get used from extern crates.
pub vis: ty::Visibility,
}
impl<Id> Export<Id> {
pub fn map_id<R>(self, map: impl FnMut(Id) -> R) -> Export<R> {
Export { ident: self.ident, res: self.res.map_id(map), span: self.span, vis: self.vis }
}
}
|
use rustc_span::Span;
use std::fmt::Debug;
|
random_line_split
|
exports.rs
|
use crate::ty;
use rustc_data_structures::fx::FxHashMap;
use rustc_hir::def::Res;
use rustc_hir::def_id::LocalDefId;
use rustc_macros::HashStable;
use rustc_span::symbol::Ident;
use rustc_span::Span;
use std::fmt::Debug;
/// This is the replacement export map. It maps a module to all of the exports
/// within.
pub type ExportMap<Id> = FxHashMap<LocalDefId, Vec<Export<Id>>>;
#[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable)]
pub struct
|
<Id> {
/// The name of the target.
pub ident: Ident,
/// The resolution of the target.
pub res: Res<Id>,
/// The span of the target.
pub span: Span,
/// The visibility of the export.
/// We include non-`pub` exports for hygienic macros that get used from extern crates.
pub vis: ty::Visibility,
}
impl<Id> Export<Id> {
pub fn map_id<R>(self, map: impl FnMut(Id) -> R) -> Export<R> {
Export { ident: self.ident, res: self.res.map_id(map), span: self.span, vis: self.vis }
}
}
|
Export
|
identifier_name
|
simple.rs
|
pub fn
|
() {
let a = true;
let b = false;
if a {
print!("a");
} else if b {
print!("b");
} else {
print!("c")
}
let mut a: int = 0;
let b: bool = true;
let c: bool = false;
let _numbers = [1, 2, 3];
let t = _numbers[a..];
// As a naked if
if b || c {
a = 1;
}
a << 32 > a << 16;
a >> 32 < a >> 16;
a >>= 1;
println!("{}", a);
// As an expression
a = if!c {
10
} else {
20
};
'foo: loop {
println!("{}", a);
break 'foo;
}
println!("{}", a);
// Tuple expressions. They are not at all ambiguious.
(0,);
(0.0, 4.5);
("a", 4u, true);
// Tuple destructuring let with a statement block!
let (_cap, _cap_name, _oldflags) = {
(1, 2, 3)
};
}
|
main
|
identifier_name
|
simple.rs
|
pub fn main() {
let a = true;
let b = false;
if a {
print!("a");
} else if b {
print!("b");
} else {
print!("c")
}
let mut a: int = 0;
let b: bool = true;
let c: bool = false;
let _numbers = [1, 2, 3];
let t = _numbers[a..];
// As a naked if
if b || c {
a = 1;
}
a << 32 > a << 16;
a >> 32 < a >> 16;
a >>= 1;
println!("{}", a);
// As an expression
a = if!c {
10
} else {
20
};
'foo: loop {
println!("{}", a);
break 'foo;
|
// Tuple expressions. They are not at all ambiguious.
(0,);
(0.0, 4.5);
("a", 4u, true);
// Tuple destructuring let with a statement block!
let (_cap, _cap_name, _oldflags) = {
(1, 2, 3)
};
}
|
}
println!("{}", a);
|
random_line_split
|
simple.rs
|
pub fn main()
|
a = 1;
}
a << 32 > a << 16;
a >> 32 < a >> 16;
a >>= 1;
println!("{}", a);
// As an expression
a = if!c {
10
} else {
20
};
'foo: loop {
println!("{}", a);
break 'foo;
}
println!("{}", a);
// Tuple expressions. They are not at all ambiguious.
(0,);
(0.0, 4.5);
("a", 4u, true);
// Tuple destructuring let with a statement block!
let (_cap, _cap_name, _oldflags) = {
(1, 2, 3)
};
}
|
{
let a = true;
let b = false;
if a {
print!("a");
} else if b {
print!("b");
} else {
print!("c")
}
let mut a: int = 0;
let b: bool = true;
let c: bool = false;
let _numbers = [1, 2, 3];
let t = _numbers[a..];
// As a naked if
if b || c {
|
identifier_body
|
simple.rs
|
pub fn main() {
let a = true;
let b = false;
if a {
print!("a");
} else if b
|
else {
print!("c")
}
let mut a: int = 0;
let b: bool = true;
let c: bool = false;
let _numbers = [1, 2, 3];
let t = _numbers[a..];
// As a naked if
if b || c {
a = 1;
}
a << 32 > a << 16;
a >> 32 < a >> 16;
a >>= 1;
println!("{}", a);
// As an expression
a = if!c {
10
} else {
20
};
'foo: loop {
println!("{}", a);
break 'foo;
}
println!("{}", a);
// Tuple expressions. They are not at all ambiguious.
(0,);
(0.0, 4.5);
("a", 4u, true);
// Tuple destructuring let with a statement block!
let (_cap, _cap_name, _oldflags) = {
(1, 2, 3)
};
}
|
{
print!("b");
}
|
conditional_block
|
sync-rwlock-write-mode-shouldnt-escape.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
fn main() {
let x = ~sync::RWLock::new();
let mut y = None;
do x.write_downgrade |write_mode| {
y = Some(write_mode);
}
// Adding this line causes a method unification failure instead
// do (&option::unwrap(y)).write { }
}
|
// error-pattern: lifetime of variable does not enclose its declaration
extern mod extra;
use extra::sync;
|
random_line_split
|
sync-rwlock-write-mode-shouldnt-escape.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern: lifetime of variable does not enclose its declaration
extern mod extra;
use extra::sync;
fn main()
|
{
let x = ~sync::RWLock::new();
let mut y = None;
do x.write_downgrade |write_mode| {
y = Some(write_mode);
}
// Adding this line causes a method unification failure instead
// do (&option::unwrap(y)).write { }
}
|
identifier_body
|
|
sync-rwlock-write-mode-shouldnt-escape.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern: lifetime of variable does not enclose its declaration
extern mod extra;
use extra::sync;
fn
|
() {
let x = ~sync::RWLock::new();
let mut y = None;
do x.write_downgrade |write_mode| {
y = Some(write_mode);
}
// Adding this line causes a method unification failure instead
// do (&option::unwrap(y)).write { }
}
|
main
|
identifier_name
|
main.rs
|
#![feature(slice_concat_ext)]
extern crate ramp;
extern crate bufstream;
extern crate regex;
use std::sync::mpsc::channel;
use std::thread;
use std::sync::{RwLock, Arc};
use std::net::{TcpListener, TcpStream};
use std::io::{Read, BufRead, Write};
use regex::Regex;
use std::slice::SliceConcatExt;
#[macro_use]
extern crate log;
extern crate env_logger;
use bufstream::BufStream;
use ramp::Database;
type DB = Arc<RwLock<Database>>;
fn main() {
/*
Logger notes:
RUST_LOG=error./main
RUST_LOG=info
http://rust-lang.github.io/log/env_logger/
*/
info!("Starting up RAMP socket server!");
let db = Arc::new(RwLock::new(Database::new()));
let listener = TcpListener::bind("127.0.0.1:6000").unwrap();
info!("Socket bound");
for stream in listener.incoming() {
// info!("connection established, spawning new thread");
let db2 = db.clone();
match stream {
Ok(stream) => {
thread::spawn(move || {
handle_client(stream, db2)
});
},
Err(e) => {}
}
}
info!("Goodbye forever.");
}
fn handle_client(mut stream: TcpStream, mut db: DB ) {
info!("Starting new client thread, creating regexes");
let prepare = Regex::new(r"prepare\s+([:alpha:]+)\s+([:alpha:]+)\s+(\d+)\s?([a-z,]*)").unwrap();
let commit = Regex::new(r"commit (\d+)").unwrap();
let get_version = Regex::new(r"get\s+([:alpha:]+)\s+(\d+)").unwrap();
let get_current = Regex::new(r"get\s+([:alpha:]+)").unwrap();
let mut buf = BufStream::new(stream.try_clone().unwrap());
let mut buffer = String::new();
for line in buf.lines() {
let l = line.unwrap();
println!("Line: {}", l);
if prepare.is_match(&l) {
println!("prepare statement");
let cap = prepare.captures(&l).unwrap();
let key = cap.at(1).unwrap();
let value = cap.at(2).unwrap();
let timestamp = cap.at(3).unwrap().parse::<i64>().unwrap();
println!("Key, value, timestamp, deps: {} : {} : {} : {}",
key, value, timestamp, cap.at(4).unwrap());
let deps : Vec<String> = cap.at(4).unwrap()
.split(",").map(|x| x.to_string())
.collect();
println!("depencencies: {:?}", deps );
{
let mut writer = (*db).write().unwrap();
writer.prepare(key.to_string(),
value.to_string(),
deps,
timestamp);
}
stream.write("PREPARED\n".as_bytes());
continue;
} else if commit.is_match(&l) {
let cap = commit.captures(&l).unwrap();
let timestamp = cap.at(1).unwrap().parse::<i64>().unwrap();
{
let mut writer = (*db).write().unwrap();
writer.commit(timestamp);
}
stream.write("COMMITTED\n".as_bytes());
continue;
} else if get_version.is_match(&l) {
let cap = get_version.captures(&l).unwrap();
let key = cap.at(1).unwrap().to_string();
let timestamp = cap.at(2).unwrap().parse::<i64>().unwrap();
println!("Get version");
{
let mut reader = (*db).read().unwrap();
match reader.get_version(key, timestamp) {
Some(version) => {
let d = version.dependencies.join(",");
let response = format!("{} {} {}\n",
version.value,
version.timestamp,
d);
stream.write(response.as_bytes());
},
None => {
stream.write("NOT FOUND\n".as_bytes());
}
};
}
continue;
} else if get_current.is_match(&l) {
println!("Get current");
let cap = get_current.captures(&l).unwrap();
let key = cap.at(1).unwrap().to_string();
// let key =
{
let mut reader = (*db).read().unwrap();
match reader.get(key) {
Some(version) =>
|
,
None => {
stream.write("NOT FOUND\n".as_bytes());
}
};
}
continue
}
}
}
|
{
let d = version.dependencies.join(",");
let response = format!("{} {} {}\n",
version.value,
version.timestamp,
d);
stream.write(response.as_bytes());
}
|
conditional_block
|
main.rs
|
#![feature(slice_concat_ext)]
extern crate ramp;
extern crate bufstream;
extern crate regex;
use std::sync::mpsc::channel;
use std::thread;
use std::sync::{RwLock, Arc};
use std::net::{TcpListener, TcpStream};
use std::io::{Read, BufRead, Write};
use regex::Regex;
use std::slice::SliceConcatExt;
#[macro_use]
extern crate log;
extern crate env_logger;
use bufstream::BufStream;
use ramp::Database;
type DB = Arc<RwLock<Database>>;
fn
|
() {
/*
Logger notes:
RUST_LOG=error./main
RUST_LOG=info
http://rust-lang.github.io/log/env_logger/
*/
info!("Starting up RAMP socket server!");
let db = Arc::new(RwLock::new(Database::new()));
let listener = TcpListener::bind("127.0.0.1:6000").unwrap();
info!("Socket bound");
for stream in listener.incoming() {
// info!("connection established, spawning new thread");
let db2 = db.clone();
match stream {
Ok(stream) => {
thread::spawn(move || {
handle_client(stream, db2)
});
},
Err(e) => {}
}
}
info!("Goodbye forever.");
}
fn handle_client(mut stream: TcpStream, mut db: DB ) {
info!("Starting new client thread, creating regexes");
let prepare = Regex::new(r"prepare\s+([:alpha:]+)\s+([:alpha:]+)\s+(\d+)\s?([a-z,]*)").unwrap();
let commit = Regex::new(r"commit (\d+)").unwrap();
let get_version = Regex::new(r"get\s+([:alpha:]+)\s+(\d+)").unwrap();
let get_current = Regex::new(r"get\s+([:alpha:]+)").unwrap();
let mut buf = BufStream::new(stream.try_clone().unwrap());
let mut buffer = String::new();
for line in buf.lines() {
let l = line.unwrap();
println!("Line: {}", l);
if prepare.is_match(&l) {
println!("prepare statement");
let cap = prepare.captures(&l).unwrap();
let key = cap.at(1).unwrap();
let value = cap.at(2).unwrap();
let timestamp = cap.at(3).unwrap().parse::<i64>().unwrap();
println!("Key, value, timestamp, deps: {} : {} : {} : {}",
key, value, timestamp, cap.at(4).unwrap());
let deps : Vec<String> = cap.at(4).unwrap()
.split(",").map(|x| x.to_string())
.collect();
println!("depencencies: {:?}", deps );
{
let mut writer = (*db).write().unwrap();
writer.prepare(key.to_string(),
value.to_string(),
deps,
timestamp);
}
stream.write("PREPARED\n".as_bytes());
continue;
} else if commit.is_match(&l) {
let cap = commit.captures(&l).unwrap();
let timestamp = cap.at(1).unwrap().parse::<i64>().unwrap();
{
let mut writer = (*db).write().unwrap();
writer.commit(timestamp);
}
stream.write("COMMITTED\n".as_bytes());
continue;
} else if get_version.is_match(&l) {
let cap = get_version.captures(&l).unwrap();
let key = cap.at(1).unwrap().to_string();
let timestamp = cap.at(2).unwrap().parse::<i64>().unwrap();
println!("Get version");
{
let mut reader = (*db).read().unwrap();
match reader.get_version(key, timestamp) {
Some(version) => {
let d = version.dependencies.join(",");
let response = format!("{} {} {}\n",
version.value,
version.timestamp,
d);
stream.write(response.as_bytes());
},
None => {
stream.write("NOT FOUND\n".as_bytes());
}
};
}
continue;
} else if get_current.is_match(&l) {
println!("Get current");
let cap = get_current.captures(&l).unwrap();
let key = cap.at(1).unwrap().to_string();
// let key =
{
let mut reader = (*db).read().unwrap();
match reader.get(key) {
Some(version) => {
let d = version.dependencies.join(",");
let response = format!("{} {} {}\n",
version.value,
version.timestamp,
d);
stream.write(response.as_bytes());
},
None => {
stream.write("NOT FOUND\n".as_bytes());
}
};
}
continue
}
}
}
|
main
|
identifier_name
|
main.rs
|
#![feature(slice_concat_ext)]
extern crate ramp;
extern crate bufstream;
extern crate regex;
use std::sync::mpsc::channel;
use std::thread;
use std::sync::{RwLock, Arc};
use std::net::{TcpListener, TcpStream};
use std::io::{Read, BufRead, Write};
use regex::Regex;
use std::slice::SliceConcatExt;
#[macro_use]
extern crate log;
extern crate env_logger;
use bufstream::BufStream;
use ramp::Database;
type DB = Arc<RwLock<Database>>;
fn main()
|
handle_client(stream, db2)
});
},
Err(e) => {}
}
}
info!("Goodbye forever.");
}
fn handle_client(mut stream: TcpStream, mut db: DB ) {
info!("Starting new client thread, creating regexes");
let prepare = Regex::new(r"prepare\s+([:alpha:]+)\s+([:alpha:]+)\s+(\d+)\s?([a-z,]*)").unwrap();
let commit = Regex::new(r"commit (\d+)").unwrap();
let get_version = Regex::new(r"get\s+([:alpha:]+)\s+(\d+)").unwrap();
let get_current = Regex::new(r"get\s+([:alpha:]+)").unwrap();
let mut buf = BufStream::new(stream.try_clone().unwrap());
let mut buffer = String::new();
for line in buf.lines() {
let l = line.unwrap();
println!("Line: {}", l);
if prepare.is_match(&l) {
println!("prepare statement");
let cap = prepare.captures(&l).unwrap();
let key = cap.at(1).unwrap();
let value = cap.at(2).unwrap();
let timestamp = cap.at(3).unwrap().parse::<i64>().unwrap();
println!("Key, value, timestamp, deps: {} : {} : {} : {}",
key, value, timestamp, cap.at(4).unwrap());
let deps : Vec<String> = cap.at(4).unwrap()
.split(",").map(|x| x.to_string())
.collect();
println!("depencencies: {:?}", deps );
{
let mut writer = (*db).write().unwrap();
writer.prepare(key.to_string(),
value.to_string(),
deps,
timestamp);
}
stream.write("PREPARED\n".as_bytes());
continue;
} else if commit.is_match(&l) {
let cap = commit.captures(&l).unwrap();
let timestamp = cap.at(1).unwrap().parse::<i64>().unwrap();
{
let mut writer = (*db).write().unwrap();
writer.commit(timestamp);
}
stream.write("COMMITTED\n".as_bytes());
continue;
} else if get_version.is_match(&l) {
let cap = get_version.captures(&l).unwrap();
let key = cap.at(1).unwrap().to_string();
let timestamp = cap.at(2).unwrap().parse::<i64>().unwrap();
println!("Get version");
{
let mut reader = (*db).read().unwrap();
match reader.get_version(key, timestamp) {
Some(version) => {
let d = version.dependencies.join(",");
let response = format!("{} {} {}\n",
version.value,
version.timestamp,
d);
stream.write(response.as_bytes());
},
None => {
stream.write("NOT FOUND\n".as_bytes());
}
};
}
continue;
} else if get_current.is_match(&l) {
println!("Get current");
let cap = get_current.captures(&l).unwrap();
let key = cap.at(1).unwrap().to_string();
// let key =
{
let mut reader = (*db).read().unwrap();
match reader.get(key) {
Some(version) => {
let d = version.dependencies.join(",");
let response = format!("{} {} {}\n",
version.value,
version.timestamp,
d);
stream.write(response.as_bytes());
},
None => {
stream.write("NOT FOUND\n".as_bytes());
}
};
}
continue
}
}
}
|
{
/*
Logger notes:
RUST_LOG=error ./main
RUST_LOG=info
http://rust-lang.github.io/log/env_logger/
*/
info!("Starting up RAMP socket server!");
let db = Arc::new(RwLock::new(Database::new()));
let listener = TcpListener::bind("127.0.0.1:6000").unwrap();
info!("Socket bound");
for stream in listener.incoming() {
// info!("connection established, spawning new thread");
let db2 = db.clone();
match stream {
Ok(stream) => {
thread::spawn(move || {
|
identifier_body
|
main.rs
|
#![feature(slice_concat_ext)]
extern crate ramp;
extern crate bufstream;
extern crate regex;
use std::sync::mpsc::channel;
use std::thread;
use std::sync::{RwLock, Arc};
use std::net::{TcpListener, TcpStream};
use std::io::{Read, BufRead, Write};
use regex::Regex;
use std::slice::SliceConcatExt;
#[macro_use]
extern crate log;
extern crate env_logger;
use bufstream::BufStream;
use ramp::Database;
type DB = Arc<RwLock<Database>>;
fn main() {
/*
Logger notes:
RUST_LOG=error./main
RUST_LOG=info
http://rust-lang.github.io/log/env_logger/
*/
info!("Starting up RAMP socket server!");
let db = Arc::new(RwLock::new(Database::new()));
let listener = TcpListener::bind("127.0.0.1:6000").unwrap();
info!("Socket bound");
for stream in listener.incoming() {
// info!("connection established, spawning new thread");
let db2 = db.clone();
match stream {
Ok(stream) => {
thread::spawn(move || {
handle_client(stream, db2)
});
},
Err(e) => {}
}
}
info!("Goodbye forever.");
}
fn handle_client(mut stream: TcpStream, mut db: DB ) {
info!("Starting new client thread, creating regexes");
let prepare = Regex::new(r"prepare\s+([:alpha:]+)\s+([:alpha:]+)\s+(\d+)\s?([a-z,]*)").unwrap();
let commit = Regex::new(r"commit (\d+)").unwrap();
let get_version = Regex::new(r"get\s+([:alpha:]+)\s+(\d+)").unwrap();
let get_current = Regex::new(r"get\s+([:alpha:]+)").unwrap();
let mut buf = BufStream::new(stream.try_clone().unwrap());
let mut buffer = String::new();
for line in buf.lines() {
let l = line.unwrap();
println!("Line: {}", l);
if prepare.is_match(&l) {
println!("prepare statement");
let cap = prepare.captures(&l).unwrap();
let key = cap.at(1).unwrap();
let value = cap.at(2).unwrap();
let timestamp = cap.at(3).unwrap().parse::<i64>().unwrap();
println!("Key, value, timestamp, deps: {} : {} : {} : {}",
key, value, timestamp, cap.at(4).unwrap());
let deps : Vec<String> = cap.at(4).unwrap()
.split(",").map(|x| x.to_string())
.collect();
println!("depencencies: {:?}", deps );
{
let mut writer = (*db).write().unwrap();
writer.prepare(key.to_string(),
value.to_string(),
deps,
timestamp);
}
stream.write("PREPARED\n".as_bytes());
continue;
} else if commit.is_match(&l) {
let cap = commit.captures(&l).unwrap();
let timestamp = cap.at(1).unwrap().parse::<i64>().unwrap();
{
let mut writer = (*db).write().unwrap();
writer.commit(timestamp);
}
stream.write("COMMITTED\n".as_bytes());
continue;
} else if get_version.is_match(&l) {
let cap = get_version.captures(&l).unwrap();
let key = cap.at(1).unwrap().to_string();
let timestamp = cap.at(2).unwrap().parse::<i64>().unwrap();
println!("Get version");
{
let mut reader = (*db).read().unwrap();
match reader.get_version(key, timestamp) {
Some(version) => {
let d = version.dependencies.join(",");
let response = format!("{} {} {}\n",
version.value,
version.timestamp,
d);
stream.write(response.as_bytes());
},
None => {
stream.write("NOT FOUND\n".as_bytes());
}
};
}
continue;
} else if get_current.is_match(&l) {
println!("Get current");
let cap = get_current.captures(&l).unwrap();
let key = cap.at(1).unwrap().to_string();
// let key =
{
let mut reader = (*db).read().unwrap();
match reader.get(key) {
Some(version) => {
let d = version.dependencies.join(",");
let response = format!("{} {} {}\n",
version.value,
version.timestamp,
|
d);
stream.write(response.as_bytes());
},
None => {
stream.write("NOT FOUND\n".as_bytes());
}
};
}
continue
}
}
}
|
random_line_split
|
|
lib.rs
|
// This file is part of rust-web/twig
//
// For the copyright and license information, please view the LICENSE
// file that was distributed with this source code.
//! Twig library for rust.
//!
//! # Examples
//!
//! ```
//! use twig::engine::Setup;
//! use twig::loader;
//! use twig::runtime::Runtime;
//! use twig::template::api::Template;
//!
//! let mut loader = loader::array::Array::default();
//! loader.set_template("greetings","Hello {{name}}!");
//!
//! let mut engine = Setup::default().engine().unwrap();
//! engine.set_loader(Box::new(loader));
//!
//! let mut runtime = Runtime::default();
|
//! let compiled = engine.load_template("greetings", None).unwrap();
//! assert_eq!(&compiled.render(&runtime).unwrap(), "Hello world!")
//! ```
extern crate regex;
#[macro_use]pub mod api;
pub mod engine;
pub mod runtime;
pub mod loader;
pub mod template;
pub mod extension;
pub use engine::Engine;
pub use engine::Setup;
|
//! runtime.set("name", "world");
//!
|
random_line_split
|
msgsend-ring-mutex-arcs.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This test creates a bunch of tasks that simultaneously send to each
// other in a ring. The messages should all be basically
// independent.
// This is like msgsend-ring-pipes but adapted to use Arcs.
// This also serves as a pipes test, because Arcs are implemented with pipes.
// no-pretty-expanded FIXME #15189
// ignore-lexer-test FIXME #15679
use std::os;
use std::sync::{Arc, Future, Mutex, Condvar};
use std::time::Duration;
use std::uint;
// A poor man's pipe.
type pipe = Arc<(Mutex<Vec<uint>>, Condvar)>;
fn send(p: &pipe, msg: uint) {
let &(ref lock, ref cond) = &**p;
let mut arr = lock.lock().unwrap();
arr.push(msg);
cond.notify_one();
}
fn recv(p: &pipe) -> uint {
let &(ref lock, ref cond) = &**p;
let mut arr = lock.lock().unwrap();
while arr.is_empty() {
arr = cond.wait(arr).unwrap();
}
arr.pop().unwrap()
}
fn init() -> (pipe,pipe) {
let m = Arc::new((Mutex::new(Vec::new()), Condvar::new()));
((&m).clone(), m)
}
fn thread_ring(i: uint, count: uint, num_chan: pipe, num_port: pipe) {
let mut num_chan = Some(num_chan);
let mut num_port = Some(num_port);
// Send/Receive lots of messages.
for j in range(0u, count) {
//println!("task %?, iter %?", i, j);
let num_chan2 = num_chan.take().unwrap();
let num_port2 = num_port.take().unwrap();
send(&num_chan2, i * j);
num_chan = Some(num_chan2);
let _n = recv(&num_port2);
//log(error, _n);
num_port = Some(num_port2);
};
}
fn main()
|
let mut futures = Vec::new();
for i in range(1u, num_tasks) {
//println!("spawning %?", i);
let (new_chan, num_port) = init();
let num_chan_2 = num_chan.clone();
let new_future = Future::spawn(move|| {
thread_ring(i, msg_per_task, num_chan_2, num_port)
});
futures.push(new_future);
num_chan = new_chan;
};
// do our iteration
thread_ring(0, msg_per_task, num_chan, num_port);
// synchronize
for f in futures.iter_mut() {
f.get()
}
});
// all done, report stats.
let num_msgs = num_tasks * msg_per_task;
let rate = (num_msgs as f64) / (dur.num_milliseconds() as f64);
println!("Sent {} messages in {} ms", num_msgs, dur.num_milliseconds());
println!(" {} messages / second", rate / 1000.0);
println!(" {} μs / message", 1000000. / rate / 1000.0);
}
|
{
let args = os::args();
let args = if os::getenv("RUST_BENCH").is_some() {
vec!("".to_string(), "100".to_string(), "10000".to_string())
} else if args.len() <= 1u {
vec!("".to_string(), "10".to_string(), "100".to_string())
} else {
args.clone().into_iter().collect()
};
let num_tasks = args[1].parse::<uint>().unwrap();
let msg_per_task = args[2].parse::<uint>().unwrap();
let (mut num_chan, num_port) = init();
let mut p = Some((num_chan, num_port));
let dur = Duration::span(|| {
let (mut num_chan, num_port) = p.take().unwrap();
// create the ring
|
identifier_body
|
msgsend-ring-mutex-arcs.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This test creates a bunch of tasks that simultaneously send to each
// other in a ring. The messages should all be basically
// independent.
// This is like msgsend-ring-pipes but adapted to use Arcs.
// This also serves as a pipes test, because Arcs are implemented with pipes.
// no-pretty-expanded FIXME #15189
// ignore-lexer-test FIXME #15679
use std::os;
use std::sync::{Arc, Future, Mutex, Condvar};
use std::time::Duration;
use std::uint;
// A poor man's pipe.
type pipe = Arc<(Mutex<Vec<uint>>, Condvar)>;
fn send(p: &pipe, msg: uint) {
let &(ref lock, ref cond) = &**p;
let mut arr = lock.lock().unwrap();
arr.push(msg);
cond.notify_one();
}
fn recv(p: &pipe) -> uint {
let &(ref lock, ref cond) = &**p;
let mut arr = lock.lock().unwrap();
while arr.is_empty() {
arr = cond.wait(arr).unwrap();
}
arr.pop().unwrap()
}
fn init() -> (pipe,pipe) {
let m = Arc::new((Mutex::new(Vec::new()), Condvar::new()));
((&m).clone(), m)
}
fn thread_ring(i: uint, count: uint, num_chan: pipe, num_port: pipe) {
let mut num_chan = Some(num_chan);
let mut num_port = Some(num_port);
// Send/Receive lots of messages.
for j in range(0u, count) {
//println!("task %?, iter %?", i, j);
let num_chan2 = num_chan.take().unwrap();
let num_port2 = num_port.take().unwrap();
send(&num_chan2, i * j);
num_chan = Some(num_chan2);
let _n = recv(&num_port2);
//log(error, _n);
num_port = Some(num_port2);
};
}
fn
|
() {
let args = os::args();
let args = if os::getenv("RUST_BENCH").is_some() {
vec!("".to_string(), "100".to_string(), "10000".to_string())
} else if args.len() <= 1u {
vec!("".to_string(), "10".to_string(), "100".to_string())
} else {
args.clone().into_iter().collect()
};
let num_tasks = args[1].parse::<uint>().unwrap();
let msg_per_task = args[2].parse::<uint>().unwrap();
let (mut num_chan, num_port) = init();
let mut p = Some((num_chan, num_port));
let dur = Duration::span(|| {
let (mut num_chan, num_port) = p.take().unwrap();
// create the ring
let mut futures = Vec::new();
for i in range(1u, num_tasks) {
//println!("spawning %?", i);
let (new_chan, num_port) = init();
let num_chan_2 = num_chan.clone();
let new_future = Future::spawn(move|| {
thread_ring(i, msg_per_task, num_chan_2, num_port)
});
futures.push(new_future);
num_chan = new_chan;
};
// do our iteration
thread_ring(0, msg_per_task, num_chan, num_port);
// synchronize
for f in futures.iter_mut() {
f.get()
}
});
// all done, report stats.
let num_msgs = num_tasks * msg_per_task;
let rate = (num_msgs as f64) / (dur.num_milliseconds() as f64);
println!("Sent {} messages in {} ms", num_msgs, dur.num_milliseconds());
println!(" {} messages / second", rate / 1000.0);
println!(" {} μs / message", 1000000. / rate / 1000.0);
}
|
main
|
identifier_name
|
msgsend-ring-mutex-arcs.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This test creates a bunch of tasks that simultaneously send to each
// other in a ring. The messages should all be basically
// independent.
// This is like msgsend-ring-pipes but adapted to use Arcs.
// This also serves as a pipes test, because Arcs are implemented with pipes.
// no-pretty-expanded FIXME #15189
// ignore-lexer-test FIXME #15679
use std::os;
use std::sync::{Arc, Future, Mutex, Condvar};
use std::time::Duration;
use std::uint;
// A poor man's pipe.
type pipe = Arc<(Mutex<Vec<uint>>, Condvar)>;
fn send(p: &pipe, msg: uint) {
let &(ref lock, ref cond) = &**p;
let mut arr = lock.lock().unwrap();
arr.push(msg);
cond.notify_one();
}
fn recv(p: &pipe) -> uint {
let &(ref lock, ref cond) = &**p;
let mut arr = lock.lock().unwrap();
while arr.is_empty() {
arr = cond.wait(arr).unwrap();
}
arr.pop().unwrap()
}
fn init() -> (pipe,pipe) {
let m = Arc::new((Mutex::new(Vec::new()), Condvar::new()));
((&m).clone(), m)
}
fn thread_ring(i: uint, count: uint, num_chan: pipe, num_port: pipe) {
let mut num_chan = Some(num_chan);
let mut num_port = Some(num_port);
// Send/Receive lots of messages.
for j in range(0u, count) {
//println!("task %?, iter %?", i, j);
let num_chan2 = num_chan.take().unwrap();
let num_port2 = num_port.take().unwrap();
send(&num_chan2, i * j);
num_chan = Some(num_chan2);
let _n = recv(&num_port2);
//log(error, _n);
num_port = Some(num_port2);
};
}
fn main() {
let args = os::args();
let args = if os::getenv("RUST_BENCH").is_some() {
vec!("".to_string(), "100".to_string(), "10000".to_string())
} else if args.len() <= 1u {
vec!("".to_string(), "10".to_string(), "100".to_string())
} else {
args.clone().into_iter().collect()
};
let num_tasks = args[1].parse::<uint>().unwrap();
let msg_per_task = args[2].parse::<uint>().unwrap();
let (mut num_chan, num_port) = init();
let mut p = Some((num_chan, num_port));
let dur = Duration::span(|| {
let (mut num_chan, num_port) = p.take().unwrap();
// create the ring
let mut futures = Vec::new();
for i in range(1u, num_tasks) {
//println!("spawning %?", i);
|
});
futures.push(new_future);
num_chan = new_chan;
};
// do our iteration
thread_ring(0, msg_per_task, num_chan, num_port);
// synchronize
for f in futures.iter_mut() {
f.get()
}
});
// all done, report stats.
let num_msgs = num_tasks * msg_per_task;
let rate = (num_msgs as f64) / (dur.num_milliseconds() as f64);
println!("Sent {} messages in {} ms", num_msgs, dur.num_milliseconds());
println!(" {} messages / second", rate / 1000.0);
println!(" {} μs / message", 1000000. / rate / 1000.0);
}
|
let (new_chan, num_port) = init();
let num_chan_2 = num_chan.clone();
let new_future = Future::spawn(move|| {
thread_ring(i, msg_per_task, num_chan_2, num_port)
|
random_line_split
|
lib.rs
|
//! A very simple HTTP server which responds with the plain text "Hello, World!" to every request.
#![crate_name = "ruster"]
extern crate time;
extern crate http;
use std::io::net::ip::{SocketAddr, Ipv4Addr};
use std::io::Writer;
use http::server::{Config, Server, Request, ResponseWriter};
use http::headers::content_type::MediaType;
#[deriving(Clone)]
struct HelloWorldServer;
|
impl Server for HelloWorldServer {
fn get_config(&self) -> Config {
Config { bind_address: SocketAddr { ip: Ipv4Addr(127, 0, 0, 1), port: 8001 } }
}
fn handle_request(&self, _r: Request, w: &mut ResponseWriter) {
w.headers.date = Some(time::now_utc());
w.headers.content_length = Some(14);
w.headers.content_type = Some(MediaType {
type_: String::from_str("text"),
subtype: String::from_str("plain"),
parameters: vec!((String::from_str("charset"), String::from_str("UTF-8")))
});
w.headers.server = Some(String::from_str("Example"));
w.write(b"Hello, World!\n").unwrap();
}
}
|
random_line_split
|
|
lib.rs
|
//! A very simple HTTP server which responds with the plain text "Hello, World!" to every request.
#![crate_name = "ruster"]
extern crate time;
extern crate http;
use std::io::net::ip::{SocketAddr, Ipv4Addr};
use std::io::Writer;
use http::server::{Config, Server, Request, ResponseWriter};
use http::headers::content_type::MediaType;
#[deriving(Clone)]
struct HelloWorldServer;
impl Server for HelloWorldServer {
fn get_config(&self) -> Config {
Config { bind_address: SocketAddr { ip: Ipv4Addr(127, 0, 0, 1), port: 8001 } }
}
fn handle_request(&self, _r: Request, w: &mut ResponseWriter)
|
}
|
{
w.headers.date = Some(time::now_utc());
w.headers.content_length = Some(14);
w.headers.content_type = Some(MediaType {
type_: String::from_str("text"),
subtype: String::from_str("plain"),
parameters: vec!((String::from_str("charset"), String::from_str("UTF-8")))
});
w.headers.server = Some(String::from_str("Example"));
w.write(b"Hello, World!\n").unwrap();
}
|
identifier_body
|
lib.rs
|
//! A very simple HTTP server which responds with the plain text "Hello, World!" to every request.
#![crate_name = "ruster"]
extern crate time;
extern crate http;
use std::io::net::ip::{SocketAddr, Ipv4Addr};
use std::io::Writer;
use http::server::{Config, Server, Request, ResponseWriter};
use http::headers::content_type::MediaType;
#[deriving(Clone)]
struct HelloWorldServer;
impl Server for HelloWorldServer {
fn
|
(&self) -> Config {
Config { bind_address: SocketAddr { ip: Ipv4Addr(127, 0, 0, 1), port: 8001 } }
}
fn handle_request(&self, _r: Request, w: &mut ResponseWriter) {
w.headers.date = Some(time::now_utc());
w.headers.content_length = Some(14);
w.headers.content_type = Some(MediaType {
type_: String::from_str("text"),
subtype: String::from_str("plain"),
parameters: vec!((String::from_str("charset"), String::from_str("UTF-8")))
});
w.headers.server = Some(String::from_str("Example"));
w.write(b"Hello, World!\n").unwrap();
}
}
|
get_config
|
identifier_name
|
deadline.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::io;
use std::time::Duration;
use futures::{Future, Select, Poll, Async};
use tokio_core::reactor::{Handle, Timeout};
type DeadlineBox<F> = Box<Future<Item = DeadlineStatus<<F as Future>::Item>, Error = <F as Future>::Error> + Send>;
/// Complete a passed future or fail if it is not completed within timeout.
pub fn deadline<F, T>(duration: Duration, handle: &Handle, future: F) -> Result<Deadline<F>, io::Error>
where F: Future<Item = T, Error = io::Error> + Send +'static, T:'static {
let timeout: DeadlineBox<F> = Box::new(Timeout::new(duration, handle)?.map(|_| DeadlineStatus::Timeout));
let future: DeadlineBox<F> = Box::new(future.map(DeadlineStatus::Meet));
let deadline = Deadline {
future: timeout.select(future),
};
Ok(deadline)
}
/// Deadline future completion status.
#[derive(Debug, PartialEq)]
pub enum DeadlineStatus<T> {
/// Completed a future.
Meet(T),
/// Faled with timeout.
Timeout,
}
/// Future, which waits for passed future completion within given period, or fails with timeout.
pub struct Deadline<F> where F: Future {
future: Select<DeadlineBox<F>, DeadlineBox<F>>,
}
impl<F, T> Future for Deadline<F> where F: Future<Item = T, Error = io::Error> {
type Item = DeadlineStatus<T>;
type Error = io::Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
match self.future.poll() {
Ok(Async::Ready((result, _other))) => Ok(Async::Ready(result)),
|
}
#[cfg(test)]
mod tests {
use std::io;
use std::time::Duration;
use futures::{Future, empty, done};
use tokio_core::reactor::Core;
use super::{deadline, DeadlineStatus};
//#[test] TODO: not working
fn _deadline_timeout_works() {
let mut core = Core::new().unwrap();
let deadline = deadline(Duration::from_millis(1), &core.handle(), empty::<(), io::Error>()).unwrap();
core.turn(Some(Duration::from_millis(3)));
assert_eq!(deadline.wait().unwrap(), DeadlineStatus::Timeout);
}
#[test]
fn deadline_result_works() {
let mut core = Core::new().unwrap();
let deadline = deadline(Duration::from_millis(1000), &core.handle(), done(Ok(()))).unwrap();
core.turn(Some(Duration::from_millis(3)));
assert_eq!(deadline.wait().unwrap(), DeadlineStatus::Meet(()));
}
}
|
Ok(Async::NotReady) => Ok(Async::NotReady),
Err((err, _other)) => Err(err),
}
}
|
random_line_split
|
deadline.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::io;
use std::time::Duration;
use futures::{Future, Select, Poll, Async};
use tokio_core::reactor::{Handle, Timeout};
type DeadlineBox<F> = Box<Future<Item = DeadlineStatus<<F as Future>::Item>, Error = <F as Future>::Error> + Send>;
/// Complete a passed future or fail if it is not completed within timeout.
pub fn deadline<F, T>(duration: Duration, handle: &Handle, future: F) -> Result<Deadline<F>, io::Error>
where F: Future<Item = T, Error = io::Error> + Send +'static, T:'static
|
/// Deadline future completion status.
#[derive(Debug, PartialEq)]
pub enum DeadlineStatus<T> {
/// Completed a future.
Meet(T),
/// Faled with timeout.
Timeout,
}
/// Future, which waits for passed future completion within given period, or fails with timeout.
pub struct Deadline<F> where F: Future {
future: Select<DeadlineBox<F>, DeadlineBox<F>>,
}
impl<F, T> Future for Deadline<F> where F: Future<Item = T, Error = io::Error> {
type Item = DeadlineStatus<T>;
type Error = io::Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
match self.future.poll() {
Ok(Async::Ready((result, _other))) => Ok(Async::Ready(result)),
Ok(Async::NotReady) => Ok(Async::NotReady),
Err((err, _other)) => Err(err),
}
}
}
#[cfg(test)]
mod tests {
use std::io;
use std::time::Duration;
use futures::{Future, empty, done};
use tokio_core::reactor::Core;
use super::{deadline, DeadlineStatus};
//#[test] TODO: not working
fn _deadline_timeout_works() {
let mut core = Core::new().unwrap();
let deadline = deadline(Duration::from_millis(1), &core.handle(), empty::<(), io::Error>()).unwrap();
core.turn(Some(Duration::from_millis(3)));
assert_eq!(deadline.wait().unwrap(), DeadlineStatus::Timeout);
}
#[test]
fn deadline_result_works() {
let mut core = Core::new().unwrap();
let deadline = deadline(Duration::from_millis(1000), &core.handle(), done(Ok(()))).unwrap();
core.turn(Some(Duration::from_millis(3)));
assert_eq!(deadline.wait().unwrap(), DeadlineStatus::Meet(()));
}
}
|
{
let timeout: DeadlineBox<F> = Box::new(Timeout::new(duration, handle)?.map(|_| DeadlineStatus::Timeout));
let future: DeadlineBox<F> = Box::new(future.map(DeadlineStatus::Meet));
let deadline = Deadline {
future: timeout.select(future),
};
Ok(deadline)
}
|
identifier_body
|
deadline.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::io;
use std::time::Duration;
use futures::{Future, Select, Poll, Async};
use tokio_core::reactor::{Handle, Timeout};
type DeadlineBox<F> = Box<Future<Item = DeadlineStatus<<F as Future>::Item>, Error = <F as Future>::Error> + Send>;
/// Complete a passed future or fail if it is not completed within timeout.
pub fn
|
<F, T>(duration: Duration, handle: &Handle, future: F) -> Result<Deadline<F>, io::Error>
where F: Future<Item = T, Error = io::Error> + Send +'static, T:'static {
let timeout: DeadlineBox<F> = Box::new(Timeout::new(duration, handle)?.map(|_| DeadlineStatus::Timeout));
let future: DeadlineBox<F> = Box::new(future.map(DeadlineStatus::Meet));
let deadline = Deadline {
future: timeout.select(future),
};
Ok(deadline)
}
/// Deadline future completion status.
#[derive(Debug, PartialEq)]
pub enum DeadlineStatus<T> {
/// Completed a future.
Meet(T),
/// Faled with timeout.
Timeout,
}
/// Future, which waits for passed future completion within given period, or fails with timeout.
pub struct Deadline<F> where F: Future {
future: Select<DeadlineBox<F>, DeadlineBox<F>>,
}
impl<F, T> Future for Deadline<F> where F: Future<Item = T, Error = io::Error> {
type Item = DeadlineStatus<T>;
type Error = io::Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
match self.future.poll() {
Ok(Async::Ready((result, _other))) => Ok(Async::Ready(result)),
Ok(Async::NotReady) => Ok(Async::NotReady),
Err((err, _other)) => Err(err),
}
}
}
#[cfg(test)]
mod tests {
use std::io;
use std::time::Duration;
use futures::{Future, empty, done};
use tokio_core::reactor::Core;
use super::{deadline, DeadlineStatus};
//#[test] TODO: not working
fn _deadline_timeout_works() {
let mut core = Core::new().unwrap();
let deadline = deadline(Duration::from_millis(1), &core.handle(), empty::<(), io::Error>()).unwrap();
core.turn(Some(Duration::from_millis(3)));
assert_eq!(deadline.wait().unwrap(), DeadlineStatus::Timeout);
}
#[test]
fn deadline_result_works() {
let mut core = Core::new().unwrap();
let deadline = deadline(Duration::from_millis(1000), &core.handle(), done(Ok(()))).unwrap();
core.turn(Some(Duration::from_millis(3)));
assert_eq!(deadline.wait().unwrap(), DeadlineStatus::Meet(()));
}
}
|
deadline
|
identifier_name
|
unwind-no-uwtable.rs
|
// run-pass
// needs-unwind
// ignore-windows target requires uwtable
// ignore-wasm32-bare no proper panic=unwind support
// compile-flags: -C panic=unwind -C force-unwind-tables=n
use std::panic::{self, AssertUnwindSafe};
struct Increase<'a>(&'a mut u8);
impl Drop for Increase<'_> {
fn drop(&mut self) {
*self.0 += 1;
}
}
#[inline(never)]
fn unwind() {
panic!();
}
#[inline(never)]
fn increase(count: &mut u8) {
let _increase = Increase(count);
unwind();
}
fn
|
() {
let mut count = 0;
assert!(panic::catch_unwind(AssertUnwindSafe(
#[inline(never)]
|| increase(&mut count)
)).is_err());
assert_eq!(count, 1);
}
|
main
|
identifier_name
|
unwind-no-uwtable.rs
|
// run-pass
// needs-unwind
// ignore-windows target requires uwtable
// ignore-wasm32-bare no proper panic=unwind support
// compile-flags: -C panic=unwind -C force-unwind-tables=n
use std::panic::{self, AssertUnwindSafe};
struct Increase<'a>(&'a mut u8);
impl Drop for Increase<'_> {
fn drop(&mut self) {
*self.0 += 1;
}
}
#[inline(never)]
fn unwind()
|
#[inline(never)]
fn increase(count: &mut u8) {
let _increase = Increase(count);
unwind();
}
fn main() {
let mut count = 0;
assert!(panic::catch_unwind(AssertUnwindSafe(
#[inline(never)]
|| increase(&mut count)
)).is_err());
assert_eq!(count, 1);
}
|
{
panic!();
}
|
identifier_body
|
unwind-no-uwtable.rs
|
// run-pass
// needs-unwind
// ignore-windows target requires uwtable
// ignore-wasm32-bare no proper panic=unwind support
// compile-flags: -C panic=unwind -C force-unwind-tables=n
use std::panic::{self, AssertUnwindSafe};
struct Increase<'a>(&'a mut u8);
impl Drop for Increase<'_> {
fn drop(&mut self) {
*self.0 += 1;
}
}
|
#[inline(never)]
fn increase(count: &mut u8) {
let _increase = Increase(count);
unwind();
}
fn main() {
let mut count = 0;
assert!(panic::catch_unwind(AssertUnwindSafe(
#[inline(never)]
|| increase(&mut count)
)).is_err());
assert_eq!(count, 1);
}
|
#[inline(never)]
fn unwind() {
panic!();
}
|
random_line_split
|
scheduler.rs
|
#![allow(unused)]
extern crate fringe;
use self::fringe::generator::{Generator, Yielder};
use self::fringe::OwnedStack;
use alloc::vec::Vec;
use time::{Duration, Instant};
#[derive(Debug)]
pub struct WaitRequest {
timeout: Option<Instant>,
event: Option<WaitEvent>,
}
#[derive(Debug)]
pub enum WaitResult {
Completed,
TimedOut,
Interrupted,
}
#[derive(Debug)]
struct Thread<'a> {
generator: Generator<'a, WaitResult, WaitRequest, OwnedStack>,
waiting_for: WaitRequest,
interrupted: bool,
}
#[derive(Debug)]
pub struct Scheduler<'a> {
threads: Vec<Thread<'a>>,
index: usize,
}
impl<'a> Scheduler<'a> {
pub fn new() -> Scheduler<'a> {
Scheduler {
threads: Vec::new(),
index: 0,
}
}
pub unsafe fn
|
<F: FnOnce(Io) + Send + 'a>(&mut self, stack_size: usize, f: F) {
let stack = OwnedStack::new(stack_size);
let thread = Thread {
generator: Generator::unsafe_new(stack, move |yielder, _| f(Io(yielder))),
waiting_for: WaitRequest {
timeout: None,
event: None,
},
interrupted: false,
};
self.threads.push(thread)
}
pub fn run(&mut self) {
if self.threads.len() == 0 {
return;
}
let now = Instant::now();
let start_index = self.index;
loop {
self.index = (self.index + 1) % self.threads.len();
let result = {
let thread = &mut self.threads[self.index];
match thread.waiting_for {
_ if thread.interrupted => {
thread.interrupted = false;
thread.generator.resume(WaitResult::Interrupted)
}
WaitRequest {
timeout: Some(instant),
..
}
if now >= instant =>
{
thread.generator.resume(WaitResult::TimedOut)
}
WaitRequest {
event: Some(ref event),
..
}
if event.completed() =>
{
thread.generator.resume(WaitResult::Completed)
}
WaitRequest {
timeout: None,
event: None,
} => thread.generator.resume(WaitResult::Completed),
_ => {
if self.index == start_index {
// We've checked every thread and none of them are runnable.
break;
} else {
continue;
}
}
}
};
match result {
None => {
// The thread has terminated.
self.threads.remove(self.index);
self.index = 0
}
Some(wait_request) => {
// The thread has suspended itself.
self.threads[self.index].waiting_for = wait_request
}
}
break;
}
}
}
#[derive(Debug)]
pub enum WaitEvent {}
impl WaitEvent {
fn completed(&self) -> bool {
match *self {}
}
}
pub type IoResult<T> = Result<T, ()>;
#[derive(Debug)]
pub struct Io<'a>(&'a Yielder<WaitResult, WaitRequest>);
impl<'a> Io<'a> {
pub fn sleep(&mut self, duration: Duration) -> IoResult<()> {
let request = WaitRequest {
timeout: Some(Instant::now() + duration),
event: None,
};
match self.0.suspend(request) {
WaitResult::TimedOut => Ok(()),
WaitResult::Interrupted => Err(()),
_ => unreachable!(),
}
}
}
|
spawn
|
identifier_name
|
scheduler.rs
|
#![allow(unused)]
extern crate fringe;
use self::fringe::generator::{Generator, Yielder};
use self::fringe::OwnedStack;
use alloc::vec::Vec;
use time::{Duration, Instant};
#[derive(Debug)]
pub struct WaitRequest {
timeout: Option<Instant>,
event: Option<WaitEvent>,
}
#[derive(Debug)]
pub enum WaitResult {
Completed,
TimedOut,
Interrupted,
}
#[derive(Debug)]
struct Thread<'a> {
generator: Generator<'a, WaitResult, WaitRequest, OwnedStack>,
waiting_for: WaitRequest,
interrupted: bool,
}
#[derive(Debug)]
pub struct Scheduler<'a> {
threads: Vec<Thread<'a>>,
index: usize,
}
impl<'a> Scheduler<'a> {
pub fn new() -> Scheduler<'a> {
Scheduler {
threads: Vec::new(),
index: 0,
}
}
pub unsafe fn spawn<F: FnOnce(Io) + Send + 'a>(&mut self, stack_size: usize, f: F) {
let stack = OwnedStack::new(stack_size);
let thread = Thread {
generator: Generator::unsafe_new(stack, move |yielder, _| f(Io(yielder))),
waiting_for: WaitRequest {
timeout: None,
event: None,
},
interrupted: false,
};
self.threads.push(thread)
}
pub fn run(&mut self) {
if self.threads.len() == 0 {
return;
}
let now = Instant::now();
let start_index = self.index;
loop {
self.index = (self.index + 1) % self.threads.len();
let result = {
let thread = &mut self.threads[self.index];
match thread.waiting_for {
_ if thread.interrupted => {
thread.interrupted = false;
thread.generator.resume(WaitResult::Interrupted)
}
WaitRequest {
timeout: Some(instant),
..
}
if now >= instant =>
{
|
}
if event.completed() =>
{
thread.generator.resume(WaitResult::Completed)
}
WaitRequest {
timeout: None,
event: None,
} => thread.generator.resume(WaitResult::Completed),
_ => {
if self.index == start_index {
// We've checked every thread and none of them are runnable.
break;
} else {
continue;
}
}
}
};
match result {
None => {
// The thread has terminated.
self.threads.remove(self.index);
self.index = 0
}
Some(wait_request) => {
// The thread has suspended itself.
self.threads[self.index].waiting_for = wait_request
}
}
break;
}
}
}
#[derive(Debug)]
pub enum WaitEvent {}
impl WaitEvent {
fn completed(&self) -> bool {
match *self {}
}
}
pub type IoResult<T> = Result<T, ()>;
#[derive(Debug)]
pub struct Io<'a>(&'a Yielder<WaitResult, WaitRequest>);
impl<'a> Io<'a> {
pub fn sleep(&mut self, duration: Duration) -> IoResult<()> {
let request = WaitRequest {
timeout: Some(Instant::now() + duration),
event: None,
};
match self.0.suspend(request) {
WaitResult::TimedOut => Ok(()),
WaitResult::Interrupted => Err(()),
_ => unreachable!(),
}
}
}
|
thread.generator.resume(WaitResult::TimedOut)
}
WaitRequest {
event: Some(ref event),
..
|
random_line_split
|
lib.rs
|
#![deny(unused)]
#![feature(collections, hash, io, libc, os, path, std_misc, unicode, env, core)]
#![cfg_attr(test, deny(warnings))]
extern crate libc;
extern crate "rustc-serialize" as rustc_serialize;
extern crate regex;
extern crate term;
extern crate time;
#[macro_use] extern crate log;
extern crate curl;
extern crate docopt;
extern crate flate2;
extern crate git2;
extern crate glob;
extern crate semver;
extern crate tar;
extern crate toml;
extern crate url;
#[cfg(test)] extern crate hamcrest;
extern crate registry;
use std::env;
use std::error::Error;
use std::old_io::stdio::{stdout_raw, stderr_raw};
use std::old_io::{self, stdout, stderr};
use rustc_serialize::{Decodable, Encodable};
use rustc_serialize::json::{self, Json};
use docopt::Docopt;
use core::{Shell, MultiShell, ShellConfig};
use term::color::{BLACK, RED};
pub use util::{CargoError, CliError, CliResult, human, Config, ChainError};
pub mod core;
pub mod ops;
pub mod sources;
pub mod util;
pub fn execute_main<T, U, V>(
exec: fn(T, U, &Config) -> CliResult<Option<V>>,
options_first: bool,
usage: &str)
where V: Encodable, T: Decodable, U: Decodable
{
process::<V, _>(|rest, shell| {
call_main(exec, shell, usage, rest, options_first)
});
}
pub fn call_main<T, U, V>(
exec: fn(T, U, &Config) -> CliResult<Option<V>>,
shell: &Config,
usage: &str,
|
let json = try!(json_from_stdin::<U>());
exec(flags, json, shell)
}
pub fn execute_main_without_stdin<T, V>(
exec: fn(T, &Config) -> CliResult<Option<V>>,
options_first: bool,
usage: &str)
where V: Encodable, T: Decodable
{
process::<V, _>(|rest, shell| {
call_main_without_stdin(exec, shell, usage, rest, options_first)
});
}
pub fn call_main_without_stdin<T, V>(
exec: fn(T, &Config) -> CliResult<Option<V>>,
shell: &Config,
usage: &str,
args: &[String],
options_first: bool) -> CliResult<Option<V>>
where V: Encodable, T: Decodable
{
let flags = try!(flags_from_args::<T>(usage, args, options_first));
exec(flags, shell)
}
fn process<V, F>(mut callback: F)
where F: FnMut(&[String], &Config) -> CliResult<Option<V>>,
V: Encodable
{
let mut shell = shell(true);
process_executed((|| {
let config = try!(Config::new(&mut shell));
let args: Vec<_> = try!(env::args_os().map(|s| {
s.into_string().map_err(|s| {
human(format!("invalid unicode in argument: {:?}", s))
})
}).collect());
callback(&args, &config)
})(), &mut shell)
}
pub fn process_executed<T>(result: CliResult<Option<T>>, shell: &mut MultiShell)
where T: Encodable
{
match result {
Err(e) => handle_error(e, shell),
Ok(Some(encodable)) => {
let encoded = json::encode(&encodable).unwrap();
println!("{}", encoded);
}
_ => {}
}
}
pub fn shell(verbose: bool) -> MultiShell {
let tty = stderr_raw().isatty();
let stderr = Box::new(stderr()) as Box<Writer + Send>;
let config = ShellConfig { color: true, verbose: verbose, tty: tty };
let err = Shell::create(stderr, config);
let tty = stdout_raw().isatty();
let stdout = Box::new(stdout()) as Box<Writer + Send>;
let config = ShellConfig { color: true, verbose: verbose, tty: tty };
let out = Shell::create(stdout, config);
MultiShell::new(out, err, verbose)
}
// `output` print variant error strings to either stderr or stdout.
// For fatal errors, print to stderr;
// and for others, e.g. docopt version info, print to stdout.
fn output(err: String, shell: &mut MultiShell, fatal: bool) {
let std_shell = if fatal {shell.err()} else {shell.out()};
let color = if fatal {RED} else {BLACK};
let _ = std_shell.say(err, color);
}
pub fn handle_error(err: CliError, shell: &mut MultiShell) {
debug!("handle_error; err={:?}", err);
let CliError { error, exit_code, unknown } = err;
let fatal = exit_code!= 0; // exit_code == 0 is non-fatal error
let hide = unknown &&!shell.get_verbose();
if hide {
let _ = shell.err().say("An unknown error occurred", RED);
} else {
output(error.to_string(), shell, fatal);
}
if!handle_cause(&error, shell) || hide {
let _ = shell.err().say("\nTo learn more, run the command again \
with --verbose.".to_string(), BLACK);
}
std::env::set_exit_status(exit_code);
}
fn handle_cause(mut cargo_err: &CargoError, shell: &mut MultiShell) -> bool {
let verbose = shell.get_verbose();
let mut err;
loop {
cargo_err = match cargo_err.cargo_cause() {
Some(cause) => cause,
None => { err = cargo_err.cause(); break }
};
if!verbose &&!cargo_err.is_human() { return false }
print(cargo_err.to_string(), shell);
}
loop {
let cause = match err { Some(err) => err, None => return true };
if!verbose { return false }
print(cause.to_string(), shell);
err = cause.cause();
}
fn print(error: String, shell: &mut MultiShell) {
let _ = shell.err().say("\nCaused by:", BLACK);
let _ = shell.err().say(format!(" {}", error), BLACK);
}
}
pub fn version() -> String {
format!("cargo {}", match option_env!("CFG_VERSION") {
Some(s) => s.to_string(),
None => format!("{}.{}.{}{}",
env!("CARGO_PKG_VERSION_MAJOR"),
env!("CARGO_PKG_VERSION_MINOR"),
env!("CARGO_PKG_VERSION_PATCH"),
option_env!("CARGO_PKG_VERSION_PRE").unwrap_or(""))
})
}
fn flags_from_args<'a, T>(usage: &str, args: &[String],
options_first: bool) -> CliResult<T>
where T: Decodable
{
let docopt = Docopt::new(usage).unwrap()
.options_first(options_first)
.argv(args.iter().map(|s| s.as_slice()))
.help(true)
.version(Some(version()));
docopt.decode().map_err(|e| {
let code = if e.fatal() {1} else {0};
CliError::from_error(human(e.to_string()), code)
})
}
fn json_from_stdin<T: Decodable>() -> CliResult<T> {
let mut reader = old_io::stdin();
let input = try!(reader.read_to_string().map_err(|_| {
CliError::new("Standard in did not exist or was not UTF-8", 1)
}));
let json = try!(Json::from_str(&input).map_err(|_| {
CliError::new("Could not parse standard in as JSON", 1)
}));
let mut decoder = json::Decoder::new(json);
Decodable::decode(&mut decoder).map_err(|_| {
CliError::new("Could not process standard in as input", 1)
})
}
|
args: &[String],
options_first: bool) -> CliResult<Option<V>>
where V: Encodable, T: Decodable, U: Decodable
{
let flags = try!(flags_from_args::<T>(usage, args, options_first));
|
random_line_split
|
lib.rs
|
#![deny(unused)]
#![feature(collections, hash, io, libc, os, path, std_misc, unicode, env, core)]
#![cfg_attr(test, deny(warnings))]
extern crate libc;
extern crate "rustc-serialize" as rustc_serialize;
extern crate regex;
extern crate term;
extern crate time;
#[macro_use] extern crate log;
extern crate curl;
extern crate docopt;
extern crate flate2;
extern crate git2;
extern crate glob;
extern crate semver;
extern crate tar;
extern crate toml;
extern crate url;
#[cfg(test)] extern crate hamcrest;
extern crate registry;
use std::env;
use std::error::Error;
use std::old_io::stdio::{stdout_raw, stderr_raw};
use std::old_io::{self, stdout, stderr};
use rustc_serialize::{Decodable, Encodable};
use rustc_serialize::json::{self, Json};
use docopt::Docopt;
use core::{Shell, MultiShell, ShellConfig};
use term::color::{BLACK, RED};
pub use util::{CargoError, CliError, CliResult, human, Config, ChainError};
pub mod core;
pub mod ops;
pub mod sources;
pub mod util;
pub fn execute_main<T, U, V>(
exec: fn(T, U, &Config) -> CliResult<Option<V>>,
options_first: bool,
usage: &str)
where V: Encodable, T: Decodable, U: Decodable
{
process::<V, _>(|rest, shell| {
call_main(exec, shell, usage, rest, options_first)
});
}
pub fn call_main<T, U, V>(
exec: fn(T, U, &Config) -> CliResult<Option<V>>,
shell: &Config,
usage: &str,
args: &[String],
options_first: bool) -> CliResult<Option<V>>
where V: Encodable, T: Decodable, U: Decodable
{
let flags = try!(flags_from_args::<T>(usage, args, options_first));
let json = try!(json_from_stdin::<U>());
exec(flags, json, shell)
}
pub fn execute_main_without_stdin<T, V>(
exec: fn(T, &Config) -> CliResult<Option<V>>,
options_first: bool,
usage: &str)
where V: Encodable, T: Decodable
{
process::<V, _>(|rest, shell| {
call_main_without_stdin(exec, shell, usage, rest, options_first)
});
}
pub fn call_main_without_stdin<T, V>(
exec: fn(T, &Config) -> CliResult<Option<V>>,
shell: &Config,
usage: &str,
args: &[String],
options_first: bool) -> CliResult<Option<V>>
where V: Encodable, T: Decodable
{
let flags = try!(flags_from_args::<T>(usage, args, options_first));
exec(flags, shell)
}
fn process<V, F>(mut callback: F)
where F: FnMut(&[String], &Config) -> CliResult<Option<V>>,
V: Encodable
{
let mut shell = shell(true);
process_executed((|| {
let config = try!(Config::new(&mut shell));
let args: Vec<_> = try!(env::args_os().map(|s| {
s.into_string().map_err(|s| {
human(format!("invalid unicode in argument: {:?}", s))
})
}).collect());
callback(&args, &config)
})(), &mut shell)
}
pub fn process_executed<T>(result: CliResult<Option<T>>, shell: &mut MultiShell)
where T: Encodable
{
match result {
Err(e) => handle_error(e, shell),
Ok(Some(encodable)) => {
let encoded = json::encode(&encodable).unwrap();
println!("{}", encoded);
}
_ => {}
}
}
pub fn shell(verbose: bool) -> MultiShell {
let tty = stderr_raw().isatty();
let stderr = Box::new(stderr()) as Box<Writer + Send>;
let config = ShellConfig { color: true, verbose: verbose, tty: tty };
let err = Shell::create(stderr, config);
let tty = stdout_raw().isatty();
let stdout = Box::new(stdout()) as Box<Writer + Send>;
let config = ShellConfig { color: true, verbose: verbose, tty: tty };
let out = Shell::create(stdout, config);
MultiShell::new(out, err, verbose)
}
// `output` print variant error strings to either stderr or stdout.
// For fatal errors, print to stderr;
// and for others, e.g. docopt version info, print to stdout.
fn output(err: String, shell: &mut MultiShell, fatal: bool) {
let std_shell = if fatal {shell.err()} else {shell.out()};
let color = if fatal {RED} else {BLACK};
let _ = std_shell.say(err, color);
}
pub fn handle_error(err: CliError, shell: &mut MultiShell) {
debug!("handle_error; err={:?}", err);
let CliError { error, exit_code, unknown } = err;
let fatal = exit_code!= 0; // exit_code == 0 is non-fatal error
let hide = unknown &&!shell.get_verbose();
if hide {
let _ = shell.err().say("An unknown error occurred", RED);
} else {
output(error.to_string(), shell, fatal);
}
if!handle_cause(&error, shell) || hide {
let _ = shell.err().say("\nTo learn more, run the command again \
with --verbose.".to_string(), BLACK);
}
std::env::set_exit_status(exit_code);
}
fn handle_cause(mut cargo_err: &CargoError, shell: &mut MultiShell) -> bool {
let verbose = shell.get_verbose();
let mut err;
loop {
cargo_err = match cargo_err.cargo_cause() {
Some(cause) => cause,
None => { err = cargo_err.cause(); break }
};
if!verbose &&!cargo_err.is_human() { return false }
print(cargo_err.to_string(), shell);
}
loop {
let cause = match err { Some(err) => err, None => return true };
if!verbose
|
print(cause.to_string(), shell);
err = cause.cause();
}
fn print(error: String, shell: &mut MultiShell) {
let _ = shell.err().say("\nCaused by:", BLACK);
let _ = shell.err().say(format!(" {}", error), BLACK);
}
}
pub fn version() -> String {
format!("cargo {}", match option_env!("CFG_VERSION") {
Some(s) => s.to_string(),
None => format!("{}.{}.{}{}",
env!("CARGO_PKG_VERSION_MAJOR"),
env!("CARGO_PKG_VERSION_MINOR"),
env!("CARGO_PKG_VERSION_PATCH"),
option_env!("CARGO_PKG_VERSION_PRE").unwrap_or(""))
})
}
fn flags_from_args<'a, T>(usage: &str, args: &[String],
options_first: bool) -> CliResult<T>
where T: Decodable
{
let docopt = Docopt::new(usage).unwrap()
.options_first(options_first)
.argv(args.iter().map(|s| s.as_slice()))
.help(true)
.version(Some(version()));
docopt.decode().map_err(|e| {
let code = if e.fatal() {1} else {0};
CliError::from_error(human(e.to_string()), code)
})
}
fn json_from_stdin<T: Decodable>() -> CliResult<T> {
let mut reader = old_io::stdin();
let input = try!(reader.read_to_string().map_err(|_| {
CliError::new("Standard in did not exist or was not UTF-8", 1)
}));
let json = try!(Json::from_str(&input).map_err(|_| {
CliError::new("Could not parse standard in as JSON", 1)
}));
let mut decoder = json::Decoder::new(json);
Decodable::decode(&mut decoder).map_err(|_| {
CliError::new("Could not process standard in as input", 1)
})
}
|
{ return false }
|
conditional_block
|
lib.rs
|
#![deny(unused)]
#![feature(collections, hash, io, libc, os, path, std_misc, unicode, env, core)]
#![cfg_attr(test, deny(warnings))]
extern crate libc;
extern crate "rustc-serialize" as rustc_serialize;
extern crate regex;
extern crate term;
extern crate time;
#[macro_use] extern crate log;
extern crate curl;
extern crate docopt;
extern crate flate2;
extern crate git2;
extern crate glob;
extern crate semver;
extern crate tar;
extern crate toml;
extern crate url;
#[cfg(test)] extern crate hamcrest;
extern crate registry;
use std::env;
use std::error::Error;
use std::old_io::stdio::{stdout_raw, stderr_raw};
use std::old_io::{self, stdout, stderr};
use rustc_serialize::{Decodable, Encodable};
use rustc_serialize::json::{self, Json};
use docopt::Docopt;
use core::{Shell, MultiShell, ShellConfig};
use term::color::{BLACK, RED};
pub use util::{CargoError, CliError, CliResult, human, Config, ChainError};
pub mod core;
pub mod ops;
pub mod sources;
pub mod util;
pub fn execute_main<T, U, V>(
exec: fn(T, U, &Config) -> CliResult<Option<V>>,
options_first: bool,
usage: &str)
where V: Encodable, T: Decodable, U: Decodable
|
pub fn call_main<T, U, V>(
exec: fn(T, U, &Config) -> CliResult<Option<V>>,
shell: &Config,
usage: &str,
args: &[String],
options_first: bool) -> CliResult<Option<V>>
where V: Encodable, T: Decodable, U: Decodable
{
let flags = try!(flags_from_args::<T>(usage, args, options_first));
let json = try!(json_from_stdin::<U>());
exec(flags, json, shell)
}
pub fn execute_main_without_stdin<T, V>(
exec: fn(T, &Config) -> CliResult<Option<V>>,
options_first: bool,
usage: &str)
where V: Encodable, T: Decodable
{
process::<V, _>(|rest, shell| {
call_main_without_stdin(exec, shell, usage, rest, options_first)
});
}
pub fn call_main_without_stdin<T, V>(
exec: fn(T, &Config) -> CliResult<Option<V>>,
shell: &Config,
usage: &str,
args: &[String],
options_first: bool) -> CliResult<Option<V>>
where V: Encodable, T: Decodable
{
let flags = try!(flags_from_args::<T>(usage, args, options_first));
exec(flags, shell)
}
fn process<V, F>(mut callback: F)
where F: FnMut(&[String], &Config) -> CliResult<Option<V>>,
V: Encodable
{
let mut shell = shell(true);
process_executed((|| {
let config = try!(Config::new(&mut shell));
let args: Vec<_> = try!(env::args_os().map(|s| {
s.into_string().map_err(|s| {
human(format!("invalid unicode in argument: {:?}", s))
})
}).collect());
callback(&args, &config)
})(), &mut shell)
}
pub fn process_executed<T>(result: CliResult<Option<T>>, shell: &mut MultiShell)
where T: Encodable
{
match result {
Err(e) => handle_error(e, shell),
Ok(Some(encodable)) => {
let encoded = json::encode(&encodable).unwrap();
println!("{}", encoded);
}
_ => {}
}
}
pub fn shell(verbose: bool) -> MultiShell {
let tty = stderr_raw().isatty();
let stderr = Box::new(stderr()) as Box<Writer + Send>;
let config = ShellConfig { color: true, verbose: verbose, tty: tty };
let err = Shell::create(stderr, config);
let tty = stdout_raw().isatty();
let stdout = Box::new(stdout()) as Box<Writer + Send>;
let config = ShellConfig { color: true, verbose: verbose, tty: tty };
let out = Shell::create(stdout, config);
MultiShell::new(out, err, verbose)
}
// `output` print variant error strings to either stderr or stdout.
// For fatal errors, print to stderr;
// and for others, e.g. docopt version info, print to stdout.
fn output(err: String, shell: &mut MultiShell, fatal: bool) {
let std_shell = if fatal {shell.err()} else {shell.out()};
let color = if fatal {RED} else {BLACK};
let _ = std_shell.say(err, color);
}
pub fn handle_error(err: CliError, shell: &mut MultiShell) {
debug!("handle_error; err={:?}", err);
let CliError { error, exit_code, unknown } = err;
let fatal = exit_code!= 0; // exit_code == 0 is non-fatal error
let hide = unknown &&!shell.get_verbose();
if hide {
let _ = shell.err().say("An unknown error occurred", RED);
} else {
output(error.to_string(), shell, fatal);
}
if!handle_cause(&error, shell) || hide {
let _ = shell.err().say("\nTo learn more, run the command again \
with --verbose.".to_string(), BLACK);
}
std::env::set_exit_status(exit_code);
}
fn handle_cause(mut cargo_err: &CargoError, shell: &mut MultiShell) -> bool {
let verbose = shell.get_verbose();
let mut err;
loop {
cargo_err = match cargo_err.cargo_cause() {
Some(cause) => cause,
None => { err = cargo_err.cause(); break }
};
if!verbose &&!cargo_err.is_human() { return false }
print(cargo_err.to_string(), shell);
}
loop {
let cause = match err { Some(err) => err, None => return true };
if!verbose { return false }
print(cause.to_string(), shell);
err = cause.cause();
}
fn print(error: String, shell: &mut MultiShell) {
let _ = shell.err().say("\nCaused by:", BLACK);
let _ = shell.err().say(format!(" {}", error), BLACK);
}
}
pub fn version() -> String {
format!("cargo {}", match option_env!("CFG_VERSION") {
Some(s) => s.to_string(),
None => format!("{}.{}.{}{}",
env!("CARGO_PKG_VERSION_MAJOR"),
env!("CARGO_PKG_VERSION_MINOR"),
env!("CARGO_PKG_VERSION_PATCH"),
option_env!("CARGO_PKG_VERSION_PRE").unwrap_or(""))
})
}
fn flags_from_args<'a, T>(usage: &str, args: &[String],
options_first: bool) -> CliResult<T>
where T: Decodable
{
let docopt = Docopt::new(usage).unwrap()
.options_first(options_first)
.argv(args.iter().map(|s| s.as_slice()))
.help(true)
.version(Some(version()));
docopt.decode().map_err(|e| {
let code = if e.fatal() {1} else {0};
CliError::from_error(human(e.to_string()), code)
})
}
fn json_from_stdin<T: Decodable>() -> CliResult<T> {
let mut reader = old_io::stdin();
let input = try!(reader.read_to_string().map_err(|_| {
CliError::new("Standard in did not exist or was not UTF-8", 1)
}));
let json = try!(Json::from_str(&input).map_err(|_| {
CliError::new("Could not parse standard in as JSON", 1)
}));
let mut decoder = json::Decoder::new(json);
Decodable::decode(&mut decoder).map_err(|_| {
CliError::new("Could not process standard in as input", 1)
})
}
|
{
process::<V, _>(|rest, shell| {
call_main(exec, shell, usage, rest, options_first)
});
}
|
identifier_body
|
lib.rs
|
#![deny(unused)]
#![feature(collections, hash, io, libc, os, path, std_misc, unicode, env, core)]
#![cfg_attr(test, deny(warnings))]
extern crate libc;
extern crate "rustc-serialize" as rustc_serialize;
extern crate regex;
extern crate term;
extern crate time;
#[macro_use] extern crate log;
extern crate curl;
extern crate docopt;
extern crate flate2;
extern crate git2;
extern crate glob;
extern crate semver;
extern crate tar;
extern crate toml;
extern crate url;
#[cfg(test)] extern crate hamcrest;
extern crate registry;
use std::env;
use std::error::Error;
use std::old_io::stdio::{stdout_raw, stderr_raw};
use std::old_io::{self, stdout, stderr};
use rustc_serialize::{Decodable, Encodable};
use rustc_serialize::json::{self, Json};
use docopt::Docopt;
use core::{Shell, MultiShell, ShellConfig};
use term::color::{BLACK, RED};
pub use util::{CargoError, CliError, CliResult, human, Config, ChainError};
pub mod core;
pub mod ops;
pub mod sources;
pub mod util;
pub fn execute_main<T, U, V>(
exec: fn(T, U, &Config) -> CliResult<Option<V>>,
options_first: bool,
usage: &str)
where V: Encodable, T: Decodable, U: Decodable
{
process::<V, _>(|rest, shell| {
call_main(exec, shell, usage, rest, options_first)
});
}
pub fn call_main<T, U, V>(
exec: fn(T, U, &Config) -> CliResult<Option<V>>,
shell: &Config,
usage: &str,
args: &[String],
options_first: bool) -> CliResult<Option<V>>
where V: Encodable, T: Decodable, U: Decodable
{
let flags = try!(flags_from_args::<T>(usage, args, options_first));
let json = try!(json_from_stdin::<U>());
exec(flags, json, shell)
}
pub fn execute_main_without_stdin<T, V>(
exec: fn(T, &Config) -> CliResult<Option<V>>,
options_first: bool,
usage: &str)
where V: Encodable, T: Decodable
{
process::<V, _>(|rest, shell| {
call_main_without_stdin(exec, shell, usage, rest, options_first)
});
}
pub fn call_main_without_stdin<T, V>(
exec: fn(T, &Config) -> CliResult<Option<V>>,
shell: &Config,
usage: &str,
args: &[String],
options_first: bool) -> CliResult<Option<V>>
where V: Encodable, T: Decodable
{
let flags = try!(flags_from_args::<T>(usage, args, options_first));
exec(flags, shell)
}
fn process<V, F>(mut callback: F)
where F: FnMut(&[String], &Config) -> CliResult<Option<V>>,
V: Encodable
{
let mut shell = shell(true);
process_executed((|| {
let config = try!(Config::new(&mut shell));
let args: Vec<_> = try!(env::args_os().map(|s| {
s.into_string().map_err(|s| {
human(format!("invalid unicode in argument: {:?}", s))
})
}).collect());
callback(&args, &config)
})(), &mut shell)
}
pub fn process_executed<T>(result: CliResult<Option<T>>, shell: &mut MultiShell)
where T: Encodable
{
match result {
Err(e) => handle_error(e, shell),
Ok(Some(encodable)) => {
let encoded = json::encode(&encodable).unwrap();
println!("{}", encoded);
}
_ => {}
}
}
pub fn shell(verbose: bool) -> MultiShell {
let tty = stderr_raw().isatty();
let stderr = Box::new(stderr()) as Box<Writer + Send>;
let config = ShellConfig { color: true, verbose: verbose, tty: tty };
let err = Shell::create(stderr, config);
let tty = stdout_raw().isatty();
let stdout = Box::new(stdout()) as Box<Writer + Send>;
let config = ShellConfig { color: true, verbose: verbose, tty: tty };
let out = Shell::create(stdout, config);
MultiShell::new(out, err, verbose)
}
// `output` print variant error strings to either stderr or stdout.
// For fatal errors, print to stderr;
// and for others, e.g. docopt version info, print to stdout.
fn output(err: String, shell: &mut MultiShell, fatal: bool) {
let std_shell = if fatal {shell.err()} else {shell.out()};
let color = if fatal {RED} else {BLACK};
let _ = std_shell.say(err, color);
}
pub fn handle_error(err: CliError, shell: &mut MultiShell) {
debug!("handle_error; err={:?}", err);
let CliError { error, exit_code, unknown } = err;
let fatal = exit_code!= 0; // exit_code == 0 is non-fatal error
let hide = unknown &&!shell.get_verbose();
if hide {
let _ = shell.err().say("An unknown error occurred", RED);
} else {
output(error.to_string(), shell, fatal);
}
if!handle_cause(&error, shell) || hide {
let _ = shell.err().say("\nTo learn more, run the command again \
with --verbose.".to_string(), BLACK);
}
std::env::set_exit_status(exit_code);
}
fn handle_cause(mut cargo_err: &CargoError, shell: &mut MultiShell) -> bool {
let verbose = shell.get_verbose();
let mut err;
loop {
cargo_err = match cargo_err.cargo_cause() {
Some(cause) => cause,
None => { err = cargo_err.cause(); break }
};
if!verbose &&!cargo_err.is_human() { return false }
print(cargo_err.to_string(), shell);
}
loop {
let cause = match err { Some(err) => err, None => return true };
if!verbose { return false }
print(cause.to_string(), shell);
err = cause.cause();
}
fn print(error: String, shell: &mut MultiShell) {
let _ = shell.err().say("\nCaused by:", BLACK);
let _ = shell.err().say(format!(" {}", error), BLACK);
}
}
pub fn version() -> String {
format!("cargo {}", match option_env!("CFG_VERSION") {
Some(s) => s.to_string(),
None => format!("{}.{}.{}{}",
env!("CARGO_PKG_VERSION_MAJOR"),
env!("CARGO_PKG_VERSION_MINOR"),
env!("CARGO_PKG_VERSION_PATCH"),
option_env!("CARGO_PKG_VERSION_PRE").unwrap_or(""))
})
}
fn
|
<'a, T>(usage: &str, args: &[String],
options_first: bool) -> CliResult<T>
where T: Decodable
{
let docopt = Docopt::new(usage).unwrap()
.options_first(options_first)
.argv(args.iter().map(|s| s.as_slice()))
.help(true)
.version(Some(version()));
docopt.decode().map_err(|e| {
let code = if e.fatal() {1} else {0};
CliError::from_error(human(e.to_string()), code)
})
}
fn json_from_stdin<T: Decodable>() -> CliResult<T> {
let mut reader = old_io::stdin();
let input = try!(reader.read_to_string().map_err(|_| {
CliError::new("Standard in did not exist or was not UTF-8", 1)
}));
let json = try!(Json::from_str(&input).map_err(|_| {
CliError::new("Could not parse standard in as JSON", 1)
}));
let mut decoder = json::Decoder::new(json);
Decodable::decode(&mut decoder).map_err(|_| {
CliError::new("Could not process standard in as input", 1)
})
}
|
flags_from_args
|
identifier_name
|
cmp.rs
|
use rational_sequences::RationalSequence;
use std::cmp::Ordering;
impl<T: Eq + Ord> PartialOrd for RationalSequence<T> {
/// Compares a `RationalSequence` to another `RationalSequence`.
///
/// See the documentation for the `Ord` implementation.
#[inline]
fn partial_cmp(&self, other: &RationalSequence<T>) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<T: Eq + Ord> Ord for RationalSequence<T> {
/// Compares a `RationalSequence` to another `RationalSequence`.
///
/// The comparison is made lexicographically with respect to the element type's ordering.
///
/// # Worst-case complexity
/// $T(n) = O(n)$
///
/// $M(n) = O(1)$
///
/// where $T$ is time, $M$ is additional memory, and $n$ is `self.component_len()`.
///
/// # Examples
/// ```
/// use malachite_base::rational_sequences::RationalSequence;
///
/// assert!(
/// RationalSequence::from_slice(&[1, 2]) <
/// RationalSequence::from_slices(&[1, 2], &[1])
/// );
|
/// assert!(
/// RationalSequence::from_slice(&[1, 2, 3]) <
/// RationalSequence::from_slices(&[1, 2], &[3, 4])
/// );
/// ```
fn cmp(&self, other: &RationalSequence<T>) -> Ordering {
if self == other {
Ordering::Equal
} else {
Iterator::cmp(self.iter(), other.iter())
}
}
}
|
random_line_split
|
|
cmp.rs
|
use rational_sequences::RationalSequence;
use std::cmp::Ordering;
impl<T: Eq + Ord> PartialOrd for RationalSequence<T> {
/// Compares a `RationalSequence` to another `RationalSequence`.
///
/// See the documentation for the `Ord` implementation.
#[inline]
fn partial_cmp(&self, other: &RationalSequence<T>) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<T: Eq + Ord> Ord for RationalSequence<T> {
/// Compares a `RationalSequence` to another `RationalSequence`.
///
/// The comparison is made lexicographically with respect to the element type's ordering.
///
/// # Worst-case complexity
/// $T(n) = O(n)$
///
/// $M(n) = O(1)$
///
/// where $T$ is time, $M$ is additional memory, and $n$ is `self.component_len()`.
///
/// # Examples
/// ```
/// use malachite_base::rational_sequences::RationalSequence;
///
/// assert!(
/// RationalSequence::from_slice(&[1, 2]) <
/// RationalSequence::from_slices(&[1, 2], &[1])
/// );
/// assert!(
/// RationalSequence::from_slice(&[1, 2, 3]) <
/// RationalSequence::from_slices(&[1, 2], &[3, 4])
/// );
/// ```
fn cmp(&self, other: &RationalSequence<T>) -> Ordering {
if self == other {
Ordering::Equal
} else
|
}
}
|
{
Iterator::cmp(self.iter(), other.iter())
}
|
conditional_block
|
cmp.rs
|
use rational_sequences::RationalSequence;
use std::cmp::Ordering;
impl<T: Eq + Ord> PartialOrd for RationalSequence<T> {
/// Compares a `RationalSequence` to another `RationalSequence`.
///
/// See the documentation for the `Ord` implementation.
#[inline]
fn partial_cmp(&self, other: &RationalSequence<T>) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<T: Eq + Ord> Ord for RationalSequence<T> {
/// Compares a `RationalSequence` to another `RationalSequence`.
///
/// The comparison is made lexicographically with respect to the element type's ordering.
///
/// # Worst-case complexity
/// $T(n) = O(n)$
///
/// $M(n) = O(1)$
///
/// where $T$ is time, $M$ is additional memory, and $n$ is `self.component_len()`.
///
/// # Examples
/// ```
/// use malachite_base::rational_sequences::RationalSequence;
///
/// assert!(
/// RationalSequence::from_slice(&[1, 2]) <
/// RationalSequence::from_slices(&[1, 2], &[1])
/// );
/// assert!(
/// RationalSequence::from_slice(&[1, 2, 3]) <
/// RationalSequence::from_slices(&[1, 2], &[3, 4])
/// );
/// ```
fn cmp(&self, other: &RationalSequence<T>) -> Ordering
|
}
|
{
if self == other {
Ordering::Equal
} else {
Iterator::cmp(self.iter(), other.iter())
}
}
|
identifier_body
|
cmp.rs
|
use rational_sequences::RationalSequence;
use std::cmp::Ordering;
impl<T: Eq + Ord> PartialOrd for RationalSequence<T> {
/// Compares a `RationalSequence` to another `RationalSequence`.
///
/// See the documentation for the `Ord` implementation.
#[inline]
fn partial_cmp(&self, other: &RationalSequence<T>) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<T: Eq + Ord> Ord for RationalSequence<T> {
/// Compares a `RationalSequence` to another `RationalSequence`.
///
/// The comparison is made lexicographically with respect to the element type's ordering.
///
/// # Worst-case complexity
/// $T(n) = O(n)$
///
/// $M(n) = O(1)$
///
/// where $T$ is time, $M$ is additional memory, and $n$ is `self.component_len()`.
///
/// # Examples
/// ```
/// use malachite_base::rational_sequences::RationalSequence;
///
/// assert!(
/// RationalSequence::from_slice(&[1, 2]) <
/// RationalSequence::from_slices(&[1, 2], &[1])
/// );
/// assert!(
/// RationalSequence::from_slice(&[1, 2, 3]) <
/// RationalSequence::from_slices(&[1, 2], &[3, 4])
/// );
/// ```
fn
|
(&self, other: &RationalSequence<T>) -> Ordering {
if self == other {
Ordering::Equal
} else {
Iterator::cmp(self.iter(), other.iter())
}
}
}
|
cmp
|
identifier_name
|
chap06.rs
|
/*
* Chap.6 Enum and Pattern Matching
*
*/
#[derive(Debug)]
enum MonitorState {
Success,
Warning,
Error,
}
#[derive(Debug)]
|
state: MonitorState,
}
#[derive(Debug)]
enum ServicePort {
HTTP(u32),
HTTPS(u32),
}
#[derive(Debug)]
enum Service {
API(String, ServicePort),
Frontend(String, ServicePort),
}
#[derive(Debug)]
enum NamedPort {
HTTPS = 443,
}
#[derive(Debug)]
enum ServiceAction {
Reload, // has no data associated at all
Start { p: NamedPort }, // named field like struct
Kill(String), // has simple unnamed string
}
impl ServiceAction {
fn call(&self) {
println!("ServiceAction called at {:?}", &self);
}
}
#[derive(Debug)]
enum RGB {
Red,
Blue,
Green,
}
#[derive(Debug)]
enum Style {
Dimension,
Color(RGB),
}
fn main() {
println!("chapter 6: enum");
let green = MonitorState::Success;
let yellow = MonitorState::Warning;
let red = MonitorState::Error;
print_my_state(green);
print_my_state(yellow);
print_my_state(red);
let http_server = Monitor {
service: String::from("httpd"),
state: MonitorState::Success,
};
println!("{}: {:?}", http_server.service, http_server.state);
let http = ServicePort::HTTP(80);
let https = ServicePort::HTTPS(443);
dbg!(http);
dbg!(https);
let api_port = ServicePort::HTTPS(443);
let frontend_port = ServicePort::HTTPS(443);
let api = Service::API(String::from("/opt/api"), api_port);
let frontend = Service::Frontend(String::from("/opt/frontend"), frontend_port);
dbg!(api);
dbg!(frontend);
service_action();
println!("{}", value_of_rgb(RGB::Red));
println!("{}", value_of_rgb(RGB::Green));
let style = Style::Color(RGB::Blue);
print_style(style);
print_style(Style::Dimension);
println!(
"handle_incr_option Some(1): {:?}",
handle_incr_option(Some(1))
);
println!("handle_incr_option None: {:?}", handle_incr_option(None));
println!(
"handle_option_normalization Some(10): {:?}",
handle_option_normalization(Some(10))
);
println!(
"handle_option_normalization None: {:?}",
handle_option_normalization(None)
);
}
fn print_my_state(s: MonitorState) {
dbg!(s);
}
fn service_action() {
let https = NamedPort::HTTPS;
let svc_axn = ServiceAction::Start { p: https };
svc_axn.call();
if let ServiceAction::Start { p } = svc_axn {
println!("{:?}", p);
}
let svc_axn = ServiceAction::Reload;
svc_axn.call();
let svc_axn = ServiceAction::Kill(String::from("unresponsive"));
svc_axn.call();
}
fn value_of_rgb(rgb: RGB) -> String {
match rgb {
RGB::Red => String::from("red"),
RGB::Green => String::from("green"),
RGB::Blue => String::from("blue"),
}
}
fn print_style(style: Style) {
match style {
Style::Color(color) => println!("{:?}", color),
_ => println!("{:?}", style),
}
}
fn handle_incr_option(num: Option<i32>) -> Option<i32> {
match num {
None => None,
Some(i) => Some(i + 1),
}
}
fn handle_option_normalization(num: Option<i32>) -> i32 {
match num {
None => 0,
Some(i) => i,
}
}
|
struct Monitor {
service: String,
|
random_line_split
|
chap06.rs
|
/*
* Chap.6 Enum and Pattern Matching
*
*/
#[derive(Debug)]
enum MonitorState {
Success,
Warning,
Error,
}
#[derive(Debug)]
struct Monitor {
service: String,
state: MonitorState,
}
#[derive(Debug)]
enum ServicePort {
HTTP(u32),
HTTPS(u32),
}
#[derive(Debug)]
enum Service {
API(String, ServicePort),
Frontend(String, ServicePort),
}
#[derive(Debug)]
enum NamedPort {
HTTPS = 443,
}
#[derive(Debug)]
enum ServiceAction {
Reload, // has no data associated at all
Start { p: NamedPort }, // named field like struct
Kill(String), // has simple unnamed string
}
impl ServiceAction {
fn call(&self) {
println!("ServiceAction called at {:?}", &self);
}
}
#[derive(Debug)]
enum RGB {
Red,
Blue,
Green,
}
#[derive(Debug)]
enum Style {
Dimension,
Color(RGB),
}
fn main() {
println!("chapter 6: enum");
let green = MonitorState::Success;
let yellow = MonitorState::Warning;
let red = MonitorState::Error;
print_my_state(green);
print_my_state(yellow);
print_my_state(red);
let http_server = Monitor {
service: String::from("httpd"),
state: MonitorState::Success,
};
println!("{}: {:?}", http_server.service, http_server.state);
let http = ServicePort::HTTP(80);
let https = ServicePort::HTTPS(443);
dbg!(http);
dbg!(https);
let api_port = ServicePort::HTTPS(443);
let frontend_port = ServicePort::HTTPS(443);
let api = Service::API(String::from("/opt/api"), api_port);
let frontend = Service::Frontend(String::from("/opt/frontend"), frontend_port);
dbg!(api);
dbg!(frontend);
service_action();
println!("{}", value_of_rgb(RGB::Red));
println!("{}", value_of_rgb(RGB::Green));
let style = Style::Color(RGB::Blue);
print_style(style);
print_style(Style::Dimension);
println!(
"handle_incr_option Some(1): {:?}",
handle_incr_option(Some(1))
);
println!("handle_incr_option None: {:?}", handle_incr_option(None));
println!(
"handle_option_normalization Some(10): {:?}",
handle_option_normalization(Some(10))
);
println!(
"handle_option_normalization None: {:?}",
handle_option_normalization(None)
);
}
fn print_my_state(s: MonitorState) {
dbg!(s);
}
fn service_action() {
let https = NamedPort::HTTPS;
let svc_axn = ServiceAction::Start { p: https };
svc_axn.call();
if let ServiceAction::Start { p } = svc_axn {
println!("{:?}", p);
}
let svc_axn = ServiceAction::Reload;
svc_axn.call();
let svc_axn = ServiceAction::Kill(String::from("unresponsive"));
svc_axn.call();
}
fn value_of_rgb(rgb: RGB) -> String {
match rgb {
RGB::Red => String::from("red"),
RGB::Green => String::from("green"),
RGB::Blue => String::from("blue"),
}
}
fn print_style(style: Style) {
match style {
Style::Color(color) => println!("{:?}", color),
_ => println!("{:?}", style),
}
}
fn handle_incr_option(num: Option<i32>) -> Option<i32> {
match num {
None => None,
Some(i) => Some(i + 1),
}
}
fn
|
(num: Option<i32>) -> i32 {
match num {
None => 0,
Some(i) => i,
}
}
|
handle_option_normalization
|
identifier_name
|
inheritance.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The `Castable` trait.
pub use dom::bindings::codegen::InheritTypes::*;
use dom::bindings::conversions::{DerivedFrom, IDLInterface};
use dom::bindings::conversions::get_dom_class;
use dom::bindings::reflector::DomObject;
use std::mem;
/// A trait to hold the cast functions of IDL interfaces that either derive
/// or are derived from other interfaces.
pub trait Castable: IDLInterface + DomObject + Sized {
/// Check whether a DOM object implements one of its deriving interfaces.
fn is<T>(&self) -> bool
where T: DerivedFrom<Self>
{
let class = unsafe { get_dom_class(self.reflector().get_jsobject().get()).unwrap() };
T::derives(class)
}
/// Cast a DOM object upwards to one of the interfaces it derives from.
fn upcast<T>(&self) -> &T
where T: Castable,
Self: DerivedFrom<T>
{
unsafe { mem::transmute(self) }
}
/// Cast a DOM object downwards to one of the interfaces it might implement.
fn downcast<T>(&self) -> Option<&T>
where T: DerivedFrom<Self>
{
if self.is::<T>() {
Some(unsafe { mem::transmute(self) })
} else
|
}
}
|
{
None
}
|
conditional_block
|
inheritance.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The `Castable` trait.
pub use dom::bindings::codegen::InheritTypes::*;
use dom::bindings::conversions::{DerivedFrom, IDLInterface};
use dom::bindings::conversions::get_dom_class;
use dom::bindings::reflector::DomObject;
use std::mem;
/// A trait to hold the cast functions of IDL interfaces that either derive
/// or are derived from other interfaces.
pub trait Castable: IDLInterface + DomObject + Sized {
/// Check whether a DOM object implements one of its deriving interfaces.
fn is<T>(&self) -> bool
where T: DerivedFrom<Self>
{
let class = unsafe { get_dom_class(self.reflector().get_jsobject().get()).unwrap() };
T::derives(class)
}
/// Cast a DOM object upwards to one of the interfaces it derives from.
fn upcast<T>(&self) -> &T
where T: Castable,
Self: DerivedFrom<T>
{
unsafe { mem::transmute(self) }
}
/// Cast a DOM object downwards to one of the interfaces it might implement.
fn downcast<T>(&self) -> Option<&T>
where T: DerivedFrom<Self>
|
}
|
{
if self.is::<T>() {
Some(unsafe { mem::transmute(self) })
} else {
None
}
}
|
identifier_body
|
inheritance.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The `Castable` trait.
pub use dom::bindings::codegen::InheritTypes::*;
use dom::bindings::conversions::{DerivedFrom, IDLInterface};
use dom::bindings::conversions::get_dom_class;
use dom::bindings::reflector::DomObject;
use std::mem;
/// A trait to hold the cast functions of IDL interfaces that either derive
/// or are derived from other interfaces.
pub trait Castable: IDLInterface + DomObject + Sized {
/// Check whether a DOM object implements one of its deriving interfaces.
fn is<T>(&self) -> bool
where T: DerivedFrom<Self>
{
let class = unsafe { get_dom_class(self.reflector().get_jsobject().get()).unwrap() };
T::derives(class)
}
/// Cast a DOM object upwards to one of the interfaces it derives from.
fn upcast<T>(&self) -> &T
where T: Castable,
Self: DerivedFrom<T>
{
unsafe { mem::transmute(self) }
}
/// Cast a DOM object downwards to one of the interfaces it might implement.
fn
|
<T>(&self) -> Option<&T>
where T: DerivedFrom<Self>
{
if self.is::<T>() {
Some(unsafe { mem::transmute(self) })
} else {
None
}
}
}
|
downcast
|
identifier_name
|
inheritance.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
//! The `Castable` trait.
pub use dom::bindings::codegen::InheritTypes::*;
use dom::bindings::conversions::{DerivedFrom, IDLInterface};
use dom::bindings::conversions::get_dom_class;
use dom::bindings::reflector::DomObject;
use std::mem;
/// A trait to hold the cast functions of IDL interfaces that either derive
/// or are derived from other interfaces.
pub trait Castable: IDLInterface + DomObject + Sized {
/// Check whether a DOM object implements one of its deriving interfaces.
fn is<T>(&self) -> bool
where T: DerivedFrom<Self>
{
let class = unsafe { get_dom_class(self.reflector().get_jsobject().get()).unwrap() };
T::derives(class)
}
/// Cast a DOM object upwards to one of the interfaces it derives from.
fn upcast<T>(&self) -> &T
where T: Castable,
Self: DerivedFrom<T>
{
unsafe { mem::transmute(self) }
}
/// Cast a DOM object downwards to one of the interfaces it might implement.
fn downcast<T>(&self) -> Option<&T>
where T: DerivedFrom<Self>
{
if self.is::<T>() {
Some(unsafe { mem::transmute(self) })
} else {
None
}
}
}
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
random_line_split
|
intrinsic-return-address.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(intrinsics)]
use std::ptr;
struct Point {
x: f32,
y: f32,
z: f32,
}
extern "rust-intrinsic" {
fn return_address() -> *const u8;
}
fn f(result: &mut uint) -> Point
|
fn main() {
let mut intrinsic_reported_address = 0;
let pt = f(&mut intrinsic_reported_address);
let actual_address = &pt as *const Point as uint;
assert_eq!(intrinsic_reported_address, actual_address);
}
|
{
unsafe {
*result = return_address() as uint;
Point {
x: 1.0,
y: 2.0,
z: 3.0,
}
}
}
|
identifier_body
|
intrinsic-return-address.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(intrinsics)]
use std::ptr;
struct Point {
x: f32,
y: f32,
z: f32,
}
extern "rust-intrinsic" {
fn return_address() -> *const u8;
}
fn f(result: &mut uint) -> Point {
unsafe {
*result = return_address() as uint;
Point {
x: 1.0,
|
}
fn main() {
let mut intrinsic_reported_address = 0;
let pt = f(&mut intrinsic_reported_address);
let actual_address = &pt as *const Point as uint;
assert_eq!(intrinsic_reported_address, actual_address);
}
|
y: 2.0,
z: 3.0,
}
}
|
random_line_split
|
intrinsic-return-address.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(intrinsics)]
use std::ptr;
struct
|
{
x: f32,
y: f32,
z: f32,
}
extern "rust-intrinsic" {
fn return_address() -> *const u8;
}
fn f(result: &mut uint) -> Point {
unsafe {
*result = return_address() as uint;
Point {
x: 1.0,
y: 2.0,
z: 3.0,
}
}
}
fn main() {
let mut intrinsic_reported_address = 0;
let pt = f(&mut intrinsic_reported_address);
let actual_address = &pt as *const Point as uint;
assert_eq!(intrinsic_reported_address, actual_address);
}
|
Point
|
identifier_name
|
linker.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::ffi::OsString;
use std::fs::{self, File};
use std::io::{self, BufWriter};
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use std::process::Command;
use back::archive;
use metadata::csearch;
use middle::dependency_format::Linkage;
use session::Session;
use session::config::DebugInfoLevel::{NoDebugInfo, LimitedDebugInfo, FullDebugInfo};
use session::config::CrateTypeDylib;
use session::config;
use syntax::ast;
use trans::CrateTranslation;
/// Linker abstraction used by back::link to build up the command to invoke a
/// linker.
///
/// This trait is the total list of requirements needed by `back::link` and
/// represents the meaning of each option being passed down. This trait is then
/// used to dispatch on whether a GNU-like linker (generally `ld.exe`) or an
/// MSVC linker (e.g. `link.exe`) is being used.
pub trait Linker {
fn link_dylib(&mut self, lib: &str);
fn link_rust_dylib(&mut self, lib: &str, path: &Path);
fn link_framework(&mut self, framework: &str);
fn link_staticlib(&mut self, lib: &str);
fn link_rlib(&mut self, lib: &Path);
fn link_whole_rlib(&mut self, lib: &Path);
fn link_whole_staticlib(&mut self, lib: &str, search_path: &[PathBuf]);
fn include_path(&mut self, path: &Path);
fn framework_path(&mut self, path: &Path);
fn output_filename(&mut self, path: &Path);
fn add_object(&mut self, path: &Path);
fn gc_sections(&mut self, is_dylib: bool);
fn position_independent_executable(&mut self);
fn optimize(&mut self);
fn debuginfo(&mut self);
fn no_default_libraries(&mut self);
fn build_dylib(&mut self, out_filename: &Path);
fn args(&mut self, args: &[String]);
fn hint_static(&mut self);
fn hint_dynamic(&mut self);
fn whole_archives(&mut self);
fn no_whole_archives(&mut self);
fn export_symbols(&mut self, sess: &Session, trans: &CrateTranslation,
tmpdir: &Path);
}
pub struct GnuLinker<'a> {
pub cmd: &'a mut Command,
pub sess: &'a Session,
}
impl<'a> GnuLinker<'a> {
fn takes_hints(&self) -> bool {
!self.sess.target.target.options.is_like_osx
}
}
impl<'a> Linker for GnuLinker<'a> {
fn link_dylib(&mut self, lib: &str) { self.cmd.arg("-l").arg(lib); }
fn link_staticlib(&mut self, lib: &str) { self.cmd.arg("-l").arg(lib); }
fn link_rlib(&mut self, lib: &Path) { self.cmd.arg(lib); }
fn include_path(&mut self, path: &Path) { self.cmd.arg("-L").arg(path); }
fn framework_path(&mut self, path: &Path) { self.cmd.arg("-F").arg(path); }
fn output_filename(&mut self, path: &Path) { self.cmd.arg("-o").arg(path); }
fn add_object(&mut self, path: &Path) { self.cmd.arg(path); }
fn position_independent_executable(&mut self) { self.cmd.arg("-pie"); }
fn args(&mut self, args: &[String]) { self.cmd.args(args); }
fn link_rust_dylib(&mut self, lib: &str, _path: &Path) {
self.cmd.arg("-l").arg(lib);
}
fn link_framework(&mut self, framework: &str) {
self.cmd.arg("-framework").arg(framework);
}
fn link_whole_staticlib(&mut self, lib: &str, search_path: &[PathBuf]) {
let target = &self.sess.target.target;
if!target.options.is_like_osx {
self.cmd.arg("-Wl,--whole-archive")
.arg("-l").arg(lib)
.arg("-Wl,--no-whole-archive");
} else {
// -force_load is the OSX equivalent of --whole-archive, but it
// involves passing the full path to the library to link.
let mut v = OsString::from("-Wl,-force_load,");
v.push(&archive::find_library(lib, search_path, &self.sess));
self.cmd.arg(&v);
}
}
fn link_whole_rlib(&mut self, lib: &Path) {
if self.sess.target.target.options.is_like_osx {
let mut v = OsString::from("-Wl,-force_load,");
v.push(lib);
self.cmd.arg(&v);
} else {
self.cmd.arg("-Wl,--whole-archive").arg(lib)
.arg("-Wl,--no-whole-archive");
}
}
fn gc_sections(&mut self, is_dylib: bool) {
// The dead_strip option to the linker specifies that functions and data
// unreachable by the entry point will be removed. This is quite useful
// with Rust's compilation model of compiling libraries at a time into
// one object file. For example, this brings hello world from 1.7MB to
// 458K.
//
// Note that this is done for both executables and dynamic libraries. We
// won't get much benefit from dylibs because LLVM will have already
// stripped away as much as it could. This has not been seen to impact
// link times negatively.
//
// -dead_strip can't be part of the pre_link_args because it's also used
// for partial linking when using multiple codegen units (-r). So we
// insert it here.
if self.sess.target.target.options.is_like_osx {
self.cmd.arg("-Wl,-dead_strip");
// If we're building a dylib, we don't use --gc-sections because LLVM
// has already done the best it can do, and we also don't want to
// eliminate the metadata. If we're building an executable, however,
// --gc-sections drops the size of hello world from 1.8MB to 597K, a 67%
// reduction.
} else if!is_dylib {
self.cmd.arg("-Wl,--gc-sections");
}
}
fn optimize(&mut self) {
if!self.sess.target.target.options.linker_is_gnu { return }
// GNU-style linkers support optimization with -O. GNU ld doesn't
// need a numeric argument, but other linkers do.
if self.sess.opts.optimize == config::Default ||
self.sess.opts.optimize == config::Aggressive {
self.cmd.arg("-Wl,-O1");
}
}
fn debuginfo(&mut self) {
// Don't do anything special here for GNU-style linkers.
}
fn no_default_libraries(&mut self) {
// Unfortunately right now passing -nodefaultlibs to gcc on windows
// doesn't work so hot (in terms of native dependencies). This if
// statement should hopefully be removed one day though!
if!self.sess.target.target.options.is_like_windows {
self.cmd.arg("-nodefaultlibs");
}
}
fn build_dylib(&mut self, out_filename: &Path) {
// On mac we need to tell the linker to let this library be rpathed
if self.sess.target.target.options.is_like_osx {
self.cmd.args(&["-dynamiclib", "-Wl,-dylib"]);
if self.sess.opts.cg.rpath {
let mut v = OsString::from("-Wl,-install_name,@rpath/");
v.push(out_filename.file_name().unwrap());
self.cmd.arg(&v);
}
} else {
self.cmd.arg("-shared");
}
}
fn whole_archives(&mut self) {
if!self.takes_hints() { return }
self.cmd.arg("-Wl,--whole-archive");
}
fn no_whole_archives(&mut self) {
if!self.takes_hints() { return }
self.cmd.arg("-Wl,--no-whole-archive");
}
fn hint_static(&mut self) {
if!self.takes_hints()
|
self.cmd.arg("-Wl,-Bstatic");
}
fn hint_dynamic(&mut self) {
if!self.takes_hints() { return }
self.cmd.arg("-Wl,-Bdynamic");
}
fn export_symbols(&mut self, _: &Session, _: &CrateTranslation, _: &Path) {
// noop, visibility in object files takes care of this
}
}
pub struct MsvcLinker<'a> {
pub cmd: &'a mut Command,
pub sess: &'a Session,
}
impl<'a> Linker for MsvcLinker<'a> {
fn link_rlib(&mut self, lib: &Path) { self.cmd.arg(lib); }
fn add_object(&mut self, path: &Path) { self.cmd.arg(path); }
fn args(&mut self, args: &[String]) { self.cmd.args(args); }
fn build_dylib(&mut self, _out_filename: &Path) { self.cmd.arg("/DLL"); }
fn gc_sections(&mut self, _is_dylib: bool) { self.cmd.arg("/OPT:REF,ICF"); }
fn link_dylib(&mut self, lib: &str) {
self.cmd.arg(&format!("{}.lib", lib));
}
fn link_rust_dylib(&mut self, lib: &str, path: &Path) {
// When producing a dll, the MSVC linker may not actually emit a
// `foo.lib` file if the dll doesn't actually export any symbols, so we
// check to see if the file is there and just omit linking to it if it's
// not present.
let name = format!("{}.lib", lib);
if fs::metadata(&path.join(&name)).is_ok() {
self.cmd.arg(name);
}
}
fn link_staticlib(&mut self, lib: &str) {
self.cmd.arg(&format!("{}.lib", lib));
}
fn position_independent_executable(&mut self) {
// noop
}
fn no_default_libraries(&mut self) {
// Currently we don't pass the /NODEFAULTLIB flag to the linker on MSVC
// as there's been trouble in the past of linking the C++ standard
// library required by LLVM. This likely needs to happen one day, but
// in general Windows is also a more controlled environment than
// Unix, so it's not necessarily as critical that this be implemented.
//
// Note that there are also some licensing worries about statically
// linking some libraries which require a specific agreement, so it may
// not ever be possible for us to pass this flag.
}
fn include_path(&mut self, path: &Path) {
let mut arg = OsString::from("/LIBPATH:");
arg.push(path);
self.cmd.arg(&arg);
}
fn output_filename(&mut self, path: &Path) {
let mut arg = OsString::from("/OUT:");
arg.push(path);
self.cmd.arg(&arg);
}
fn framework_path(&mut self, _path: &Path) {
panic!("frameworks are not supported on windows")
}
fn link_framework(&mut self, _framework: &str) {
panic!("frameworks are not supported on windows")
}
fn link_whole_staticlib(&mut self, lib: &str, _search_path: &[PathBuf]) {
// not supported?
self.link_staticlib(lib);
}
fn link_whole_rlib(&mut self, path: &Path) {
// not supported?
self.link_rlib(path);
}
fn optimize(&mut self) {
// Needs more investigation of `/OPT` arguments
}
fn debuginfo(&mut self) {
match self.sess.opts.debuginfo {
NoDebugInfo => {
// Do nothing if debuginfo is disabled
},
LimitedDebugInfo |
FullDebugInfo => {
// This will cause the Microsoft linker to generate a PDB file
// from the CodeView line tables in the object files.
self.cmd.arg("/DEBUG");
}
}
}
fn whole_archives(&mut self) {
// hints not supported?
}
fn no_whole_archives(&mut self) {
// hints not supported?
}
// On windows static libraries are of the form `foo.lib` and dynamic
// libraries are not linked against directly, but rather through their
// import libraries also called `foo.lib`. As a result there's no
// possibility for a native library to appear both dynamically and
// statically in the same folder so we don't have to worry about hints like
// we do on Unix platforms.
fn hint_static(&mut self) {}
fn hint_dynamic(&mut self) {}
// Currently the compiler doesn't use `dllexport` (an LLVM attribute) to
// export symbols from a dynamic library. When building a dynamic library,
// however, we're going to want some symbols exported, so this function
// generates a DEF file which lists all the symbols.
//
// The linker will read this `*.def` file and export all the symbols from
// the dynamic library. Note that this is not as simple as just exporting
// all the symbols in the current crate (as specified by `trans.reachable`)
// but rather we also need to possibly export the symbols of upstream
// crates. Upstream rlibs may be linked statically to this dynamic library,
// in which case they may continue to transitively be used and hence need
// their symbols exported.
fn export_symbols(&mut self, sess: &Session, trans: &CrateTranslation,
tmpdir: &Path) {
let path = tmpdir.join("lib.def");
let res = (|| -> io::Result<()> {
let mut f = BufWriter::new(try!(File::create(&path)));
// Start off with the standard module name header and then go
// straight to exports.
try!(writeln!(f, "LIBRARY"));
try!(writeln!(f, "EXPORTS"));
// Write out all our local symbols
for sym in trans.reachable.iter() {
try!(writeln!(f, " {}", sym));
}
// Take a look at how all upstream crates are linked into this
// dynamic library. For all statically linked libraries we take all
// their reachable symbols and emit them as well.
let cstore = &sess.cstore;
let formats = sess.dependency_formats.borrow();
let symbols = formats[&CrateTypeDylib].iter();
let symbols = symbols.enumerate().filter_map(|(i, f)| {
if *f == Linkage::Static {
Some((i + 1) as ast::CrateNum)
} else {
None
}
}).flat_map(|cnum| {
csearch::get_reachable_ids(cstore, cnum)
}).map(|did| {
csearch::get_symbol(cstore, did)
});
for symbol in symbols {
try!(writeln!(f, " {}", symbol));
}
Ok(())
})();
if let Err(e) = res {
sess.fatal(&format!("failed to write lib.def file: {}", e));
}
let mut arg = OsString::from("/DEF:");
arg.push(path);
self.cmd.arg(&arg);
}
}
|
{ return }
|
conditional_block
|
linker.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::ffi::OsString;
use std::fs::{self, File};
use std::io::{self, BufWriter};
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use std::process::Command;
use back::archive;
use metadata::csearch;
use middle::dependency_format::Linkage;
use session::Session;
use session::config::DebugInfoLevel::{NoDebugInfo, LimitedDebugInfo, FullDebugInfo};
use session::config::CrateTypeDylib;
use session::config;
use syntax::ast;
use trans::CrateTranslation;
/// Linker abstraction used by back::link to build up the command to invoke a
/// linker.
///
/// This trait is the total list of requirements needed by `back::link` and
/// represents the meaning of each option being passed down. This trait is then
/// used to dispatch on whether a GNU-like linker (generally `ld.exe`) or an
/// MSVC linker (e.g. `link.exe`) is being used.
pub trait Linker {
fn link_dylib(&mut self, lib: &str);
fn link_rust_dylib(&mut self, lib: &str, path: &Path);
fn link_framework(&mut self, framework: &str);
fn link_staticlib(&mut self, lib: &str);
fn link_rlib(&mut self, lib: &Path);
fn link_whole_rlib(&mut self, lib: &Path);
fn link_whole_staticlib(&mut self, lib: &str, search_path: &[PathBuf]);
fn include_path(&mut self, path: &Path);
fn framework_path(&mut self, path: &Path);
fn output_filename(&mut self, path: &Path);
fn add_object(&mut self, path: &Path);
fn gc_sections(&mut self, is_dylib: bool);
fn position_independent_executable(&mut self);
fn optimize(&mut self);
fn debuginfo(&mut self);
fn no_default_libraries(&mut self);
fn build_dylib(&mut self, out_filename: &Path);
fn args(&mut self, args: &[String]);
fn hint_static(&mut self);
fn hint_dynamic(&mut self);
fn whole_archives(&mut self);
fn no_whole_archives(&mut self);
fn export_symbols(&mut self, sess: &Session, trans: &CrateTranslation,
tmpdir: &Path);
}
pub struct GnuLinker<'a> {
pub cmd: &'a mut Command,
pub sess: &'a Session,
}
impl<'a> GnuLinker<'a> {
fn takes_hints(&self) -> bool {
!self.sess.target.target.options.is_like_osx
}
}
impl<'a> Linker for GnuLinker<'a> {
fn link_dylib(&mut self, lib: &str) { self.cmd.arg("-l").arg(lib); }
fn link_staticlib(&mut self, lib: &str) { self.cmd.arg("-l").arg(lib); }
fn link_rlib(&mut self, lib: &Path) { self.cmd.arg(lib); }
fn include_path(&mut self, path: &Path) { self.cmd.arg("-L").arg(path); }
fn framework_path(&mut self, path: &Path) { self.cmd.arg("-F").arg(path); }
fn output_filename(&mut self, path: &Path) { self.cmd.arg("-o").arg(path); }
fn add_object(&mut self, path: &Path) { self.cmd.arg(path); }
fn position_independent_executable(&mut self) { self.cmd.arg("-pie"); }
fn args(&mut self, args: &[String]) { self.cmd.args(args); }
fn link_rust_dylib(&mut self, lib: &str, _path: &Path) {
self.cmd.arg("-l").arg(lib);
}
fn link_framework(&mut self, framework: &str) {
self.cmd.arg("-framework").arg(framework);
}
fn link_whole_staticlib(&mut self, lib: &str, search_path: &[PathBuf]) {
let target = &self.sess.target.target;
if!target.options.is_like_osx {
self.cmd.arg("-Wl,--whole-archive")
.arg("-l").arg(lib)
.arg("-Wl,--no-whole-archive");
} else {
// -force_load is the OSX equivalent of --whole-archive, but it
// involves passing the full path to the library to link.
let mut v = OsString::from("-Wl,-force_load,");
v.push(&archive::find_library(lib, search_path, &self.sess));
self.cmd.arg(&v);
}
}
fn link_whole_rlib(&mut self, lib: &Path) {
if self.sess.target.target.options.is_like_osx {
let mut v = OsString::from("-Wl,-force_load,");
v.push(lib);
self.cmd.arg(&v);
} else {
self.cmd.arg("-Wl,--whole-archive").arg(lib)
.arg("-Wl,--no-whole-archive");
}
}
fn gc_sections(&mut self, is_dylib: bool) {
// The dead_strip option to the linker specifies that functions and data
// unreachable by the entry point will be removed. This is quite useful
// with Rust's compilation model of compiling libraries at a time into
// one object file. For example, this brings hello world from 1.7MB to
// 458K.
//
// Note that this is done for both executables and dynamic libraries. We
// won't get much benefit from dylibs because LLVM will have already
// stripped away as much as it could. This has not been seen to impact
// link times negatively.
//
// -dead_strip can't be part of the pre_link_args because it's also used
// for partial linking when using multiple codegen units (-r). So we
// insert it here.
if self.sess.target.target.options.is_like_osx {
self.cmd.arg("-Wl,-dead_strip");
// If we're building a dylib, we don't use --gc-sections because LLVM
// has already done the best it can do, and we also don't want to
// eliminate the metadata. If we're building an executable, however,
// --gc-sections drops the size of hello world from 1.8MB to 597K, a 67%
// reduction.
} else if!is_dylib {
self.cmd.arg("-Wl,--gc-sections");
}
}
fn optimize(&mut self) {
if!self.sess.target.target.options.linker_is_gnu { return }
// GNU-style linkers support optimization with -O. GNU ld doesn't
// need a numeric argument, but other linkers do.
if self.sess.opts.optimize == config::Default ||
self.sess.opts.optimize == config::Aggressive {
self.cmd.arg("-Wl,-O1");
}
}
fn debuginfo(&mut self) {
// Don't do anything special here for GNU-style linkers.
}
fn no_default_libraries(&mut self) {
// Unfortunately right now passing -nodefaultlibs to gcc on windows
// doesn't work so hot (in terms of native dependencies). This if
// statement should hopefully be removed one day though!
if!self.sess.target.target.options.is_like_windows {
self.cmd.arg("-nodefaultlibs");
}
}
fn build_dylib(&mut self, out_filename: &Path) {
// On mac we need to tell the linker to let this library be rpathed
if self.sess.target.target.options.is_like_osx {
self.cmd.args(&["-dynamiclib", "-Wl,-dylib"]);
if self.sess.opts.cg.rpath {
let mut v = OsString::from("-Wl,-install_name,@rpath/");
v.push(out_filename.file_name().unwrap());
self.cmd.arg(&v);
}
} else {
self.cmd.arg("-shared");
}
}
fn whole_archives(&mut self) {
if!self.takes_hints() { return }
self.cmd.arg("-Wl,--whole-archive");
}
fn no_whole_archives(&mut self) {
if!self.takes_hints() { return }
self.cmd.arg("-Wl,--no-whole-archive");
}
fn hint_static(&mut self) {
if!self.takes_hints() { return }
self.cmd.arg("-Wl,-Bstatic");
}
fn hint_dynamic(&mut self) {
if!self.takes_hints() { return }
self.cmd.arg("-Wl,-Bdynamic");
}
fn export_symbols(&mut self, _: &Session, _: &CrateTranslation, _: &Path) {
// noop, visibility in object files takes care of this
}
}
pub struct MsvcLinker<'a> {
pub cmd: &'a mut Command,
pub sess: &'a Session,
}
impl<'a> Linker for MsvcLinker<'a> {
fn link_rlib(&mut self, lib: &Path) { self.cmd.arg(lib); }
fn add_object(&mut self, path: &Path) { self.cmd.arg(path); }
fn args(&mut self, args: &[String]) { self.cmd.args(args); }
fn build_dylib(&mut self, _out_filename: &Path) { self.cmd.arg("/DLL"); }
fn gc_sections(&mut self, _is_dylib: bool) { self.cmd.arg("/OPT:REF,ICF"); }
fn link_dylib(&mut self, lib: &str) {
self.cmd.arg(&format!("{}.lib", lib));
}
fn link_rust_dylib(&mut self, lib: &str, path: &Path) {
// When producing a dll, the MSVC linker may not actually emit a
// `foo.lib` file if the dll doesn't actually export any symbols, so we
// check to see if the file is there and just omit linking to it if it's
// not present.
let name = format!("{}.lib", lib);
if fs::metadata(&path.join(&name)).is_ok() {
self.cmd.arg(name);
}
}
fn link_staticlib(&mut self, lib: &str) {
self.cmd.arg(&format!("{}.lib", lib));
}
fn position_independent_executable(&mut self) {
// noop
}
fn no_default_libraries(&mut self) {
// Currently we don't pass the /NODEFAULTLIB flag to the linker on MSVC
// as there's been trouble in the past of linking the C++ standard
// library required by LLVM. This likely needs to happen one day, but
// in general Windows is also a more controlled environment than
// Unix, so it's not necessarily as critical that this be implemented.
//
// Note that there are also some licensing worries about statically
// linking some libraries which require a specific agreement, so it may
// not ever be possible for us to pass this flag.
}
fn include_path(&mut self, path: &Path) {
let mut arg = OsString::from("/LIBPATH:");
arg.push(path);
self.cmd.arg(&arg);
}
fn output_filename(&mut self, path: &Path) {
let mut arg = OsString::from("/OUT:");
arg.push(path);
self.cmd.arg(&arg);
|
fn framework_path(&mut self, _path: &Path) {
panic!("frameworks are not supported on windows")
}
fn link_framework(&mut self, _framework: &str) {
panic!("frameworks are not supported on windows")
}
fn link_whole_staticlib(&mut self, lib: &str, _search_path: &[PathBuf]) {
// not supported?
self.link_staticlib(lib);
}
fn link_whole_rlib(&mut self, path: &Path) {
// not supported?
self.link_rlib(path);
}
fn optimize(&mut self) {
// Needs more investigation of `/OPT` arguments
}
fn debuginfo(&mut self) {
match self.sess.opts.debuginfo {
NoDebugInfo => {
// Do nothing if debuginfo is disabled
},
LimitedDebugInfo |
FullDebugInfo => {
// This will cause the Microsoft linker to generate a PDB file
// from the CodeView line tables in the object files.
self.cmd.arg("/DEBUG");
}
}
}
fn whole_archives(&mut self) {
// hints not supported?
}
fn no_whole_archives(&mut self) {
// hints not supported?
}
// On windows static libraries are of the form `foo.lib` and dynamic
// libraries are not linked against directly, but rather through their
// import libraries also called `foo.lib`. As a result there's no
// possibility for a native library to appear both dynamically and
// statically in the same folder so we don't have to worry about hints like
// we do on Unix platforms.
fn hint_static(&mut self) {}
fn hint_dynamic(&mut self) {}
// Currently the compiler doesn't use `dllexport` (an LLVM attribute) to
// export symbols from a dynamic library. When building a dynamic library,
// however, we're going to want some symbols exported, so this function
// generates a DEF file which lists all the symbols.
//
// The linker will read this `*.def` file and export all the symbols from
// the dynamic library. Note that this is not as simple as just exporting
// all the symbols in the current crate (as specified by `trans.reachable`)
// but rather we also need to possibly export the symbols of upstream
// crates. Upstream rlibs may be linked statically to this dynamic library,
// in which case they may continue to transitively be used and hence need
// their symbols exported.
fn export_symbols(&mut self, sess: &Session, trans: &CrateTranslation,
tmpdir: &Path) {
let path = tmpdir.join("lib.def");
let res = (|| -> io::Result<()> {
let mut f = BufWriter::new(try!(File::create(&path)));
// Start off with the standard module name header and then go
// straight to exports.
try!(writeln!(f, "LIBRARY"));
try!(writeln!(f, "EXPORTS"));
// Write out all our local symbols
for sym in trans.reachable.iter() {
try!(writeln!(f, " {}", sym));
}
// Take a look at how all upstream crates are linked into this
// dynamic library. For all statically linked libraries we take all
// their reachable symbols and emit them as well.
let cstore = &sess.cstore;
let formats = sess.dependency_formats.borrow();
let symbols = formats[&CrateTypeDylib].iter();
let symbols = symbols.enumerate().filter_map(|(i, f)| {
if *f == Linkage::Static {
Some((i + 1) as ast::CrateNum)
} else {
None
}
}).flat_map(|cnum| {
csearch::get_reachable_ids(cstore, cnum)
}).map(|did| {
csearch::get_symbol(cstore, did)
});
for symbol in symbols {
try!(writeln!(f, " {}", symbol));
}
Ok(())
})();
if let Err(e) = res {
sess.fatal(&format!("failed to write lib.def file: {}", e));
}
let mut arg = OsString::from("/DEF:");
arg.push(path);
self.cmd.arg(&arg);
}
}
|
}
|
random_line_split
|
linker.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::ffi::OsString;
use std::fs::{self, File};
use std::io::{self, BufWriter};
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use std::process::Command;
use back::archive;
use metadata::csearch;
use middle::dependency_format::Linkage;
use session::Session;
use session::config::DebugInfoLevel::{NoDebugInfo, LimitedDebugInfo, FullDebugInfo};
use session::config::CrateTypeDylib;
use session::config;
use syntax::ast;
use trans::CrateTranslation;
/// Linker abstraction used by back::link to build up the command to invoke a
/// linker.
///
/// This trait is the total list of requirements needed by `back::link` and
/// represents the meaning of each option being passed down. This trait is then
/// used to dispatch on whether a GNU-like linker (generally `ld.exe`) or an
/// MSVC linker (e.g. `link.exe`) is being used.
pub trait Linker {
fn link_dylib(&mut self, lib: &str);
fn link_rust_dylib(&mut self, lib: &str, path: &Path);
fn link_framework(&mut self, framework: &str);
fn link_staticlib(&mut self, lib: &str);
fn link_rlib(&mut self, lib: &Path);
fn link_whole_rlib(&mut self, lib: &Path);
fn link_whole_staticlib(&mut self, lib: &str, search_path: &[PathBuf]);
fn include_path(&mut self, path: &Path);
fn framework_path(&mut self, path: &Path);
fn output_filename(&mut self, path: &Path);
fn add_object(&mut self, path: &Path);
fn gc_sections(&mut self, is_dylib: bool);
fn position_independent_executable(&mut self);
fn optimize(&mut self);
fn debuginfo(&mut self);
fn no_default_libraries(&mut self);
fn build_dylib(&mut self, out_filename: &Path);
fn args(&mut self, args: &[String]);
fn hint_static(&mut self);
fn hint_dynamic(&mut self);
fn whole_archives(&mut self);
fn no_whole_archives(&mut self);
fn export_symbols(&mut self, sess: &Session, trans: &CrateTranslation,
tmpdir: &Path);
}
pub struct GnuLinker<'a> {
pub cmd: &'a mut Command,
pub sess: &'a Session,
}
impl<'a> GnuLinker<'a> {
fn takes_hints(&self) -> bool {
!self.sess.target.target.options.is_like_osx
}
}
impl<'a> Linker for GnuLinker<'a> {
fn link_dylib(&mut self, lib: &str) { self.cmd.arg("-l").arg(lib); }
fn link_staticlib(&mut self, lib: &str) { self.cmd.arg("-l").arg(lib); }
fn link_rlib(&mut self, lib: &Path) { self.cmd.arg(lib); }
fn include_path(&mut self, path: &Path) { self.cmd.arg("-L").arg(path); }
fn framework_path(&mut self, path: &Path) { self.cmd.arg("-F").arg(path); }
fn output_filename(&mut self, path: &Path) { self.cmd.arg("-o").arg(path); }
fn add_object(&mut self, path: &Path) { self.cmd.arg(path); }
fn position_independent_executable(&mut self) { self.cmd.arg("-pie"); }
fn args(&mut self, args: &[String]) { self.cmd.args(args); }
fn link_rust_dylib(&mut self, lib: &str, _path: &Path) {
self.cmd.arg("-l").arg(lib);
}
fn link_framework(&mut self, framework: &str) {
self.cmd.arg("-framework").arg(framework);
}
fn link_whole_staticlib(&mut self, lib: &str, search_path: &[PathBuf]) {
let target = &self.sess.target.target;
if!target.options.is_like_osx {
self.cmd.arg("-Wl,--whole-archive")
.arg("-l").arg(lib)
.arg("-Wl,--no-whole-archive");
} else {
// -force_load is the OSX equivalent of --whole-archive, but it
// involves passing the full path to the library to link.
let mut v = OsString::from("-Wl,-force_load,");
v.push(&archive::find_library(lib, search_path, &self.sess));
self.cmd.arg(&v);
}
}
fn link_whole_rlib(&mut self, lib: &Path) {
if self.sess.target.target.options.is_like_osx {
let mut v = OsString::from("-Wl,-force_load,");
v.push(lib);
self.cmd.arg(&v);
} else {
self.cmd.arg("-Wl,--whole-archive").arg(lib)
.arg("-Wl,--no-whole-archive");
}
}
fn gc_sections(&mut self, is_dylib: bool) {
// The dead_strip option to the linker specifies that functions and data
// unreachable by the entry point will be removed. This is quite useful
// with Rust's compilation model of compiling libraries at a time into
// one object file. For example, this brings hello world from 1.7MB to
// 458K.
//
// Note that this is done for both executables and dynamic libraries. We
// won't get much benefit from dylibs because LLVM will have already
// stripped away as much as it could. This has not been seen to impact
// link times negatively.
//
// -dead_strip can't be part of the pre_link_args because it's also used
// for partial linking when using multiple codegen units (-r). So we
// insert it here.
if self.sess.target.target.options.is_like_osx {
self.cmd.arg("-Wl,-dead_strip");
// If we're building a dylib, we don't use --gc-sections because LLVM
// has already done the best it can do, and we also don't want to
// eliminate the metadata. If we're building an executable, however,
// --gc-sections drops the size of hello world from 1.8MB to 597K, a 67%
// reduction.
} else if!is_dylib {
self.cmd.arg("-Wl,--gc-sections");
}
}
fn optimize(&mut self) {
if!self.sess.target.target.options.linker_is_gnu { return }
// GNU-style linkers support optimization with -O. GNU ld doesn't
// need a numeric argument, but other linkers do.
if self.sess.opts.optimize == config::Default ||
self.sess.opts.optimize == config::Aggressive {
self.cmd.arg("-Wl,-O1");
}
}
fn debuginfo(&mut self) {
// Don't do anything special here for GNU-style linkers.
}
fn no_default_libraries(&mut self) {
// Unfortunately right now passing -nodefaultlibs to gcc on windows
// doesn't work so hot (in terms of native dependencies). This if
// statement should hopefully be removed one day though!
if!self.sess.target.target.options.is_like_windows {
self.cmd.arg("-nodefaultlibs");
}
}
fn build_dylib(&mut self, out_filename: &Path) {
// On mac we need to tell the linker to let this library be rpathed
if self.sess.target.target.options.is_like_osx {
self.cmd.args(&["-dynamiclib", "-Wl,-dylib"]);
if self.sess.opts.cg.rpath {
let mut v = OsString::from("-Wl,-install_name,@rpath/");
v.push(out_filename.file_name().unwrap());
self.cmd.arg(&v);
}
} else {
self.cmd.arg("-shared");
}
}
fn whole_archives(&mut self) {
if!self.takes_hints() { return }
self.cmd.arg("-Wl,--whole-archive");
}
fn no_whole_archives(&mut self) {
if!self.takes_hints() { return }
self.cmd.arg("-Wl,--no-whole-archive");
}
fn hint_static(&mut self) {
if!self.takes_hints() { return }
self.cmd.arg("-Wl,-Bstatic");
}
fn hint_dynamic(&mut self)
|
fn export_symbols(&mut self, _: &Session, _: &CrateTranslation, _: &Path) {
// noop, visibility in object files takes care of this
}
}
pub struct MsvcLinker<'a> {
pub cmd: &'a mut Command,
pub sess: &'a Session,
}
impl<'a> Linker for MsvcLinker<'a> {
fn link_rlib(&mut self, lib: &Path) { self.cmd.arg(lib); }
fn add_object(&mut self, path: &Path) { self.cmd.arg(path); }
fn args(&mut self, args: &[String]) { self.cmd.args(args); }
fn build_dylib(&mut self, _out_filename: &Path) { self.cmd.arg("/DLL"); }
fn gc_sections(&mut self, _is_dylib: bool) { self.cmd.arg("/OPT:REF,ICF"); }
fn link_dylib(&mut self, lib: &str) {
self.cmd.arg(&format!("{}.lib", lib));
}
fn link_rust_dylib(&mut self, lib: &str, path: &Path) {
// When producing a dll, the MSVC linker may not actually emit a
// `foo.lib` file if the dll doesn't actually export any symbols, so we
// check to see if the file is there and just omit linking to it if it's
// not present.
let name = format!("{}.lib", lib);
if fs::metadata(&path.join(&name)).is_ok() {
self.cmd.arg(name);
}
}
fn link_staticlib(&mut self, lib: &str) {
self.cmd.arg(&format!("{}.lib", lib));
}
fn position_independent_executable(&mut self) {
// noop
}
fn no_default_libraries(&mut self) {
// Currently we don't pass the /NODEFAULTLIB flag to the linker on MSVC
// as there's been trouble in the past of linking the C++ standard
// library required by LLVM. This likely needs to happen one day, but
// in general Windows is also a more controlled environment than
// Unix, so it's not necessarily as critical that this be implemented.
//
// Note that there are also some licensing worries about statically
// linking some libraries which require a specific agreement, so it may
// not ever be possible for us to pass this flag.
}
fn include_path(&mut self, path: &Path) {
let mut arg = OsString::from("/LIBPATH:");
arg.push(path);
self.cmd.arg(&arg);
}
fn output_filename(&mut self, path: &Path) {
let mut arg = OsString::from("/OUT:");
arg.push(path);
self.cmd.arg(&arg);
}
fn framework_path(&mut self, _path: &Path) {
panic!("frameworks are not supported on windows")
}
fn link_framework(&mut self, _framework: &str) {
panic!("frameworks are not supported on windows")
}
fn link_whole_staticlib(&mut self, lib: &str, _search_path: &[PathBuf]) {
// not supported?
self.link_staticlib(lib);
}
fn link_whole_rlib(&mut self, path: &Path) {
// not supported?
self.link_rlib(path);
}
fn optimize(&mut self) {
// Needs more investigation of `/OPT` arguments
}
fn debuginfo(&mut self) {
match self.sess.opts.debuginfo {
NoDebugInfo => {
// Do nothing if debuginfo is disabled
},
LimitedDebugInfo |
FullDebugInfo => {
// This will cause the Microsoft linker to generate a PDB file
// from the CodeView line tables in the object files.
self.cmd.arg("/DEBUG");
}
}
}
fn whole_archives(&mut self) {
// hints not supported?
}
fn no_whole_archives(&mut self) {
// hints not supported?
}
// On windows static libraries are of the form `foo.lib` and dynamic
// libraries are not linked against directly, but rather through their
// import libraries also called `foo.lib`. As a result there's no
// possibility for a native library to appear both dynamically and
// statically in the same folder so we don't have to worry about hints like
// we do on Unix platforms.
fn hint_static(&mut self) {}
fn hint_dynamic(&mut self) {}
// Currently the compiler doesn't use `dllexport` (an LLVM attribute) to
// export symbols from a dynamic library. When building a dynamic library,
// however, we're going to want some symbols exported, so this function
// generates a DEF file which lists all the symbols.
//
// The linker will read this `*.def` file and export all the symbols from
// the dynamic library. Note that this is not as simple as just exporting
// all the symbols in the current crate (as specified by `trans.reachable`)
// but rather we also need to possibly export the symbols of upstream
// crates. Upstream rlibs may be linked statically to this dynamic library,
// in which case they may continue to transitively be used and hence need
// their symbols exported.
fn export_symbols(&mut self, sess: &Session, trans: &CrateTranslation,
tmpdir: &Path) {
let path = tmpdir.join("lib.def");
let res = (|| -> io::Result<()> {
let mut f = BufWriter::new(try!(File::create(&path)));
// Start off with the standard module name header and then go
// straight to exports.
try!(writeln!(f, "LIBRARY"));
try!(writeln!(f, "EXPORTS"));
// Write out all our local symbols
for sym in trans.reachable.iter() {
try!(writeln!(f, " {}", sym));
}
// Take a look at how all upstream crates are linked into this
// dynamic library. For all statically linked libraries we take all
// their reachable symbols and emit them as well.
let cstore = &sess.cstore;
let formats = sess.dependency_formats.borrow();
let symbols = formats[&CrateTypeDylib].iter();
let symbols = symbols.enumerate().filter_map(|(i, f)| {
if *f == Linkage::Static {
Some((i + 1) as ast::CrateNum)
} else {
None
}
}).flat_map(|cnum| {
csearch::get_reachable_ids(cstore, cnum)
}).map(|did| {
csearch::get_symbol(cstore, did)
});
for symbol in symbols {
try!(writeln!(f, " {}", symbol));
}
Ok(())
})();
if let Err(e) = res {
sess.fatal(&format!("failed to write lib.def file: {}", e));
}
let mut arg = OsString::from("/DEF:");
arg.push(path);
self.cmd.arg(&arg);
}
}
|
{
if !self.takes_hints() { return }
self.cmd.arg("-Wl,-Bdynamic");
}
|
identifier_body
|
linker.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::ffi::OsString;
use std::fs::{self, File};
use std::io::{self, BufWriter};
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use std::process::Command;
use back::archive;
use metadata::csearch;
use middle::dependency_format::Linkage;
use session::Session;
use session::config::DebugInfoLevel::{NoDebugInfo, LimitedDebugInfo, FullDebugInfo};
use session::config::CrateTypeDylib;
use session::config;
use syntax::ast;
use trans::CrateTranslation;
/// Linker abstraction used by back::link to build up the command to invoke a
/// linker.
///
/// This trait is the total list of requirements needed by `back::link` and
/// represents the meaning of each option being passed down. This trait is then
/// used to dispatch on whether a GNU-like linker (generally `ld.exe`) or an
/// MSVC linker (e.g. `link.exe`) is being used.
pub trait Linker {
fn link_dylib(&mut self, lib: &str);
fn link_rust_dylib(&mut self, lib: &str, path: &Path);
fn link_framework(&mut self, framework: &str);
fn link_staticlib(&mut self, lib: &str);
fn link_rlib(&mut self, lib: &Path);
fn link_whole_rlib(&mut self, lib: &Path);
fn link_whole_staticlib(&mut self, lib: &str, search_path: &[PathBuf]);
fn include_path(&mut self, path: &Path);
fn framework_path(&mut self, path: &Path);
fn output_filename(&mut self, path: &Path);
fn add_object(&mut self, path: &Path);
fn gc_sections(&mut self, is_dylib: bool);
fn position_independent_executable(&mut self);
fn optimize(&mut self);
fn debuginfo(&mut self);
fn no_default_libraries(&mut self);
fn build_dylib(&mut self, out_filename: &Path);
fn args(&mut self, args: &[String]);
fn hint_static(&mut self);
fn hint_dynamic(&mut self);
fn whole_archives(&mut self);
fn no_whole_archives(&mut self);
fn export_symbols(&mut self, sess: &Session, trans: &CrateTranslation,
tmpdir: &Path);
}
pub struct GnuLinker<'a> {
pub cmd: &'a mut Command,
pub sess: &'a Session,
}
impl<'a> GnuLinker<'a> {
fn takes_hints(&self) -> bool {
!self.sess.target.target.options.is_like_osx
}
}
impl<'a> Linker for GnuLinker<'a> {
fn link_dylib(&mut self, lib: &str) { self.cmd.arg("-l").arg(lib); }
fn link_staticlib(&mut self, lib: &str) { self.cmd.arg("-l").arg(lib); }
fn link_rlib(&mut self, lib: &Path) { self.cmd.arg(lib); }
fn include_path(&mut self, path: &Path) { self.cmd.arg("-L").arg(path); }
fn framework_path(&mut self, path: &Path) { self.cmd.arg("-F").arg(path); }
fn output_filename(&mut self, path: &Path) { self.cmd.arg("-o").arg(path); }
fn add_object(&mut self, path: &Path) { self.cmd.arg(path); }
fn position_independent_executable(&mut self) { self.cmd.arg("-pie"); }
fn args(&mut self, args: &[String]) { self.cmd.args(args); }
fn link_rust_dylib(&mut self, lib: &str, _path: &Path) {
self.cmd.arg("-l").arg(lib);
}
fn link_framework(&mut self, framework: &str) {
self.cmd.arg("-framework").arg(framework);
}
fn link_whole_staticlib(&mut self, lib: &str, search_path: &[PathBuf]) {
let target = &self.sess.target.target;
if!target.options.is_like_osx {
self.cmd.arg("-Wl,--whole-archive")
.arg("-l").arg(lib)
.arg("-Wl,--no-whole-archive");
} else {
// -force_load is the OSX equivalent of --whole-archive, but it
// involves passing the full path to the library to link.
let mut v = OsString::from("-Wl,-force_load,");
v.push(&archive::find_library(lib, search_path, &self.sess));
self.cmd.arg(&v);
}
}
fn link_whole_rlib(&mut self, lib: &Path) {
if self.sess.target.target.options.is_like_osx {
let mut v = OsString::from("-Wl,-force_load,");
v.push(lib);
self.cmd.arg(&v);
} else {
self.cmd.arg("-Wl,--whole-archive").arg(lib)
.arg("-Wl,--no-whole-archive");
}
}
fn gc_sections(&mut self, is_dylib: bool) {
// The dead_strip option to the linker specifies that functions and data
// unreachable by the entry point will be removed. This is quite useful
// with Rust's compilation model of compiling libraries at a time into
// one object file. For example, this brings hello world from 1.7MB to
// 458K.
//
// Note that this is done for both executables and dynamic libraries. We
// won't get much benefit from dylibs because LLVM will have already
// stripped away as much as it could. This has not been seen to impact
// link times negatively.
//
// -dead_strip can't be part of the pre_link_args because it's also used
// for partial linking when using multiple codegen units (-r). So we
// insert it here.
if self.sess.target.target.options.is_like_osx {
self.cmd.arg("-Wl,-dead_strip");
// If we're building a dylib, we don't use --gc-sections because LLVM
// has already done the best it can do, and we also don't want to
// eliminate the metadata. If we're building an executable, however,
// --gc-sections drops the size of hello world from 1.8MB to 597K, a 67%
// reduction.
} else if!is_dylib {
self.cmd.arg("-Wl,--gc-sections");
}
}
fn optimize(&mut self) {
if!self.sess.target.target.options.linker_is_gnu { return }
// GNU-style linkers support optimization with -O. GNU ld doesn't
// need a numeric argument, but other linkers do.
if self.sess.opts.optimize == config::Default ||
self.sess.opts.optimize == config::Aggressive {
self.cmd.arg("-Wl,-O1");
}
}
fn debuginfo(&mut self) {
// Don't do anything special here for GNU-style linkers.
}
fn no_default_libraries(&mut self) {
// Unfortunately right now passing -nodefaultlibs to gcc on windows
// doesn't work so hot (in terms of native dependencies). This if
// statement should hopefully be removed one day though!
if!self.sess.target.target.options.is_like_windows {
self.cmd.arg("-nodefaultlibs");
}
}
fn build_dylib(&mut self, out_filename: &Path) {
// On mac we need to tell the linker to let this library be rpathed
if self.sess.target.target.options.is_like_osx {
self.cmd.args(&["-dynamiclib", "-Wl,-dylib"]);
if self.sess.opts.cg.rpath {
let mut v = OsString::from("-Wl,-install_name,@rpath/");
v.push(out_filename.file_name().unwrap());
self.cmd.arg(&v);
}
} else {
self.cmd.arg("-shared");
}
}
fn whole_archives(&mut self) {
if!self.takes_hints() { return }
self.cmd.arg("-Wl,--whole-archive");
}
fn no_whole_archives(&mut self) {
if!self.takes_hints() { return }
self.cmd.arg("-Wl,--no-whole-archive");
}
fn hint_static(&mut self) {
if!self.takes_hints() { return }
self.cmd.arg("-Wl,-Bstatic");
}
fn hint_dynamic(&mut self) {
if!self.takes_hints() { return }
self.cmd.arg("-Wl,-Bdynamic");
}
fn export_symbols(&mut self, _: &Session, _: &CrateTranslation, _: &Path) {
// noop, visibility in object files takes care of this
}
}
pub struct MsvcLinker<'a> {
pub cmd: &'a mut Command,
pub sess: &'a Session,
}
impl<'a> Linker for MsvcLinker<'a> {
fn link_rlib(&mut self, lib: &Path) { self.cmd.arg(lib); }
fn add_object(&mut self, path: &Path) { self.cmd.arg(path); }
fn
|
(&mut self, args: &[String]) { self.cmd.args(args); }
fn build_dylib(&mut self, _out_filename: &Path) { self.cmd.arg("/DLL"); }
fn gc_sections(&mut self, _is_dylib: bool) { self.cmd.arg("/OPT:REF,ICF"); }
fn link_dylib(&mut self, lib: &str) {
self.cmd.arg(&format!("{}.lib", lib));
}
fn link_rust_dylib(&mut self, lib: &str, path: &Path) {
// When producing a dll, the MSVC linker may not actually emit a
// `foo.lib` file if the dll doesn't actually export any symbols, so we
// check to see if the file is there and just omit linking to it if it's
// not present.
let name = format!("{}.lib", lib);
if fs::metadata(&path.join(&name)).is_ok() {
self.cmd.arg(name);
}
}
fn link_staticlib(&mut self, lib: &str) {
self.cmd.arg(&format!("{}.lib", lib));
}
fn position_independent_executable(&mut self) {
// noop
}
fn no_default_libraries(&mut self) {
// Currently we don't pass the /NODEFAULTLIB flag to the linker on MSVC
// as there's been trouble in the past of linking the C++ standard
// library required by LLVM. This likely needs to happen one day, but
// in general Windows is also a more controlled environment than
// Unix, so it's not necessarily as critical that this be implemented.
//
// Note that there are also some licensing worries about statically
// linking some libraries which require a specific agreement, so it may
// not ever be possible for us to pass this flag.
}
fn include_path(&mut self, path: &Path) {
let mut arg = OsString::from("/LIBPATH:");
arg.push(path);
self.cmd.arg(&arg);
}
fn output_filename(&mut self, path: &Path) {
let mut arg = OsString::from("/OUT:");
arg.push(path);
self.cmd.arg(&arg);
}
fn framework_path(&mut self, _path: &Path) {
panic!("frameworks are not supported on windows")
}
fn link_framework(&mut self, _framework: &str) {
panic!("frameworks are not supported on windows")
}
fn link_whole_staticlib(&mut self, lib: &str, _search_path: &[PathBuf]) {
// not supported?
self.link_staticlib(lib);
}
fn link_whole_rlib(&mut self, path: &Path) {
// not supported?
self.link_rlib(path);
}
fn optimize(&mut self) {
// Needs more investigation of `/OPT` arguments
}
fn debuginfo(&mut self) {
match self.sess.opts.debuginfo {
NoDebugInfo => {
// Do nothing if debuginfo is disabled
},
LimitedDebugInfo |
FullDebugInfo => {
// This will cause the Microsoft linker to generate a PDB file
// from the CodeView line tables in the object files.
self.cmd.arg("/DEBUG");
}
}
}
fn whole_archives(&mut self) {
// hints not supported?
}
fn no_whole_archives(&mut self) {
// hints not supported?
}
// On windows static libraries are of the form `foo.lib` and dynamic
// libraries are not linked against directly, but rather through their
// import libraries also called `foo.lib`. As a result there's no
// possibility for a native library to appear both dynamically and
// statically in the same folder so we don't have to worry about hints like
// we do on Unix platforms.
fn hint_static(&mut self) {}
fn hint_dynamic(&mut self) {}
// Currently the compiler doesn't use `dllexport` (an LLVM attribute) to
// export symbols from a dynamic library. When building a dynamic library,
// however, we're going to want some symbols exported, so this function
// generates a DEF file which lists all the symbols.
//
// The linker will read this `*.def` file and export all the symbols from
// the dynamic library. Note that this is not as simple as just exporting
// all the symbols in the current crate (as specified by `trans.reachable`)
// but rather we also need to possibly export the symbols of upstream
// crates. Upstream rlibs may be linked statically to this dynamic library,
// in which case they may continue to transitively be used and hence need
// their symbols exported.
fn export_symbols(&mut self, sess: &Session, trans: &CrateTranslation,
tmpdir: &Path) {
let path = tmpdir.join("lib.def");
let res = (|| -> io::Result<()> {
let mut f = BufWriter::new(try!(File::create(&path)));
// Start off with the standard module name header and then go
// straight to exports.
try!(writeln!(f, "LIBRARY"));
try!(writeln!(f, "EXPORTS"));
// Write out all our local symbols
for sym in trans.reachable.iter() {
try!(writeln!(f, " {}", sym));
}
// Take a look at how all upstream crates are linked into this
// dynamic library. For all statically linked libraries we take all
// their reachable symbols and emit them as well.
let cstore = &sess.cstore;
let formats = sess.dependency_formats.borrow();
let symbols = formats[&CrateTypeDylib].iter();
let symbols = symbols.enumerate().filter_map(|(i, f)| {
if *f == Linkage::Static {
Some((i + 1) as ast::CrateNum)
} else {
None
}
}).flat_map(|cnum| {
csearch::get_reachable_ids(cstore, cnum)
}).map(|did| {
csearch::get_symbol(cstore, did)
});
for symbol in symbols {
try!(writeln!(f, " {}", symbol));
}
Ok(())
})();
if let Err(e) = res {
sess.fatal(&format!("failed to write lib.def file: {}", e));
}
let mut arg = OsString::from("/DEF:");
arg.push(path);
self.cmd.arg(&arg);
}
}
|
args
|
identifier_name
|
dropck.rs
|
use crate::check::regionck::RegionCtxt;
use crate::hir;
use crate::hir::def_id::{DefId, LocalDefId};
use rustc_errors::{struct_span_err, ErrorReported};
use rustc_infer::infer::outlives::env::OutlivesEnvironment;
use rustc_infer::infer::{InferOk, RegionckMode, TyCtxtInferExt};
use rustc_infer::traits::TraitEngineExt as _;
use rustc_middle::ty::error::TypeError;
use rustc_middle::ty::relate::{Relate, RelateResult, TypeRelation};
use rustc_middle::ty::subst::{Subst, SubstsRef};
use rustc_middle::ty::{self, Predicate, Ty, TyCtxt};
use rustc_span::Span;
use rustc_trait_selection::traits::error_reporting::InferCtxtExt;
use rustc_trait_selection::traits::query::dropck_outlives::AtExt;
use rustc_trait_selection::traits::{ObligationCause, TraitEngine, TraitEngineExt};
/// This function confirms that the `Drop` implementation identified by
/// `drop_impl_did` is not any more specialized than the type it is
/// attached to (Issue #8142).
///
/// This means:
///
/// 1. The self type must be nominal (this is already checked during
/// coherence),
///
/// 2. The generic region/type parameters of the impl's self type must
/// all be parameters of the Drop impl itself (i.e., no
/// specialization like `impl Drop for Foo<i32>`), and,
///
/// 3. Any bounds on the generic parameters must be reflected in the
/// struct/enum definition for the nominal type itself (i.e.
/// cannot do `struct S<T>; impl<T:Clone> Drop for S<T> {... }`).
///
pub fn check_drop_impl(tcx: TyCtxt<'_>, drop_impl_did: DefId) -> Result<(), ErrorReported> {
let dtor_self_type = tcx.type_of(drop_impl_did);
let dtor_predicates = tcx.predicates_of(drop_impl_did);
match dtor_self_type.kind() {
ty::Adt(adt_def, self_to_impl_substs) => {
ensure_drop_params_and_item_params_correspond(
tcx,
drop_impl_did.expect_local(),
dtor_self_type,
adt_def.did,
)?;
ensure_drop_predicates_are_implied_by_item_defn(
tcx,
dtor_predicates,
adt_def.did.expect_local(),
self_to_impl_substs,
)
}
_ => {
// Destructors only work on nominal types. This was
// already checked by coherence, but compilation may
// not have been terminated.
let span = tcx.def_span(drop_impl_did);
tcx.sess.delay_span_bug(
span,
&format!("should have been rejected by coherence check: {}", dtor_self_type),
);
Err(ErrorReported)
}
}
}
fn ensure_drop_params_and_item_params_correspond<'tcx>(
tcx: TyCtxt<'tcx>,
drop_impl_did: LocalDefId,
drop_impl_ty: Ty<'tcx>,
self_type_did: DefId,
) -> Result<(), ErrorReported> {
let drop_impl_hir_id = tcx.hir().local_def_id_to_hir_id(drop_impl_did);
// check that the impl type can be made to match the trait type.
tcx.infer_ctxt().enter(|ref infcx| {
let impl_param_env = tcx.param_env(self_type_did);
let tcx = infcx.tcx;
let mut fulfillment_cx = <dyn TraitEngine<'_>>::new(tcx);
let named_type = tcx.type_of(self_type_did);
let drop_impl_span = tcx.def_span(drop_impl_did);
let fresh_impl_substs =
infcx.fresh_substs_for_item(drop_impl_span, drop_impl_did.to_def_id());
let fresh_impl_self_ty = drop_impl_ty.subst(tcx, fresh_impl_substs);
let cause = &ObligationCause::misc(drop_impl_span, drop_impl_hir_id);
match infcx.at(cause, impl_param_env).eq(named_type, fresh_impl_self_ty) {
Ok(InferOk { obligations,.. }) => {
fulfillment_cx.register_predicate_obligations(infcx, obligations);
}
Err(_) => {
let item_span = tcx.def_span(self_type_did);
let self_descr = tcx.def_kind(self_type_did).descr(self_type_did);
struct_span_err!(
tcx.sess,
drop_impl_span,
E0366,
"`Drop` impls cannot be specialized"
)
.span_note(
item_span,
&format!(
"use the same sequence of generic type, lifetime and const parameters \
as the {} definition",
self_descr,
),
)
.emit();
return Err(ErrorReported);
}
}
let errors = fulfillment_cx.select_all_or_error(&infcx);
if!errors.is_empty() {
// this could be reached when we get lazy normalization
infcx.report_fulfillment_errors(&errors, None, false);
return Err(ErrorReported);
}
// NB. It seems a bit... suspicious to use an empty param-env
// here. The correct thing, I imagine, would be
// `OutlivesEnvironment::new(impl_param_env)`, which would
// allow region solving to take any `a: 'b` relations on the
// impl into account. But I could not create a test case where
// it did the wrong thing, so I chose to preserve existing
// behavior, since it ought to be simply more
// conservative. -nmatsakis
let outlives_env = OutlivesEnvironment::new(ty::ParamEnv::empty());
infcx.resolve_regions_and_report_errors(
drop_impl_did.to_def_id(),
&outlives_env,
RegionckMode::default(),
);
Ok(())
})
}
/// Confirms that every predicate imposed by dtor_predicates is
/// implied by assuming the predicates attached to self_type_did.
fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>(
tcx: TyCtxt<'tcx>,
dtor_predicates: ty::GenericPredicates<'tcx>,
self_type_did: LocalDefId,
self_to_impl_substs: SubstsRef<'tcx>,
) -> Result<(), ErrorReported> {
let mut result = Ok(());
// Here is an example, analogous to that from
// `compare_impl_method`.
//
// Consider a struct type:
//
// struct Type<'c, 'b:'c, 'a> {
// x: &'a Contents // (contents are irrelevant;
// y: &'c Cell<&'b Contents>, // only the bounds matter for our purposes.)
// }
//
// and a Drop impl:
//
// impl<'z, 'y:'z, 'x:'y> Drop for P<'z, 'y, 'x> {
// fn drop(&mut self) { self.y.set(self.x); } // (only legal if 'x: 'y)
// }
//
// We start out with self_to_impl_substs, that maps the generic
// parameters of Type to that of the Drop impl.
//
// self_to_impl_substs = {'c => 'z, 'b => 'y, 'a => 'x}
//
// Applying this to the predicates (i.e., assumptions) provided by the item
// definition yields the instantiated assumptions:
//
// ['y : 'z]
//
// We then check all of the predicates of the Drop impl:
//
// ['y:'z, 'x:'y]
//
// and ensure each is in the list of instantiated
// assumptions. Here, `'y:'z` is present, but `'x:'y` is
// absent. So we report an error that the Drop impl injected a
// predicate that is not present on the struct definition.
let self_type_hir_id = tcx.hir().local_def_id_to_hir_id(self_type_did);
// We can assume the predicates attached to struct/enum definition
// hold.
let generic_assumptions = tcx.predicates_of(self_type_did);
let assumptions_in_impl_context = generic_assumptions.instantiate(tcx, &self_to_impl_substs);
let assumptions_in_impl_context = assumptions_in_impl_context.predicates;
let self_param_env = tcx.param_env(self_type_did);
// An earlier version of this code attempted to do this checking
// via the traits::fulfill machinery. However, it ran into trouble
// since the fulfill machinery merely turns outlives-predicates
// 'a:'b and T:'b into region inference constraints. It is simpler
// just to look for all the predicates directly.
assert_eq!(dtor_predicates.parent, None);
for &(predicate, predicate_sp) in dtor_predicates.predicates {
// (We do not need to worry about deep analysis of type
|
// to take on a structure that is roughly an alpha-renaming of
// the generic parameters of the item definition.)
// This path now just checks *all* predicates via an instantiation of
// the `SimpleEqRelation`, which simply forwards to the `relate` machinery
// after taking care of anonymizing late bound regions.
//
// However, it may be more efficient in the future to batch
// the analysis together via the fulfill (see comment above regarding
// the usage of the fulfill machinery), rather than the
// repeated `.iter().any(..)` calls.
// This closure is a more robust way to check `Predicate` equality
// than simple `==` checks (which were the previous implementation).
// It relies on `ty::relate` for `TraitPredicate`, `ProjectionPredicate`,
// `ConstEvaluatable` and `TypeOutlives` (which implement the Relate trait),
// while delegating on simple equality for the other `Predicate`.
// This implementation solves (Issue #59497) and (Issue #58311).
// It is unclear to me at the moment whether the approach based on `relate`
// could be extended easily also to the other `Predicate`.
let predicate_matches_closure = |p: Predicate<'tcx>| {
let mut relator: SimpleEqRelation<'tcx> = SimpleEqRelation::new(tcx, self_param_env);
let predicate = predicate.kind();
let p = p.kind();
match (predicate.skip_binder(), p.skip_binder()) {
(ty::PredicateKind::Trait(a), ty::PredicateKind::Trait(b)) => {
relator.relate(predicate.rebind(a), p.rebind(b)).is_ok()
}
(ty::PredicateKind::Projection(a), ty::PredicateKind::Projection(b)) => {
relator.relate(predicate.rebind(a), p.rebind(b)).is_ok()
}
(
ty::PredicateKind::ConstEvaluatable(a),
ty::PredicateKind::ConstEvaluatable(b),
) => tcx.try_unify_abstract_consts((a, b)),
(
ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ty_a, lt_a)),
ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ty_b, lt_b)),
) => {
relator.relate(predicate.rebind(ty_a), p.rebind(ty_b)).is_ok()
&& relator.relate(predicate.rebind(lt_a), p.rebind(lt_b)).is_ok()
}
_ => predicate == p,
}
};
if!assumptions_in_impl_context.iter().copied().any(predicate_matches_closure) {
let item_span = tcx.hir().span(self_type_hir_id);
let self_descr = tcx.def_kind(self_type_did).descr(self_type_did.to_def_id());
struct_span_err!(
tcx.sess,
predicate_sp,
E0367,
"`Drop` impl requires `{}` but the {} it is implemented for does not",
predicate,
self_descr,
)
.span_note(item_span, "the implementor must specify the same requirement")
.emit();
result = Err(ErrorReported);
}
}
result
}
/// This function is not only checking that the dropck obligations are met for
/// the given type, but it's also currently preventing non-regular recursion in
/// types from causing stack overflows (dropck_no_diverge_on_nonregular_*.rs).
crate fn check_drop_obligations<'a, 'tcx>(
rcx: &mut RegionCtxt<'a, 'tcx>,
ty: Ty<'tcx>,
span: Span,
body_id: hir::HirId,
) {
debug!("check_drop_obligations typ: {:?}", ty);
let cause = &ObligationCause::misc(span, body_id);
let infer_ok = rcx.infcx.at(cause, rcx.fcx.param_env).dropck_outlives(ty);
debug!("dropck_outlives = {:#?}", infer_ok);
rcx.fcx.register_infer_ok_obligations(infer_ok);
}
// This is an implementation of the TypeRelation trait with the
// aim of simply comparing for equality (without side-effects).
// It is not intended to be used anywhere else other than here.
crate struct SimpleEqRelation<'tcx> {
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
}
impl<'tcx> SimpleEqRelation<'tcx> {
fn new(tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> SimpleEqRelation<'tcx> {
SimpleEqRelation { tcx, param_env }
}
}
impl TypeRelation<'tcx> for SimpleEqRelation<'tcx> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
fn param_env(&self) -> ty::ParamEnv<'tcx> {
self.param_env
}
fn tag(&self) -> &'static str {
"dropck::SimpleEqRelation"
}
fn a_is_expected(&self) -> bool {
true
}
fn relate_with_variance<T: Relate<'tcx>>(
&mut self,
_: ty::Variance,
_info: ty::VarianceDiagInfo<'tcx>,
a: T,
b: T,
) -> RelateResult<'tcx, T> {
// Here we ignore variance because we require drop impl's types
// to be *exactly* the same as to the ones in the struct definition.
self.relate(a, b)
}
fn tys(&mut self, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
debug!("SimpleEqRelation::tys(a={:?}, b={:?})", a, b);
ty::relate::super_relate_tys(self, a, b)
}
fn regions(
&mut self,
a: ty::Region<'tcx>,
b: ty::Region<'tcx>,
) -> RelateResult<'tcx, ty::Region<'tcx>> {
debug!("SimpleEqRelation::regions(a={:?}, b={:?})", a, b);
// We can just equate the regions because LBRs have been
// already anonymized.
if a == b {
Ok(a)
} else {
// I'm not sure is this `TypeError` is the right one, but
// it should not matter as it won't be checked (the dropck
// will emit its own, more informative and higher-level errors
// in case anything goes wrong).
Err(TypeError::RegionsPlaceholderMismatch)
}
}
fn consts(
&mut self,
a: &'tcx ty::Const<'tcx>,
b: &'tcx ty::Const<'tcx>,
) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> {
debug!("SimpleEqRelation::consts(a={:?}, b={:?})", a, b);
ty::relate::super_relate_consts(self, a, b)
}
fn binders<T>(
&mut self,
a: ty::Binder<'tcx, T>,
b: ty::Binder<'tcx, T>,
) -> RelateResult<'tcx, ty::Binder<'tcx, T>>
where
T: Relate<'tcx>,
{
debug!("SimpleEqRelation::binders({:?}: {:?}", a, b);
// Anonymizing the LBRs is necessary to solve (Issue #59497).
// After we do so, it should be totally fine to skip the binders.
let anon_a = self.tcx.anonymize_late_bound_regions(a);
let anon_b = self.tcx.anonymize_late_bound_regions(b);
self.relate(anon_a.skip_binder(), anon_b.skip_binder())?;
Ok(a)
}
}
|
// expressions etc because the Drop impls are already forced
|
random_line_split
|
dropck.rs
|
use crate::check::regionck::RegionCtxt;
use crate::hir;
use crate::hir::def_id::{DefId, LocalDefId};
use rustc_errors::{struct_span_err, ErrorReported};
use rustc_infer::infer::outlives::env::OutlivesEnvironment;
use rustc_infer::infer::{InferOk, RegionckMode, TyCtxtInferExt};
use rustc_infer::traits::TraitEngineExt as _;
use rustc_middle::ty::error::TypeError;
use rustc_middle::ty::relate::{Relate, RelateResult, TypeRelation};
use rustc_middle::ty::subst::{Subst, SubstsRef};
use rustc_middle::ty::{self, Predicate, Ty, TyCtxt};
use rustc_span::Span;
use rustc_trait_selection::traits::error_reporting::InferCtxtExt;
use rustc_trait_selection::traits::query::dropck_outlives::AtExt;
use rustc_trait_selection::traits::{ObligationCause, TraitEngine, TraitEngineExt};
/// This function confirms that the `Drop` implementation identified by
/// `drop_impl_did` is not any more specialized than the type it is
/// attached to (Issue #8142).
///
/// This means:
///
/// 1. The self type must be nominal (this is already checked during
/// coherence),
///
/// 2. The generic region/type parameters of the impl's self type must
/// all be parameters of the Drop impl itself (i.e., no
/// specialization like `impl Drop for Foo<i32>`), and,
///
/// 3. Any bounds on the generic parameters must be reflected in the
/// struct/enum definition for the nominal type itself (i.e.
/// cannot do `struct S<T>; impl<T:Clone> Drop for S<T> {... }`).
///
pub fn check_drop_impl(tcx: TyCtxt<'_>, drop_impl_did: DefId) -> Result<(), ErrorReported> {
let dtor_self_type = tcx.type_of(drop_impl_did);
let dtor_predicates = tcx.predicates_of(drop_impl_did);
match dtor_self_type.kind() {
ty::Adt(adt_def, self_to_impl_substs) => {
ensure_drop_params_and_item_params_correspond(
tcx,
drop_impl_did.expect_local(),
dtor_self_type,
adt_def.did,
)?;
ensure_drop_predicates_are_implied_by_item_defn(
tcx,
dtor_predicates,
adt_def.did.expect_local(),
self_to_impl_substs,
)
}
_ => {
// Destructors only work on nominal types. This was
// already checked by coherence, but compilation may
// not have been terminated.
let span = tcx.def_span(drop_impl_did);
tcx.sess.delay_span_bug(
span,
&format!("should have been rejected by coherence check: {}", dtor_self_type),
);
Err(ErrorReported)
}
}
}
fn ensure_drop_params_and_item_params_correspond<'tcx>(
tcx: TyCtxt<'tcx>,
drop_impl_did: LocalDefId,
drop_impl_ty: Ty<'tcx>,
self_type_did: DefId,
) -> Result<(), ErrorReported> {
let drop_impl_hir_id = tcx.hir().local_def_id_to_hir_id(drop_impl_did);
// check that the impl type can be made to match the trait type.
tcx.infer_ctxt().enter(|ref infcx| {
let impl_param_env = tcx.param_env(self_type_did);
let tcx = infcx.tcx;
let mut fulfillment_cx = <dyn TraitEngine<'_>>::new(tcx);
let named_type = tcx.type_of(self_type_did);
let drop_impl_span = tcx.def_span(drop_impl_did);
let fresh_impl_substs =
infcx.fresh_substs_for_item(drop_impl_span, drop_impl_did.to_def_id());
let fresh_impl_self_ty = drop_impl_ty.subst(tcx, fresh_impl_substs);
let cause = &ObligationCause::misc(drop_impl_span, drop_impl_hir_id);
match infcx.at(cause, impl_param_env).eq(named_type, fresh_impl_self_ty) {
Ok(InferOk { obligations,.. }) => {
fulfillment_cx.register_predicate_obligations(infcx, obligations);
}
Err(_) => {
let item_span = tcx.def_span(self_type_did);
let self_descr = tcx.def_kind(self_type_did).descr(self_type_did);
struct_span_err!(
tcx.sess,
drop_impl_span,
E0366,
"`Drop` impls cannot be specialized"
)
.span_note(
item_span,
&format!(
"use the same sequence of generic type, lifetime and const parameters \
as the {} definition",
self_descr,
),
)
.emit();
return Err(ErrorReported);
}
}
let errors = fulfillment_cx.select_all_or_error(&infcx);
if!errors.is_empty() {
// this could be reached when we get lazy normalization
infcx.report_fulfillment_errors(&errors, None, false);
return Err(ErrorReported);
}
// NB. It seems a bit... suspicious to use an empty param-env
// here. The correct thing, I imagine, would be
// `OutlivesEnvironment::new(impl_param_env)`, which would
// allow region solving to take any `a: 'b` relations on the
// impl into account. But I could not create a test case where
// it did the wrong thing, so I chose to preserve existing
// behavior, since it ought to be simply more
// conservative. -nmatsakis
let outlives_env = OutlivesEnvironment::new(ty::ParamEnv::empty());
infcx.resolve_regions_and_report_errors(
drop_impl_did.to_def_id(),
&outlives_env,
RegionckMode::default(),
);
Ok(())
})
}
/// Confirms that every predicate imposed by dtor_predicates is
/// implied by assuming the predicates attached to self_type_did.
fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>(
tcx: TyCtxt<'tcx>,
dtor_predicates: ty::GenericPredicates<'tcx>,
self_type_did: LocalDefId,
self_to_impl_substs: SubstsRef<'tcx>,
) -> Result<(), ErrorReported> {
let mut result = Ok(());
// Here is an example, analogous to that from
// `compare_impl_method`.
//
// Consider a struct type:
//
// struct Type<'c, 'b:'c, 'a> {
// x: &'a Contents // (contents are irrelevant;
// y: &'c Cell<&'b Contents>, // only the bounds matter for our purposes.)
// }
//
// and a Drop impl:
//
// impl<'z, 'y:'z, 'x:'y> Drop for P<'z, 'y, 'x> {
// fn drop(&mut self) { self.y.set(self.x); } // (only legal if 'x: 'y)
// }
//
// We start out with self_to_impl_substs, that maps the generic
// parameters of Type to that of the Drop impl.
//
// self_to_impl_substs = {'c => 'z, 'b => 'y, 'a => 'x}
//
// Applying this to the predicates (i.e., assumptions) provided by the item
// definition yields the instantiated assumptions:
//
// ['y : 'z]
//
// We then check all of the predicates of the Drop impl:
//
// ['y:'z, 'x:'y]
//
// and ensure each is in the list of instantiated
// assumptions. Here, `'y:'z` is present, but `'x:'y` is
// absent. So we report an error that the Drop impl injected a
// predicate that is not present on the struct definition.
let self_type_hir_id = tcx.hir().local_def_id_to_hir_id(self_type_did);
// We can assume the predicates attached to struct/enum definition
// hold.
let generic_assumptions = tcx.predicates_of(self_type_did);
let assumptions_in_impl_context = generic_assumptions.instantiate(tcx, &self_to_impl_substs);
let assumptions_in_impl_context = assumptions_in_impl_context.predicates;
let self_param_env = tcx.param_env(self_type_did);
// An earlier version of this code attempted to do this checking
// via the traits::fulfill machinery. However, it ran into trouble
// since the fulfill machinery merely turns outlives-predicates
// 'a:'b and T:'b into region inference constraints. It is simpler
// just to look for all the predicates directly.
assert_eq!(dtor_predicates.parent, None);
for &(predicate, predicate_sp) in dtor_predicates.predicates {
// (We do not need to worry about deep analysis of type
// expressions etc because the Drop impls are already forced
// to take on a structure that is roughly an alpha-renaming of
// the generic parameters of the item definition.)
// This path now just checks *all* predicates via an instantiation of
// the `SimpleEqRelation`, which simply forwards to the `relate` machinery
// after taking care of anonymizing late bound regions.
//
// However, it may be more efficient in the future to batch
// the analysis together via the fulfill (see comment above regarding
// the usage of the fulfill machinery), rather than the
// repeated `.iter().any(..)` calls.
// This closure is a more robust way to check `Predicate` equality
// than simple `==` checks (which were the previous implementation).
// It relies on `ty::relate` for `TraitPredicate`, `ProjectionPredicate`,
// `ConstEvaluatable` and `TypeOutlives` (which implement the Relate trait),
// while delegating on simple equality for the other `Predicate`.
// This implementation solves (Issue #59497) and (Issue #58311).
// It is unclear to me at the moment whether the approach based on `relate`
// could be extended easily also to the other `Predicate`.
let predicate_matches_closure = |p: Predicate<'tcx>| {
let mut relator: SimpleEqRelation<'tcx> = SimpleEqRelation::new(tcx, self_param_env);
let predicate = predicate.kind();
let p = p.kind();
match (predicate.skip_binder(), p.skip_binder()) {
(ty::PredicateKind::Trait(a), ty::PredicateKind::Trait(b)) => {
relator.relate(predicate.rebind(a), p.rebind(b)).is_ok()
}
(ty::PredicateKind::Projection(a), ty::PredicateKind::Projection(b)) => {
relator.relate(predicate.rebind(a), p.rebind(b)).is_ok()
}
(
ty::PredicateKind::ConstEvaluatable(a),
ty::PredicateKind::ConstEvaluatable(b),
) => tcx.try_unify_abstract_consts((a, b)),
(
ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ty_a, lt_a)),
ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ty_b, lt_b)),
) =>
|
_ => predicate == p,
}
};
if!assumptions_in_impl_context.iter().copied().any(predicate_matches_closure) {
let item_span = tcx.hir().span(self_type_hir_id);
let self_descr = tcx.def_kind(self_type_did).descr(self_type_did.to_def_id());
struct_span_err!(
tcx.sess,
predicate_sp,
E0367,
"`Drop` impl requires `{}` but the {} it is implemented for does not",
predicate,
self_descr,
)
.span_note(item_span, "the implementor must specify the same requirement")
.emit();
result = Err(ErrorReported);
}
}
result
}
/// This function is not only checking that the dropck obligations are met for
/// the given type, but it's also currently preventing non-regular recursion in
/// types from causing stack overflows (dropck_no_diverge_on_nonregular_*.rs).
crate fn check_drop_obligations<'a, 'tcx>(
rcx: &mut RegionCtxt<'a, 'tcx>,
ty: Ty<'tcx>,
span: Span,
body_id: hir::HirId,
) {
debug!("check_drop_obligations typ: {:?}", ty);
let cause = &ObligationCause::misc(span, body_id);
let infer_ok = rcx.infcx.at(cause, rcx.fcx.param_env).dropck_outlives(ty);
debug!("dropck_outlives = {:#?}", infer_ok);
rcx.fcx.register_infer_ok_obligations(infer_ok);
}
// This is an implementation of the TypeRelation trait with the
// aim of simply comparing for equality (without side-effects).
// It is not intended to be used anywhere else other than here.
crate struct SimpleEqRelation<'tcx> {
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
}
impl<'tcx> SimpleEqRelation<'tcx> {
fn new(tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> SimpleEqRelation<'tcx> {
SimpleEqRelation { tcx, param_env }
}
}
impl TypeRelation<'tcx> for SimpleEqRelation<'tcx> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
fn param_env(&self) -> ty::ParamEnv<'tcx> {
self.param_env
}
fn tag(&self) -> &'static str {
"dropck::SimpleEqRelation"
}
fn a_is_expected(&self) -> bool {
true
}
fn relate_with_variance<T: Relate<'tcx>>(
&mut self,
_: ty::Variance,
_info: ty::VarianceDiagInfo<'tcx>,
a: T,
b: T,
) -> RelateResult<'tcx, T> {
// Here we ignore variance because we require drop impl's types
// to be *exactly* the same as to the ones in the struct definition.
self.relate(a, b)
}
fn tys(&mut self, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
debug!("SimpleEqRelation::tys(a={:?}, b={:?})", a, b);
ty::relate::super_relate_tys(self, a, b)
}
fn regions(
&mut self,
a: ty::Region<'tcx>,
b: ty::Region<'tcx>,
) -> RelateResult<'tcx, ty::Region<'tcx>> {
debug!("SimpleEqRelation::regions(a={:?}, b={:?})", a, b);
// We can just equate the regions because LBRs have been
// already anonymized.
if a == b {
Ok(a)
} else {
// I'm not sure is this `TypeError` is the right one, but
// it should not matter as it won't be checked (the dropck
// will emit its own, more informative and higher-level errors
// in case anything goes wrong).
Err(TypeError::RegionsPlaceholderMismatch)
}
}
fn consts(
&mut self,
a: &'tcx ty::Const<'tcx>,
b: &'tcx ty::Const<'tcx>,
) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> {
debug!("SimpleEqRelation::consts(a={:?}, b={:?})", a, b);
ty::relate::super_relate_consts(self, a, b)
}
fn binders<T>(
&mut self,
a: ty::Binder<'tcx, T>,
b: ty::Binder<'tcx, T>,
) -> RelateResult<'tcx, ty::Binder<'tcx, T>>
where
T: Relate<'tcx>,
{
debug!("SimpleEqRelation::binders({:?}: {:?}", a, b);
// Anonymizing the LBRs is necessary to solve (Issue #59497).
// After we do so, it should be totally fine to skip the binders.
let anon_a = self.tcx.anonymize_late_bound_regions(a);
let anon_b = self.tcx.anonymize_late_bound_regions(b);
self.relate(anon_a.skip_binder(), anon_b.skip_binder())?;
Ok(a)
}
}
|
{
relator.relate(predicate.rebind(ty_a), p.rebind(ty_b)).is_ok()
&& relator.relate(predicate.rebind(lt_a), p.rebind(lt_b)).is_ok()
}
|
conditional_block
|
dropck.rs
|
use crate::check::regionck::RegionCtxt;
use crate::hir;
use crate::hir::def_id::{DefId, LocalDefId};
use rustc_errors::{struct_span_err, ErrorReported};
use rustc_infer::infer::outlives::env::OutlivesEnvironment;
use rustc_infer::infer::{InferOk, RegionckMode, TyCtxtInferExt};
use rustc_infer::traits::TraitEngineExt as _;
use rustc_middle::ty::error::TypeError;
use rustc_middle::ty::relate::{Relate, RelateResult, TypeRelation};
use rustc_middle::ty::subst::{Subst, SubstsRef};
use rustc_middle::ty::{self, Predicate, Ty, TyCtxt};
use rustc_span::Span;
use rustc_trait_selection::traits::error_reporting::InferCtxtExt;
use rustc_trait_selection::traits::query::dropck_outlives::AtExt;
use rustc_trait_selection::traits::{ObligationCause, TraitEngine, TraitEngineExt};
/// This function confirms that the `Drop` implementation identified by
/// `drop_impl_did` is not any more specialized than the type it is
/// attached to (Issue #8142).
///
/// This means:
///
/// 1. The self type must be nominal (this is already checked during
/// coherence),
///
/// 2. The generic region/type parameters of the impl's self type must
/// all be parameters of the Drop impl itself (i.e., no
/// specialization like `impl Drop for Foo<i32>`), and,
///
/// 3. Any bounds on the generic parameters must be reflected in the
/// struct/enum definition for the nominal type itself (i.e.
/// cannot do `struct S<T>; impl<T:Clone> Drop for S<T> {... }`).
///
pub fn check_drop_impl(tcx: TyCtxt<'_>, drop_impl_did: DefId) -> Result<(), ErrorReported> {
let dtor_self_type = tcx.type_of(drop_impl_did);
let dtor_predicates = tcx.predicates_of(drop_impl_did);
match dtor_self_type.kind() {
ty::Adt(adt_def, self_to_impl_substs) => {
ensure_drop_params_and_item_params_correspond(
tcx,
drop_impl_did.expect_local(),
dtor_self_type,
adt_def.did,
)?;
ensure_drop_predicates_are_implied_by_item_defn(
tcx,
dtor_predicates,
adt_def.did.expect_local(),
self_to_impl_substs,
)
}
_ => {
// Destructors only work on nominal types. This was
// already checked by coherence, but compilation may
// not have been terminated.
let span = tcx.def_span(drop_impl_did);
tcx.sess.delay_span_bug(
span,
&format!("should have been rejected by coherence check: {}", dtor_self_type),
);
Err(ErrorReported)
}
}
}
fn ensure_drop_params_and_item_params_correspond<'tcx>(
tcx: TyCtxt<'tcx>,
drop_impl_did: LocalDefId,
drop_impl_ty: Ty<'tcx>,
self_type_did: DefId,
) -> Result<(), ErrorReported> {
let drop_impl_hir_id = tcx.hir().local_def_id_to_hir_id(drop_impl_did);
// check that the impl type can be made to match the trait type.
tcx.infer_ctxt().enter(|ref infcx| {
let impl_param_env = tcx.param_env(self_type_did);
let tcx = infcx.tcx;
let mut fulfillment_cx = <dyn TraitEngine<'_>>::new(tcx);
let named_type = tcx.type_of(self_type_did);
let drop_impl_span = tcx.def_span(drop_impl_did);
let fresh_impl_substs =
infcx.fresh_substs_for_item(drop_impl_span, drop_impl_did.to_def_id());
let fresh_impl_self_ty = drop_impl_ty.subst(tcx, fresh_impl_substs);
let cause = &ObligationCause::misc(drop_impl_span, drop_impl_hir_id);
match infcx.at(cause, impl_param_env).eq(named_type, fresh_impl_self_ty) {
Ok(InferOk { obligations,.. }) => {
fulfillment_cx.register_predicate_obligations(infcx, obligations);
}
Err(_) => {
let item_span = tcx.def_span(self_type_did);
let self_descr = tcx.def_kind(self_type_did).descr(self_type_did);
struct_span_err!(
tcx.sess,
drop_impl_span,
E0366,
"`Drop` impls cannot be specialized"
)
.span_note(
item_span,
&format!(
"use the same sequence of generic type, lifetime and const parameters \
as the {} definition",
self_descr,
),
)
.emit();
return Err(ErrorReported);
}
}
let errors = fulfillment_cx.select_all_or_error(&infcx);
if!errors.is_empty() {
// this could be reached when we get lazy normalization
infcx.report_fulfillment_errors(&errors, None, false);
return Err(ErrorReported);
}
// NB. It seems a bit... suspicious to use an empty param-env
// here. The correct thing, I imagine, would be
// `OutlivesEnvironment::new(impl_param_env)`, which would
// allow region solving to take any `a: 'b` relations on the
// impl into account. But I could not create a test case where
// it did the wrong thing, so I chose to preserve existing
// behavior, since it ought to be simply more
// conservative. -nmatsakis
let outlives_env = OutlivesEnvironment::new(ty::ParamEnv::empty());
infcx.resolve_regions_and_report_errors(
drop_impl_did.to_def_id(),
&outlives_env,
RegionckMode::default(),
);
Ok(())
})
}
/// Confirms that every predicate imposed by dtor_predicates is
/// implied by assuming the predicates attached to self_type_did.
fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>(
tcx: TyCtxt<'tcx>,
dtor_predicates: ty::GenericPredicates<'tcx>,
self_type_did: LocalDefId,
self_to_impl_substs: SubstsRef<'tcx>,
) -> Result<(), ErrorReported> {
let mut result = Ok(());
// Here is an example, analogous to that from
// `compare_impl_method`.
//
// Consider a struct type:
//
// struct Type<'c, 'b:'c, 'a> {
// x: &'a Contents // (contents are irrelevant;
// y: &'c Cell<&'b Contents>, // only the bounds matter for our purposes.)
// }
//
// and a Drop impl:
//
// impl<'z, 'y:'z, 'x:'y> Drop for P<'z, 'y, 'x> {
// fn drop(&mut self) { self.y.set(self.x); } // (only legal if 'x: 'y)
// }
//
// We start out with self_to_impl_substs, that maps the generic
// parameters of Type to that of the Drop impl.
//
// self_to_impl_substs = {'c => 'z, 'b => 'y, 'a => 'x}
//
// Applying this to the predicates (i.e., assumptions) provided by the item
// definition yields the instantiated assumptions:
//
// ['y : 'z]
//
// We then check all of the predicates of the Drop impl:
//
// ['y:'z, 'x:'y]
//
// and ensure each is in the list of instantiated
// assumptions. Here, `'y:'z` is present, but `'x:'y` is
// absent. So we report an error that the Drop impl injected a
// predicate that is not present on the struct definition.
let self_type_hir_id = tcx.hir().local_def_id_to_hir_id(self_type_did);
// We can assume the predicates attached to struct/enum definition
// hold.
let generic_assumptions = tcx.predicates_of(self_type_did);
let assumptions_in_impl_context = generic_assumptions.instantiate(tcx, &self_to_impl_substs);
let assumptions_in_impl_context = assumptions_in_impl_context.predicates;
let self_param_env = tcx.param_env(self_type_did);
// An earlier version of this code attempted to do this checking
// via the traits::fulfill machinery. However, it ran into trouble
// since the fulfill machinery merely turns outlives-predicates
// 'a:'b and T:'b into region inference constraints. It is simpler
// just to look for all the predicates directly.
assert_eq!(dtor_predicates.parent, None);
for &(predicate, predicate_sp) in dtor_predicates.predicates {
// (We do not need to worry about deep analysis of type
// expressions etc because the Drop impls are already forced
// to take on a structure that is roughly an alpha-renaming of
// the generic parameters of the item definition.)
// This path now just checks *all* predicates via an instantiation of
// the `SimpleEqRelation`, which simply forwards to the `relate` machinery
// after taking care of anonymizing late bound regions.
//
// However, it may be more efficient in the future to batch
// the analysis together via the fulfill (see comment above regarding
// the usage of the fulfill machinery), rather than the
// repeated `.iter().any(..)` calls.
// This closure is a more robust way to check `Predicate` equality
// than simple `==` checks (which were the previous implementation).
// It relies on `ty::relate` for `TraitPredicate`, `ProjectionPredicate`,
// `ConstEvaluatable` and `TypeOutlives` (which implement the Relate trait),
// while delegating on simple equality for the other `Predicate`.
// This implementation solves (Issue #59497) and (Issue #58311).
// It is unclear to me at the moment whether the approach based on `relate`
// could be extended easily also to the other `Predicate`.
let predicate_matches_closure = |p: Predicate<'tcx>| {
let mut relator: SimpleEqRelation<'tcx> = SimpleEqRelation::new(tcx, self_param_env);
let predicate = predicate.kind();
let p = p.kind();
match (predicate.skip_binder(), p.skip_binder()) {
(ty::PredicateKind::Trait(a), ty::PredicateKind::Trait(b)) => {
relator.relate(predicate.rebind(a), p.rebind(b)).is_ok()
}
(ty::PredicateKind::Projection(a), ty::PredicateKind::Projection(b)) => {
relator.relate(predicate.rebind(a), p.rebind(b)).is_ok()
}
(
ty::PredicateKind::ConstEvaluatable(a),
ty::PredicateKind::ConstEvaluatable(b),
) => tcx.try_unify_abstract_consts((a, b)),
(
ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ty_a, lt_a)),
ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ty_b, lt_b)),
) => {
relator.relate(predicate.rebind(ty_a), p.rebind(ty_b)).is_ok()
&& relator.relate(predicate.rebind(lt_a), p.rebind(lt_b)).is_ok()
}
_ => predicate == p,
}
};
if!assumptions_in_impl_context.iter().copied().any(predicate_matches_closure) {
let item_span = tcx.hir().span(self_type_hir_id);
let self_descr = tcx.def_kind(self_type_did).descr(self_type_did.to_def_id());
struct_span_err!(
tcx.sess,
predicate_sp,
E0367,
"`Drop` impl requires `{}` but the {} it is implemented for does not",
predicate,
self_descr,
)
.span_note(item_span, "the implementor must specify the same requirement")
.emit();
result = Err(ErrorReported);
}
}
result
}
/// This function is not only checking that the dropck obligations are met for
/// the given type, but it's also currently preventing non-regular recursion in
/// types from causing stack overflows (dropck_no_diverge_on_nonregular_*.rs).
crate fn check_drop_obligations<'a, 'tcx>(
rcx: &mut RegionCtxt<'a, 'tcx>,
ty: Ty<'tcx>,
span: Span,
body_id: hir::HirId,
) {
debug!("check_drop_obligations typ: {:?}", ty);
let cause = &ObligationCause::misc(span, body_id);
let infer_ok = rcx.infcx.at(cause, rcx.fcx.param_env).dropck_outlives(ty);
debug!("dropck_outlives = {:#?}", infer_ok);
rcx.fcx.register_infer_ok_obligations(infer_ok);
}
// This is an implementation of the TypeRelation trait with the
// aim of simply comparing for equality (without side-effects).
// It is not intended to be used anywhere else other than here.
crate struct SimpleEqRelation<'tcx> {
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
}
impl<'tcx> SimpleEqRelation<'tcx> {
fn new(tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> SimpleEqRelation<'tcx>
|
}
impl TypeRelation<'tcx> for SimpleEqRelation<'tcx> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
fn param_env(&self) -> ty::ParamEnv<'tcx> {
self.param_env
}
fn tag(&self) -> &'static str {
"dropck::SimpleEqRelation"
}
fn a_is_expected(&self) -> bool {
true
}
fn relate_with_variance<T: Relate<'tcx>>(
&mut self,
_: ty::Variance,
_info: ty::VarianceDiagInfo<'tcx>,
a: T,
b: T,
) -> RelateResult<'tcx, T> {
// Here we ignore variance because we require drop impl's types
// to be *exactly* the same as to the ones in the struct definition.
self.relate(a, b)
}
fn tys(&mut self, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
debug!("SimpleEqRelation::tys(a={:?}, b={:?})", a, b);
ty::relate::super_relate_tys(self, a, b)
}
fn regions(
&mut self,
a: ty::Region<'tcx>,
b: ty::Region<'tcx>,
) -> RelateResult<'tcx, ty::Region<'tcx>> {
debug!("SimpleEqRelation::regions(a={:?}, b={:?})", a, b);
// We can just equate the regions because LBRs have been
// already anonymized.
if a == b {
Ok(a)
} else {
// I'm not sure is this `TypeError` is the right one, but
// it should not matter as it won't be checked (the dropck
// will emit its own, more informative and higher-level errors
// in case anything goes wrong).
Err(TypeError::RegionsPlaceholderMismatch)
}
}
fn consts(
&mut self,
a: &'tcx ty::Const<'tcx>,
b: &'tcx ty::Const<'tcx>,
) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> {
debug!("SimpleEqRelation::consts(a={:?}, b={:?})", a, b);
ty::relate::super_relate_consts(self, a, b)
}
fn binders<T>(
&mut self,
a: ty::Binder<'tcx, T>,
b: ty::Binder<'tcx, T>,
) -> RelateResult<'tcx, ty::Binder<'tcx, T>>
where
T: Relate<'tcx>,
{
debug!("SimpleEqRelation::binders({:?}: {:?}", a, b);
// Anonymizing the LBRs is necessary to solve (Issue #59497).
// After we do so, it should be totally fine to skip the binders.
let anon_a = self.tcx.anonymize_late_bound_regions(a);
let anon_b = self.tcx.anonymize_late_bound_regions(b);
self.relate(anon_a.skip_binder(), anon_b.skip_binder())?;
Ok(a)
}
}
|
{
SimpleEqRelation { tcx, param_env }
}
|
identifier_body
|
dropck.rs
|
use crate::check::regionck::RegionCtxt;
use crate::hir;
use crate::hir::def_id::{DefId, LocalDefId};
use rustc_errors::{struct_span_err, ErrorReported};
use rustc_infer::infer::outlives::env::OutlivesEnvironment;
use rustc_infer::infer::{InferOk, RegionckMode, TyCtxtInferExt};
use rustc_infer::traits::TraitEngineExt as _;
use rustc_middle::ty::error::TypeError;
use rustc_middle::ty::relate::{Relate, RelateResult, TypeRelation};
use rustc_middle::ty::subst::{Subst, SubstsRef};
use rustc_middle::ty::{self, Predicate, Ty, TyCtxt};
use rustc_span::Span;
use rustc_trait_selection::traits::error_reporting::InferCtxtExt;
use rustc_trait_selection::traits::query::dropck_outlives::AtExt;
use rustc_trait_selection::traits::{ObligationCause, TraitEngine, TraitEngineExt};
/// This function confirms that the `Drop` implementation identified by
/// `drop_impl_did` is not any more specialized than the type it is
/// attached to (Issue #8142).
///
/// This means:
///
/// 1. The self type must be nominal (this is already checked during
/// coherence),
///
/// 2. The generic region/type parameters of the impl's self type must
/// all be parameters of the Drop impl itself (i.e., no
/// specialization like `impl Drop for Foo<i32>`), and,
///
/// 3. Any bounds on the generic parameters must be reflected in the
/// struct/enum definition for the nominal type itself (i.e.
/// cannot do `struct S<T>; impl<T:Clone> Drop for S<T> {... }`).
///
pub fn check_drop_impl(tcx: TyCtxt<'_>, drop_impl_did: DefId) -> Result<(), ErrorReported> {
let dtor_self_type = tcx.type_of(drop_impl_did);
let dtor_predicates = tcx.predicates_of(drop_impl_did);
match dtor_self_type.kind() {
ty::Adt(adt_def, self_to_impl_substs) => {
ensure_drop_params_and_item_params_correspond(
tcx,
drop_impl_did.expect_local(),
dtor_self_type,
adt_def.did,
)?;
ensure_drop_predicates_are_implied_by_item_defn(
tcx,
dtor_predicates,
adt_def.did.expect_local(),
self_to_impl_substs,
)
}
_ => {
// Destructors only work on nominal types. This was
// already checked by coherence, but compilation may
// not have been terminated.
let span = tcx.def_span(drop_impl_did);
tcx.sess.delay_span_bug(
span,
&format!("should have been rejected by coherence check: {}", dtor_self_type),
);
Err(ErrorReported)
}
}
}
fn ensure_drop_params_and_item_params_correspond<'tcx>(
tcx: TyCtxt<'tcx>,
drop_impl_did: LocalDefId,
drop_impl_ty: Ty<'tcx>,
self_type_did: DefId,
) -> Result<(), ErrorReported> {
let drop_impl_hir_id = tcx.hir().local_def_id_to_hir_id(drop_impl_did);
// check that the impl type can be made to match the trait type.
tcx.infer_ctxt().enter(|ref infcx| {
let impl_param_env = tcx.param_env(self_type_did);
let tcx = infcx.tcx;
let mut fulfillment_cx = <dyn TraitEngine<'_>>::new(tcx);
let named_type = tcx.type_of(self_type_did);
let drop_impl_span = tcx.def_span(drop_impl_did);
let fresh_impl_substs =
infcx.fresh_substs_for_item(drop_impl_span, drop_impl_did.to_def_id());
let fresh_impl_self_ty = drop_impl_ty.subst(tcx, fresh_impl_substs);
let cause = &ObligationCause::misc(drop_impl_span, drop_impl_hir_id);
match infcx.at(cause, impl_param_env).eq(named_type, fresh_impl_self_ty) {
Ok(InferOk { obligations,.. }) => {
fulfillment_cx.register_predicate_obligations(infcx, obligations);
}
Err(_) => {
let item_span = tcx.def_span(self_type_did);
let self_descr = tcx.def_kind(self_type_did).descr(self_type_did);
struct_span_err!(
tcx.sess,
drop_impl_span,
E0366,
"`Drop` impls cannot be specialized"
)
.span_note(
item_span,
&format!(
"use the same sequence of generic type, lifetime and const parameters \
as the {} definition",
self_descr,
),
)
.emit();
return Err(ErrorReported);
}
}
let errors = fulfillment_cx.select_all_or_error(&infcx);
if!errors.is_empty() {
// this could be reached when we get lazy normalization
infcx.report_fulfillment_errors(&errors, None, false);
return Err(ErrorReported);
}
// NB. It seems a bit... suspicious to use an empty param-env
// here. The correct thing, I imagine, would be
// `OutlivesEnvironment::new(impl_param_env)`, which would
// allow region solving to take any `a: 'b` relations on the
// impl into account. But I could not create a test case where
// it did the wrong thing, so I chose to preserve existing
// behavior, since it ought to be simply more
// conservative. -nmatsakis
let outlives_env = OutlivesEnvironment::new(ty::ParamEnv::empty());
infcx.resolve_regions_and_report_errors(
drop_impl_did.to_def_id(),
&outlives_env,
RegionckMode::default(),
);
Ok(())
})
}
/// Confirms that every predicate imposed by dtor_predicates is
/// implied by assuming the predicates attached to self_type_did.
fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>(
tcx: TyCtxt<'tcx>,
dtor_predicates: ty::GenericPredicates<'tcx>,
self_type_did: LocalDefId,
self_to_impl_substs: SubstsRef<'tcx>,
) -> Result<(), ErrorReported> {
let mut result = Ok(());
// Here is an example, analogous to that from
// `compare_impl_method`.
//
// Consider a struct type:
//
// struct Type<'c, 'b:'c, 'a> {
// x: &'a Contents // (contents are irrelevant;
// y: &'c Cell<&'b Contents>, // only the bounds matter for our purposes.)
// }
//
// and a Drop impl:
//
// impl<'z, 'y:'z, 'x:'y> Drop for P<'z, 'y, 'x> {
// fn drop(&mut self) { self.y.set(self.x); } // (only legal if 'x: 'y)
// }
//
// We start out with self_to_impl_substs, that maps the generic
// parameters of Type to that of the Drop impl.
//
// self_to_impl_substs = {'c => 'z, 'b => 'y, 'a => 'x}
//
// Applying this to the predicates (i.e., assumptions) provided by the item
// definition yields the instantiated assumptions:
//
// ['y : 'z]
//
// We then check all of the predicates of the Drop impl:
//
// ['y:'z, 'x:'y]
//
// and ensure each is in the list of instantiated
// assumptions. Here, `'y:'z` is present, but `'x:'y` is
// absent. So we report an error that the Drop impl injected a
// predicate that is not present on the struct definition.
let self_type_hir_id = tcx.hir().local_def_id_to_hir_id(self_type_did);
// We can assume the predicates attached to struct/enum definition
// hold.
let generic_assumptions = tcx.predicates_of(self_type_did);
let assumptions_in_impl_context = generic_assumptions.instantiate(tcx, &self_to_impl_substs);
let assumptions_in_impl_context = assumptions_in_impl_context.predicates;
let self_param_env = tcx.param_env(self_type_did);
// An earlier version of this code attempted to do this checking
// via the traits::fulfill machinery. However, it ran into trouble
// since the fulfill machinery merely turns outlives-predicates
// 'a:'b and T:'b into region inference constraints. It is simpler
// just to look for all the predicates directly.
assert_eq!(dtor_predicates.parent, None);
for &(predicate, predicate_sp) in dtor_predicates.predicates {
// (We do not need to worry about deep analysis of type
// expressions etc because the Drop impls are already forced
// to take on a structure that is roughly an alpha-renaming of
// the generic parameters of the item definition.)
// This path now just checks *all* predicates via an instantiation of
// the `SimpleEqRelation`, which simply forwards to the `relate` machinery
// after taking care of anonymizing late bound regions.
//
// However, it may be more efficient in the future to batch
// the analysis together via the fulfill (see comment above regarding
// the usage of the fulfill machinery), rather than the
// repeated `.iter().any(..)` calls.
// This closure is a more robust way to check `Predicate` equality
// than simple `==` checks (which were the previous implementation).
// It relies on `ty::relate` for `TraitPredicate`, `ProjectionPredicate`,
// `ConstEvaluatable` and `TypeOutlives` (which implement the Relate trait),
// while delegating on simple equality for the other `Predicate`.
// This implementation solves (Issue #59497) and (Issue #58311).
// It is unclear to me at the moment whether the approach based on `relate`
// could be extended easily also to the other `Predicate`.
let predicate_matches_closure = |p: Predicate<'tcx>| {
let mut relator: SimpleEqRelation<'tcx> = SimpleEqRelation::new(tcx, self_param_env);
let predicate = predicate.kind();
let p = p.kind();
match (predicate.skip_binder(), p.skip_binder()) {
(ty::PredicateKind::Trait(a), ty::PredicateKind::Trait(b)) => {
relator.relate(predicate.rebind(a), p.rebind(b)).is_ok()
}
(ty::PredicateKind::Projection(a), ty::PredicateKind::Projection(b)) => {
relator.relate(predicate.rebind(a), p.rebind(b)).is_ok()
}
(
ty::PredicateKind::ConstEvaluatable(a),
ty::PredicateKind::ConstEvaluatable(b),
) => tcx.try_unify_abstract_consts((a, b)),
(
ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ty_a, lt_a)),
ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ty_b, lt_b)),
) => {
relator.relate(predicate.rebind(ty_a), p.rebind(ty_b)).is_ok()
&& relator.relate(predicate.rebind(lt_a), p.rebind(lt_b)).is_ok()
}
_ => predicate == p,
}
};
if!assumptions_in_impl_context.iter().copied().any(predicate_matches_closure) {
let item_span = tcx.hir().span(self_type_hir_id);
let self_descr = tcx.def_kind(self_type_did).descr(self_type_did.to_def_id());
struct_span_err!(
tcx.sess,
predicate_sp,
E0367,
"`Drop` impl requires `{}` but the {} it is implemented for does not",
predicate,
self_descr,
)
.span_note(item_span, "the implementor must specify the same requirement")
.emit();
result = Err(ErrorReported);
}
}
result
}
/// This function is not only checking that the dropck obligations are met for
/// the given type, but it's also currently preventing non-regular recursion in
/// types from causing stack overflows (dropck_no_diverge_on_nonregular_*.rs).
crate fn check_drop_obligations<'a, 'tcx>(
rcx: &mut RegionCtxt<'a, 'tcx>,
ty: Ty<'tcx>,
span: Span,
body_id: hir::HirId,
) {
debug!("check_drop_obligations typ: {:?}", ty);
let cause = &ObligationCause::misc(span, body_id);
let infer_ok = rcx.infcx.at(cause, rcx.fcx.param_env).dropck_outlives(ty);
debug!("dropck_outlives = {:#?}", infer_ok);
rcx.fcx.register_infer_ok_obligations(infer_ok);
}
// This is an implementation of the TypeRelation trait with the
// aim of simply comparing for equality (without side-effects).
// It is not intended to be used anywhere else other than here.
crate struct SimpleEqRelation<'tcx> {
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
}
impl<'tcx> SimpleEqRelation<'tcx> {
fn new(tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> SimpleEqRelation<'tcx> {
SimpleEqRelation { tcx, param_env }
}
}
impl TypeRelation<'tcx> for SimpleEqRelation<'tcx> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
fn param_env(&self) -> ty::ParamEnv<'tcx> {
self.param_env
}
fn tag(&self) -> &'static str {
"dropck::SimpleEqRelation"
}
fn a_is_expected(&self) -> bool {
true
}
fn relate_with_variance<T: Relate<'tcx>>(
&mut self,
_: ty::Variance,
_info: ty::VarianceDiagInfo<'tcx>,
a: T,
b: T,
) -> RelateResult<'tcx, T> {
// Here we ignore variance because we require drop impl's types
// to be *exactly* the same as to the ones in the struct definition.
self.relate(a, b)
}
fn
|
(&mut self, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
debug!("SimpleEqRelation::tys(a={:?}, b={:?})", a, b);
ty::relate::super_relate_tys(self, a, b)
}
fn regions(
&mut self,
a: ty::Region<'tcx>,
b: ty::Region<'tcx>,
) -> RelateResult<'tcx, ty::Region<'tcx>> {
debug!("SimpleEqRelation::regions(a={:?}, b={:?})", a, b);
// We can just equate the regions because LBRs have been
// already anonymized.
if a == b {
Ok(a)
} else {
// I'm not sure is this `TypeError` is the right one, but
// it should not matter as it won't be checked (the dropck
// will emit its own, more informative and higher-level errors
// in case anything goes wrong).
Err(TypeError::RegionsPlaceholderMismatch)
}
}
fn consts(
&mut self,
a: &'tcx ty::Const<'tcx>,
b: &'tcx ty::Const<'tcx>,
) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> {
debug!("SimpleEqRelation::consts(a={:?}, b={:?})", a, b);
ty::relate::super_relate_consts(self, a, b)
}
fn binders<T>(
&mut self,
a: ty::Binder<'tcx, T>,
b: ty::Binder<'tcx, T>,
) -> RelateResult<'tcx, ty::Binder<'tcx, T>>
where
T: Relate<'tcx>,
{
debug!("SimpleEqRelation::binders({:?}: {:?}", a, b);
// Anonymizing the LBRs is necessary to solve (Issue #59497).
// After we do so, it should be totally fine to skip the binders.
let anon_a = self.tcx.anonymize_late_bound_regions(a);
let anon_b = self.tcx.anonymize_late_bound_regions(b);
self.relate(anon_a.skip_binder(), anon_b.skip_binder())?;
Ok(a)
}
}
|
tys
|
identifier_name
|
pwm1.rs
|
//! Output a PWM with a duty cycle of ~6% on all the channels of TIM1
// FIXME doesn't seem to work :-(
#![deny(warnings)]
#![feature(const_fn)]
#![feature(used)]
#![no_std]
extern crate blue_pill;
extern crate embedded_hal as hal;
// version = "0.2.3"
extern crate cortex_m_rt;
// version = "0.1.0"
#[macro_use]
extern crate cortex_m_rtfm as rtfm;
use blue_pill::{Channel, Pwm, stm32f103xx};
use blue_pill::time::Hertz;
use hal::prelude::*;
use rtfm::{P0, T0, TMax};
// CONFIGURATION
const FREQUENCY: Hertz = Hertz(1_000);
// RESOURCES
peripherals!(stm32f103xx, {
AFIO: Peripheral {
ceiling: C0,
},
GPIOA: Peripheral {
ceiling: C0,
},
RCC: Peripheral {
ceiling: C0,
},
TIM1: Peripheral {
ceiling: C0,
},
});
// INITIALIZATION PHASE
fn init(ref prio: P0, thr: &TMax) {
let afio = &AFIO.access(prio, thr);
let gpioa = &GPIOA.access(prio, thr);
|
let tim1 = TIM1.access(prio, thr);
let pwm = Pwm(&*tim1);
pwm.init(FREQUENCY.invert(), afio, gpioa, rcc);
let duty = pwm.get_max_duty() / 16;
const CHANNELS: [Channel; 4] =
[Channel::_1, Channel::_2, Channel::_3, Channel::_4];
for c in &CHANNELS {
pwm.set_duty(*c, duty);
}
for c in &CHANNELS {
pwm.enable(*c);
rtfm::bkpt();
}
}
// IDLE LOOP
fn idle(_prio: P0, _thr: T0) ->! {
// Sleep
loop {
rtfm::wfi();
}
}
// TASKS
tasks!(stm32f103xx, {});
|
let rcc = &RCC.access(prio, thr);
|
random_line_split
|
pwm1.rs
|
//! Output a PWM with a duty cycle of ~6% on all the channels of TIM1
// FIXME doesn't seem to work :-(
#![deny(warnings)]
#![feature(const_fn)]
#![feature(used)]
#![no_std]
extern crate blue_pill;
extern crate embedded_hal as hal;
// version = "0.2.3"
extern crate cortex_m_rt;
// version = "0.1.0"
#[macro_use]
extern crate cortex_m_rtfm as rtfm;
use blue_pill::{Channel, Pwm, stm32f103xx};
use blue_pill::time::Hertz;
use hal::prelude::*;
use rtfm::{P0, T0, TMax};
// CONFIGURATION
const FREQUENCY: Hertz = Hertz(1_000);
// RESOURCES
peripherals!(stm32f103xx, {
AFIO: Peripheral {
ceiling: C0,
},
GPIOA: Peripheral {
ceiling: C0,
},
RCC: Peripheral {
ceiling: C0,
},
TIM1: Peripheral {
ceiling: C0,
},
});
// INITIALIZATION PHASE
fn init(ref prio: P0, thr: &TMax) {
let afio = &AFIO.access(prio, thr);
let gpioa = &GPIOA.access(prio, thr);
let rcc = &RCC.access(prio, thr);
let tim1 = TIM1.access(prio, thr);
let pwm = Pwm(&*tim1);
pwm.init(FREQUENCY.invert(), afio, gpioa, rcc);
let duty = pwm.get_max_duty() / 16;
const CHANNELS: [Channel; 4] =
[Channel::_1, Channel::_2, Channel::_3, Channel::_4];
for c in &CHANNELS {
pwm.set_duty(*c, duty);
}
for c in &CHANNELS {
pwm.enable(*c);
rtfm::bkpt();
}
}
// IDLE LOOP
fn idle(_prio: P0, _thr: T0) ->!
|
// TASKS
tasks!(stm32f103xx, {});
|
{
// Sleep
loop {
rtfm::wfi();
}
}
|
identifier_body
|
pwm1.rs
|
//! Output a PWM with a duty cycle of ~6% on all the channels of TIM1
// FIXME doesn't seem to work :-(
#![deny(warnings)]
#![feature(const_fn)]
#![feature(used)]
#![no_std]
extern crate blue_pill;
extern crate embedded_hal as hal;
// version = "0.2.3"
extern crate cortex_m_rt;
// version = "0.1.0"
#[macro_use]
extern crate cortex_m_rtfm as rtfm;
use blue_pill::{Channel, Pwm, stm32f103xx};
use blue_pill::time::Hertz;
use hal::prelude::*;
use rtfm::{P0, T0, TMax};
// CONFIGURATION
const FREQUENCY: Hertz = Hertz(1_000);
// RESOURCES
peripherals!(stm32f103xx, {
AFIO: Peripheral {
ceiling: C0,
},
GPIOA: Peripheral {
ceiling: C0,
},
RCC: Peripheral {
ceiling: C0,
},
TIM1: Peripheral {
ceiling: C0,
},
});
// INITIALIZATION PHASE
fn
|
(ref prio: P0, thr: &TMax) {
let afio = &AFIO.access(prio, thr);
let gpioa = &GPIOA.access(prio, thr);
let rcc = &RCC.access(prio, thr);
let tim1 = TIM1.access(prio, thr);
let pwm = Pwm(&*tim1);
pwm.init(FREQUENCY.invert(), afio, gpioa, rcc);
let duty = pwm.get_max_duty() / 16;
const CHANNELS: [Channel; 4] =
[Channel::_1, Channel::_2, Channel::_3, Channel::_4];
for c in &CHANNELS {
pwm.set_duty(*c, duty);
}
for c in &CHANNELS {
pwm.enable(*c);
rtfm::bkpt();
}
}
// IDLE LOOP
fn idle(_prio: P0, _thr: T0) ->! {
// Sleep
loop {
rtfm::wfi();
}
}
// TASKS
tasks!(stm32f103xx, {});
|
init
|
identifier_name
|
fullscreen.rs
|
mod support;
use glutin::event::{ElementState, Event, KeyboardInput, VirtualKeyCode, WindowEvent};
use glutin::event_loop::{ControlFlow, EventLoop};
use glutin::monitor::{MonitorHandle, VideoMode};
use glutin::window::{Fullscreen, WindowBuilder};
use std::io::{stdin, stdout, Write};
fn main()
|
let wb = WindowBuilder::new().with_title("Hello world!").with_fullscreen(fullscreen.clone());
let windowed_context = glutin::ContextBuilder::new().build_windowed(wb, &el).unwrap();
let windowed_context = unsafe { windowed_context.make_current().unwrap() };
let gl = support::load(&windowed_context.context());
el.run(move |event, _, control_flow| {
*control_flow = ControlFlow::Wait;
match event {
Event::WindowEvent { event,.. } => match event {
WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit,
WindowEvent::Resized(physical_size) => {
windowed_context.resize(physical_size);
}
WindowEvent::KeyboardInput {
input: KeyboardInput { virtual_keycode: Some(virtual_code), state,.. },
..
} => match (virtual_code, state) {
(VirtualKeyCode::Escape, _) => *control_flow = ControlFlow::Exit,
(VirtualKeyCode::F, ElementState::Pressed) => {
if windowed_context.window().fullscreen().is_some() {
windowed_context.window().set_fullscreen(None);
} else {
windowed_context.window().set_fullscreen(fullscreen.clone());
}
}
(VirtualKeyCode::S, ElementState::Pressed) => {
println!("window.fullscreen {:?}", windowed_context.window().fullscreen());
}
(VirtualKeyCode::M, ElementState::Pressed) => {
is_maximized =!is_maximized;
windowed_context.window().set_maximized(is_maximized);
}
(VirtualKeyCode::D, ElementState::Pressed) => {
decorations =!decorations;
windowed_context.window().set_decorations(decorations);
}
_ => (),
},
_ => (),
},
Event::RedrawRequested(_) => {
gl.draw_frame([1.0, 0.5, 0.7, 1.0]);
windowed_context.swap_buffers().unwrap();
}
_ => {}
}
});
}
// Enumerate monitors and prompt user to choose one
fn prompt_for_monitor(el: &EventLoop<()>) -> MonitorHandle {
for (num, monitor) in el.available_monitors().enumerate() {
println!("Monitor #{}: {:?}", num, monitor.name());
}
print!("Please write the number of the monitor to use: ");
stdout().flush().unwrap();
let mut num = String::new();
stdin().read_line(&mut num).unwrap();
let num = num.trim().parse().ok().expect("Please enter a number");
let monitor = el.available_monitors().nth(num).expect("Please enter a valid ID");
println!("Using {:?}", monitor.name());
monitor
}
fn prompt_for_video_mode(monitor: &MonitorHandle) -> VideoMode {
for (i, video_mode) in monitor.video_modes().enumerate() {
println!("Video mode #{}: {}", i, video_mode);
}
print!("Please write the number of the video mode to use: ");
stdout().flush().unwrap();
let mut num = String::new();
stdin().read_line(&mut num).unwrap();
let num = num.trim().parse().ok().expect("Please enter a number");
let video_mode = monitor.video_modes().nth(num).expect("Please enter a valid ID");
println!("Using {}", video_mode);
video_mode
}
|
{
let el = EventLoop::new();
print!("Please choose the fullscreen mode: (1) exclusive, (2) borderless: ");
stdout().flush().unwrap();
let mut num = String::new();
stdin().read_line(&mut num).unwrap();
let num = num.trim().parse().ok().expect("Please enter a number");
let fullscreen = Some(match num {
1 => Fullscreen::Exclusive(prompt_for_video_mode(&prompt_for_monitor(&el))),
2 => Fullscreen::Borderless(Some(prompt_for_monitor(&el))),
_ => panic!("Please enter a valid number"),
});
println!("Press (F) to toggle fullscreen, (D) to toggle window decorations, and (M) to toggle maximized/minimized.");
let mut is_maximized = false;
let mut decorations = true;
|
identifier_body
|
fullscreen.rs
|
mod support;
use glutin::event::{ElementState, Event, KeyboardInput, VirtualKeyCode, WindowEvent};
use glutin::event_loop::{ControlFlow, EventLoop};
use glutin::monitor::{MonitorHandle, VideoMode};
use glutin::window::{Fullscreen, WindowBuilder};
use std::io::{stdin, stdout, Write};
fn main() {
let el = EventLoop::new();
print!("Please choose the fullscreen mode: (1) exclusive, (2) borderless: ");
stdout().flush().unwrap();
let mut num = String::new();
stdin().read_line(&mut num).unwrap();
let num = num.trim().parse().ok().expect("Please enter a number");
let fullscreen = Some(match num {
1 => Fullscreen::Exclusive(prompt_for_video_mode(&prompt_for_monitor(&el))),
2 => Fullscreen::Borderless(Some(prompt_for_monitor(&el))),
_ => panic!("Please enter a valid number"),
});
println!("Press (F) to toggle fullscreen, (D) to toggle window decorations, and (M) to toggle maximized/minimized.");
let mut is_maximized = false;
let mut decorations = true;
let wb = WindowBuilder::new().with_title("Hello world!").with_fullscreen(fullscreen.clone());
let windowed_context = glutin::ContextBuilder::new().build_windowed(wb, &el).unwrap();
let windowed_context = unsafe { windowed_context.make_current().unwrap() };
let gl = support::load(&windowed_context.context());
el.run(move |event, _, control_flow| {
*control_flow = ControlFlow::Wait;
match event {
Event::WindowEvent { event,.. } => match event {
WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit,
WindowEvent::Resized(physical_size) => {
windowed_context.resize(physical_size);
}
WindowEvent::KeyboardInput {
input: KeyboardInput { virtual_keycode: Some(virtual_code), state,.. },
..
} => match (virtual_code, state) {
(VirtualKeyCode::Escape, _) => *control_flow = ControlFlow::Exit,
(VirtualKeyCode::F, ElementState::Pressed) => {
if windowed_context.window().fullscreen().is_some() {
windowed_context.window().set_fullscreen(None);
} else {
windowed_context.window().set_fullscreen(fullscreen.clone());
}
}
(VirtualKeyCode::S, ElementState::Pressed) => {
println!("window.fullscreen {:?}", windowed_context.window().fullscreen());
}
(VirtualKeyCode::M, ElementState::Pressed) => {
is_maximized =!is_maximized;
windowed_context.window().set_maximized(is_maximized);
}
(VirtualKeyCode::D, ElementState::Pressed) => {
decorations =!decorations;
windowed_context.window().set_decorations(decorations);
}
_ => (),
},
_ => (),
},
Event::RedrawRequested(_) => {
gl.draw_frame([1.0, 0.5, 0.7, 1.0]);
windowed_context.swap_buffers().unwrap();
}
_ => {}
}
});
}
// Enumerate monitors and prompt user to choose one
fn prompt_for_monitor(el: &EventLoop<()>) -> MonitorHandle {
for (num, monitor) in el.available_monitors().enumerate() {
println!("Monitor #{}: {:?}", num, monitor.name());
}
print!("Please write the number of the monitor to use: ");
stdout().flush().unwrap();
let mut num = String::new();
stdin().read_line(&mut num).unwrap();
let num = num.trim().parse().ok().expect("Please enter a number");
|
println!("Using {:?}", monitor.name());
monitor
}
fn prompt_for_video_mode(monitor: &MonitorHandle) -> VideoMode {
for (i, video_mode) in monitor.video_modes().enumerate() {
println!("Video mode #{}: {}", i, video_mode);
}
print!("Please write the number of the video mode to use: ");
stdout().flush().unwrap();
let mut num = String::new();
stdin().read_line(&mut num).unwrap();
let num = num.trim().parse().ok().expect("Please enter a number");
let video_mode = monitor.video_modes().nth(num).expect("Please enter a valid ID");
println!("Using {}", video_mode);
video_mode
}
|
let monitor = el.available_monitors().nth(num).expect("Please enter a valid ID");
|
random_line_split
|
fullscreen.rs
|
mod support;
use glutin::event::{ElementState, Event, KeyboardInput, VirtualKeyCode, WindowEvent};
use glutin::event_loop::{ControlFlow, EventLoop};
use glutin::monitor::{MonitorHandle, VideoMode};
use glutin::window::{Fullscreen, WindowBuilder};
use std::io::{stdin, stdout, Write};
fn main() {
let el = EventLoop::new();
print!("Please choose the fullscreen mode: (1) exclusive, (2) borderless: ");
stdout().flush().unwrap();
let mut num = String::new();
stdin().read_line(&mut num).unwrap();
let num = num.trim().parse().ok().expect("Please enter a number");
let fullscreen = Some(match num {
1 => Fullscreen::Exclusive(prompt_for_video_mode(&prompt_for_monitor(&el))),
2 => Fullscreen::Borderless(Some(prompt_for_monitor(&el))),
_ => panic!("Please enter a valid number"),
});
println!("Press (F) to toggle fullscreen, (D) to toggle window decorations, and (M) to toggle maximized/minimized.");
let mut is_maximized = false;
let mut decorations = true;
let wb = WindowBuilder::new().with_title("Hello world!").with_fullscreen(fullscreen.clone());
let windowed_context = glutin::ContextBuilder::new().build_windowed(wb, &el).unwrap();
let windowed_context = unsafe { windowed_context.make_current().unwrap() };
let gl = support::load(&windowed_context.context());
el.run(move |event, _, control_flow| {
*control_flow = ControlFlow::Wait;
match event {
Event::WindowEvent { event,.. } => match event {
WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit,
WindowEvent::Resized(physical_size) => {
windowed_context.resize(physical_size);
}
WindowEvent::KeyboardInput {
input: KeyboardInput { virtual_keycode: Some(virtual_code), state,.. },
..
} => match (virtual_code, state) {
(VirtualKeyCode::Escape, _) => *control_flow = ControlFlow::Exit,
(VirtualKeyCode::F, ElementState::Pressed) => {
if windowed_context.window().fullscreen().is_some() {
windowed_context.window().set_fullscreen(None);
} else {
windowed_context.window().set_fullscreen(fullscreen.clone());
}
}
(VirtualKeyCode::S, ElementState::Pressed) => {
println!("window.fullscreen {:?}", windowed_context.window().fullscreen());
}
(VirtualKeyCode::M, ElementState::Pressed) => {
is_maximized =!is_maximized;
windowed_context.window().set_maximized(is_maximized);
}
(VirtualKeyCode::D, ElementState::Pressed) => {
decorations =!decorations;
windowed_context.window().set_decorations(decorations);
}
_ => (),
},
_ => (),
},
Event::RedrawRequested(_) => {
gl.draw_frame([1.0, 0.5, 0.7, 1.0]);
windowed_context.swap_buffers().unwrap();
}
_ => {}
}
});
}
// Enumerate monitors and prompt user to choose one
fn prompt_for_monitor(el: &EventLoop<()>) -> MonitorHandle {
for (num, monitor) in el.available_monitors().enumerate() {
println!("Monitor #{}: {:?}", num, monitor.name());
}
print!("Please write the number of the monitor to use: ");
stdout().flush().unwrap();
let mut num = String::new();
stdin().read_line(&mut num).unwrap();
let num = num.trim().parse().ok().expect("Please enter a number");
let monitor = el.available_monitors().nth(num).expect("Please enter a valid ID");
println!("Using {:?}", monitor.name());
monitor
}
fn
|
(monitor: &MonitorHandle) -> VideoMode {
for (i, video_mode) in monitor.video_modes().enumerate() {
println!("Video mode #{}: {}", i, video_mode);
}
print!("Please write the number of the video mode to use: ");
stdout().flush().unwrap();
let mut num = String::new();
stdin().read_line(&mut num).unwrap();
let num = num.trim().parse().ok().expect("Please enter a number");
let video_mode = monitor.video_modes().nth(num).expect("Please enter a valid ID");
println!("Using {}", video_mode);
video_mode
}
|
prompt_for_video_mode
|
identifier_name
|
mysql.rs
|
use crate::prelude::*;
#[cfg(feature = "db-diesel-mysql")]
mod diesel_mysql {
use super::*;
use ::diesel::{
deserialize::{self, FromSql},
mysql::Mysql,
serialize::{self, IsNull, Output, ToSql},
sql_types::Numeric,
};
use std::io::Write;
use std::str::FromStr;
impl ToSql<Numeric, Mysql> for Decimal {
fn to_sql<W: Write>(&self, out: &mut Output<W, Mysql>) -> serialize::Result {
// From what I can ascertain, MySQL simply writes to a string format for the Decimal type.
write!(out, "{}", *self).map(|_| IsNull::No).map_err(|e| e.into())
}
}
impl FromSql<Numeric, Mysql> for Decimal {
fn from_sql(numeric: Option<&[u8]>) -> deserialize::Result<Self> {
// From what I can ascertain, MySQL simply reads from a string format for the Decimal type.
// Explicitly, it looks like it is length followed by the string. Regardless, we can leverage
// internal types.
let bytes = numeric.ok_or("Invalid decimal")?;
let s = std::str::from_utf8(bytes)?;
Decimal::from_str(s).map_err(|e| e.into())
}
}
#[cfg(test)]
mod tests {
use super::*;
use diesel::deserialize::QueryableByName;
use diesel::prelude::*;
use diesel::row::NamedRow;
use diesel::sql_query;
use diesel::sql_types::Text;
struct Test {
value: Decimal,
}
struct NullableTest {
value: Option<Decimal>,
}
impl QueryableByName<Mysql> for Test {
fn build<R: NamedRow<Mysql>>(row: &R) -> deserialize::Result<Self> {
let value = row.get("value")?;
Ok(Test { value })
}
}
impl QueryableByName<Mysql> for NullableTest {
fn build<R: NamedRow<Mysql>>(row: &R) -> deserialize::Result<Self> {
let value = row.get("value")?;
Ok(NullableTest { value })
}
}
pub static TEST_DECIMALS: &[(u32, u32, &str, &str)] = &[
// precision, scale, sent, expected
(1, 0, "1", "1"),
(6, 2, "1", "1.00"),
(6, 2, "9999.99", "9999.99"),
(35, 6, "3950.123456", "3950.123456"),
(10, 2, "3950.123456", "3950.12"),
(35, 6, "3950", "3950.000000"),
(4, 0, "3950", "3950"),
(35, 6, "0.1", "0.100000"),
(35, 6, "0.01", "0.010000"),
(35, 6, "0.001", "0.001000"),
(35, 6, "0.0001", "0.000100"),
(35, 6, "0.00001", "0.000010"),
(35, 6, "0.000001", "0.000001"),
(35, 6, "1", "1.000000"),
(35, 6, "-100", "-100.000000"),
(35, 6, "-123.456", "-123.456000"),
(35, 6, "119996.25", "119996.250000"),
(35, 6, "1000000", "1000000.000000"),
(35, 6, "9999999.99999", "9999999.999990"),
(35, 6, "12340.56789", "12340.567890"),
];
/// Gets the URL for connecting to MySQL for testing. Set the MYSQL_URL
/// environment variable to change from the default of "mysql://root@localhost/mysql".
fn get_mysql_url() -> String {
if let Ok(url) = std::env::var("MYSQL_URL") {
return url;
}
"mysql://[email protected]/mysql".to_string()
}
#[test]
fn test_null()
|
#[test]
fn read_numeric_type() {
let connection = diesel::MysqlConnection::establish(&get_mysql_url()).expect("Establish connection");
for &(precision, scale, sent, expected) in TEST_DECIMALS.iter() {
let items: Vec<Test> = sql_query(format!(
"SELECT CAST('{}' AS DECIMAL({}, {})) AS value",
sent, precision, scale
))
.load(&connection)
.expect("Unable to query value");
assert_eq!(
expected,
items.first().unwrap().value.to_string(),
"DECIMAL({}, {}) sent: {}",
precision,
scale,
sent
);
}
}
#[test]
fn write_numeric_type() {
let connection = diesel::MysqlConnection::establish(&get_mysql_url()).expect("Establish connection");
for &(precision, scale, sent, expected) in TEST_DECIMALS.iter() {
let items: Vec<Test> =
sql_query(format!("SELECT CAST($1 AS DECIMAL({}, {})) AS value", precision, scale))
.bind::<Text, _>(sent)
.load(&connection)
.expect("Unable to query value");
assert_eq!(
expected,
items.first().unwrap().value.to_string(),
"DECIMAL({}, {}) sent: {}",
precision,
scale,
sent
);
}
}
}
}
|
{
let connection = diesel::MysqlConnection::establish(&get_mysql_url()).expect("Establish connection");
// Test NULL
let items: Vec<NullableTest> = sql_query("SELECT CAST(NULL AS DECIMAL) AS value")
.load(&connection)
.expect("Unable to query value");
let result = items.first().unwrap().value;
assert_eq!(None, result);
}
|
identifier_body
|
mysql.rs
|
use crate::prelude::*;
#[cfg(feature = "db-diesel-mysql")]
mod diesel_mysql {
use super::*;
use ::diesel::{
deserialize::{self, FromSql},
mysql::Mysql,
serialize::{self, IsNull, Output, ToSql},
sql_types::Numeric,
};
use std::io::Write;
use std::str::FromStr;
impl ToSql<Numeric, Mysql> for Decimal {
fn to_sql<W: Write>(&self, out: &mut Output<W, Mysql>) -> serialize::Result {
// From what I can ascertain, MySQL simply writes to a string format for the Decimal type.
write!(out, "{}", *self).map(|_| IsNull::No).map_err(|e| e.into())
}
}
impl FromSql<Numeric, Mysql> for Decimal {
fn from_sql(numeric: Option<&[u8]>) -> deserialize::Result<Self> {
// From what I can ascertain, MySQL simply reads from a string format for the Decimal type.
// Explicitly, it looks like it is length followed by the string. Regardless, we can leverage
// internal types.
let bytes = numeric.ok_or("Invalid decimal")?;
let s = std::str::from_utf8(bytes)?;
Decimal::from_str(s).map_err(|e| e.into())
}
}
#[cfg(test)]
mod tests {
use super::*;
use diesel::deserialize::QueryableByName;
use diesel::prelude::*;
use diesel::row::NamedRow;
use diesel::sql_query;
use diesel::sql_types::Text;
struct Test {
value: Decimal,
}
struct NullableTest {
value: Option<Decimal>,
}
impl QueryableByName<Mysql> for Test {
fn build<R: NamedRow<Mysql>>(row: &R) -> deserialize::Result<Self> {
let value = row.get("value")?;
Ok(Test { value })
}
}
impl QueryableByName<Mysql> for NullableTest {
fn build<R: NamedRow<Mysql>>(row: &R) -> deserialize::Result<Self> {
let value = row.get("value")?;
Ok(NullableTest { value })
}
}
pub static TEST_DECIMALS: &[(u32, u32, &str, &str)] = &[
// precision, scale, sent, expected
(1, 0, "1", "1"),
(6, 2, "1", "1.00"),
(6, 2, "9999.99", "9999.99"),
(35, 6, "3950.123456", "3950.123456"),
(10, 2, "3950.123456", "3950.12"),
(35, 6, "3950", "3950.000000"),
(4, 0, "3950", "3950"),
(35, 6, "0.1", "0.100000"),
(35, 6, "0.01", "0.010000"),
(35, 6, "0.001", "0.001000"),
(35, 6, "0.0001", "0.000100"),
(35, 6, "0.00001", "0.000010"),
(35, 6, "0.000001", "0.000001"),
(35, 6, "1", "1.000000"),
(35, 6, "-100", "-100.000000"),
(35, 6, "-123.456", "-123.456000"),
(35, 6, "119996.25", "119996.250000"),
(35, 6, "1000000", "1000000.000000"),
(35, 6, "9999999.99999", "9999999.999990"),
(35, 6, "12340.56789", "12340.567890"),
];
/// Gets the URL for connecting to MySQL for testing. Set the MYSQL_URL
/// environment variable to change from the default of "mysql://root@localhost/mysql".
fn get_mysql_url() -> String {
if let Ok(url) = std::env::var("MYSQL_URL") {
return url;
}
"mysql://[email protected]/mysql".to_string()
}
#[test]
fn
|
() {
let connection = diesel::MysqlConnection::establish(&get_mysql_url()).expect("Establish connection");
// Test NULL
let items: Vec<NullableTest> = sql_query("SELECT CAST(NULL AS DECIMAL) AS value")
.load(&connection)
.expect("Unable to query value");
let result = items.first().unwrap().value;
assert_eq!(None, result);
}
#[test]
fn read_numeric_type() {
let connection = diesel::MysqlConnection::establish(&get_mysql_url()).expect("Establish connection");
for &(precision, scale, sent, expected) in TEST_DECIMALS.iter() {
let items: Vec<Test> = sql_query(format!(
"SELECT CAST('{}' AS DECIMAL({}, {})) AS value",
sent, precision, scale
))
.load(&connection)
.expect("Unable to query value");
assert_eq!(
expected,
items.first().unwrap().value.to_string(),
"DECIMAL({}, {}) sent: {}",
precision,
scale,
sent
);
}
}
#[test]
fn write_numeric_type() {
let connection = diesel::MysqlConnection::establish(&get_mysql_url()).expect("Establish connection");
for &(precision, scale, sent, expected) in TEST_DECIMALS.iter() {
let items: Vec<Test> =
sql_query(format!("SELECT CAST($1 AS DECIMAL({}, {})) AS value", precision, scale))
.bind::<Text, _>(sent)
.load(&connection)
.expect("Unable to query value");
assert_eq!(
expected,
items.first().unwrap().value.to_string(),
"DECIMAL({}, {}) sent: {}",
precision,
scale,
sent
);
}
}
}
}
|
test_null
|
identifier_name
|
mysql.rs
|
use crate::prelude::*;
#[cfg(feature = "db-diesel-mysql")]
mod diesel_mysql {
use super::*;
use ::diesel::{
deserialize::{self, FromSql},
mysql::Mysql,
serialize::{self, IsNull, Output, ToSql},
sql_types::Numeric,
};
use std::io::Write;
use std::str::FromStr;
impl ToSql<Numeric, Mysql> for Decimal {
fn to_sql<W: Write>(&self, out: &mut Output<W, Mysql>) -> serialize::Result {
// From what I can ascertain, MySQL simply writes to a string format for the Decimal type.
write!(out, "{}", *self).map(|_| IsNull::No).map_err(|e| e.into())
}
}
impl FromSql<Numeric, Mysql> for Decimal {
fn from_sql(numeric: Option<&[u8]>) -> deserialize::Result<Self> {
// From what I can ascertain, MySQL simply reads from a string format for the Decimal type.
// Explicitly, it looks like it is length followed by the string. Regardless, we can leverage
// internal types.
let bytes = numeric.ok_or("Invalid decimal")?;
let s = std::str::from_utf8(bytes)?;
Decimal::from_str(s).map_err(|e| e.into())
}
}
#[cfg(test)]
mod tests {
use super::*;
use diesel::deserialize::QueryableByName;
use diesel::prelude::*;
use diesel::row::NamedRow;
use diesel::sql_query;
use diesel::sql_types::Text;
struct Test {
value: Decimal,
}
struct NullableTest {
value: Option<Decimal>,
}
impl QueryableByName<Mysql> for Test {
fn build<R: NamedRow<Mysql>>(row: &R) -> deserialize::Result<Self> {
let value = row.get("value")?;
Ok(Test { value })
}
}
impl QueryableByName<Mysql> for NullableTest {
fn build<R: NamedRow<Mysql>>(row: &R) -> deserialize::Result<Self> {
let value = row.get("value")?;
Ok(NullableTest { value })
}
}
pub static TEST_DECIMALS: &[(u32, u32, &str, &str)] = &[
// precision, scale, sent, expected
(1, 0, "1", "1"),
(6, 2, "1", "1.00"),
(6, 2, "9999.99", "9999.99"),
(35, 6, "3950.123456", "3950.123456"),
(10, 2, "3950.123456", "3950.12"),
(35, 6, "3950", "3950.000000"),
(4, 0, "3950", "3950"),
(35, 6, "0.1", "0.100000"),
(35, 6, "0.01", "0.010000"),
(35, 6, "0.001", "0.001000"),
(35, 6, "0.0001", "0.000100"),
(35, 6, "0.00001", "0.000010"),
(35, 6, "0.000001", "0.000001"),
(35, 6, "1", "1.000000"),
(35, 6, "-100", "-100.000000"),
(35, 6, "-123.456", "-123.456000"),
(35, 6, "119996.25", "119996.250000"),
(35, 6, "1000000", "1000000.000000"),
(35, 6, "9999999.99999", "9999999.999990"),
(35, 6, "12340.56789", "12340.567890"),
];
/// Gets the URL for connecting to MySQL for testing. Set the MYSQL_URL
/// environment variable to change from the default of "mysql://root@localhost/mysql".
fn get_mysql_url() -> String {
if let Ok(url) = std::env::var("MYSQL_URL")
|
"mysql://[email protected]/mysql".to_string()
}
#[test]
fn test_null() {
let connection = diesel::MysqlConnection::establish(&get_mysql_url()).expect("Establish connection");
// Test NULL
let items: Vec<NullableTest> = sql_query("SELECT CAST(NULL AS DECIMAL) AS value")
.load(&connection)
.expect("Unable to query value");
let result = items.first().unwrap().value;
assert_eq!(None, result);
}
#[test]
fn read_numeric_type() {
let connection = diesel::MysqlConnection::establish(&get_mysql_url()).expect("Establish connection");
for &(precision, scale, sent, expected) in TEST_DECIMALS.iter() {
let items: Vec<Test> = sql_query(format!(
"SELECT CAST('{}' AS DECIMAL({}, {})) AS value",
sent, precision, scale
))
.load(&connection)
.expect("Unable to query value");
assert_eq!(
expected,
items.first().unwrap().value.to_string(),
"DECIMAL({}, {}) sent: {}",
precision,
scale,
sent
);
}
}
#[test]
fn write_numeric_type() {
let connection = diesel::MysqlConnection::establish(&get_mysql_url()).expect("Establish connection");
for &(precision, scale, sent, expected) in TEST_DECIMALS.iter() {
let items: Vec<Test> =
sql_query(format!("SELECT CAST($1 AS DECIMAL({}, {})) AS value", precision, scale))
.bind::<Text, _>(sent)
.load(&connection)
.expect("Unable to query value");
assert_eq!(
expected,
items.first().unwrap().value.to_string(),
"DECIMAL({}, {}) sent: {}",
precision,
scale,
sent
);
}
}
}
}
|
{
return url;
}
|
conditional_block
|
mysql.rs
|
use crate::prelude::*;
#[cfg(feature = "db-diesel-mysql")]
mod diesel_mysql {
use super::*;
use ::diesel::{
deserialize::{self, FromSql},
mysql::Mysql,
serialize::{self, IsNull, Output, ToSql},
sql_types::Numeric,
};
use std::io::Write;
use std::str::FromStr;
impl ToSql<Numeric, Mysql> for Decimal {
fn to_sql<W: Write>(&self, out: &mut Output<W, Mysql>) -> serialize::Result {
// From what I can ascertain, MySQL simply writes to a string format for the Decimal type.
write!(out, "{}", *self).map(|_| IsNull::No).map_err(|e| e.into())
}
}
impl FromSql<Numeric, Mysql> for Decimal {
fn from_sql(numeric: Option<&[u8]>) -> deserialize::Result<Self> {
// From what I can ascertain, MySQL simply reads from a string format for the Decimal type.
// Explicitly, it looks like it is length followed by the string. Regardless, we can leverage
// internal types.
let bytes = numeric.ok_or("Invalid decimal")?;
let s = std::str::from_utf8(bytes)?;
Decimal::from_str(s).map_err(|e| e.into())
}
}
#[cfg(test)]
mod tests {
use super::*;
use diesel::deserialize::QueryableByName;
use diesel::prelude::*;
use diesel::row::NamedRow;
use diesel::sql_query;
use diesel::sql_types::Text;
struct Test {
value: Decimal,
}
struct NullableTest {
value: Option<Decimal>,
}
impl QueryableByName<Mysql> for Test {
fn build<R: NamedRow<Mysql>>(row: &R) -> deserialize::Result<Self> {
let value = row.get("value")?;
Ok(Test { value })
}
}
impl QueryableByName<Mysql> for NullableTest {
fn build<R: NamedRow<Mysql>>(row: &R) -> deserialize::Result<Self> {
|
}
pub static TEST_DECIMALS: &[(u32, u32, &str, &str)] = &[
// precision, scale, sent, expected
(1, 0, "1", "1"),
(6, 2, "1", "1.00"),
(6, 2, "9999.99", "9999.99"),
(35, 6, "3950.123456", "3950.123456"),
(10, 2, "3950.123456", "3950.12"),
(35, 6, "3950", "3950.000000"),
(4, 0, "3950", "3950"),
(35, 6, "0.1", "0.100000"),
(35, 6, "0.01", "0.010000"),
(35, 6, "0.001", "0.001000"),
(35, 6, "0.0001", "0.000100"),
(35, 6, "0.00001", "0.000010"),
(35, 6, "0.000001", "0.000001"),
(35, 6, "1", "1.000000"),
(35, 6, "-100", "-100.000000"),
(35, 6, "-123.456", "-123.456000"),
(35, 6, "119996.25", "119996.250000"),
(35, 6, "1000000", "1000000.000000"),
(35, 6, "9999999.99999", "9999999.999990"),
(35, 6, "12340.56789", "12340.567890"),
];
/// Gets the URL for connecting to MySQL for testing. Set the MYSQL_URL
/// environment variable to change from the default of "mysql://root@localhost/mysql".
fn get_mysql_url() -> String {
if let Ok(url) = std::env::var("MYSQL_URL") {
return url;
}
"mysql://[email protected]/mysql".to_string()
}
#[test]
fn test_null() {
let connection = diesel::MysqlConnection::establish(&get_mysql_url()).expect("Establish connection");
// Test NULL
let items: Vec<NullableTest> = sql_query("SELECT CAST(NULL AS DECIMAL) AS value")
.load(&connection)
.expect("Unable to query value");
let result = items.first().unwrap().value;
assert_eq!(None, result);
}
#[test]
fn read_numeric_type() {
let connection = diesel::MysqlConnection::establish(&get_mysql_url()).expect("Establish connection");
for &(precision, scale, sent, expected) in TEST_DECIMALS.iter() {
let items: Vec<Test> = sql_query(format!(
"SELECT CAST('{}' AS DECIMAL({}, {})) AS value",
sent, precision, scale
))
.load(&connection)
.expect("Unable to query value");
assert_eq!(
expected,
items.first().unwrap().value.to_string(),
"DECIMAL({}, {}) sent: {}",
precision,
scale,
sent
);
}
}
#[test]
fn write_numeric_type() {
let connection = diesel::MysqlConnection::establish(&get_mysql_url()).expect("Establish connection");
for &(precision, scale, sent, expected) in TEST_DECIMALS.iter() {
let items: Vec<Test> =
sql_query(format!("SELECT CAST($1 AS DECIMAL({}, {})) AS value", precision, scale))
.bind::<Text, _>(sent)
.load(&connection)
.expect("Unable to query value");
assert_eq!(
expected,
items.first().unwrap().value.to_string(),
"DECIMAL({}, {}) sent: {}",
precision,
scale,
sent
);
}
}
}
}
|
let value = row.get("value")?;
Ok(NullableTest { value })
}
|
random_line_split
|
gauge.rs
|
use std::f64;
use std::mem;
use std::sync::atomic::{AtomicU64, Ordering};
#[derive(Debug)]
pub struct Gauge {
value: AtomicU64,
}
impl Gauge {
pub fn new() -> Gauge {
let bits = unsafe { mem::transmute(f64::NAN) };
Gauge { value: AtomicU64::new(bits) }
}
pub fn clear(&mut self) {
let bits = unsafe { mem::transmute(f64::NAN) };
self.value.store(bits, Ordering::Relaxed);
}
pub fn set(&mut self, value: f64) {
let bits = unsafe { mem::transmute(value) };
self.value.store(bits, Ordering::Relaxed);
}
pub fn snapshot(&self) -> f64 {
let bits = self.value.load(Ordering::Relaxed);
unsafe { mem::transmute(bits) }
}
}
#[cfg(test)]
mod test {
use super::*;
use std::f64;
#[test]
fn
|
() {
let mut c: Gauge = Gauge::new();
let s1 = c.snapshot();
c.set(1f64);
let s2 = c.snapshot();
assert!(f64::is_nan(s1));
assert!(s2 == 1f64);
}
}
|
snapshot
|
identifier_name
|
gauge.rs
|
use std::f64;
use std::mem;
use std::sync::atomic::{AtomicU64, Ordering};
#[derive(Debug)]
pub struct Gauge {
value: AtomicU64,
}
impl Gauge {
pub fn new() -> Gauge {
let bits = unsafe { mem::transmute(f64::NAN) };
Gauge { value: AtomicU64::new(bits) }
}
pub fn clear(&mut self)
|
pub fn set(&mut self, value: f64) {
let bits = unsafe { mem::transmute(value) };
self.value.store(bits, Ordering::Relaxed);
}
pub fn snapshot(&self) -> f64 {
let bits = self.value.load(Ordering::Relaxed);
unsafe { mem::transmute(bits) }
}
}
#[cfg(test)]
mod test {
use super::*;
use std::f64;
#[test]
fn snapshot() {
let mut c: Gauge = Gauge::new();
let s1 = c.snapshot();
c.set(1f64);
let s2 = c.snapshot();
assert!(f64::is_nan(s1));
assert!(s2 == 1f64);
}
}
|
{
let bits = unsafe { mem::transmute(f64::NAN) };
self.value.store(bits, Ordering::Relaxed);
}
|
identifier_body
|
gauge.rs
|
use std::f64;
use std::mem;
use std::sync::atomic::{AtomicU64, Ordering};
#[derive(Debug)]
pub struct Gauge {
value: AtomicU64,
}
impl Gauge {
pub fn new() -> Gauge {
let bits = unsafe { mem::transmute(f64::NAN) };
Gauge { value: AtomicU64::new(bits) }
}
pub fn clear(&mut self) {
let bits = unsafe { mem::transmute(f64::NAN) };
self.value.store(bits, Ordering::Relaxed);
}
pub fn set(&mut self, value: f64) {
let bits = unsafe { mem::transmute(value) };
self.value.store(bits, Ordering::Relaxed);
}
pub fn snapshot(&self) -> f64 {
let bits = self.value.load(Ordering::Relaxed);
unsafe { mem::transmute(bits) }
}
}
#[cfg(test)]
mod test {
use super::*;
|
#[test]
fn snapshot() {
let mut c: Gauge = Gauge::new();
let s1 = c.snapshot();
c.set(1f64);
let s2 = c.snapshot();
assert!(f64::is_nan(s1));
assert!(s2 == 1f64);
}
}
|
use std::f64;
|
random_line_split
|
genericitesource0.rs
|
// A est un type concret.
struct A;
// Lorsque nous déclarons `Single`, la première occurrence de `A` n'est
// pas précédée du type générique `<A>`. Le type `Single` et `A` sont donc
// concrets.
struct Single(A);
// ^ Voici la première occurrence du type `A`.
// En revanche, ici, `<T>` précède la première occurrence `T`, donc le type
// `SingleGen` est générique. Puisque le type `T` est générique, cela pourrait être
// "n'importe quoi", y compris le type concret `A` déclaré au début du fichier.
struct SingleGen<T>(T);
f
|
{
// `Single` est un type concret et prend explicitement un paramètre
// de type `A`.
let _s = Single(A);
// On créé une variable nommée `_char` de type `SingleGen<char>`
// et on lui assigne la valeur `SingleGen('a')`.
// Le type requis du paramètre passé pour cette instance de `SingleGen`
// est spécifié, mais il peut être omis, exemple ---
let _char: SingleGen<char> = SingleGen('a');
// --->
let _t = SingleGen(A); // On passe une instance
// du type `A` définit en haut.
let _i32 = SingleGen(6); // On passe un entier de type `i32`.
let _char = SingleGen('a'); // On passe un `char`.
}
|
n main()
|
identifier_name
|
genericitesource0.rs
|
// A est un type concret.
struct A;
// Lorsque nous déclarons `Single`, la première occurrence de `A` n'est
// pas précédée du type générique `<A>`. Le type `Single` et `A` sont donc
// concrets.
struct Single(A);
|
// `SingleGen` est générique. Puisque le type `T` est générique, cela pourrait être
// "n'importe quoi", y compris le type concret `A` déclaré au début du fichier.
struct SingleGen<T>(T);
fn main() {
// `Single` est un type concret et prend explicitement un paramètre
// de type `A`.
let _s = Single(A);
// On créé une variable nommée `_char` de type `SingleGen<char>`
// et on lui assigne la valeur `SingleGen('a')`.
// Le type requis du paramètre passé pour cette instance de `SingleGen`
// est spécifié, mais il peut être omis, exemple ---
let _char: SingleGen<char> = SingleGen('a');
// --->
let _t = SingleGen(A); // On passe une instance
// du type `A` définit en haut.
let _i32 = SingleGen(6); // On passe un entier de type `i32`.
let _char = SingleGen('a'); // On passe un `char`.
}
|
// ^ Voici la première occurrence du type `A`.
// En revanche, ici, `<T>` précède la première occurrence `T`, donc le type
|
random_line_split
|
vreyeparameters.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use core::nonzero::NonZero;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::VREyeParametersBinding;
use dom::bindings::codegen::Bindings::VREyeParametersBinding::VREyeParametersMethods;
use dom::bindings::conversions::slice_to_array_buffer_view;
use dom::bindings::js::{JS, Root};
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::globalscope::GlobalScope;
use dom::vrfieldofview::VRFieldOfView;
use js::jsapi::{Heap, JSContext, JSObject};
use std::default::Default;
use webvr_traits::WebVREyeParameters;
#[dom_struct]
pub struct VREyeParameters {
reflector_: Reflector,
#[ignore_heap_size_of = "Defined in rust-webvr"]
parameters: DOMRefCell<WebVREyeParameters>,
offset: Heap<*mut JSObject>,
fov: JS<VRFieldOfView>,
}
unsafe_no_jsmanaged_fields!(WebVREyeParameters);
impl VREyeParameters {
#[allow(unsafe_code)]
#[allow(unrooted_must_root)]
fn new_inherited(parameters: WebVREyeParameters, global: &GlobalScope) -> VREyeParameters {
let fov = VRFieldOfView::new(&global, parameters.field_of_view.clone());
let mut result = VREyeParameters {
reflector_: Reflector::new(),
parameters: DOMRefCell::new(parameters),
offset: Heap::default(),
fov: JS::from_ref(&*fov)
};
unsafe {
result.offset.set(slice_to_array_buffer_view(global.get_cx(), &result.parameters.borrow().offset));
}
result
}
pub fn new(parameters: WebVREyeParameters, global: &GlobalScope) -> Root<VREyeParameters> {
reflect_dom_object(box VREyeParameters::new_inherited(parameters, global),
global,
VREyeParametersBinding::Wrap)
}
}
impl VREyeParametersMethods for VREyeParameters {
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vreyeparameters-offset
unsafe fn
|
(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
NonZero::new(self.offset.get())
}
// https://w3c.github.io/webvr/#dom-vreyeparameters-fieldofview
fn FieldOfView(&self) -> Root<VRFieldOfView> {
Root::from_ref(&*self.fov)
}
// https://w3c.github.io/webvr/#dom-vreyeparameters-renderwidth
fn RenderWidth(&self) -> u32 {
self.parameters.borrow().render_width
}
// https://w3c.github.io/webvr/#dom-vreyeparameters-renderheight
fn RenderHeight(&self) -> u32 {
self.parameters.borrow().render_height
}
}
|
Offset
|
identifier_name
|
vreyeparameters.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use core::nonzero::NonZero;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::VREyeParametersBinding;
use dom::bindings::codegen::Bindings::VREyeParametersBinding::VREyeParametersMethods;
use dom::bindings::conversions::slice_to_array_buffer_view;
use dom::bindings::js::{JS, Root};
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::globalscope::GlobalScope;
use dom::vrfieldofview::VRFieldOfView;
use js::jsapi::{Heap, JSContext, JSObject};
use std::default::Default;
use webvr_traits::WebVREyeParameters;
#[dom_struct]
pub struct VREyeParameters {
reflector_: Reflector,
#[ignore_heap_size_of = "Defined in rust-webvr"]
parameters: DOMRefCell<WebVREyeParameters>,
offset: Heap<*mut JSObject>,
fov: JS<VRFieldOfView>,
}
unsafe_no_jsmanaged_fields!(WebVREyeParameters);
impl VREyeParameters {
#[allow(unsafe_code)]
#[allow(unrooted_must_root)]
fn new_inherited(parameters: WebVREyeParameters, global: &GlobalScope) -> VREyeParameters {
let fov = VRFieldOfView::new(&global, parameters.field_of_view.clone());
let mut result = VREyeParameters {
reflector_: Reflector::new(),
parameters: DOMRefCell::new(parameters),
offset: Heap::default(),
fov: JS::from_ref(&*fov)
};
unsafe {
result.offset.set(slice_to_array_buffer_view(global.get_cx(), &result.parameters.borrow().offset));
}
result
}
pub fn new(parameters: WebVREyeParameters, global: &GlobalScope) -> Root<VREyeParameters>
|
}
impl VREyeParametersMethods for VREyeParameters {
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vreyeparameters-offset
unsafe fn Offset(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
NonZero::new(self.offset.get())
}
// https://w3c.github.io/webvr/#dom-vreyeparameters-fieldofview
fn FieldOfView(&self) -> Root<VRFieldOfView> {
Root::from_ref(&*self.fov)
}
// https://w3c.github.io/webvr/#dom-vreyeparameters-renderwidth
fn RenderWidth(&self) -> u32 {
self.parameters.borrow().render_width
}
// https://w3c.github.io/webvr/#dom-vreyeparameters-renderheight
fn RenderHeight(&self) -> u32 {
self.parameters.borrow().render_height
}
}
|
{
reflect_dom_object(box VREyeParameters::new_inherited(parameters, global),
global,
VREyeParametersBinding::Wrap)
}
|
identifier_body
|
vreyeparameters.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use core::nonzero::NonZero;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::VREyeParametersBinding;
use dom::bindings::codegen::Bindings::VREyeParametersBinding::VREyeParametersMethods;
use dom::bindings::conversions::slice_to_array_buffer_view;
use dom::bindings::js::{JS, Root};
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::globalscope::GlobalScope;
use dom::vrfieldofview::VRFieldOfView;
use js::jsapi::{Heap, JSContext, JSObject};
use std::default::Default;
use webvr_traits::WebVREyeParameters;
#[dom_struct]
pub struct VREyeParameters {
reflector_: Reflector,
#[ignore_heap_size_of = "Defined in rust-webvr"]
parameters: DOMRefCell<WebVREyeParameters>,
offset: Heap<*mut JSObject>,
fov: JS<VRFieldOfView>,
}
unsafe_no_jsmanaged_fields!(WebVREyeParameters);
impl VREyeParameters {
#[allow(unsafe_code)]
#[allow(unrooted_must_root)]
fn new_inherited(parameters: WebVREyeParameters, global: &GlobalScope) -> VREyeParameters {
let fov = VRFieldOfView::new(&global, parameters.field_of_view.clone());
let mut result = VREyeParameters {
reflector_: Reflector::new(),
parameters: DOMRefCell::new(parameters),
offset: Heap::default(),
fov: JS::from_ref(&*fov)
};
unsafe {
|
pub fn new(parameters: WebVREyeParameters, global: &GlobalScope) -> Root<VREyeParameters> {
reflect_dom_object(box VREyeParameters::new_inherited(parameters, global),
global,
VREyeParametersBinding::Wrap)
}
}
impl VREyeParametersMethods for VREyeParameters {
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vreyeparameters-offset
unsafe fn Offset(&self, _cx: *mut JSContext) -> NonZero<*mut JSObject> {
NonZero::new(self.offset.get())
}
// https://w3c.github.io/webvr/#dom-vreyeparameters-fieldofview
fn FieldOfView(&self) -> Root<VRFieldOfView> {
Root::from_ref(&*self.fov)
}
// https://w3c.github.io/webvr/#dom-vreyeparameters-renderwidth
fn RenderWidth(&self) -> u32 {
self.parameters.borrow().render_width
}
// https://w3c.github.io/webvr/#dom-vreyeparameters-renderheight
fn RenderHeight(&self) -> u32 {
self.parameters.borrow().render_height
}
}
|
result.offset.set(slice_to_array_buffer_view(global.get_cx(), &result.parameters.borrow().offset));
}
result
}
|
random_line_split
|
main.rs
|
use rand::{thread_rng, seq::SliceRandom};
use mcc4::*;
|
let game = ConnectFour::<BitState>::new(7, 6).unwrap();
let human_player = HumanPlayer::new();
let ai_player = TreeSearchPlayer::new(&game);
let mut players: Vec<Box<PlayerTrait<Game=_>>> = vec![Box::new(human_player), Box::new(ai_player)];
players.shuffle(&mut thread_rng());
println!("\x1B[2J\x1B[H");
println!("{}", game.state());
for (state, player, move_, winner) in game.iter(players) {
print!("\x1B[2J\x1B[H");
println!("Player {} has moved {}", player, move_);
println!("{}", state);
match winner {
Winner::Winner(winner) => println!("Player {} has won.", winner),
Winner::Draw => println!("Draw."),
Winner::NotFinishedYet => {}
};
}
}
|
fn main() {
env_logger::init();
|
random_line_split
|
main.rs
|
use rand::{thread_rng, seq::SliceRandom};
use mcc4::*;
fn main() {
env_logger::init();
let game = ConnectFour::<BitState>::new(7, 6).unwrap();
let human_player = HumanPlayer::new();
let ai_player = TreeSearchPlayer::new(&game);
let mut players: Vec<Box<PlayerTrait<Game=_>>> = vec![Box::new(human_player), Box::new(ai_player)];
players.shuffle(&mut thread_rng());
println!("\x1B[2J\x1B[H");
println!("{}", game.state());
for (state, player, move_, winner) in game.iter(players) {
print!("\x1B[2J\x1B[H");
println!("Player {} has moved {}", player, move_);
println!("{}", state);
match winner {
Winner::Winner(winner) => println!("Player {} has won.", winner),
Winner::Draw => println!("Draw."),
Winner::NotFinishedYet =>
|
};
}
}
|
{}
|
conditional_block
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.