file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
trait-inheritance-num.rs
|
// xfail-fast
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern mod extra;
use std::cmp::{Eq, Ord};
use std::num::NumCast;
pub trait NumExt: Num + NumCast + Eq + Ord {}
pub trait FloatExt: NumExt {}
fn greater_than_one<T:NumExt>(n: &T) -> bool { *n > NumCast::from(1).unwrap() }
fn greater_than_one_float<T:FloatExt>(n: &T) -> bool { *n > NumCast::from(1).unwrap() }
|
pub fn main() {}
|
random_line_split
|
|
scene_system.rs
|
use std::sync::{Arc, Mutex};
use render::camera::Camera;
#[derive(Clone,Debug)]
pub struct SceneManager {
pub inner: Arc<Mutex<u8>>,
cameras: Arc<Mutex<Vec<Camera>>>,
}
impl SceneManager{
fn new() -> Self {
SceneManager {
inner: Arc::new(Mutex::new(0)),
cameras: Arc::new(Mutex::new(Vec::new())),
}
}
fn init(&mut self){
self.add_camera(Camera::new());
}
fn add_camera(&mut self, camera: Camera){
self.cameras.lock().unwrap().push(camera);
// println!("length {}", self.cameras.lock().unwrap().len());
}
fn get_camera(&self, index: usize) -> Camera {
// println!("!!!!!!!{} {}", index, self.cameras.lock().unwrap().len());
(self.cameras.lock().unwrap())[index].clone()
}
fn update(&self) {
(self.cameras.lock().unwrap())[0].update()
}
// fn process_input(&self, event: &glutin::Event) {
// (self.cameras.lock().unwrap())[0].process_input(event)
// }
}
#[cfg(test)]
|
}
|
mod tests {
use super::*;
fn test_load_obj() {
}
|
random_line_split
|
scene_system.rs
|
use std::sync::{Arc, Mutex};
use render::camera::Camera;
#[derive(Clone,Debug)]
pub struct SceneManager {
pub inner: Arc<Mutex<u8>>,
cameras: Arc<Mutex<Vec<Camera>>>,
}
impl SceneManager{
fn new() -> Self {
SceneManager {
inner: Arc::new(Mutex::new(0)),
cameras: Arc::new(Mutex::new(Vec::new())),
}
}
fn init(&mut self){
self.add_camera(Camera::new());
}
fn add_camera(&mut self, camera: Camera){
self.cameras.lock().unwrap().push(camera);
// println!("length {}", self.cameras.lock().unwrap().len());
}
fn get_camera(&self, index: usize) -> Camera {
// println!("!!!!!!!{} {}", index, self.cameras.lock().unwrap().len());
(self.cameras.lock().unwrap())[index].clone()
}
fn update(&self)
|
// fn process_input(&self, event: &glutin::Event) {
// (self.cameras.lock().unwrap())[0].process_input(event)
// }
}
#[cfg(test)]
mod tests {
use super::*;
fn test_load_obj() {
}
}
|
{
(self.cameras.lock().unwrap())[0].update()
}
|
identifier_body
|
scene_system.rs
|
use std::sync::{Arc, Mutex};
use render::camera::Camera;
#[derive(Clone,Debug)]
pub struct SceneManager {
pub inner: Arc<Mutex<u8>>,
cameras: Arc<Mutex<Vec<Camera>>>,
}
impl SceneManager{
fn new() -> Self {
SceneManager {
inner: Arc::new(Mutex::new(0)),
cameras: Arc::new(Mutex::new(Vec::new())),
}
}
fn init(&mut self){
self.add_camera(Camera::new());
}
fn add_camera(&mut self, camera: Camera){
self.cameras.lock().unwrap().push(camera);
// println!("length {}", self.cameras.lock().unwrap().len());
}
fn get_camera(&self, index: usize) -> Camera {
// println!("!!!!!!!{} {}", index, self.cameras.lock().unwrap().len());
(self.cameras.lock().unwrap())[index].clone()
}
fn
|
(&self) {
(self.cameras.lock().unwrap())[0].update()
}
// fn process_input(&self, event: &glutin::Event) {
// (self.cameras.lock().unwrap())[0].process_input(event)
// }
}
#[cfg(test)]
mod tests {
use super::*;
fn test_load_obj() {
}
}
|
update
|
identifier_name
|
class-exports.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(dead_code)]
#![allow(non_camel_case_types)]
/* Test that exporting a class also exports its
public fields and methods */
use kitty::cat;
mod kitty {
pub struct cat {
meows: usize,
name: String,
}
impl cat {
pub fn get_name(&self) -> String { self.name.clone() }
}
pub fn cat(in_name: String) -> cat {
cat {
name: in_name,
|
pub fn main() {
assert_eq!(cat("Spreckles".to_string()).get_name(),
"Spreckles".to_string());
}
|
meows: 0
}
}
}
|
random_line_split
|
class-exports.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(dead_code)]
#![allow(non_camel_case_types)]
/* Test that exporting a class also exports its
public fields and methods */
use kitty::cat;
mod kitty {
pub struct cat {
meows: usize,
name: String,
}
impl cat {
pub fn get_name(&self) -> String { self.name.clone() }
}
pub fn cat(in_name: String) -> cat {
cat {
name: in_name,
meows: 0
}
}
}
pub fn main()
|
{
assert_eq!(cat("Spreckles".to_string()).get_name(),
"Spreckles".to_string());
}
|
identifier_body
|
|
class-exports.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(dead_code)]
#![allow(non_camel_case_types)]
/* Test that exporting a class also exports its
public fields and methods */
use kitty::cat;
mod kitty {
pub struct
|
{
meows: usize,
name: String,
}
impl cat {
pub fn get_name(&self) -> String { self.name.clone() }
}
pub fn cat(in_name: String) -> cat {
cat {
name: in_name,
meows: 0
}
}
}
pub fn main() {
assert_eq!(cat("Spreckles".to_string()).get_name(),
"Spreckles".to_string());
}
|
cat
|
identifier_name
|
config.rs
|
use std::fs;
use std::io::{Read, Write};
use std::path::PathBuf;
use std::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard};
use failure::format_err;
use serde_derive::{Deserialize, Serialize};
use toml;
use crate::db::Database;
use crate::errors::*;
use crate::repos;
use crate::users;
pub struct Config {
pub main: MainConfig,
pub admin: Option<AdminConfig>,
pub metrics: Option<MetricsConfig>,
pub github: GithubConfig,
pub slack: SlackConfig,
pub jira: Option<JiraConfig>,
pub ldap: Option<LdapConfig>,
pub users: RwLock<users::UserConfig>,
pub repos: RwLock<repos::RepoConfig>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ConfigModel {
pub main: MainConfig,
pub admin: Option<AdminConfig>,
pub metrics: Option<MetricsConfig>,
pub github: GithubConfig,
pub slack: SlackConfig,
pub jira: Option<JiraConfig>,
pub ldap: Option<LdapConfig>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct MainConfig {
pub listen_addr: Option<String>,
pub clone_root_dir: String,
pub num_http_threads: Option<usize>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct AdminConfig {
pub name: String,
pub salt: String,
pub pass_hash: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
|
pub pass_hash: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct SlackConfig {
pub bot_token: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct GithubConfig {
pub webhook_secret: String,
pub host: String,
pub api_token: Option<String>,
pub app_id: Option<u32>,
pub app_key_file: Option<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct JiraConfig {
pub host: String,
pub username: String,
pub password: String,
// review state that may be necessary before submitting for review (defaults to ["In Progress"])
pub progress_states: Option<Vec<String>>,
// review state to transition to when marked for review (defaults to ["Pending Review"])
pub review_states: Option<Vec<String>>,
// resolved state to transition to when PR is merged. (defaults to ["Resolved", "Done"])
pub resolved_states: Option<Vec<String>>,
// when marking as resolved, add this resolution (defaults to ["Fixed", "Done"])
pub fixed_resolutions: Option<Vec<String>>,
// the field name for where the version goes. (defaults to "fixVersions").
pub fix_versions_field: Option<String>,
// the field name for where the pending build versions go. expected to be a plain text field
pub pending_versions_field: Option<String>,
// optional name of role to restrict octobot comment visibility. (e.g. "Developers")
pub restrict_comment_visibility_to_role: Option<String>,
// optional suffix to add to the username for the login dialog (e.g. "@company.com")
pub login_suffix: Option<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct LdapConfig {
// LDAP URL (e.g. ldaps://ldap.company.com)
pub url: String,
// either username for AD or bind DN for LDAP
pub bind_user: String,
// bind user's password
pub bind_pass: String,
// the base DN to bind to
pub base_dn: String,
// attributes to match logins against (e.g. ["samAccountName", "userPrincipalName"] for AD, ["uid, "mail"] for LDAP)
pub userid_attributes: Vec<String>,
// Additional LDAP search filter for user types and group membership
// e.g. (&(objectCategory=Person)(memberOf=cn=octobot-admins,ou=users,dc=company,dc=com))
pub search_filter: Option<String>,
}
impl Config {
// TODO: weird that `new` is used only by tests and the actual `new` is below...
pub fn new(db: Database) -> Config {
Config::new_with_model(ConfigModel::new(), db)
}
fn new_with_model(config: ConfigModel, db: Database) -> Config {
Config {
main: config.main,
admin: config.admin,
metrics: config.metrics,
github: config.github,
slack: config.slack,
jira: config.jira,
ldap: config.ldap,
users: RwLock::new(users::UserConfig::new(db.clone())),
repos: RwLock::new(repos::RepoConfig::new(db)),
}
}
pub fn save(&self, config_file: &str) -> Result<()> {
let model = ConfigModel {
main: self.main.clone(),
admin: self.admin.clone(),
metrics: self.metrics.clone(),
github: self.github.clone(),
slack: self.slack.clone(),
jira: self.jira.clone(),
ldap: self.ldap.clone(),
};
let serialized =
toml::to_string(&model).map_err(|e| format_err!("Error serializing config: {}", e))?;
let tmp_file = config_file.to_string() + ".tmp";
let bak_file = config_file.to_string() + ".bak";
let mut file = fs::File::create(&tmp_file)?;
file.write_all(serialized.as_bytes())?;
fs::rename(&config_file, &bak_file)?;
fs::rename(&tmp_file, &config_file)?;
fs::remove_file(&bak_file)?;
Ok(())
}
pub fn users(&self) -> RwLockReadGuard<users::UserConfig> {
self.users.read().unwrap()
}
pub fn users_write(&self) -> RwLockWriteGuard<users::UserConfig> {
self.users.write().unwrap()
}
pub fn repos(&self) -> RwLockReadGuard<repos::RepoConfig> {
self.repos.read().unwrap()
}
pub fn repos_write(&self) -> RwLockWriteGuard<repos::RepoConfig> {
self.repos.write().unwrap()
}
}
impl ConfigModel {
pub fn new() -> ConfigModel {
ConfigModel {
main: MainConfig {
listen_addr: None,
clone_root_dir: String::new(),
num_http_threads: None,
},
admin: None,
metrics: None,
github: GithubConfig {
webhook_secret: String::new(),
host: String::new(),
api_token: None,
app_id: None,
app_key_file: None,
},
slack: SlackConfig {
bot_token: String::new(),
},
jira: None,
ldap: None,
}
}
}
impl GithubConfig {
pub fn app_key(&self) -> Result<Vec<u8>> {
let key_file = &self
.app_key_file
.as_ref()
.expect("expected an app_key_file");
let mut file_open = fs::File::open(key_file)?;
let mut contents = vec![];
file_open.read_to_end(&mut contents)?;
Ok(contents)
}
}
impl JiraConfig {
pub fn base_url(&self) -> String {
if self.host.starts_with("http") {
self.host.clone()
} else {
format!("https://{}", self.host)
}
}
pub fn progress_states(&self) -> Vec<String> {
if let Some(ref states) = self.progress_states {
states.clone() // hmm. do these w/o a clone?
} else {
vec!["In Progress".into()]
}
}
pub fn review_states(&self) -> Vec<String> {
if let Some(ref states) = self.review_states {
states.clone() // hmm. do these w/o a clone?
} else {
vec!["Pending Review".into()]
}
}
pub fn resolved_states(&self) -> Vec<String> {
if let Some(ref states) = self.resolved_states {
states.clone() // hmm. do these w/o a clone?
} else {
vec!["Resolved".into(), "Done".into()]
}
}
pub fn fixed_resolutions(&self) -> Vec<String> {
if let Some(ref res) = self.fixed_resolutions {
res.clone() // hmm. do these w/o a clone?
} else {
vec!["Fixed".into(), "Done".into()]
}
}
pub fn fix_versions(&self) -> String {
if let Some(ref field) = self.fix_versions_field {
field.clone()
} else {
"fixVersions".into()
}
}
}
pub fn new(config_file: PathBuf) -> Result<Config> {
let db_file_name = "db.sqlite3";
match config_file.file_name() {
Some(name) => {
if name == db_file_name {
return Err(format_err!("Must provide toml config file"));
}
}
None => return Err(format_err!("Provided config file has no file name")),
};
let mut db_file = config_file.clone();
db_file.set_file_name(db_file_name);
let db = Database::new(&db_file.to_string_lossy())?;
let mut config_file_open = fs::File::open(&config_file)?;
let mut config_contents = String::new();
config_file_open.read_to_string(&mut config_contents)?;
let config_model = parse_string(&config_contents)?;
Ok(Config::new_with_model(config_model, db))
}
fn parse_string(config_contents: &str) -> Result<ConfigModel> {
toml::from_str::<ConfigModel>(config_contents)
.map_err(|e| format_err!("Error parsing config: {}", e))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse() {
let config_str = r#"
[main]
clone_root_dir = "./repos"
[slack]
bot_token = "foo"
[github]
webhook_secret = "abcd"
host = "git.company.com"
app_id = 2
app_key_file = "some-file.key"
"#;
let config = parse_string(config_str).unwrap();
assert_eq!("foo", config.slack.bot_token);
}
}
|
pub struct MetricsConfig {
pub salt: String,
|
random_line_split
|
config.rs
|
use std::fs;
use std::io::{Read, Write};
use std::path::PathBuf;
use std::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard};
use failure::format_err;
use serde_derive::{Deserialize, Serialize};
use toml;
use crate::db::Database;
use crate::errors::*;
use crate::repos;
use crate::users;
pub struct Config {
pub main: MainConfig,
pub admin: Option<AdminConfig>,
pub metrics: Option<MetricsConfig>,
pub github: GithubConfig,
pub slack: SlackConfig,
pub jira: Option<JiraConfig>,
pub ldap: Option<LdapConfig>,
pub users: RwLock<users::UserConfig>,
pub repos: RwLock<repos::RepoConfig>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ConfigModel {
pub main: MainConfig,
pub admin: Option<AdminConfig>,
pub metrics: Option<MetricsConfig>,
pub github: GithubConfig,
pub slack: SlackConfig,
pub jira: Option<JiraConfig>,
pub ldap: Option<LdapConfig>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct MainConfig {
pub listen_addr: Option<String>,
pub clone_root_dir: String,
pub num_http_threads: Option<usize>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct AdminConfig {
pub name: String,
pub salt: String,
pub pass_hash: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct MetricsConfig {
pub salt: String,
pub pass_hash: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct SlackConfig {
pub bot_token: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct GithubConfig {
pub webhook_secret: String,
pub host: String,
pub api_token: Option<String>,
pub app_id: Option<u32>,
pub app_key_file: Option<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct JiraConfig {
pub host: String,
pub username: String,
pub password: String,
// review state that may be necessary before submitting for review (defaults to ["In Progress"])
pub progress_states: Option<Vec<String>>,
// review state to transition to when marked for review (defaults to ["Pending Review"])
pub review_states: Option<Vec<String>>,
// resolved state to transition to when PR is merged. (defaults to ["Resolved", "Done"])
pub resolved_states: Option<Vec<String>>,
// when marking as resolved, add this resolution (defaults to ["Fixed", "Done"])
pub fixed_resolutions: Option<Vec<String>>,
// the field name for where the version goes. (defaults to "fixVersions").
pub fix_versions_field: Option<String>,
// the field name for where the pending build versions go. expected to be a plain text field
pub pending_versions_field: Option<String>,
// optional name of role to restrict octobot comment visibility. (e.g. "Developers")
pub restrict_comment_visibility_to_role: Option<String>,
// optional suffix to add to the username for the login dialog (e.g. "@company.com")
pub login_suffix: Option<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct LdapConfig {
// LDAP URL (e.g. ldaps://ldap.company.com)
pub url: String,
// either username for AD or bind DN for LDAP
pub bind_user: String,
// bind user's password
pub bind_pass: String,
// the base DN to bind to
pub base_dn: String,
// attributes to match logins against (e.g. ["samAccountName", "userPrincipalName"] for AD, ["uid, "mail"] for LDAP)
pub userid_attributes: Vec<String>,
// Additional LDAP search filter for user types and group membership
// e.g. (&(objectCategory=Person)(memberOf=cn=octobot-admins,ou=users,dc=company,dc=com))
pub search_filter: Option<String>,
}
impl Config {
// TODO: weird that `new` is used only by tests and the actual `new` is below...
pub fn new(db: Database) -> Config {
Config::new_with_model(ConfigModel::new(), db)
}
fn new_with_model(config: ConfigModel, db: Database) -> Config {
Config {
main: config.main,
admin: config.admin,
metrics: config.metrics,
github: config.github,
slack: config.slack,
jira: config.jira,
ldap: config.ldap,
users: RwLock::new(users::UserConfig::new(db.clone())),
repos: RwLock::new(repos::RepoConfig::new(db)),
}
}
pub fn save(&self, config_file: &str) -> Result<()> {
let model = ConfigModel {
main: self.main.clone(),
admin: self.admin.clone(),
metrics: self.metrics.clone(),
github: self.github.clone(),
slack: self.slack.clone(),
jira: self.jira.clone(),
ldap: self.ldap.clone(),
};
let serialized =
toml::to_string(&model).map_err(|e| format_err!("Error serializing config: {}", e))?;
let tmp_file = config_file.to_string() + ".tmp";
let bak_file = config_file.to_string() + ".bak";
let mut file = fs::File::create(&tmp_file)?;
file.write_all(serialized.as_bytes())?;
fs::rename(&config_file, &bak_file)?;
fs::rename(&tmp_file, &config_file)?;
fs::remove_file(&bak_file)?;
Ok(())
}
pub fn users(&self) -> RwLockReadGuard<users::UserConfig> {
self.users.read().unwrap()
}
pub fn users_write(&self) -> RwLockWriteGuard<users::UserConfig> {
self.users.write().unwrap()
}
pub fn repos(&self) -> RwLockReadGuard<repos::RepoConfig> {
self.repos.read().unwrap()
}
pub fn repos_write(&self) -> RwLockWriteGuard<repos::RepoConfig> {
self.repos.write().unwrap()
}
}
impl ConfigModel {
pub fn new() -> ConfigModel {
ConfigModel {
main: MainConfig {
listen_addr: None,
clone_root_dir: String::new(),
num_http_threads: None,
},
admin: None,
metrics: None,
github: GithubConfig {
webhook_secret: String::new(),
host: String::new(),
api_token: None,
app_id: None,
app_key_file: None,
},
slack: SlackConfig {
bot_token: String::new(),
},
jira: None,
ldap: None,
}
}
}
impl GithubConfig {
pub fn app_key(&self) -> Result<Vec<u8>> {
let key_file = &self
.app_key_file
.as_ref()
.expect("expected an app_key_file");
let mut file_open = fs::File::open(key_file)?;
let mut contents = vec![];
file_open.read_to_end(&mut contents)?;
Ok(contents)
}
}
impl JiraConfig {
pub fn base_url(&self) -> String {
if self.host.starts_with("http") {
self.host.clone()
} else {
format!("https://{}", self.host)
}
}
pub fn progress_states(&self) -> Vec<String> {
if let Some(ref states) = self.progress_states {
states.clone() // hmm. do these w/o a clone?
} else {
vec!["In Progress".into()]
}
}
pub fn review_states(&self) -> Vec<String> {
if let Some(ref states) = self.review_states {
states.clone() // hmm. do these w/o a clone?
} else
|
}
pub fn resolved_states(&self) -> Vec<String> {
if let Some(ref states) = self.resolved_states {
states.clone() // hmm. do these w/o a clone?
} else {
vec!["Resolved".into(), "Done".into()]
}
}
pub fn fixed_resolutions(&self) -> Vec<String> {
if let Some(ref res) = self.fixed_resolutions {
res.clone() // hmm. do these w/o a clone?
} else {
vec!["Fixed".into(), "Done".into()]
}
}
pub fn fix_versions(&self) -> String {
if let Some(ref field) = self.fix_versions_field {
field.clone()
} else {
"fixVersions".into()
}
}
}
pub fn new(config_file: PathBuf) -> Result<Config> {
let db_file_name = "db.sqlite3";
match config_file.file_name() {
Some(name) => {
if name == db_file_name {
return Err(format_err!("Must provide toml config file"));
}
}
None => return Err(format_err!("Provided config file has no file name")),
};
let mut db_file = config_file.clone();
db_file.set_file_name(db_file_name);
let db = Database::new(&db_file.to_string_lossy())?;
let mut config_file_open = fs::File::open(&config_file)?;
let mut config_contents = String::new();
config_file_open.read_to_string(&mut config_contents)?;
let config_model = parse_string(&config_contents)?;
Ok(Config::new_with_model(config_model, db))
}
fn parse_string(config_contents: &str) -> Result<ConfigModel> {
toml::from_str::<ConfigModel>(config_contents)
.map_err(|e| format_err!("Error parsing config: {}", e))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse() {
let config_str = r#"
[main]
clone_root_dir = "./repos"
[slack]
bot_token = "foo"
[github]
webhook_secret = "abcd"
host = "git.company.com"
app_id = 2
app_key_file = "some-file.key"
"#;
let config = parse_string(config_str).unwrap();
assert_eq!("foo", config.slack.bot_token);
}
}
|
{
vec!["Pending Review".into()]
}
|
conditional_block
|
config.rs
|
use std::fs;
use std::io::{Read, Write};
use std::path::PathBuf;
use std::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard};
use failure::format_err;
use serde_derive::{Deserialize, Serialize};
use toml;
use crate::db::Database;
use crate::errors::*;
use crate::repos;
use crate::users;
pub struct Config {
pub main: MainConfig,
pub admin: Option<AdminConfig>,
pub metrics: Option<MetricsConfig>,
pub github: GithubConfig,
pub slack: SlackConfig,
pub jira: Option<JiraConfig>,
pub ldap: Option<LdapConfig>,
pub users: RwLock<users::UserConfig>,
pub repos: RwLock<repos::RepoConfig>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ConfigModel {
pub main: MainConfig,
pub admin: Option<AdminConfig>,
pub metrics: Option<MetricsConfig>,
pub github: GithubConfig,
pub slack: SlackConfig,
pub jira: Option<JiraConfig>,
pub ldap: Option<LdapConfig>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct MainConfig {
pub listen_addr: Option<String>,
pub clone_root_dir: String,
pub num_http_threads: Option<usize>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct AdminConfig {
pub name: String,
pub salt: String,
pub pass_hash: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct MetricsConfig {
pub salt: String,
pub pass_hash: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct SlackConfig {
pub bot_token: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct GithubConfig {
pub webhook_secret: String,
pub host: String,
pub api_token: Option<String>,
pub app_id: Option<u32>,
pub app_key_file: Option<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct JiraConfig {
pub host: String,
pub username: String,
pub password: String,
// review state that may be necessary before submitting for review (defaults to ["In Progress"])
pub progress_states: Option<Vec<String>>,
// review state to transition to when marked for review (defaults to ["Pending Review"])
pub review_states: Option<Vec<String>>,
// resolved state to transition to when PR is merged. (defaults to ["Resolved", "Done"])
pub resolved_states: Option<Vec<String>>,
// when marking as resolved, add this resolution (defaults to ["Fixed", "Done"])
pub fixed_resolutions: Option<Vec<String>>,
// the field name for where the version goes. (defaults to "fixVersions").
pub fix_versions_field: Option<String>,
// the field name for where the pending build versions go. expected to be a plain text field
pub pending_versions_field: Option<String>,
// optional name of role to restrict octobot comment visibility. (e.g. "Developers")
pub restrict_comment_visibility_to_role: Option<String>,
// optional suffix to add to the username for the login dialog (e.g. "@company.com")
pub login_suffix: Option<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct LdapConfig {
// LDAP URL (e.g. ldaps://ldap.company.com)
pub url: String,
// either username for AD or bind DN for LDAP
pub bind_user: String,
// bind user's password
pub bind_pass: String,
// the base DN to bind to
pub base_dn: String,
// attributes to match logins against (e.g. ["samAccountName", "userPrincipalName"] for AD, ["uid, "mail"] for LDAP)
pub userid_attributes: Vec<String>,
// Additional LDAP search filter for user types and group membership
// e.g. (&(objectCategory=Person)(memberOf=cn=octobot-admins,ou=users,dc=company,dc=com))
pub search_filter: Option<String>,
}
impl Config {
// TODO: weird that `new` is used only by tests and the actual `new` is below...
pub fn new(db: Database) -> Config {
Config::new_with_model(ConfigModel::new(), db)
}
fn new_with_model(config: ConfigModel, db: Database) -> Config {
Config {
main: config.main,
admin: config.admin,
metrics: config.metrics,
github: config.github,
slack: config.slack,
jira: config.jira,
ldap: config.ldap,
users: RwLock::new(users::UserConfig::new(db.clone())),
repos: RwLock::new(repos::RepoConfig::new(db)),
}
}
pub fn save(&self, config_file: &str) -> Result<()> {
let model = ConfigModel {
main: self.main.clone(),
admin: self.admin.clone(),
metrics: self.metrics.clone(),
github: self.github.clone(),
slack: self.slack.clone(),
jira: self.jira.clone(),
ldap: self.ldap.clone(),
};
let serialized =
toml::to_string(&model).map_err(|e| format_err!("Error serializing config: {}", e))?;
let tmp_file = config_file.to_string() + ".tmp";
let bak_file = config_file.to_string() + ".bak";
let mut file = fs::File::create(&tmp_file)?;
file.write_all(serialized.as_bytes())?;
fs::rename(&config_file, &bak_file)?;
fs::rename(&tmp_file, &config_file)?;
fs::remove_file(&bak_file)?;
Ok(())
}
pub fn users(&self) -> RwLockReadGuard<users::UserConfig> {
self.users.read().unwrap()
}
pub fn users_write(&self) -> RwLockWriteGuard<users::UserConfig> {
self.users.write().unwrap()
}
pub fn repos(&self) -> RwLockReadGuard<repos::RepoConfig> {
self.repos.read().unwrap()
}
pub fn repos_write(&self) -> RwLockWriteGuard<repos::RepoConfig> {
self.repos.write().unwrap()
}
}
impl ConfigModel {
pub fn new() -> ConfigModel {
ConfigModel {
main: MainConfig {
listen_addr: None,
clone_root_dir: String::new(),
num_http_threads: None,
},
admin: None,
metrics: None,
github: GithubConfig {
webhook_secret: String::new(),
host: String::new(),
api_token: None,
app_id: None,
app_key_file: None,
},
slack: SlackConfig {
bot_token: String::new(),
},
jira: None,
ldap: None,
}
}
}
impl GithubConfig {
pub fn app_key(&self) -> Result<Vec<u8>> {
let key_file = &self
.app_key_file
.as_ref()
.expect("expected an app_key_file");
let mut file_open = fs::File::open(key_file)?;
let mut contents = vec![];
file_open.read_to_end(&mut contents)?;
Ok(contents)
}
}
impl JiraConfig {
pub fn base_url(&self) -> String {
if self.host.starts_with("http") {
self.host.clone()
} else {
format!("https://{}", self.host)
}
}
pub fn
|
(&self) -> Vec<String> {
if let Some(ref states) = self.progress_states {
states.clone() // hmm. do these w/o a clone?
} else {
vec!["In Progress".into()]
}
}
pub fn review_states(&self) -> Vec<String> {
if let Some(ref states) = self.review_states {
states.clone() // hmm. do these w/o a clone?
} else {
vec!["Pending Review".into()]
}
}
pub fn resolved_states(&self) -> Vec<String> {
if let Some(ref states) = self.resolved_states {
states.clone() // hmm. do these w/o a clone?
} else {
vec!["Resolved".into(), "Done".into()]
}
}
pub fn fixed_resolutions(&self) -> Vec<String> {
if let Some(ref res) = self.fixed_resolutions {
res.clone() // hmm. do these w/o a clone?
} else {
vec!["Fixed".into(), "Done".into()]
}
}
pub fn fix_versions(&self) -> String {
if let Some(ref field) = self.fix_versions_field {
field.clone()
} else {
"fixVersions".into()
}
}
}
pub fn new(config_file: PathBuf) -> Result<Config> {
let db_file_name = "db.sqlite3";
match config_file.file_name() {
Some(name) => {
if name == db_file_name {
return Err(format_err!("Must provide toml config file"));
}
}
None => return Err(format_err!("Provided config file has no file name")),
};
let mut db_file = config_file.clone();
db_file.set_file_name(db_file_name);
let db = Database::new(&db_file.to_string_lossy())?;
let mut config_file_open = fs::File::open(&config_file)?;
let mut config_contents = String::new();
config_file_open.read_to_string(&mut config_contents)?;
let config_model = parse_string(&config_contents)?;
Ok(Config::new_with_model(config_model, db))
}
fn parse_string(config_contents: &str) -> Result<ConfigModel> {
toml::from_str::<ConfigModel>(config_contents)
.map_err(|e| format_err!("Error parsing config: {}", e))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse() {
let config_str = r#"
[main]
clone_root_dir = "./repos"
[slack]
bot_token = "foo"
[github]
webhook_secret = "abcd"
host = "git.company.com"
app_id = 2
app_key_file = "some-file.key"
"#;
let config = parse_string(config_str).unwrap();
assert_eq!("foo", config.slack.bot_token);
}
}
|
progress_states
|
identifier_name
|
htmlulistelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::HTMLUListElementBinding;
use dom::bindings::utils::{DOMString, ErrorResult};
use dom::document::AbstractDocument;
use dom::element::HTMLUListElementTypeId;
use dom::htmlelement::HTMLElement;
use dom::node::{AbstractNode, Node, ScriptView};
pub struct HTMLUListElement {
htmlelement: HTMLElement
}
impl HTMLUListElement {
pub fn new_inherited(localName: ~str, document: AbstractDocument) -> HTMLUListElement {
HTMLUListElement {
htmlelement: HTMLElement::new(HTMLUListElementTypeId, localName, document)
}
}
pub fn new(localName: ~str, document: AbstractDocument) -> AbstractNode<ScriptView> {
let element = HTMLUListElement::new_inherited(localName, document);
Node::reflect_node(@mut element, document, HTMLUListElementBinding::Wrap)
}
}
impl HTMLUListElement {
pub fn Compact(&self) -> bool {
false
}
pub fn SetCompact(&mut self, _compact: bool) -> ErrorResult {
Ok(())
}
pub fn Type(&self) -> DOMString
|
pub fn SetType(&mut self, _type: &DOMString) -> ErrorResult {
Ok(())
}
}
|
{
None
}
|
identifier_body
|
htmlulistelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::HTMLUListElementBinding;
|
use dom::element::HTMLUListElementTypeId;
use dom::htmlelement::HTMLElement;
use dom::node::{AbstractNode, Node, ScriptView};
pub struct HTMLUListElement {
htmlelement: HTMLElement
}
impl HTMLUListElement {
pub fn new_inherited(localName: ~str, document: AbstractDocument) -> HTMLUListElement {
HTMLUListElement {
htmlelement: HTMLElement::new(HTMLUListElementTypeId, localName, document)
}
}
pub fn new(localName: ~str, document: AbstractDocument) -> AbstractNode<ScriptView> {
let element = HTMLUListElement::new_inherited(localName, document);
Node::reflect_node(@mut element, document, HTMLUListElementBinding::Wrap)
}
}
impl HTMLUListElement {
pub fn Compact(&self) -> bool {
false
}
pub fn SetCompact(&mut self, _compact: bool) -> ErrorResult {
Ok(())
}
pub fn Type(&self) -> DOMString {
None
}
pub fn SetType(&mut self, _type: &DOMString) -> ErrorResult {
Ok(())
}
}
|
use dom::bindings::utils::{DOMString, ErrorResult};
use dom::document::AbstractDocument;
|
random_line_split
|
htmlulistelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::HTMLUListElementBinding;
use dom::bindings::utils::{DOMString, ErrorResult};
use dom::document::AbstractDocument;
use dom::element::HTMLUListElementTypeId;
use dom::htmlelement::HTMLElement;
use dom::node::{AbstractNode, Node, ScriptView};
pub struct
|
{
htmlelement: HTMLElement
}
impl HTMLUListElement {
pub fn new_inherited(localName: ~str, document: AbstractDocument) -> HTMLUListElement {
HTMLUListElement {
htmlelement: HTMLElement::new(HTMLUListElementTypeId, localName, document)
}
}
pub fn new(localName: ~str, document: AbstractDocument) -> AbstractNode<ScriptView> {
let element = HTMLUListElement::new_inherited(localName, document);
Node::reflect_node(@mut element, document, HTMLUListElementBinding::Wrap)
}
}
impl HTMLUListElement {
pub fn Compact(&self) -> bool {
false
}
pub fn SetCompact(&mut self, _compact: bool) -> ErrorResult {
Ok(())
}
pub fn Type(&self) -> DOMString {
None
}
pub fn SetType(&mut self, _type: &DOMString) -> ErrorResult {
Ok(())
}
}
|
HTMLUListElement
|
identifier_name
|
supports_rule.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! [@supports rules](https://drafts.csswg.org/css-conditional-3/#at-supports)
use crate::parser::ParserContext;
use crate::properties::{PropertyDeclaration, PropertyId, SourcePropertyDeclaration};
use crate::selector_parser::{SelectorImpl, SelectorParser};
use crate::shared_lock::{DeepCloneParams, DeepCloneWithLock, Locked};
use crate::shared_lock::{SharedRwLock, SharedRwLockReadGuard, ToCssWithGuard};
use crate::str::CssStringWriter;
use crate::stylesheets::{CssRuleType, CssRules, Namespaces};
use cssparser::parse_important;
use cssparser::{Delimiter, Parser, SourceLocation, Token};
use cssparser::{ParseError as CssParseError, ParserInput};
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocSizeOfOps, MallocUnconditionalShallowSizeOf};
use selectors::parser::{Selector, SelectorParseErrorKind};
use servo_arc::Arc;
use std::ffi::{CStr, CString};
use std::fmt::{self, Write};
use std::str;
use style_traits::{CssWriter, ParseError, StyleParseErrorKind, ToCss};
/// An [`@supports`][supports] rule.
///
/// [supports]: https://drafts.csswg.org/css-conditional-3/#at-supports
#[derive(Debug, ToShmem)]
pub struct SupportsRule {
/// The parsed condition
pub condition: SupportsCondition,
/// Child rules
pub rules: Arc<Locked<CssRules>>,
/// The result of evaluating the condition
pub enabled: bool,
/// The line and column of the rule's source code.
pub source_location: SourceLocation,
}
impl SupportsRule {
/// Measure heap usage.
#[cfg(feature = "gecko")]
pub fn size_of(&self, guard: &SharedRwLockReadGuard, ops: &mut MallocSizeOfOps) -> usize {
// Measurement of other fields may be added later.
self.rules.unconditional_shallow_size_of(ops) +
self.rules.read_with(guard).size_of(guard, ops)
}
}
impl ToCssWithGuard for SupportsRule {
fn to_css(&self, guard: &SharedRwLockReadGuard, dest: &mut CssStringWriter) -> fmt::Result {
dest.write_str("@supports ")?;
self.condition.to_css(&mut CssWriter::new(dest))?;
self.rules.read_with(guard).to_css_block(guard, dest)
}
}
impl DeepCloneWithLock for SupportsRule {
fn deep_clone_with_lock(
&self,
lock: &SharedRwLock,
guard: &SharedRwLockReadGuard,
params: &DeepCloneParams,
) -> Self {
let rules = self.rules.read_with(guard);
SupportsRule {
condition: self.condition.clone(),
rules: Arc::new(lock.wrap(rules.deep_clone_with_lock(lock, guard, params))),
enabled: self.enabled,
source_location: self.source_location.clone(),
}
}
}
/// An @supports condition
///
/// <https://drafts.csswg.org/css-conditional-3/#at-supports>
#[derive(Clone, Debug, ToShmem)]
pub enum SupportsCondition {
/// `not (condition)`
Not(Box<SupportsCondition>),
/// `(condition)`
Parenthesized(Box<SupportsCondition>),
/// `(condition) and (condition) and (condition)..`
And(Vec<SupportsCondition>),
/// `(condition) or (condition) or (condition)..`
Or(Vec<SupportsCondition>),
/// `property-ident: value` (value can be any tokens)
Declaration(Declaration),
/// A `selector()` function.
Selector(RawSelector),
/// `-moz-bool-pref("pref-name")`
/// Since we need to pass it through FFI to get the pref value,
/// we store it as CString directly.
MozBoolPref(CString),
/// `(any tokens)` or `func(any tokens)`
FutureSyntax(String),
}
impl SupportsCondition {
/// Parse a condition
///
/// <https://drafts.csswg.org/css-conditional/#supports_condition>
pub fn parse<'i, 't>(input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
if input.try_parse(|i| i.expect_ident_matching("not")).is_ok() {
let inner = SupportsCondition::parse_in_parens(input)?;
return Ok(SupportsCondition::Not(Box::new(inner)));
}
let in_parens = SupportsCondition::parse_in_parens(input)?;
let location = input.current_source_location();
let (keyword, wrapper) = match input.next() {
// End of input
Err(..) => return Ok(in_parens),
Ok(&Token::Ident(ref ident)) => {
match_ignore_ascii_case! { &ident,
"and" => ("and", SupportsCondition::And as fn(_) -> _),
"or" => ("or", SupportsCondition::Or as fn(_) -> _),
_ => return Err(location.new_custom_error(SelectorParseErrorKind::UnexpectedIdent(ident.clone())))
}
},
Ok(t) => return Err(location.new_unexpected_token_error(t.clone())),
};
let mut conditions = Vec::with_capacity(2);
conditions.push(in_parens);
loop {
conditions.push(SupportsCondition::parse_in_parens(input)?);
if input
.try_parse(|input| input.expect_ident_matching(keyword))
.is_err()
{
// Did not find the expected keyword.
// If we found some other token, it will be rejected by
// `Parser::parse_entirely` somewhere up the stack.
return Ok(wrapper(conditions));
}
}
}
/// Parses a functional supports condition.
fn parse_functional<'i, 't>(
function: &str,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
match_ignore_ascii_case! { function,
// Although this is an internal syntax, it is not necessary
// to check parsing context as far as we accept any
// unexpected token as future syntax, and evaluate it to
// false when not in chrome / ua sheet.
// See https://drafts.csswg.org/css-conditional-3/#general_enclosed
"-moz-bool-pref" => {
let name = {
let name = input.expect_string()?;
CString::new(name.as_bytes())
}.map_err(|_| input.new_custom_error(StyleParseErrorKind::UnspecifiedError))?;
Ok(SupportsCondition::MozBoolPref(name))
},
"selector" => {
let pos = input.position();
consume_any_value(input)?;
Ok(SupportsCondition::Selector(RawSelector(
input.slice_from(pos).to_owned()
)))
},
_ => {
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
},
}
}
/// <https://drafts.csswg.org/css-conditional-3/#supports_condition_in_parens>
fn parse_in_parens<'i, 't>(input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
// Whitespace is normally taken care of in `Parser::next`,
// but we want to not include it in `pos` for the SupportsCondition::FutureSyntax cases.
while input.try_parse(Parser::expect_whitespace).is_ok() {}
let pos = input.position();
let location = input.current_source_location();
match *input.next()? {
Token::ParenthesisBlock => {
let nested = input
.try_parse(|input| input.parse_nested_block(parse_condition_or_declaration));
if nested.is_ok() {
return nested;
}
},
Token::Function(ref ident) => {
let ident = ident.clone();
let nested = input.try_parse(|input| {
input.parse_nested_block(|input| {
SupportsCondition::parse_functional(&ident, input)
})
});
if nested.is_ok() {
return nested;
}
},
ref t => return Err(location.new_unexpected_token_error(t.clone())),
}
input.parse_nested_block(consume_any_value)?;
Ok(SupportsCondition::FutureSyntax(
input.slice_from(pos).to_owned(),
))
}
/// Evaluate a supports condition
pub fn eval(&self, cx: &ParserContext, namespaces: &Namespaces) -> bool {
match *self {
SupportsCondition::Not(ref cond) =>!cond.eval(cx, namespaces),
SupportsCondition::Parenthesized(ref cond) => cond.eval(cx, namespaces),
SupportsCondition::And(ref vec) => vec.iter().all(|c| c.eval(cx, namespaces)),
SupportsCondition::Or(ref vec) => vec.iter().any(|c| c.eval(cx, namespaces)),
SupportsCondition::Declaration(ref decl) => decl.eval(cx),
SupportsCondition::MozBoolPref(ref name) => eval_moz_bool_pref(name, cx),
SupportsCondition::Selector(ref selector) => selector.eval(cx, namespaces),
SupportsCondition::FutureSyntax(_) => false,
}
}
}
#[cfg(feature = "gecko")]
fn eval_moz_bool_pref(name: &CStr, cx: &ParserContext) -> bool {
use crate::gecko_bindings::bindings;
if!cx.in_ua_or_chrome_sheet() {
return false;
}
unsafe { bindings::Gecko_GetBoolPrefValue(name.as_ptr()) }
}
#[cfg(feature = "servo")]
fn eval_moz_bool_pref(_: &CStr, _: &ParserContext) -> bool {
false
}
/// supports_condition | declaration
/// <https://drafts.csswg.org/css-conditional/#dom-css-supports-conditiontext-conditiontext>
pub fn parse_condition_or_declaration<'i, 't>(
input: &mut Parser<'i, 't>,
) -> Result<SupportsCondition, ParseError<'i>> {
if let Ok(condition) = input.try_parse(SupportsCondition::parse) {
Ok(SupportsCondition::Parenthesized(Box::new(condition)))
} else {
Declaration::parse(input).map(SupportsCondition::Declaration)
}
}
impl ToCss for SupportsCondition {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
match *self {
SupportsCondition::Not(ref cond) => {
dest.write_str("not ")?;
cond.to_css(dest)
},
SupportsCondition::Parenthesized(ref cond) => {
dest.write_str("(")?;
cond.to_css(dest)?;
dest.write_str(")")
},
SupportsCondition::And(ref vec) => {
let mut first = true;
for cond in vec {
if!first {
dest.write_str(" and ")?;
}
first = false;
cond.to_css(dest)?;
}
Ok(())
},
SupportsCondition::Or(ref vec) => {
let mut first = true;
for cond in vec {
if!first {
dest.write_str(" or ")?;
}
first = false;
cond.to_css(dest)?;
}
Ok(())
},
SupportsCondition::Declaration(ref decl) => {
dest.write_str("(")?;
decl.to_css(dest)?;
dest.write_str(")")
},
SupportsCondition::Selector(ref selector) => {
dest.write_str("selector(")?;
selector.to_css(dest)?;
dest.write_str(")")
},
SupportsCondition::MozBoolPref(ref name) => {
dest.write_str("-moz-bool-pref(")?;
let name =
str::from_utf8(name.as_bytes()).expect("Should be parsed from valid UTF-8");
name.to_css(dest)?;
dest.write_str(")")
},
SupportsCondition::FutureSyntax(ref s) => dest.write_str(&s),
}
}
}
#[derive(Clone, Debug, ToShmem)]
/// A possibly-invalid CSS selector.
pub struct
|
(pub String);
impl ToCss for RawSelector {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
dest.write_str(&self.0)
}
}
impl RawSelector {
/// Tries to evaluate a `selector()` function.
pub fn eval(&self, context: &ParserContext, namespaces: &Namespaces) -> bool {
#[cfg(feature = "gecko")]
{
if!static_prefs::pref!("layout.css.supports-selector.enabled") {
return false;
}
}
let mut input = ParserInput::new(&self.0);
let mut input = Parser::new(&mut input);
input
.parse_entirely(|input| -> Result<(), CssParseError<()>> {
let parser = SelectorParser {
namespaces,
stylesheet_origin: context.stylesheet_origin,
url_data: Some(context.url_data),
};
#[allow(unused_variables)]
let selector = Selector::<SelectorImpl>::parse(&parser, input)
.map_err(|_| input.new_custom_error(()))?;
#[cfg(feature = "gecko")]
{
use crate::selector_parser::PseudoElement;
use selectors::parser::Component;
let has_any_unknown_webkit_pseudo = selector.has_pseudo_element() &&
selector.iter_raw_match_order().any(|component| {
matches!(
*component,
Component::PseudoElement(PseudoElement::UnknownWebkit(..))
)
});
if has_any_unknown_webkit_pseudo {
return Err(input.new_custom_error(()));
}
}
Ok(())
})
.is_ok()
}
}
#[derive(Clone, Debug, ToShmem)]
/// A possibly-invalid property declaration
pub struct Declaration(pub String);
impl ToCss for Declaration {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
dest.write_str(&self.0)
}
}
/// <https://drafts.csswg.org/css-syntax-3/#typedef-any-value>
fn consume_any_value<'i, 't>(input: &mut Parser<'i, 't>) -> Result<(), ParseError<'i>> {
input.expect_no_error_token().map_err(|err| err.into())
}
impl Declaration {
/// Parse a declaration
pub fn parse<'i, 't>(input: &mut Parser<'i, 't>) -> Result<Declaration, ParseError<'i>> {
let pos = input.position();
input.expect_ident()?;
input.expect_colon()?;
consume_any_value(input)?;
Ok(Declaration(input.slice_from(pos).to_owned()))
}
/// Determine if a declaration parses
///
/// <https://drafts.csswg.org/css-conditional-3/#support-definition>
pub fn eval(&self, context: &ParserContext) -> bool {
debug_assert_eq!(context.rule_type(), CssRuleType::Style);
let mut input = ParserInput::new(&self.0);
let mut input = Parser::new(&mut input);
input
.parse_entirely(|input| -> Result<(), CssParseError<()>> {
let prop = input.expect_ident_cloned().unwrap();
input.expect_colon().unwrap();
let id =
PropertyId::parse(&prop, context).map_err(|_| input.new_custom_error(()))?;
let mut declarations = SourcePropertyDeclaration::new();
input.parse_until_before(Delimiter::Bang, |input| {
PropertyDeclaration::parse_into(&mut declarations, id, &context, input)
.map_err(|_| input.new_custom_error(()))
})?;
let _ = input.try_parse(parse_important);
Ok(())
})
.is_ok()
}
}
|
RawSelector
|
identifier_name
|
supports_rule.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! [@supports rules](https://drafts.csswg.org/css-conditional-3/#at-supports)
use crate::parser::ParserContext;
use crate::properties::{PropertyDeclaration, PropertyId, SourcePropertyDeclaration};
use crate::selector_parser::{SelectorImpl, SelectorParser};
use crate::shared_lock::{DeepCloneParams, DeepCloneWithLock, Locked};
use crate::shared_lock::{SharedRwLock, SharedRwLockReadGuard, ToCssWithGuard};
use crate::str::CssStringWriter;
use crate::stylesheets::{CssRuleType, CssRules, Namespaces};
use cssparser::parse_important;
use cssparser::{Delimiter, Parser, SourceLocation, Token};
use cssparser::{ParseError as CssParseError, ParserInput};
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocSizeOfOps, MallocUnconditionalShallowSizeOf};
use selectors::parser::{Selector, SelectorParseErrorKind};
use servo_arc::Arc;
use std::ffi::{CStr, CString};
use std::fmt::{self, Write};
use std::str;
use style_traits::{CssWriter, ParseError, StyleParseErrorKind, ToCss};
/// An [`@supports`][supports] rule.
///
/// [supports]: https://drafts.csswg.org/css-conditional-3/#at-supports
#[derive(Debug, ToShmem)]
pub struct SupportsRule {
/// The parsed condition
pub condition: SupportsCondition,
/// Child rules
pub rules: Arc<Locked<CssRules>>,
/// The result of evaluating the condition
pub enabled: bool,
/// The line and column of the rule's source code.
pub source_location: SourceLocation,
}
impl SupportsRule {
/// Measure heap usage.
#[cfg(feature = "gecko")]
pub fn size_of(&self, guard: &SharedRwLockReadGuard, ops: &mut MallocSizeOfOps) -> usize {
// Measurement of other fields may be added later.
self.rules.unconditional_shallow_size_of(ops) +
self.rules.read_with(guard).size_of(guard, ops)
}
}
impl ToCssWithGuard for SupportsRule {
fn to_css(&self, guard: &SharedRwLockReadGuard, dest: &mut CssStringWriter) -> fmt::Result {
dest.write_str("@supports ")?;
self.condition.to_css(&mut CssWriter::new(dest))?;
self.rules.read_with(guard).to_css_block(guard, dest)
}
}
impl DeepCloneWithLock for SupportsRule {
fn deep_clone_with_lock(
&self,
lock: &SharedRwLock,
guard: &SharedRwLockReadGuard,
params: &DeepCloneParams,
) -> Self {
let rules = self.rules.read_with(guard);
SupportsRule {
condition: self.condition.clone(),
rules: Arc::new(lock.wrap(rules.deep_clone_with_lock(lock, guard, params))),
enabled: self.enabled,
source_location: self.source_location.clone(),
}
}
}
/// An @supports condition
///
/// <https://drafts.csswg.org/css-conditional-3/#at-supports>
#[derive(Clone, Debug, ToShmem)]
pub enum SupportsCondition {
/// `not (condition)`
Not(Box<SupportsCondition>),
/// `(condition)`
Parenthesized(Box<SupportsCondition>),
/// `(condition) and (condition) and (condition)..`
And(Vec<SupportsCondition>),
/// `(condition) or (condition) or (condition)..`
Or(Vec<SupportsCondition>),
/// `property-ident: value` (value can be any tokens)
Declaration(Declaration),
/// A `selector()` function.
Selector(RawSelector),
/// `-moz-bool-pref("pref-name")`
/// Since we need to pass it through FFI to get the pref value,
/// we store it as CString directly.
MozBoolPref(CString),
/// `(any tokens)` or `func(any tokens)`
FutureSyntax(String),
}
impl SupportsCondition {
/// Parse a condition
///
/// <https://drafts.csswg.org/css-conditional/#supports_condition>
pub fn parse<'i, 't>(input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
if input.try_parse(|i| i.expect_ident_matching("not")).is_ok() {
let inner = SupportsCondition::parse_in_parens(input)?;
return Ok(SupportsCondition::Not(Box::new(inner)));
}
let in_parens = SupportsCondition::parse_in_parens(input)?;
let location = input.current_source_location();
let (keyword, wrapper) = match input.next() {
// End of input
Err(..) => return Ok(in_parens),
Ok(&Token::Ident(ref ident)) => {
match_ignore_ascii_case! { &ident,
"and" => ("and", SupportsCondition::And as fn(_) -> _),
"or" => ("or", SupportsCondition::Or as fn(_) -> _),
_ => return Err(location.new_custom_error(SelectorParseErrorKind::UnexpectedIdent(ident.clone())))
}
},
Ok(t) => return Err(location.new_unexpected_token_error(t.clone())),
};
let mut conditions = Vec::with_capacity(2);
conditions.push(in_parens);
loop {
conditions.push(SupportsCondition::parse_in_parens(input)?);
if input
.try_parse(|input| input.expect_ident_matching(keyword))
.is_err()
{
// Did not find the expected keyword.
// If we found some other token, it will be rejected by
// `Parser::parse_entirely` somewhere up the stack.
return Ok(wrapper(conditions));
}
}
}
/// Parses a functional supports condition.
fn parse_functional<'i, 't>(
function: &str,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
match_ignore_ascii_case! { function,
// Although this is an internal syntax, it is not necessary
// to check parsing context as far as we accept any
// unexpected token as future syntax, and evaluate it to
// false when not in chrome / ua sheet.
// See https://drafts.csswg.org/css-conditional-3/#general_enclosed
"-moz-bool-pref" => {
let name = {
let name = input.expect_string()?;
CString::new(name.as_bytes())
}.map_err(|_| input.new_custom_error(StyleParseErrorKind::UnspecifiedError))?;
Ok(SupportsCondition::MozBoolPref(name))
},
"selector" => {
let pos = input.position();
consume_any_value(input)?;
Ok(SupportsCondition::Selector(RawSelector(
input.slice_from(pos).to_owned()
)))
},
_ => {
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
},
}
}
/// <https://drafts.csswg.org/css-conditional-3/#supports_condition_in_parens>
fn parse_in_parens<'i, 't>(input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
// Whitespace is normally taken care of in `Parser::next`,
// but we want to not include it in `pos` for the SupportsCondition::FutureSyntax cases.
while input.try_parse(Parser::expect_whitespace).is_ok() {}
let pos = input.position();
let location = input.current_source_location();
match *input.next()? {
Token::ParenthesisBlock => {
let nested = input
.try_parse(|input| input.parse_nested_block(parse_condition_or_declaration));
if nested.is_ok() {
return nested;
}
},
Token::Function(ref ident) => {
let ident = ident.clone();
let nested = input.try_parse(|input| {
input.parse_nested_block(|input| {
SupportsCondition::parse_functional(&ident, input)
})
});
if nested.is_ok() {
return nested;
}
},
ref t => return Err(location.new_unexpected_token_error(t.clone())),
}
input.parse_nested_block(consume_any_value)?;
Ok(SupportsCondition::FutureSyntax(
input.slice_from(pos).to_owned(),
))
}
/// Evaluate a supports condition
pub fn eval(&self, cx: &ParserContext, namespaces: &Namespaces) -> bool {
match *self {
SupportsCondition::Not(ref cond) =>!cond.eval(cx, namespaces),
SupportsCondition::Parenthesized(ref cond) => cond.eval(cx, namespaces),
SupportsCondition::And(ref vec) => vec.iter().all(|c| c.eval(cx, namespaces)),
SupportsCondition::Or(ref vec) => vec.iter().any(|c| c.eval(cx, namespaces)),
SupportsCondition::Declaration(ref decl) => decl.eval(cx),
SupportsCondition::MozBoolPref(ref name) => eval_moz_bool_pref(name, cx),
SupportsCondition::Selector(ref selector) => selector.eval(cx, namespaces),
SupportsCondition::FutureSyntax(_) => false,
|
}
}
}
#[cfg(feature = "gecko")]
fn eval_moz_bool_pref(name: &CStr, cx: &ParserContext) -> bool {
use crate::gecko_bindings::bindings;
if!cx.in_ua_or_chrome_sheet() {
return false;
}
unsafe { bindings::Gecko_GetBoolPrefValue(name.as_ptr()) }
}
#[cfg(feature = "servo")]
fn eval_moz_bool_pref(_: &CStr, _: &ParserContext) -> bool {
false
}
/// supports_condition | declaration
/// <https://drafts.csswg.org/css-conditional/#dom-css-supports-conditiontext-conditiontext>
pub fn parse_condition_or_declaration<'i, 't>(
input: &mut Parser<'i, 't>,
) -> Result<SupportsCondition, ParseError<'i>> {
if let Ok(condition) = input.try_parse(SupportsCondition::parse) {
Ok(SupportsCondition::Parenthesized(Box::new(condition)))
} else {
Declaration::parse(input).map(SupportsCondition::Declaration)
}
}
impl ToCss for SupportsCondition {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
match *self {
SupportsCondition::Not(ref cond) => {
dest.write_str("not ")?;
cond.to_css(dest)
},
SupportsCondition::Parenthesized(ref cond) => {
dest.write_str("(")?;
cond.to_css(dest)?;
dest.write_str(")")
},
SupportsCondition::And(ref vec) => {
let mut first = true;
for cond in vec {
if!first {
dest.write_str(" and ")?;
}
first = false;
cond.to_css(dest)?;
}
Ok(())
},
SupportsCondition::Or(ref vec) => {
let mut first = true;
for cond in vec {
if!first {
dest.write_str(" or ")?;
}
first = false;
cond.to_css(dest)?;
}
Ok(())
},
SupportsCondition::Declaration(ref decl) => {
dest.write_str("(")?;
decl.to_css(dest)?;
dest.write_str(")")
},
SupportsCondition::Selector(ref selector) => {
dest.write_str("selector(")?;
selector.to_css(dest)?;
dest.write_str(")")
},
SupportsCondition::MozBoolPref(ref name) => {
dest.write_str("-moz-bool-pref(")?;
let name =
str::from_utf8(name.as_bytes()).expect("Should be parsed from valid UTF-8");
name.to_css(dest)?;
dest.write_str(")")
},
SupportsCondition::FutureSyntax(ref s) => dest.write_str(&s),
}
}
}
#[derive(Clone, Debug, ToShmem)]
/// A possibly-invalid CSS selector.
pub struct RawSelector(pub String);
impl ToCss for RawSelector {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
dest.write_str(&self.0)
}
}
impl RawSelector {
/// Tries to evaluate a `selector()` function.
pub fn eval(&self, context: &ParserContext, namespaces: &Namespaces) -> bool {
#[cfg(feature = "gecko")]
{
if!static_prefs::pref!("layout.css.supports-selector.enabled") {
return false;
}
}
let mut input = ParserInput::new(&self.0);
let mut input = Parser::new(&mut input);
input
.parse_entirely(|input| -> Result<(), CssParseError<()>> {
let parser = SelectorParser {
namespaces,
stylesheet_origin: context.stylesheet_origin,
url_data: Some(context.url_data),
};
#[allow(unused_variables)]
let selector = Selector::<SelectorImpl>::parse(&parser, input)
.map_err(|_| input.new_custom_error(()))?;
#[cfg(feature = "gecko")]
{
use crate::selector_parser::PseudoElement;
use selectors::parser::Component;
let has_any_unknown_webkit_pseudo = selector.has_pseudo_element() &&
selector.iter_raw_match_order().any(|component| {
matches!(
*component,
Component::PseudoElement(PseudoElement::UnknownWebkit(..))
)
});
if has_any_unknown_webkit_pseudo {
return Err(input.new_custom_error(()));
}
}
Ok(())
})
.is_ok()
}
}
#[derive(Clone, Debug, ToShmem)]
/// A possibly-invalid property declaration
pub struct Declaration(pub String);
impl ToCss for Declaration {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
dest.write_str(&self.0)
}
}
/// <https://drafts.csswg.org/css-syntax-3/#typedef-any-value>
fn consume_any_value<'i, 't>(input: &mut Parser<'i, 't>) -> Result<(), ParseError<'i>> {
input.expect_no_error_token().map_err(|err| err.into())
}
impl Declaration {
/// Parse a declaration
pub fn parse<'i, 't>(input: &mut Parser<'i, 't>) -> Result<Declaration, ParseError<'i>> {
let pos = input.position();
input.expect_ident()?;
input.expect_colon()?;
consume_any_value(input)?;
Ok(Declaration(input.slice_from(pos).to_owned()))
}
/// Determine if a declaration parses
///
/// <https://drafts.csswg.org/css-conditional-3/#support-definition>
pub fn eval(&self, context: &ParserContext) -> bool {
debug_assert_eq!(context.rule_type(), CssRuleType::Style);
let mut input = ParserInput::new(&self.0);
let mut input = Parser::new(&mut input);
input
.parse_entirely(|input| -> Result<(), CssParseError<()>> {
let prop = input.expect_ident_cloned().unwrap();
input.expect_colon().unwrap();
let id =
PropertyId::parse(&prop, context).map_err(|_| input.new_custom_error(()))?;
let mut declarations = SourcePropertyDeclaration::new();
input.parse_until_before(Delimiter::Bang, |input| {
PropertyDeclaration::parse_into(&mut declarations, id, &context, input)
.map_err(|_| input.new_custom_error(()))
})?;
let _ = input.try_parse(parse_important);
Ok(())
})
.is_ok()
}
}
|
random_line_split
|
|
ffi.rs
|
#![allow(dead_code)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
use std::os::raw::c_ulong;
pub type dssim_px_t = f32;
#[link(name = "Accelerate", kind = "framework")]
#[cfg(target_os = "macos")]
extern "C" {
pub fn vImageConvolve_PlanarF(src: *const vImage_Buffer<*const f32>,
dest: *mut vImage_Buffer<*mut f32>,
tempBuffer: *mut f32,
srcOffsetToROI_X: vImagePixelCount,
srcOffsetToROI_Y: vImagePixelCount,
kernel: *const f32,
kernel_height: u32,
kernel_width: u32,
backgroundColor: Pixel_F,
flags: vImage_Flags) -> vImage_Error;
}
pub type vImagePixelCount = c_ulong;
pub type vImage_Error = isize;
pub type Pixel_F = f32;
#[repr(u32)]
pub enum vImage_Flags {
kvImageNoFlags = 0,
/* Operate on red, green and blue channels only. Alpha is copied from source
to destination. For Interleaved formats only. */
kvImageLeaveAlphaUnchanged = 1,
/* Copy edge pixels. Convolution Only. */
kvImageCopyInPlace = 2,
/* Use the background color for missing pixels. */
kvImageBackgroundColorFill = 4,
/* Use the nearest pixel for missing pixels. */
kvImageEdgeExtend = 8,
/* Pass to turn off internal tiling and disable internal multithreading. Use this if
you want to do your own tiling, or to use the Min/Max filters in place. */
kvImageDoNotTile = 16,
/* Use a higher quality, slower resampling filter for Geometry operations
(shear, scale, rotate, affine transform, etc.) */
kvImageHighQualityResampling = 32,
/* Use only the part of the kernel that overlaps the image. For integer kernels,
real_divisor = divisor * (sum of used kernel elements) / (sum of kernel elements).
This should preserve image brightness at the edges. Convolution only. */
kvImageTruncateKernel = 64,
/* The function will return the number of bytes required for the temp buffer.
If this value is negative, it is an error, per standard usage. */
kvImageGetTempBufferSize = 128,
/* Some functions such as vImageConverter_CreateWithCGImageFormat have so many possible error conditions
that developers may need more help than a simple error code to diagnose problems. When this
flag is set and an error is encountered, an informative error message will be logged to the Apple
System Logger (ASL). The output should be visible in Console.app. */
kvImagePrintDiagnosticsToConsole = 256,
/* Pass this flag to prevent vImage from allocating additional storage. */
kvImageNoAllocate = 512,
/* Use methods that are HDR-aware, capable of providing correct results for input images with pixel values
outside the otherwise limited (typically [-2,2]) range. This may be slower. */
kvImageHDRContent = 1024
}
#[repr(C)]
pub struct
|
<T> {
pub data: T,
pub height: vImagePixelCount,
pub width: vImagePixelCount,
pub rowBytes: usize,
}
|
vImage_Buffer
|
identifier_name
|
ffi.rs
|
#![allow(dead_code)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
use std::os::raw::c_ulong;
pub type dssim_px_t = f32;
#[link(name = "Accelerate", kind = "framework")]
#[cfg(target_os = "macos")]
extern "C" {
pub fn vImageConvolve_PlanarF(src: *const vImage_Buffer<*const f32>,
dest: *mut vImage_Buffer<*mut f32>,
tempBuffer: *mut f32,
srcOffsetToROI_X: vImagePixelCount,
srcOffsetToROI_Y: vImagePixelCount,
kernel: *const f32,
kernel_height: u32,
kernel_width: u32,
backgroundColor: Pixel_F,
flags: vImage_Flags) -> vImage_Error;
}
pub type vImagePixelCount = c_ulong;
pub type vImage_Error = isize;
pub type Pixel_F = f32;
#[repr(u32)]
pub enum vImage_Flags {
kvImageNoFlags = 0,
/* Operate on red, green and blue channels only. Alpha is copied from source
to destination. For Interleaved formats only. */
kvImageLeaveAlphaUnchanged = 1,
/* Copy edge pixels. Convolution Only. */
kvImageCopyInPlace = 2,
/* Use the background color for missing pixels. */
kvImageBackgroundColorFill = 4,
|
/* Use the nearest pixel for missing pixels. */
kvImageEdgeExtend = 8,
/* Pass to turn off internal tiling and disable internal multithreading. Use this if
you want to do your own tiling, or to use the Min/Max filters in place. */
kvImageDoNotTile = 16,
/* Use a higher quality, slower resampling filter for Geometry operations
(shear, scale, rotate, affine transform, etc.) */
kvImageHighQualityResampling = 32,
/* Use only the part of the kernel that overlaps the image. For integer kernels,
real_divisor = divisor * (sum of used kernel elements) / (sum of kernel elements).
This should preserve image brightness at the edges. Convolution only. */
kvImageTruncateKernel = 64,
/* The function will return the number of bytes required for the temp buffer.
If this value is negative, it is an error, per standard usage. */
kvImageGetTempBufferSize = 128,
/* Some functions such as vImageConverter_CreateWithCGImageFormat have so many possible error conditions
that developers may need more help than a simple error code to diagnose problems. When this
flag is set and an error is encountered, an informative error message will be logged to the Apple
System Logger (ASL). The output should be visible in Console.app. */
kvImagePrintDiagnosticsToConsole = 256,
/* Pass this flag to prevent vImage from allocating additional storage. */
kvImageNoAllocate = 512,
/* Use methods that are HDR-aware, capable of providing correct results for input images with pixel values
outside the otherwise limited (typically [-2,2]) range. This may be slower. */
kvImageHDRContent = 1024
}
#[repr(C)]
pub struct vImage_Buffer<T> {
pub data: T,
pub height: vImagePixelCount,
pub width: vImagePixelCount,
pub rowBytes: usize,
}
|
random_line_split
|
|
nested-matchs.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn baz() ->! { panic!(); }
fn foo() {
match Some::<int>(5) {
Some::<int>(_x) => {
let mut bar;
match None::<int> { None::<int> => { bar = 5; } _ => { baz(); } }
println!("{}", bar);
}
None::<int> => { println!("hello"); }
|
}
}
pub fn main() { foo(); }
|
random_line_split
|
|
nested-matchs.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn
|
() ->! { panic!(); }
fn foo() {
match Some::<int>(5) {
Some::<int>(_x) => {
let mut bar;
match None::<int> { None::<int> => { bar = 5; } _ => { baz(); } }
println!("{}", bar);
}
None::<int> => { println!("hello"); }
}
}
pub fn main() { foo(); }
|
baz
|
identifier_name
|
nested-matchs.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn baz() ->! { panic!(); }
fn foo()
|
}
pub fn main() { foo(); }
|
{
match Some::<int>(5) {
Some::<int>(_x) => {
let mut bar;
match None::<int> { None::<int> => { bar = 5; } _ => { baz(); } }
println!("{}", bar);
}
None::<int> => { println!("hello"); }
}
|
identifier_body
|
file_logger.rs
|
use std::io::Write;
use std::fs::{File, OpenOptions};
use {Logger, MessageType, format_message};
/// Write log to text file.
pub struct FileLogger {
log_file: String
}
impl FileLogger {
pub fn
|
(file_path: &str) -> FileLogger {
File::create(file_path).unwrap();
FileLogger { log_file: file_path.to_string() }
}
}
impl Logger for FileLogger {
fn log(&self, msg_type:MessageType, message:&str) {
let mut file = OpenOptions::new()
.append(true)
.write(true)
.open(&self.log_file).unwrap();
file.write(format_message(msg_type, message).as_bytes()).unwrap();
}
}
#[test]
fn file_logger_works() {
let logger = FileLogger::new("test.log");
logger.info("Test info message.");
logger.warn("Test warn message.");
logger.error("Test error message.");
}
|
new
|
identifier_name
|
file_logger.rs
|
use std::io::Write;
use std::fs::{File, OpenOptions};
|
/// Write log to text file.
pub struct FileLogger {
log_file: String
}
impl FileLogger {
pub fn new(file_path: &str) -> FileLogger {
File::create(file_path).unwrap();
FileLogger { log_file: file_path.to_string() }
}
}
impl Logger for FileLogger {
fn log(&self, msg_type:MessageType, message:&str) {
let mut file = OpenOptions::new()
.append(true)
.write(true)
.open(&self.log_file).unwrap();
file.write(format_message(msg_type, message).as_bytes()).unwrap();
}
}
#[test]
fn file_logger_works() {
let logger = FileLogger::new("test.log");
logger.info("Test info message.");
logger.warn("Test warn message.");
logger.error("Test error message.");
}
|
use {Logger, MessageType, format_message};
|
random_line_split
|
file_logger.rs
|
use std::io::Write;
use std::fs::{File, OpenOptions};
use {Logger, MessageType, format_message};
/// Write log to text file.
pub struct FileLogger {
log_file: String
}
impl FileLogger {
pub fn new(file_path: &str) -> FileLogger {
File::create(file_path).unwrap();
FileLogger { log_file: file_path.to_string() }
}
}
impl Logger for FileLogger {
fn log(&self, msg_type:MessageType, message:&str) {
let mut file = OpenOptions::new()
.append(true)
.write(true)
.open(&self.log_file).unwrap();
file.write(format_message(msg_type, message).as_bytes()).unwrap();
}
}
#[test]
fn file_logger_works()
|
{
let logger = FileLogger::new("test.log");
logger.info("Test info message.");
logger.warn("Test warn message.");
logger.error("Test error message.");
}
|
identifier_body
|
|
alias-uninit-value.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Regression test for issue #374
enum sty { ty_nil, }
struct RawT {struct_: sty, cname: Option<~str>, hash: uint}
fn mk_raw_ty(st: sty, cname: Option<~str>) -> RawT {
return RawT {struct_: st, cname: cname, hash: 0u};
}
pub fn
|
() { mk_raw_ty(ty_nil, None::<~str>); }
|
main
|
identifier_name
|
alias-uninit-value.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Regression test for issue #374
enum sty { ty_nil, }
struct RawT {struct_: sty, cname: Option<~str>, hash: uint}
|
return RawT {struct_: st, cname: cname, hash: 0u};
}
pub fn main() { mk_raw_ty(ty_nil, None::<~str>); }
|
fn mk_raw_ty(st: sty, cname: Option<~str>) -> RawT {
|
random_line_split
|
utils.rs
|
use std::collections::{HashSet, HashMap};
use std::iter::FromIterator;
use std::cmp::min;
use std::error::Error;
use rand::{thread_rng, Rng};
use image::{GenericImage, DynamicImage, ImageBuffer, Luma, FilterType};
use image::imageops::rotate270;
use image::imageops::colorops::grayscale;
pub fn slice(count: u32, limit: u32) -> Result<HashSet<u32>, Box<Error>>
|
fn convert(im: &DynamicImage,
size: u32)
-> Result<(f32, ImageBuffer<Luma<u8>, Vec<u8>>), Box<Error>> {
let mut conv = im.clone();
let (w, h) = conv.dimensions();
let mul = match size > 0 && (w > size || h > size) {
true => {
conv = conv.resize(size, size, FilterType::Lanczos3);
match w > h {
true => w as f32 / size as f32,
false => h as f32 / size as f32,
}
}
false => 1.0,
};
Ok((mul, grayscale(&conv)))
}
fn chop(conv: &mut ImageBuffer<Luma<u8>, Vec<u8>>,
limit: u32)
-> Result<ImageBuffer<Luma<u8>, Vec<u8>>, Box<Error>> {
Ok(match limit > 0 {
true => {
let (w, h) = conv.dimensions();
let rows = slice(w, limit)?;
let mut strips: ImageBuffer<Luma<u8>, Vec<u8>> = ImageBuffer::new(rows.len() as u32, h);
for (i, row) in rows.iter().enumerate() {
strips.copy_from(&conv.sub_image(*row, 0, 1, h), i as u32, 0);
}
strips
}
false => conv.clone(),
})
}
fn entropy(strip: &mut ImageBuffer<Luma<u8>, Vec<u8>>,
x: u32,
y: u32,
width: u32,
height: u32)
-> Result<f32, Box<Error>> {
let sub = strip.sub_image(x, y, width, height);
let (w, h) = sub.dimensions();
let len = (w * h) as f32;
let hm = sub.pixels().fold(HashMap::new(), |mut acc, e| {
*acc.entry(e.2.data[0]).or_insert(0) += 1;
acc
});
Ok(hm.values().fold(0f32, |acc, &x| {
let f = x as f32 / len;
acc - (f * f.log2())
}))
}
pub fn scan(im: &DynamicImage,
size: Option<u32>,
columns: Option<u32>,
depth: Option<f32>,
threshold: Option<f32>,
deep: Option<bool>)
-> Result<Vec<u32>, Box<Error>> {
let threshold = threshold.unwrap_or(0.5);
let (mul, mut conv) = convert(im, size.unwrap_or(0))?;
let mut borders = Vec::new();
let depth = depth.unwrap_or(0.25);
let deep = deep.unwrap_or(true);
for side in 0..4 {
let mut strips = chop(&mut conv, columns.unwrap_or(0))?;
let (w, h) = strips.dimensions();
let height = (depth * h as f32).round() as u32;
let mut border = 0;
loop {
let mut start = border + 1;
for center in (border + 1)..height {
if entropy(&mut strips, 0, border, w, center)? > 0.0 {
start = center;
break;
}
}
let mut sub = 0;
let mut delta = threshold;
for center in (start..height).rev() {
let upper = entropy(&mut strips, 0, border, w, center - border)?;
let lower = entropy(&mut strips, 0, center, w, center - border)?;
let diff = match lower!= 0.0 {
true => upper as f32 / lower as f32,
false => delta,
};
if diff < delta && diff < threshold {
delta = diff;
sub = center;
}
}
if sub == 0 || border == sub {
break;
}
border = sub;
if!deep {
break;
}
}
borders.push((border as f32 * mul) as u32);
if side!= 3 {
conv = rotate270(&conv);
}
}
Ok(borders)
}
|
{
let mut indexes: Vec<u32> = (0..count).collect();
if limit > 0 && limit < count {
let mut rng = thread_rng();
rng.shuffle(&mut indexes);
let len = indexes.len();
indexes.truncate(min(len, limit as usize));
}
Ok(HashSet::from_iter(indexes.iter().cloned()))
}
|
identifier_body
|
utils.rs
|
use std::collections::{HashSet, HashMap};
use std::iter::FromIterator;
use std::cmp::min;
use std::error::Error;
use rand::{thread_rng, Rng};
use image::{GenericImage, DynamicImage, ImageBuffer, Luma, FilterType};
use image::imageops::rotate270;
use image::imageops::colorops::grayscale;
pub fn slice(count: u32, limit: u32) -> Result<HashSet<u32>, Box<Error>> {
let mut indexes: Vec<u32> = (0..count).collect();
if limit > 0 && limit < count {
let mut rng = thread_rng();
rng.shuffle(&mut indexes);
let len = indexes.len();
indexes.truncate(min(len, limit as usize));
}
Ok(HashSet::from_iter(indexes.iter().cloned()))
}
fn convert(im: &DynamicImage,
size: u32)
-> Result<(f32, ImageBuffer<Luma<u8>, Vec<u8>>), Box<Error>> {
let mut conv = im.clone();
let (w, h) = conv.dimensions();
let mul = match size > 0 && (w > size || h > size) {
true => {
conv = conv.resize(size, size, FilterType::Lanczos3);
match w > h {
true => w as f32 / size as f32,
false => h as f32 / size as f32,
}
}
false => 1.0,
};
Ok((mul, grayscale(&conv)))
}
fn chop(conv: &mut ImageBuffer<Luma<u8>, Vec<u8>>,
limit: u32)
-> Result<ImageBuffer<Luma<u8>, Vec<u8>>, Box<Error>> {
Ok(match limit > 0 {
true => {
let (w, h) = conv.dimensions();
let rows = slice(w, limit)?;
let mut strips: ImageBuffer<Luma<u8>, Vec<u8>> = ImageBuffer::new(rows.len() as u32, h);
for (i, row) in rows.iter().enumerate() {
strips.copy_from(&conv.sub_image(*row, 0, 1, h), i as u32, 0);
}
strips
}
false => conv.clone(),
})
}
fn
|
(strip: &mut ImageBuffer<Luma<u8>, Vec<u8>>,
x: u32,
y: u32,
width: u32,
height: u32)
-> Result<f32, Box<Error>> {
let sub = strip.sub_image(x, y, width, height);
let (w, h) = sub.dimensions();
let len = (w * h) as f32;
let hm = sub.pixels().fold(HashMap::new(), |mut acc, e| {
*acc.entry(e.2.data[0]).or_insert(0) += 1;
acc
});
Ok(hm.values().fold(0f32, |acc, &x| {
let f = x as f32 / len;
acc - (f * f.log2())
}))
}
pub fn scan(im: &DynamicImage,
size: Option<u32>,
columns: Option<u32>,
depth: Option<f32>,
threshold: Option<f32>,
deep: Option<bool>)
-> Result<Vec<u32>, Box<Error>> {
let threshold = threshold.unwrap_or(0.5);
let (mul, mut conv) = convert(im, size.unwrap_or(0))?;
let mut borders = Vec::new();
let depth = depth.unwrap_or(0.25);
let deep = deep.unwrap_or(true);
for side in 0..4 {
let mut strips = chop(&mut conv, columns.unwrap_or(0))?;
let (w, h) = strips.dimensions();
let height = (depth * h as f32).round() as u32;
let mut border = 0;
loop {
let mut start = border + 1;
for center in (border + 1)..height {
if entropy(&mut strips, 0, border, w, center)? > 0.0 {
start = center;
break;
}
}
let mut sub = 0;
let mut delta = threshold;
for center in (start..height).rev() {
let upper = entropy(&mut strips, 0, border, w, center - border)?;
let lower = entropy(&mut strips, 0, center, w, center - border)?;
let diff = match lower!= 0.0 {
true => upper as f32 / lower as f32,
false => delta,
};
if diff < delta && diff < threshold {
delta = diff;
sub = center;
}
}
if sub == 0 || border == sub {
break;
}
border = sub;
if!deep {
break;
}
}
borders.push((border as f32 * mul) as u32);
if side!= 3 {
conv = rotate270(&conv);
}
}
Ok(borders)
}
|
entropy
|
identifier_name
|
utils.rs
|
use std::collections::{HashSet, HashMap};
use std::iter::FromIterator;
use std::cmp::min;
use std::error::Error;
use rand::{thread_rng, Rng};
use image::{GenericImage, DynamicImage, ImageBuffer, Luma, FilterType};
use image::imageops::rotate270;
use image::imageops::colorops::grayscale;
pub fn slice(count: u32, limit: u32) -> Result<HashSet<u32>, Box<Error>> {
let mut indexes: Vec<u32> = (0..count).collect();
if limit > 0 && limit < count {
let mut rng = thread_rng();
rng.shuffle(&mut indexes);
let len = indexes.len();
indexes.truncate(min(len, limit as usize));
}
Ok(HashSet::from_iter(indexes.iter().cloned()))
}
fn convert(im: &DynamicImage,
size: u32)
-> Result<(f32, ImageBuffer<Luma<u8>, Vec<u8>>), Box<Error>> {
let mut conv = im.clone();
let (w, h) = conv.dimensions();
let mul = match size > 0 && (w > size || h > size) {
true => {
conv = conv.resize(size, size, FilterType::Lanczos3);
match w > h {
true => w as f32 / size as f32,
false => h as f32 / size as f32,
}
}
false => 1.0,
};
Ok((mul, grayscale(&conv)))
}
fn chop(conv: &mut ImageBuffer<Luma<u8>, Vec<u8>>,
limit: u32)
-> Result<ImageBuffer<Luma<u8>, Vec<u8>>, Box<Error>> {
Ok(match limit > 0 {
true => {
let (w, h) = conv.dimensions();
let rows = slice(w, limit)?;
let mut strips: ImageBuffer<Luma<u8>, Vec<u8>> = ImageBuffer::new(rows.len() as u32, h);
for (i, row) in rows.iter().enumerate() {
strips.copy_from(&conv.sub_image(*row, 0, 1, h), i as u32, 0);
}
strips
}
false => conv.clone(),
})
}
fn entropy(strip: &mut ImageBuffer<Luma<u8>, Vec<u8>>,
x: u32,
y: u32,
width: u32,
height: u32)
-> Result<f32, Box<Error>> {
let sub = strip.sub_image(x, y, width, height);
let (w, h) = sub.dimensions();
let len = (w * h) as f32;
let hm = sub.pixels().fold(HashMap::new(), |mut acc, e| {
*acc.entry(e.2.data[0]).or_insert(0) += 1;
acc
});
Ok(hm.values().fold(0f32, |acc, &x| {
let f = x as f32 / len;
acc - (f * f.log2())
}))
}
pub fn scan(im: &DynamicImage,
size: Option<u32>,
columns: Option<u32>,
depth: Option<f32>,
threshold: Option<f32>,
deep: Option<bool>)
-> Result<Vec<u32>, Box<Error>> {
let threshold = threshold.unwrap_or(0.5);
let (mul, mut conv) = convert(im, size.unwrap_or(0))?;
let mut borders = Vec::new();
let depth = depth.unwrap_or(0.25);
let deep = deep.unwrap_or(true);
for side in 0..4 {
let mut strips = chop(&mut conv, columns.unwrap_or(0))?;
let (w, h) = strips.dimensions();
let height = (depth * h as f32).round() as u32;
let mut border = 0;
loop {
let mut start = border + 1;
for center in (border + 1)..height {
if entropy(&mut strips, 0, border, w, center)? > 0.0 {
start = center;
break;
}
}
let mut sub = 0;
let mut delta = threshold;
for center in (start..height).rev() {
let upper = entropy(&mut strips, 0, border, w, center - border)?;
let lower = entropy(&mut strips, 0, center, w, center - border)?;
let diff = match lower!= 0.0 {
true => upper as f32 / lower as f32,
|
sub = center;
}
}
if sub == 0 || border == sub {
break;
}
border = sub;
if!deep {
break;
}
}
borders.push((border as f32 * mul) as u32);
if side!= 3 {
conv = rotate270(&conv);
}
}
Ok(borders)
}
|
false => delta,
};
if diff < delta && diff < threshold {
delta = diff;
|
random_line_split
|
mod.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use lint;
use metadata::cstore::CStore;
use metadata::filesearch;
use middle::dependency_format;
use session::search_paths::PathKind;
use util::nodemap::{NodeMap, FnvHashMap};
use syntax::ast::NodeId;
use syntax::codemap::Span;
use syntax::diagnostic::{self, Emitter};
use syntax::diagnostics;
use syntax::feature_gate;
use syntax::parse;
use syntax::parse::token;
use syntax::parse::ParseSess;
use syntax::{ast, codemap};
use syntax::feature_gate::AttributeType;
use rustc_back::target::Target;
use std::path::{Path, PathBuf};
use std::cell::{Cell, RefCell};
use std::env;
pub mod config;
pub mod search_paths;
// Represents the data associated with a compilation
// session for a single crate.
pub struct Session {
pub target: config::Config,
pub host: Target,
pub opts: config::Options,
pub cstore: CStore,
pub parse_sess: ParseSess,
// For a library crate, this is always none
pub entry_fn: RefCell<Option<(NodeId, codemap::Span)>>,
pub entry_type: Cell<Option<config::EntryFnType>>,
pub plugin_registrar_fn: Cell<Option<ast::NodeId>>,
pub default_sysroot: Option<PathBuf>,
// The name of the root source file of the crate, in the local file system.
// The path is always expected to be absolute. `None` means that there is no
// source file.
pub local_crate_source_file: Option<PathBuf>,
pub working_dir: PathBuf,
pub lint_store: RefCell<lint::LintStore>,
pub lints: RefCell<NodeMap<Vec<(lint::LintId, codemap::Span, String)>>>,
pub plugin_llvm_passes: RefCell<Vec<String>>,
pub plugin_attributes: RefCell<Vec<(String, AttributeType)>>,
pub crate_types: RefCell<Vec<config::CrateType>>,
pub dependency_formats: RefCell<dependency_format::Dependencies>,
pub crate_metadata: RefCell<Vec<String>>,
pub features: RefCell<feature_gate::Features>,
pub delayed_span_bug: RefCell<Option<(codemap::Span, String)>>,
/// The maximum recursion limit for potentially infinitely recursive
/// operations such as auto-dereference and monomorphization.
pub recursion_limit: Cell<usize>,
pub can_print_warnings: bool,
/// The metadata::creader module may inject an allocator dependency if it
/// didn't already find one, and this tracks what was injected.
pub injected_allocator: Cell<Option<ast::CrateNum>>,
next_node_id: Cell<ast::NodeId>,
}
impl Session {
pub fn span_fatal(&self, sp: Span, msg: &str) ->! {
if self.opts.treat_err_as_bug {
self.span_bug(sp, msg);
}
panic!(self.diagnostic().span_fatal(sp, msg))
}
pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) ->! {
if self.opts.treat_err_as_bug {
self.span_bug(sp, msg);
}
panic!(self.diagnostic().span_fatal_with_code(sp, msg, code))
}
pub fn fatal(&self, msg: &str) ->! {
if self.opts.treat_err_as_bug {
self.bug(msg);
}
self.diagnostic().handler().fatal(msg)
}
pub fn span_err_or_warn(&self, is_warning: bool, sp: Span, msg: &str) {
if is_warning {
self.span_warn(sp, msg);
} else {
self.span_err(sp, msg);
}
}
pub fn span_err(&self, sp: Span, msg: &str) {
if self.opts.treat_err_as_bug {
self.span_bug(sp, msg);
}
match split_msg_into_multilines(msg) {
Some(msg) => self.diagnostic().span_err(sp, &msg[..]),
None => self.diagnostic().span_err(sp, msg)
}
}
pub fn note_rfc_1214(&self, span: Span) {
self.span_note(
span,
&format!("this warning results from recent bug fixes and clarifications; \
it will become a HARD ERROR in the next release. \
See RFC 1214 for details."));
}
pub fn span_err_with_code(&self, sp: Span, msg: &str, code: &str) {
if self.opts.treat_err_as_bug {
self.span_bug(sp, msg);
}
match split_msg_into_multilines(msg) {
Some(msg) => self.diagnostic().span_err_with_code(sp, &msg[..], code),
None => self.diagnostic().span_err_with_code(sp, msg, code)
}
}
pub fn err(&self, msg: &str) {
if self.opts.treat_err_as_bug {
self.bug(msg);
}
self.diagnostic().handler().err(msg)
}
pub fn err_count(&self) -> usize {
self.diagnostic().handler().err_count()
}
pub fn has_errors(&self) -> bool {
self.diagnostic().handler().has_errors()
}
pub fn abort_if_errors(&self) {
self.diagnostic().handler().abort_if_errors();
let delayed_bug = self.delayed_span_bug.borrow();
match *delayed_bug {
Some((span, ref errmsg)) => {
self.diagnostic().span_bug(span, errmsg);
},
_ => {}
}
}
pub fn span_warn(&self, sp: Span, msg: &str) {
if self.can_print_warnings {
self.diagnostic().span_warn(sp, msg)
}
}
pub fn span_warn_with_code(&self, sp: Span, msg: &str, code: &str) {
if self.can_print_warnings {
self.diagnostic().span_warn_with_code(sp, msg, code)
}
}
pub fn warn(&self, msg: &str) {
if self.can_print_warnings {
self.diagnostic().handler().warn(msg)
}
}
pub fn opt_span_warn(&self, opt_sp: Option<Span>, msg: &str) {
match opt_sp {
Some(sp) => self.span_warn(sp, msg),
None => self.warn(msg),
}
}
pub fn span_note(&self, sp: Span, msg: &str) {
self.diagnostic().span_note(sp, msg)
}
pub fn span_end_note(&self, sp: Span, msg: &str) {
self.diagnostic().span_end_note(sp, msg)
}
/// Prints out a message with a suggested edit of the code.
///
/// See `diagnostic::RenderSpan::Suggestion` for more information.
pub fn span_suggestion(&self, sp: Span, msg: &str, suggestion: String) {
self.diagnostic().span_suggestion(sp, msg, suggestion)
}
pub fn span_help(&self, sp: Span, msg: &str) {
self.diagnostic().span_help(sp, msg)
}
pub fn fileline_note(&self, sp: Span, msg: &str) {
self.diagnostic().fileline_note(sp, msg)
}
pub fn fileline_help(&self, sp: Span, msg: &str) {
self.diagnostic().fileline_help(sp, msg)
}
pub fn note(&self, msg: &str) {
self.diagnostic().handler().note(msg)
}
pub fn help(&self, msg: &str) {
self.diagnostic().handler().help(msg)
}
pub fn opt_span_bug(&self, opt_sp: Option<Span>, msg: &str) ->! {
match opt_sp {
Some(sp) => self.span_bug(sp, msg),
None => self.bug(msg),
}
}
/// Delay a span_bug() call until abort_if_errors()
pub fn delay_span_bug(&self, sp: Span, msg: &str) {
let mut delayed = self.delayed_span_bug.borrow_mut();
*delayed = Some((sp, msg.to_string()));
}
pub fn span_bug(&self, sp: Span, msg: &str) ->! {
self.diagnostic().span_bug(sp, msg)
}
pub fn bug(&self, msg: &str) ->! {
self.diagnostic().handler().bug(msg)
}
pub fn span_unimpl(&self, sp: Span, msg: &str) ->! {
self.diagnostic().span_unimpl(sp, msg)
}
pub fn unimpl(&self, msg: &str) ->! {
self.diagnostic().handler().unimpl(msg)
}
pub fn add_lint(&self,
lint: &'static lint::Lint,
id: ast::NodeId,
sp: Span,
msg: String) {
let lint_id = lint::LintId::of(lint);
let mut lints = self.lints.borrow_mut();
match lints.get_mut(&id) {
Some(arr) => { arr.push((lint_id, sp, msg)); return; }
None => {}
}
lints.insert(id, vec!((lint_id, sp, msg)));
}
pub fn next_node_id(&self) -> ast::NodeId {
self.reserve_node_ids(1)
}
pub fn reserve_node_ids(&self, count: ast::NodeId) -> ast::NodeId {
let id = self.next_node_id.get();
match id.checked_add(count) {
Some(next) => self.next_node_id.set(next),
None => self.bug("Input too large, ran out of node ids!")
}
id
}
pub fn diagnostic<'a>(&'a self) -> &'a diagnostic::SpanHandler {
&self.parse_sess.span_diagnostic
}
pub fn codemap<'a>(&'a self) -> &'a codemap::CodeMap {
self.parse_sess.codemap()
}
// This exists to help with refactoring to eliminate impossible
// cases later on
pub fn impossible_case(&self, sp: Span, msg: &str) ->! {
self.span_bug(sp,
&format!("impossible case reached: {}", msg));
}
pub fn verbose(&self) -> bool { self.opts.debugging_opts.verbose }
pub fn time_passes(&self) -> bool { self.opts.debugging_opts.time_passes }
pub fn count_llvm_insns(&self) -> bool {
self.opts.debugging_opts.count_llvm_insns
}
pub fn count_type_sizes(&self) -> bool {
self.opts.debugging_opts.count_type_sizes
}
pub fn time_llvm_passes(&self) -> bool {
self.opts.debugging_opts.time_llvm_passes
}
pub fn trans_stats(&self) -> bool { self.opts.debugging_opts.trans_stats }
pub fn meta_stats(&self) -> bool { self.opts.debugging_opts.meta_stats }
pub fn asm_comments(&self) -> bool { self.opts.debugging_opts.asm_comments }
pub fn no_verify(&self) -> bool { self.opts.debugging_opts.no_verify }
pub fn borrowck_stats(&self) -> bool { self.opts.debugging_opts.borrowck_stats }
pub fn print_llvm_passes(&self) -> bool {
self.opts.debugging_opts.print_llvm_passes
}
pub fn lto(&self) -> bool {
self.opts.cg.lto
}
pub fn no_landing_pads(&self) -> bool {
self.opts.debugging_opts.no_landing_pads
}
pub fn unstable_options(&self) -> bool {
self.opts.debugging_opts.unstable_options
}
pub fn print_enum_sizes(&self) -> bool {
self.opts.debugging_opts.print_enum_sizes
}
pub fn nonzeroing_move_hints(&self) -> bool {
self.opts.debugging_opts.enable_nonzeroing_move_hints
}
pub fn sysroot<'a>(&'a self) -> &'a Path {
match self.opts.maybe_sysroot {
Some (ref sysroot) => sysroot,
None => self.default_sysroot.as_ref()
.expect("missing sysroot and default_sysroot in Session")
}
}
pub fn target_filesearch(&self, kind: PathKind) -> filesearch::FileSearch {
filesearch::FileSearch::new(self.sysroot(),
&self.opts.target_triple,
&self.opts.search_paths,
kind)
}
pub fn host_filesearch(&self, kind: PathKind) -> filesearch::FileSearch {
filesearch::FileSearch::new(
self.sysroot(),
config::host_triple(),
&self.opts.search_paths,
kind)
}
}
fn split_msg_into_multilines(msg: &str) -> Option<String> {
// Conditions for enabling multi-line errors:
if!msg.contains("mismatched types") &&
!msg.contains("type mismatch resolving") &&
!msg.contains("if and else have incompatible types") &&
!msg.contains("if may be missing an else clause") &&
!msg.contains("match arms have incompatible types") &&
!msg.contains("structure constructor specifies a structure of type") &&
!msg.contains("has an incompatible type for trait") {
return None
}
let first = msg.match_indices("expected").filter(|s| {
s.0 > 0 && (msg.char_at_reverse(s.0) =='' ||
msg.char_at_reverse(s.0) == '(')
}).map(|(a, b)| (a - 1, b));
let second = msg.match_indices("found").filter(|s| {
msg.char_at_reverse(s.0) ==''
}).map(|(a, b)| (a - 1, b));
let mut new_msg = String::new();
let mut head = 0;
// Insert `\n` before expected and found.
for (pos1, pos2) in first.zip(second) {
new_msg = new_msg +
// A `(` may be preceded by a space and it should be trimmed
msg[head..pos1.0].trim_right() + // prefix
"\n" + // insert before first
&msg[pos1.0..pos1.1] + // insert what first matched
&msg[pos1.1..pos2.0] + // between matches
"\n " + // insert before second
// 123
// `expected` is 3 char longer than `found`. To align the types,
// `found` gets 3 spaces prepended.
&msg[pos2.0..pos2.1]; // insert what second matched
head = pos2.1;
}
let mut tail = &msg[head..];
let third = tail.find("(values differ")
.or(tail.find("(lifetime"))
.or(tail.find("(cyclic type of infinite size"));
// Insert `\n` before any remaining messages which match.
if let Some(pos) = third {
// The end of the message may just be wrapped in `()` without
// `expected`/`found`. Push this also to a new line and add the
// final tail after.
new_msg = new_msg +
// `(` is usually preceded by a space and should be trimmed.
tail[..pos].trim_right() + // prefix
"\n" + // insert before paren
&tail[pos..]; // append the tail
tail = "";
}
new_msg.push_str(tail);
return Some(new_msg);
}
pub fn build_session(sopts: config::Options,
local_crate_source_file: Option<PathBuf>,
registry: diagnostics::registry::Registry)
-> Session {
// FIXME: This is not general enough to make the warning lint completely override
// normal diagnostic warnings, since the warning lint can also be denied and changed
// later via the source code.
let can_print_warnings = sopts.lint_opts
.iter()
.filter(|&&(ref key, _)| *key == "warnings")
.map(|&(_, ref level)| *level!= lint::Allow)
|
.unwrap_or(true);
let codemap = codemap::CodeMap::new();
let diagnostic_handler =
diagnostic::Handler::new(sopts.color, Some(registry), can_print_warnings);
let span_diagnostic_handler =
diagnostic::SpanHandler::new(diagnostic_handler, codemap);
build_session_(sopts, local_crate_source_file, span_diagnostic_handler)
}
pub fn build_session_(sopts: config::Options,
local_crate_source_file: Option<PathBuf>,
span_diagnostic: diagnostic::SpanHandler)
-> Session {
let host = match Target::search(config::host_triple()) {
Ok(t) => t,
Err(e) => {
span_diagnostic.handler()
.fatal(&format!("Error loading host specification: {}", e));
}
};
let target_cfg = config::build_target_config(&sopts, &span_diagnostic);
let p_s = parse::ParseSess::with_span_handler(span_diagnostic);
let default_sysroot = match sopts.maybe_sysroot {
Some(_) => None,
None => Some(filesearch::get_or_default_sysroot())
};
// Make the path absolute, if necessary
let local_crate_source_file = local_crate_source_file.map(|path|
if path.is_absolute() {
path.clone()
} else {
env::current_dir().unwrap().join(&path)
}
);
let can_print_warnings = sopts.lint_opts
.iter()
.filter(|&&(ref key, _)| *key == "warnings")
.map(|&(_, ref level)| *level!= lint::Allow)
.last()
.unwrap_or(true);
let sess = Session {
target: target_cfg,
host: host,
opts: sopts,
cstore: CStore::new(token::get_ident_interner()),
parse_sess: p_s,
// For a library crate, this is always none
entry_fn: RefCell::new(None),
entry_type: Cell::new(None),
plugin_registrar_fn: Cell::new(None),
default_sysroot: default_sysroot,
local_crate_source_file: local_crate_source_file,
working_dir: env::current_dir().unwrap(),
lint_store: RefCell::new(lint::LintStore::new()),
lints: RefCell::new(NodeMap()),
plugin_llvm_passes: RefCell::new(Vec::new()),
plugin_attributes: RefCell::new(Vec::new()),
crate_types: RefCell::new(Vec::new()),
dependency_formats: RefCell::new(FnvHashMap()),
crate_metadata: RefCell::new(Vec::new()),
delayed_span_bug: RefCell::new(None),
features: RefCell::new(feature_gate::Features::new()),
recursion_limit: Cell::new(64),
can_print_warnings: can_print_warnings,
next_node_id: Cell::new(1),
injected_allocator: Cell::new(None),
};
sess
}
// Seems out of place, but it uses session, so I'm putting it here
pub fn expect<T, M>(sess: &Session, opt: Option<T>, msg: M) -> T where
M: FnOnce() -> String,
{
diagnostic::expect(sess.diagnostic(), opt, msg)
}
pub fn early_error(msg: &str) ->! {
let mut emitter = diagnostic::EmitterWriter::stderr(diagnostic::Auto, None);
emitter.emit(None, msg, None, diagnostic::Fatal);
panic!(diagnostic::FatalError);
}
pub fn early_warn(msg: &str) {
let mut emitter = diagnostic::EmitterWriter::stderr(diagnostic::Auto, None);
emitter.emit(None, msg, None, diagnostic::Warning);
}
|
.last()
|
random_line_split
|
mod.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use lint;
use metadata::cstore::CStore;
use metadata::filesearch;
use middle::dependency_format;
use session::search_paths::PathKind;
use util::nodemap::{NodeMap, FnvHashMap};
use syntax::ast::NodeId;
use syntax::codemap::Span;
use syntax::diagnostic::{self, Emitter};
use syntax::diagnostics;
use syntax::feature_gate;
use syntax::parse;
use syntax::parse::token;
use syntax::parse::ParseSess;
use syntax::{ast, codemap};
use syntax::feature_gate::AttributeType;
use rustc_back::target::Target;
use std::path::{Path, PathBuf};
use std::cell::{Cell, RefCell};
use std::env;
pub mod config;
pub mod search_paths;
// Represents the data associated with a compilation
// session for a single crate.
pub struct Session {
pub target: config::Config,
pub host: Target,
pub opts: config::Options,
pub cstore: CStore,
pub parse_sess: ParseSess,
// For a library crate, this is always none
pub entry_fn: RefCell<Option<(NodeId, codemap::Span)>>,
pub entry_type: Cell<Option<config::EntryFnType>>,
pub plugin_registrar_fn: Cell<Option<ast::NodeId>>,
pub default_sysroot: Option<PathBuf>,
// The name of the root source file of the crate, in the local file system.
// The path is always expected to be absolute. `None` means that there is no
// source file.
pub local_crate_source_file: Option<PathBuf>,
pub working_dir: PathBuf,
pub lint_store: RefCell<lint::LintStore>,
pub lints: RefCell<NodeMap<Vec<(lint::LintId, codemap::Span, String)>>>,
pub plugin_llvm_passes: RefCell<Vec<String>>,
pub plugin_attributes: RefCell<Vec<(String, AttributeType)>>,
pub crate_types: RefCell<Vec<config::CrateType>>,
pub dependency_formats: RefCell<dependency_format::Dependencies>,
pub crate_metadata: RefCell<Vec<String>>,
pub features: RefCell<feature_gate::Features>,
pub delayed_span_bug: RefCell<Option<(codemap::Span, String)>>,
/// The maximum recursion limit for potentially infinitely recursive
/// operations such as auto-dereference and monomorphization.
pub recursion_limit: Cell<usize>,
pub can_print_warnings: bool,
/// The metadata::creader module may inject an allocator dependency if it
/// didn't already find one, and this tracks what was injected.
pub injected_allocator: Cell<Option<ast::CrateNum>>,
next_node_id: Cell<ast::NodeId>,
}
impl Session {
pub fn span_fatal(&self, sp: Span, msg: &str) ->! {
if self.opts.treat_err_as_bug {
self.span_bug(sp, msg);
}
panic!(self.diagnostic().span_fatal(sp, msg))
}
pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) ->! {
if self.opts.treat_err_as_bug {
self.span_bug(sp, msg);
}
panic!(self.diagnostic().span_fatal_with_code(sp, msg, code))
}
pub fn fatal(&self, msg: &str) ->! {
if self.opts.treat_err_as_bug {
self.bug(msg);
}
self.diagnostic().handler().fatal(msg)
}
pub fn span_err_or_warn(&self, is_warning: bool, sp: Span, msg: &str) {
if is_warning {
self.span_warn(sp, msg);
} else {
self.span_err(sp, msg);
}
}
pub fn span_err(&self, sp: Span, msg: &str) {
if self.opts.treat_err_as_bug {
self.span_bug(sp, msg);
}
match split_msg_into_multilines(msg) {
Some(msg) => self.diagnostic().span_err(sp, &msg[..]),
None => self.diagnostic().span_err(sp, msg)
}
}
pub fn note_rfc_1214(&self, span: Span) {
self.span_note(
span,
&format!("this warning results from recent bug fixes and clarifications; \
it will become a HARD ERROR in the next release. \
See RFC 1214 for details."));
}
pub fn span_err_with_code(&self, sp: Span, msg: &str, code: &str) {
if self.opts.treat_err_as_bug {
self.span_bug(sp, msg);
}
match split_msg_into_multilines(msg) {
Some(msg) => self.diagnostic().span_err_with_code(sp, &msg[..], code),
None => self.diagnostic().span_err_with_code(sp, msg, code)
}
}
pub fn err(&self, msg: &str) {
if self.opts.treat_err_as_bug {
self.bug(msg);
}
self.diagnostic().handler().err(msg)
}
pub fn err_count(&self) -> usize {
self.diagnostic().handler().err_count()
}
pub fn has_errors(&self) -> bool {
self.diagnostic().handler().has_errors()
}
pub fn abort_if_errors(&self) {
self.diagnostic().handler().abort_if_errors();
let delayed_bug = self.delayed_span_bug.borrow();
match *delayed_bug {
Some((span, ref errmsg)) => {
self.diagnostic().span_bug(span, errmsg);
},
_ => {}
}
}
pub fn span_warn(&self, sp: Span, msg: &str) {
if self.can_print_warnings {
self.diagnostic().span_warn(sp, msg)
}
}
pub fn span_warn_with_code(&self, sp: Span, msg: &str, code: &str) {
if self.can_print_warnings {
self.diagnostic().span_warn_with_code(sp, msg, code)
}
}
pub fn warn(&self, msg: &str) {
if self.can_print_warnings {
self.diagnostic().handler().warn(msg)
}
}
pub fn opt_span_warn(&self, opt_sp: Option<Span>, msg: &str) {
match opt_sp {
Some(sp) => self.span_warn(sp, msg),
None => self.warn(msg),
}
}
pub fn span_note(&self, sp: Span, msg: &str) {
self.diagnostic().span_note(sp, msg)
}
pub fn span_end_note(&self, sp: Span, msg: &str) {
self.diagnostic().span_end_note(sp, msg)
}
/// Prints out a message with a suggested edit of the code.
///
/// See `diagnostic::RenderSpan::Suggestion` for more information.
pub fn span_suggestion(&self, sp: Span, msg: &str, suggestion: String) {
self.diagnostic().span_suggestion(sp, msg, suggestion)
}
pub fn span_help(&self, sp: Span, msg: &str) {
self.diagnostic().span_help(sp, msg)
}
pub fn
|
(&self, sp: Span, msg: &str) {
self.diagnostic().fileline_note(sp, msg)
}
pub fn fileline_help(&self, sp: Span, msg: &str) {
self.diagnostic().fileline_help(sp, msg)
}
pub fn note(&self, msg: &str) {
self.diagnostic().handler().note(msg)
}
pub fn help(&self, msg: &str) {
self.diagnostic().handler().help(msg)
}
pub fn opt_span_bug(&self, opt_sp: Option<Span>, msg: &str) ->! {
match opt_sp {
Some(sp) => self.span_bug(sp, msg),
None => self.bug(msg),
}
}
/// Delay a span_bug() call until abort_if_errors()
pub fn delay_span_bug(&self, sp: Span, msg: &str) {
let mut delayed = self.delayed_span_bug.borrow_mut();
*delayed = Some((sp, msg.to_string()));
}
pub fn span_bug(&self, sp: Span, msg: &str) ->! {
self.diagnostic().span_bug(sp, msg)
}
pub fn bug(&self, msg: &str) ->! {
self.diagnostic().handler().bug(msg)
}
pub fn span_unimpl(&self, sp: Span, msg: &str) ->! {
self.diagnostic().span_unimpl(sp, msg)
}
pub fn unimpl(&self, msg: &str) ->! {
self.diagnostic().handler().unimpl(msg)
}
pub fn add_lint(&self,
lint: &'static lint::Lint,
id: ast::NodeId,
sp: Span,
msg: String) {
let lint_id = lint::LintId::of(lint);
let mut lints = self.lints.borrow_mut();
match lints.get_mut(&id) {
Some(arr) => { arr.push((lint_id, sp, msg)); return; }
None => {}
}
lints.insert(id, vec!((lint_id, sp, msg)));
}
pub fn next_node_id(&self) -> ast::NodeId {
self.reserve_node_ids(1)
}
pub fn reserve_node_ids(&self, count: ast::NodeId) -> ast::NodeId {
let id = self.next_node_id.get();
match id.checked_add(count) {
Some(next) => self.next_node_id.set(next),
None => self.bug("Input too large, ran out of node ids!")
}
id
}
pub fn diagnostic<'a>(&'a self) -> &'a diagnostic::SpanHandler {
&self.parse_sess.span_diagnostic
}
pub fn codemap<'a>(&'a self) -> &'a codemap::CodeMap {
self.parse_sess.codemap()
}
// This exists to help with refactoring to eliminate impossible
// cases later on
pub fn impossible_case(&self, sp: Span, msg: &str) ->! {
self.span_bug(sp,
&format!("impossible case reached: {}", msg));
}
pub fn verbose(&self) -> bool { self.opts.debugging_opts.verbose }
pub fn time_passes(&self) -> bool { self.opts.debugging_opts.time_passes }
pub fn count_llvm_insns(&self) -> bool {
self.opts.debugging_opts.count_llvm_insns
}
pub fn count_type_sizes(&self) -> bool {
self.opts.debugging_opts.count_type_sizes
}
pub fn time_llvm_passes(&self) -> bool {
self.opts.debugging_opts.time_llvm_passes
}
pub fn trans_stats(&self) -> bool { self.opts.debugging_opts.trans_stats }
pub fn meta_stats(&self) -> bool { self.opts.debugging_opts.meta_stats }
pub fn asm_comments(&self) -> bool { self.opts.debugging_opts.asm_comments }
pub fn no_verify(&self) -> bool { self.opts.debugging_opts.no_verify }
pub fn borrowck_stats(&self) -> bool { self.opts.debugging_opts.borrowck_stats }
pub fn print_llvm_passes(&self) -> bool {
self.opts.debugging_opts.print_llvm_passes
}
pub fn lto(&self) -> bool {
self.opts.cg.lto
}
pub fn no_landing_pads(&self) -> bool {
self.opts.debugging_opts.no_landing_pads
}
pub fn unstable_options(&self) -> bool {
self.opts.debugging_opts.unstable_options
}
pub fn print_enum_sizes(&self) -> bool {
self.opts.debugging_opts.print_enum_sizes
}
pub fn nonzeroing_move_hints(&self) -> bool {
self.opts.debugging_opts.enable_nonzeroing_move_hints
}
pub fn sysroot<'a>(&'a self) -> &'a Path {
match self.opts.maybe_sysroot {
Some (ref sysroot) => sysroot,
None => self.default_sysroot.as_ref()
.expect("missing sysroot and default_sysroot in Session")
}
}
pub fn target_filesearch(&self, kind: PathKind) -> filesearch::FileSearch {
filesearch::FileSearch::new(self.sysroot(),
&self.opts.target_triple,
&self.opts.search_paths,
kind)
}
pub fn host_filesearch(&self, kind: PathKind) -> filesearch::FileSearch {
filesearch::FileSearch::new(
self.sysroot(),
config::host_triple(),
&self.opts.search_paths,
kind)
}
}
fn split_msg_into_multilines(msg: &str) -> Option<String> {
// Conditions for enabling multi-line errors:
if!msg.contains("mismatched types") &&
!msg.contains("type mismatch resolving") &&
!msg.contains("if and else have incompatible types") &&
!msg.contains("if may be missing an else clause") &&
!msg.contains("match arms have incompatible types") &&
!msg.contains("structure constructor specifies a structure of type") &&
!msg.contains("has an incompatible type for trait") {
return None
}
let first = msg.match_indices("expected").filter(|s| {
s.0 > 0 && (msg.char_at_reverse(s.0) =='' ||
msg.char_at_reverse(s.0) == '(')
}).map(|(a, b)| (a - 1, b));
let second = msg.match_indices("found").filter(|s| {
msg.char_at_reverse(s.0) ==''
}).map(|(a, b)| (a - 1, b));
let mut new_msg = String::new();
let mut head = 0;
// Insert `\n` before expected and found.
for (pos1, pos2) in first.zip(second) {
new_msg = new_msg +
// A `(` may be preceded by a space and it should be trimmed
msg[head..pos1.0].trim_right() + // prefix
"\n" + // insert before first
&msg[pos1.0..pos1.1] + // insert what first matched
&msg[pos1.1..pos2.0] + // between matches
"\n " + // insert before second
// 123
// `expected` is 3 char longer than `found`. To align the types,
// `found` gets 3 spaces prepended.
&msg[pos2.0..pos2.1]; // insert what second matched
head = pos2.1;
}
let mut tail = &msg[head..];
let third = tail.find("(values differ")
.or(tail.find("(lifetime"))
.or(tail.find("(cyclic type of infinite size"));
// Insert `\n` before any remaining messages which match.
if let Some(pos) = third {
// The end of the message may just be wrapped in `()` without
// `expected`/`found`. Push this also to a new line and add the
// final tail after.
new_msg = new_msg +
// `(` is usually preceded by a space and should be trimmed.
tail[..pos].trim_right() + // prefix
"\n" + // insert before paren
&tail[pos..]; // append the tail
tail = "";
}
new_msg.push_str(tail);
return Some(new_msg);
}
pub fn build_session(sopts: config::Options,
local_crate_source_file: Option<PathBuf>,
registry: diagnostics::registry::Registry)
-> Session {
// FIXME: This is not general enough to make the warning lint completely override
// normal diagnostic warnings, since the warning lint can also be denied and changed
// later via the source code.
let can_print_warnings = sopts.lint_opts
.iter()
.filter(|&&(ref key, _)| *key == "warnings")
.map(|&(_, ref level)| *level!= lint::Allow)
.last()
.unwrap_or(true);
let codemap = codemap::CodeMap::new();
let diagnostic_handler =
diagnostic::Handler::new(sopts.color, Some(registry), can_print_warnings);
let span_diagnostic_handler =
diagnostic::SpanHandler::new(diagnostic_handler, codemap);
build_session_(sopts, local_crate_source_file, span_diagnostic_handler)
}
pub fn build_session_(sopts: config::Options,
local_crate_source_file: Option<PathBuf>,
span_diagnostic: diagnostic::SpanHandler)
-> Session {
let host = match Target::search(config::host_triple()) {
Ok(t) => t,
Err(e) => {
span_diagnostic.handler()
.fatal(&format!("Error loading host specification: {}", e));
}
};
let target_cfg = config::build_target_config(&sopts, &span_diagnostic);
let p_s = parse::ParseSess::with_span_handler(span_diagnostic);
let default_sysroot = match sopts.maybe_sysroot {
Some(_) => None,
None => Some(filesearch::get_or_default_sysroot())
};
// Make the path absolute, if necessary
let local_crate_source_file = local_crate_source_file.map(|path|
if path.is_absolute() {
path.clone()
} else {
env::current_dir().unwrap().join(&path)
}
);
let can_print_warnings = sopts.lint_opts
.iter()
.filter(|&&(ref key, _)| *key == "warnings")
.map(|&(_, ref level)| *level!= lint::Allow)
.last()
.unwrap_or(true);
let sess = Session {
target: target_cfg,
host: host,
opts: sopts,
cstore: CStore::new(token::get_ident_interner()),
parse_sess: p_s,
// For a library crate, this is always none
entry_fn: RefCell::new(None),
entry_type: Cell::new(None),
plugin_registrar_fn: Cell::new(None),
default_sysroot: default_sysroot,
local_crate_source_file: local_crate_source_file,
working_dir: env::current_dir().unwrap(),
lint_store: RefCell::new(lint::LintStore::new()),
lints: RefCell::new(NodeMap()),
plugin_llvm_passes: RefCell::new(Vec::new()),
plugin_attributes: RefCell::new(Vec::new()),
crate_types: RefCell::new(Vec::new()),
dependency_formats: RefCell::new(FnvHashMap()),
crate_metadata: RefCell::new(Vec::new()),
delayed_span_bug: RefCell::new(None),
features: RefCell::new(feature_gate::Features::new()),
recursion_limit: Cell::new(64),
can_print_warnings: can_print_warnings,
next_node_id: Cell::new(1),
injected_allocator: Cell::new(None),
};
sess
}
// Seems out of place, but it uses session, so I'm putting it here
pub fn expect<T, M>(sess: &Session, opt: Option<T>, msg: M) -> T where
M: FnOnce() -> String,
{
diagnostic::expect(sess.diagnostic(), opt, msg)
}
pub fn early_error(msg: &str) ->! {
let mut emitter = diagnostic::EmitterWriter::stderr(diagnostic::Auto, None);
emitter.emit(None, msg, None, diagnostic::Fatal);
panic!(diagnostic::FatalError);
}
pub fn early_warn(msg: &str) {
let mut emitter = diagnostic::EmitterWriter::stderr(diagnostic::Auto, None);
emitter.emit(None, msg, None, diagnostic::Warning);
}
|
fileline_note
|
identifier_name
|
mod.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use lint;
use metadata::cstore::CStore;
use metadata::filesearch;
use middle::dependency_format;
use session::search_paths::PathKind;
use util::nodemap::{NodeMap, FnvHashMap};
use syntax::ast::NodeId;
use syntax::codemap::Span;
use syntax::diagnostic::{self, Emitter};
use syntax::diagnostics;
use syntax::feature_gate;
use syntax::parse;
use syntax::parse::token;
use syntax::parse::ParseSess;
use syntax::{ast, codemap};
use syntax::feature_gate::AttributeType;
use rustc_back::target::Target;
use std::path::{Path, PathBuf};
use std::cell::{Cell, RefCell};
use std::env;
pub mod config;
pub mod search_paths;
// Represents the data associated with a compilation
// session for a single crate.
pub struct Session {
pub target: config::Config,
pub host: Target,
pub opts: config::Options,
pub cstore: CStore,
pub parse_sess: ParseSess,
// For a library crate, this is always none
pub entry_fn: RefCell<Option<(NodeId, codemap::Span)>>,
pub entry_type: Cell<Option<config::EntryFnType>>,
pub plugin_registrar_fn: Cell<Option<ast::NodeId>>,
pub default_sysroot: Option<PathBuf>,
// The name of the root source file of the crate, in the local file system.
// The path is always expected to be absolute. `None` means that there is no
// source file.
pub local_crate_source_file: Option<PathBuf>,
pub working_dir: PathBuf,
pub lint_store: RefCell<lint::LintStore>,
pub lints: RefCell<NodeMap<Vec<(lint::LintId, codemap::Span, String)>>>,
pub plugin_llvm_passes: RefCell<Vec<String>>,
pub plugin_attributes: RefCell<Vec<(String, AttributeType)>>,
pub crate_types: RefCell<Vec<config::CrateType>>,
pub dependency_formats: RefCell<dependency_format::Dependencies>,
pub crate_metadata: RefCell<Vec<String>>,
pub features: RefCell<feature_gate::Features>,
pub delayed_span_bug: RefCell<Option<(codemap::Span, String)>>,
/// The maximum recursion limit for potentially infinitely recursive
/// operations such as auto-dereference and monomorphization.
pub recursion_limit: Cell<usize>,
pub can_print_warnings: bool,
/// The metadata::creader module may inject an allocator dependency if it
/// didn't already find one, and this tracks what was injected.
pub injected_allocator: Cell<Option<ast::CrateNum>>,
next_node_id: Cell<ast::NodeId>,
}
impl Session {
pub fn span_fatal(&self, sp: Span, msg: &str) ->! {
if self.opts.treat_err_as_bug {
self.span_bug(sp, msg);
}
panic!(self.diagnostic().span_fatal(sp, msg))
}
pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) ->! {
if self.opts.treat_err_as_bug {
self.span_bug(sp, msg);
}
panic!(self.diagnostic().span_fatal_with_code(sp, msg, code))
}
pub fn fatal(&self, msg: &str) ->! {
if self.opts.treat_err_as_bug {
self.bug(msg);
}
self.diagnostic().handler().fatal(msg)
}
pub fn span_err_or_warn(&self, is_warning: bool, sp: Span, msg: &str) {
if is_warning {
self.span_warn(sp, msg);
} else {
self.span_err(sp, msg);
}
}
pub fn span_err(&self, sp: Span, msg: &str) {
if self.opts.treat_err_as_bug {
self.span_bug(sp, msg);
}
match split_msg_into_multilines(msg) {
Some(msg) => self.diagnostic().span_err(sp, &msg[..]),
None => self.diagnostic().span_err(sp, msg)
}
}
pub fn note_rfc_1214(&self, span: Span) {
self.span_note(
span,
&format!("this warning results from recent bug fixes and clarifications; \
it will become a HARD ERROR in the next release. \
See RFC 1214 for details."));
}
pub fn span_err_with_code(&self, sp: Span, msg: &str, code: &str) {
if self.opts.treat_err_as_bug {
self.span_bug(sp, msg);
}
match split_msg_into_multilines(msg) {
Some(msg) => self.diagnostic().span_err_with_code(sp, &msg[..], code),
None => self.diagnostic().span_err_with_code(sp, msg, code)
}
}
pub fn err(&self, msg: &str) {
if self.opts.treat_err_as_bug {
self.bug(msg);
}
self.diagnostic().handler().err(msg)
}
pub fn err_count(&self) -> usize {
self.diagnostic().handler().err_count()
}
pub fn has_errors(&self) -> bool {
self.diagnostic().handler().has_errors()
}
pub fn abort_if_errors(&self) {
self.diagnostic().handler().abort_if_errors();
let delayed_bug = self.delayed_span_bug.borrow();
match *delayed_bug {
Some((span, ref errmsg)) => {
self.diagnostic().span_bug(span, errmsg);
},
_ => {}
}
}
pub fn span_warn(&self, sp: Span, msg: &str) {
if self.can_print_warnings {
self.diagnostic().span_warn(sp, msg)
}
}
pub fn span_warn_with_code(&self, sp: Span, msg: &str, code: &str) {
if self.can_print_warnings {
self.diagnostic().span_warn_with_code(sp, msg, code)
}
}
pub fn warn(&self, msg: &str) {
if self.can_print_warnings {
self.diagnostic().handler().warn(msg)
}
}
pub fn opt_span_warn(&self, opt_sp: Option<Span>, msg: &str) {
match opt_sp {
Some(sp) => self.span_warn(sp, msg),
None => self.warn(msg),
}
}
pub fn span_note(&self, sp: Span, msg: &str) {
self.diagnostic().span_note(sp, msg)
}
pub fn span_end_note(&self, sp: Span, msg: &str) {
self.diagnostic().span_end_note(sp, msg)
}
/// Prints out a message with a suggested edit of the code.
///
/// See `diagnostic::RenderSpan::Suggestion` for more information.
pub fn span_suggestion(&self, sp: Span, msg: &str, suggestion: String) {
self.diagnostic().span_suggestion(sp, msg, suggestion)
}
pub fn span_help(&self, sp: Span, msg: &str) {
self.diagnostic().span_help(sp, msg)
}
pub fn fileline_note(&self, sp: Span, msg: &str) {
self.diagnostic().fileline_note(sp, msg)
}
pub fn fileline_help(&self, sp: Span, msg: &str) {
self.diagnostic().fileline_help(sp, msg)
}
pub fn note(&self, msg: &str) {
self.diagnostic().handler().note(msg)
}
pub fn help(&self, msg: &str) {
self.diagnostic().handler().help(msg)
}
pub fn opt_span_bug(&self, opt_sp: Option<Span>, msg: &str) ->! {
match opt_sp {
Some(sp) => self.span_bug(sp, msg),
None => self.bug(msg),
}
}
/// Delay a span_bug() call until abort_if_errors()
pub fn delay_span_bug(&self, sp: Span, msg: &str) {
let mut delayed = self.delayed_span_bug.borrow_mut();
*delayed = Some((sp, msg.to_string()));
}
pub fn span_bug(&self, sp: Span, msg: &str) ->! {
self.diagnostic().span_bug(sp, msg)
}
pub fn bug(&self, msg: &str) ->!
|
pub fn span_unimpl(&self, sp: Span, msg: &str) ->! {
self.diagnostic().span_unimpl(sp, msg)
}
pub fn unimpl(&self, msg: &str) ->! {
self.diagnostic().handler().unimpl(msg)
}
pub fn add_lint(&self,
lint: &'static lint::Lint,
id: ast::NodeId,
sp: Span,
msg: String) {
let lint_id = lint::LintId::of(lint);
let mut lints = self.lints.borrow_mut();
match lints.get_mut(&id) {
Some(arr) => { arr.push((lint_id, sp, msg)); return; }
None => {}
}
lints.insert(id, vec!((lint_id, sp, msg)));
}
pub fn next_node_id(&self) -> ast::NodeId {
self.reserve_node_ids(1)
}
pub fn reserve_node_ids(&self, count: ast::NodeId) -> ast::NodeId {
let id = self.next_node_id.get();
match id.checked_add(count) {
Some(next) => self.next_node_id.set(next),
None => self.bug("Input too large, ran out of node ids!")
}
id
}
pub fn diagnostic<'a>(&'a self) -> &'a diagnostic::SpanHandler {
&self.parse_sess.span_diagnostic
}
pub fn codemap<'a>(&'a self) -> &'a codemap::CodeMap {
self.parse_sess.codemap()
}
// This exists to help with refactoring to eliminate impossible
// cases later on
pub fn impossible_case(&self, sp: Span, msg: &str) ->! {
self.span_bug(sp,
&format!("impossible case reached: {}", msg));
}
pub fn verbose(&self) -> bool { self.opts.debugging_opts.verbose }
pub fn time_passes(&self) -> bool { self.opts.debugging_opts.time_passes }
pub fn count_llvm_insns(&self) -> bool {
self.opts.debugging_opts.count_llvm_insns
}
pub fn count_type_sizes(&self) -> bool {
self.opts.debugging_opts.count_type_sizes
}
pub fn time_llvm_passes(&self) -> bool {
self.opts.debugging_opts.time_llvm_passes
}
pub fn trans_stats(&self) -> bool { self.opts.debugging_opts.trans_stats }
pub fn meta_stats(&self) -> bool { self.opts.debugging_opts.meta_stats }
pub fn asm_comments(&self) -> bool { self.opts.debugging_opts.asm_comments }
pub fn no_verify(&self) -> bool { self.opts.debugging_opts.no_verify }
pub fn borrowck_stats(&self) -> bool { self.opts.debugging_opts.borrowck_stats }
pub fn print_llvm_passes(&self) -> bool {
self.opts.debugging_opts.print_llvm_passes
}
pub fn lto(&self) -> bool {
self.opts.cg.lto
}
pub fn no_landing_pads(&self) -> bool {
self.opts.debugging_opts.no_landing_pads
}
pub fn unstable_options(&self) -> bool {
self.opts.debugging_opts.unstable_options
}
pub fn print_enum_sizes(&self) -> bool {
self.opts.debugging_opts.print_enum_sizes
}
pub fn nonzeroing_move_hints(&self) -> bool {
self.opts.debugging_opts.enable_nonzeroing_move_hints
}
pub fn sysroot<'a>(&'a self) -> &'a Path {
match self.opts.maybe_sysroot {
Some (ref sysroot) => sysroot,
None => self.default_sysroot.as_ref()
.expect("missing sysroot and default_sysroot in Session")
}
}
pub fn target_filesearch(&self, kind: PathKind) -> filesearch::FileSearch {
filesearch::FileSearch::new(self.sysroot(),
&self.opts.target_triple,
&self.opts.search_paths,
kind)
}
pub fn host_filesearch(&self, kind: PathKind) -> filesearch::FileSearch {
filesearch::FileSearch::new(
self.sysroot(),
config::host_triple(),
&self.opts.search_paths,
kind)
}
}
fn split_msg_into_multilines(msg: &str) -> Option<String> {
// Conditions for enabling multi-line errors:
if!msg.contains("mismatched types") &&
!msg.contains("type mismatch resolving") &&
!msg.contains("if and else have incompatible types") &&
!msg.contains("if may be missing an else clause") &&
!msg.contains("match arms have incompatible types") &&
!msg.contains("structure constructor specifies a structure of type") &&
!msg.contains("has an incompatible type for trait") {
return None
}
let first = msg.match_indices("expected").filter(|s| {
s.0 > 0 && (msg.char_at_reverse(s.0) =='' ||
msg.char_at_reverse(s.0) == '(')
}).map(|(a, b)| (a - 1, b));
let second = msg.match_indices("found").filter(|s| {
msg.char_at_reverse(s.0) ==''
}).map(|(a, b)| (a - 1, b));
let mut new_msg = String::new();
let mut head = 0;
// Insert `\n` before expected and found.
for (pos1, pos2) in first.zip(second) {
new_msg = new_msg +
// A `(` may be preceded by a space and it should be trimmed
msg[head..pos1.0].trim_right() + // prefix
"\n" + // insert before first
&msg[pos1.0..pos1.1] + // insert what first matched
&msg[pos1.1..pos2.0] + // between matches
"\n " + // insert before second
// 123
// `expected` is 3 char longer than `found`. To align the types,
// `found` gets 3 spaces prepended.
&msg[pos2.0..pos2.1]; // insert what second matched
head = pos2.1;
}
let mut tail = &msg[head..];
let third = tail.find("(values differ")
.or(tail.find("(lifetime"))
.or(tail.find("(cyclic type of infinite size"));
// Insert `\n` before any remaining messages which match.
if let Some(pos) = third {
// The end of the message may just be wrapped in `()` without
// `expected`/`found`. Push this also to a new line and add the
// final tail after.
new_msg = new_msg +
// `(` is usually preceded by a space and should be trimmed.
tail[..pos].trim_right() + // prefix
"\n" + // insert before paren
&tail[pos..]; // append the tail
tail = "";
}
new_msg.push_str(tail);
return Some(new_msg);
}
pub fn build_session(sopts: config::Options,
local_crate_source_file: Option<PathBuf>,
registry: diagnostics::registry::Registry)
-> Session {
// FIXME: This is not general enough to make the warning lint completely override
// normal diagnostic warnings, since the warning lint can also be denied and changed
// later via the source code.
let can_print_warnings = sopts.lint_opts
.iter()
.filter(|&&(ref key, _)| *key == "warnings")
.map(|&(_, ref level)| *level!= lint::Allow)
.last()
.unwrap_or(true);
let codemap = codemap::CodeMap::new();
let diagnostic_handler =
diagnostic::Handler::new(sopts.color, Some(registry), can_print_warnings);
let span_diagnostic_handler =
diagnostic::SpanHandler::new(diagnostic_handler, codemap);
build_session_(sopts, local_crate_source_file, span_diagnostic_handler)
}
pub fn build_session_(sopts: config::Options,
local_crate_source_file: Option<PathBuf>,
span_diagnostic: diagnostic::SpanHandler)
-> Session {
let host = match Target::search(config::host_triple()) {
Ok(t) => t,
Err(e) => {
span_diagnostic.handler()
.fatal(&format!("Error loading host specification: {}", e));
}
};
let target_cfg = config::build_target_config(&sopts, &span_diagnostic);
let p_s = parse::ParseSess::with_span_handler(span_diagnostic);
let default_sysroot = match sopts.maybe_sysroot {
Some(_) => None,
None => Some(filesearch::get_or_default_sysroot())
};
// Make the path absolute, if necessary
let local_crate_source_file = local_crate_source_file.map(|path|
if path.is_absolute() {
path.clone()
} else {
env::current_dir().unwrap().join(&path)
}
);
let can_print_warnings = sopts.lint_opts
.iter()
.filter(|&&(ref key, _)| *key == "warnings")
.map(|&(_, ref level)| *level!= lint::Allow)
.last()
.unwrap_or(true);
let sess = Session {
target: target_cfg,
host: host,
opts: sopts,
cstore: CStore::new(token::get_ident_interner()),
parse_sess: p_s,
// For a library crate, this is always none
entry_fn: RefCell::new(None),
entry_type: Cell::new(None),
plugin_registrar_fn: Cell::new(None),
default_sysroot: default_sysroot,
local_crate_source_file: local_crate_source_file,
working_dir: env::current_dir().unwrap(),
lint_store: RefCell::new(lint::LintStore::new()),
lints: RefCell::new(NodeMap()),
plugin_llvm_passes: RefCell::new(Vec::new()),
plugin_attributes: RefCell::new(Vec::new()),
crate_types: RefCell::new(Vec::new()),
dependency_formats: RefCell::new(FnvHashMap()),
crate_metadata: RefCell::new(Vec::new()),
delayed_span_bug: RefCell::new(None),
features: RefCell::new(feature_gate::Features::new()),
recursion_limit: Cell::new(64),
can_print_warnings: can_print_warnings,
next_node_id: Cell::new(1),
injected_allocator: Cell::new(None),
};
sess
}
// Seems out of place, but it uses session, so I'm putting it here
pub fn expect<T, M>(sess: &Session, opt: Option<T>, msg: M) -> T where
M: FnOnce() -> String,
{
diagnostic::expect(sess.diagnostic(), opt, msg)
}
pub fn early_error(msg: &str) ->! {
let mut emitter = diagnostic::EmitterWriter::stderr(diagnostic::Auto, None);
emitter.emit(None, msg, None, diagnostic::Fatal);
panic!(diagnostic::FatalError);
}
pub fn early_warn(msg: &str) {
let mut emitter = diagnostic::EmitterWriter::stderr(diagnostic::Auto, None);
emitter.emit(None, msg, None, diagnostic::Warning);
}
|
{
self.diagnostic().handler().bug(msg)
}
|
identifier_body
|
processinginstruction.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::ProcessingInstructionBinding;
use crate::dom::bindings::codegen::Bindings::ProcessingInstructionBinding::ProcessingInstructionMethods;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::characterdata::CharacterData;
use crate::dom::document::Document;
use crate::dom::node::Node;
use dom_struct::dom_struct;
/// An HTML processing instruction node.
#[dom_struct]
pub struct ProcessingInstruction {
characterdata: CharacterData,
target: DOMString,
}
impl ProcessingInstruction {
fn
|
(
target: DOMString,
data: DOMString,
document: &Document,
) -> ProcessingInstruction {
ProcessingInstruction {
characterdata: CharacterData::new_inherited(data, document),
target: target,
}
}
pub fn new(
target: DOMString,
data: DOMString,
document: &Document,
) -> DomRoot<ProcessingInstruction> {
Node::reflect_node(
Box::new(ProcessingInstruction::new_inherited(target, data, document)),
document,
ProcessingInstructionBinding::Wrap,
)
}
}
impl ProcessingInstruction {
pub fn target(&self) -> &DOMString {
&self.target
}
}
impl ProcessingInstructionMethods for ProcessingInstruction {
// https://dom.spec.whatwg.org/#dom-processinginstruction-target
fn Target(&self) -> DOMString {
self.target.clone()
}
}
|
new_inherited
|
identifier_name
|
processinginstruction.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::ProcessingInstructionBinding;
|
use crate::dom::bindings::codegen::Bindings::ProcessingInstructionBinding::ProcessingInstructionMethods;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::characterdata::CharacterData;
use crate::dom::document::Document;
use crate::dom::node::Node;
use dom_struct::dom_struct;
/// An HTML processing instruction node.
#[dom_struct]
pub struct ProcessingInstruction {
characterdata: CharacterData,
target: DOMString,
}
impl ProcessingInstruction {
fn new_inherited(
target: DOMString,
data: DOMString,
document: &Document,
) -> ProcessingInstruction {
ProcessingInstruction {
characterdata: CharacterData::new_inherited(data, document),
target: target,
}
}
pub fn new(
target: DOMString,
data: DOMString,
document: &Document,
) -> DomRoot<ProcessingInstruction> {
Node::reflect_node(
Box::new(ProcessingInstruction::new_inherited(target, data, document)),
document,
ProcessingInstructionBinding::Wrap,
)
}
}
impl ProcessingInstruction {
pub fn target(&self) -> &DOMString {
&self.target
}
}
impl ProcessingInstructionMethods for ProcessingInstruction {
// https://dom.spec.whatwg.org/#dom-processinginstruction-target
fn Target(&self) -> DOMString {
self.target.clone()
}
}
|
random_line_split
|
|
processinginstruction.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::ProcessingInstructionBinding;
use crate::dom::bindings::codegen::Bindings::ProcessingInstructionBinding::ProcessingInstructionMethods;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::characterdata::CharacterData;
use crate::dom::document::Document;
use crate::dom::node::Node;
use dom_struct::dom_struct;
/// An HTML processing instruction node.
#[dom_struct]
pub struct ProcessingInstruction {
characterdata: CharacterData,
target: DOMString,
}
impl ProcessingInstruction {
fn new_inherited(
target: DOMString,
data: DOMString,
document: &Document,
) -> ProcessingInstruction {
ProcessingInstruction {
characterdata: CharacterData::new_inherited(data, document),
target: target,
}
}
pub fn new(
target: DOMString,
data: DOMString,
document: &Document,
) -> DomRoot<ProcessingInstruction> {
Node::reflect_node(
Box::new(ProcessingInstruction::new_inherited(target, data, document)),
document,
ProcessingInstructionBinding::Wrap,
)
}
}
impl ProcessingInstruction {
pub fn target(&self) -> &DOMString {
&self.target
}
}
impl ProcessingInstructionMethods for ProcessingInstruction {
// https://dom.spec.whatwg.org/#dom-processinginstruction-target
fn Target(&self) -> DOMString
|
}
|
{
self.target.clone()
}
|
identifier_body
|
mod.rs
|
// Copyright 2016 LambdaStack All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![allow(unused_imports)]
#![allow(unused_mut)]
#![allow(unused_variables)]
//! This library module provides HTTP/2 parsing and buffer frames used for HTTP/2.
//!
//! NB: This code is changing so please do not depend on it at this time!
pub const FRAME_HEADER_BYTES: usize = 9;
use byteorder::ByteOrder;
use byteorder;
pub mod kind;
pub mod flag;
pub mod payload;
pub mod frame;
use self::kind::*;
use self::flag::*;
use self::frame::*;
use self::payload::*;
/// Errors that can occur during parsing an HTTP/2 frame.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum Error {
/// A full frame header was not passed.
Short,
/// An unsupported value was set for the flag value.
BadFlag(u8),
/// An unsupported value was set for the frame kind.
BadKind(u8),
/// The padding length was larger than the frame-header-specified
/// length of the payload.
TooMuchPadding(u8),
/// The payload length specified by the frame header was shorter than
/// necessary for the parser settings specified and the frame type.
///
/// This happens if, for instance, the priority flag is set and the
/// header length is shorter than a stream dependency.
///
/// `PayloadLengthTooShort` should be treated as a protocol error.
PayloadLengthTooShort,
/// The payload length specified by the frame header of a settings frame
/// was not a round multiple of the size of a single setting.
PartialSettingLength,
/// The payload length specified by the frame header was not the
/// value necessary for the specific frame type.
InvalidPayloadLength
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct ParserSettings {
padding: bool,
priority: bool
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct StreamIdentifier(pub u32);
impl StreamIdentifier {
pub fn
|
(buf: &[u8]) -> StreamIdentifier {
StreamIdentifier(
byteorder::BigEndian::read_u32(buf) & ((1 << 31) - 1)
)
}
pub fn encode(&self, buf: &mut [u8]) -> usize {
encode_u32(buf, self.0)
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct ErrorCode(pub u32);
pub enum HttpError {
Protocol,
Internal,
FlowControlError,
SettingsTimeout,
}
impl ErrorCode {
pub fn parse(buf: &[u8]) -> ErrorCode {
ErrorCode(byteorder::BigEndian::read_u32(buf))
}
pub fn encode(&self, buf: &mut [u8]) -> usize {
encode_u32(buf, self.0)
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct SizeIncrement(pub u32);
impl SizeIncrement {
pub fn parse(buf: &[u8]) -> SizeIncrement {
SizeIncrement(byteorder::BigEndian::read_u32(buf))
}
pub fn encode(&self, buf: &mut [u8]) -> usize {
encode_u32(buf, self.0)
}
}
#[inline(always)]
pub fn encode_u24(buf: &mut [u8], val: u32) -> usize {
buf[0] = (val >> 16) as u8;
buf[1] = (val >> 8) as u8;
buf[2] = val as u8;
3
}
#[inline(always)]
pub fn encode_u32(buf: &mut [u8], val: u32) -> usize {
byteorder::BigEndian::write_u32(buf, val);
4
}
#[inline(always)]
pub fn encode_u64(buf: &mut [u8], val: u64) -> usize {
byteorder::BigEndian::write_u64(buf, val);
8
}
|
parse
|
identifier_name
|
mod.rs
|
// Copyright 2016 LambdaStack All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![allow(unused_imports)]
#![allow(unused_mut)]
#![allow(unused_variables)]
//! This library module provides HTTP/2 parsing and buffer frames used for HTTP/2.
//!
//! NB: This code is changing so please do not depend on it at this time!
pub const FRAME_HEADER_BYTES: usize = 9;
use byteorder::ByteOrder;
use byteorder;
pub mod kind;
pub mod flag;
pub mod payload;
pub mod frame;
use self::kind::*;
use self::flag::*;
use self::frame::*;
use self::payload::*;
/// Errors that can occur during parsing an HTTP/2 frame.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum Error {
/// A full frame header was not passed.
Short,
/// An unsupported value was set for the flag value.
BadFlag(u8),
/// An unsupported value was set for the frame kind.
BadKind(u8),
/// The padding length was larger than the frame-header-specified
/// length of the payload.
TooMuchPadding(u8),
/// The payload length specified by the frame header was shorter than
/// necessary for the parser settings specified and the frame type.
///
/// This happens if, for instance, the priority flag is set and the
/// header length is shorter than a stream dependency.
///
/// `PayloadLengthTooShort` should be treated as a protocol error.
PayloadLengthTooShort,
/// The payload length specified by the frame header of a settings frame
/// was not a round multiple of the size of a single setting.
PartialSettingLength,
/// The payload length specified by the frame header was not the
/// value necessary for the specific frame type.
InvalidPayloadLength
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct ParserSettings {
padding: bool,
priority: bool
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct StreamIdentifier(pub u32);
impl StreamIdentifier {
pub fn parse(buf: &[u8]) -> StreamIdentifier {
StreamIdentifier(
byteorder::BigEndian::read_u32(buf) & ((1 << 31) - 1)
)
}
pub fn encode(&self, buf: &mut [u8]) -> usize {
encode_u32(buf, self.0)
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
pub enum HttpError {
Protocol,
Internal,
FlowControlError,
SettingsTimeout,
}
impl ErrorCode {
pub fn parse(buf: &[u8]) -> ErrorCode {
ErrorCode(byteorder::BigEndian::read_u32(buf))
}
pub fn encode(&self, buf: &mut [u8]) -> usize {
encode_u32(buf, self.0)
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct SizeIncrement(pub u32);
impl SizeIncrement {
pub fn parse(buf: &[u8]) -> SizeIncrement {
SizeIncrement(byteorder::BigEndian::read_u32(buf))
}
pub fn encode(&self, buf: &mut [u8]) -> usize {
encode_u32(buf, self.0)
}
}
#[inline(always)]
pub fn encode_u24(buf: &mut [u8], val: u32) -> usize {
buf[0] = (val >> 16) as u8;
buf[1] = (val >> 8) as u8;
buf[2] = val as u8;
3
}
#[inline(always)]
pub fn encode_u32(buf: &mut [u8], val: u32) -> usize {
byteorder::BigEndian::write_u32(buf, val);
4
}
#[inline(always)]
pub fn encode_u64(buf: &mut [u8], val: u64) -> usize {
byteorder::BigEndian::write_u64(buf, val);
8
}
|
pub struct ErrorCode(pub u32);
|
random_line_split
|
font_template.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::fs::File;
use std::io::Read;
use string_cache::Atom;
/// Platform specific font representation for Linux.
/// The identifier is an absolute path, and the bytes
/// field is the loaded data that can be passed to
|
}
impl FontTemplateData {
pub fn new(identifier: Atom, font_data: Option<Vec<u8>>) -> FontTemplateData {
let bytes = match font_data {
Some(bytes) => {
bytes
},
None => {
// TODO: Handle file load failure!
let mut file = File::open(&*identifier).unwrap();
let mut buffer = vec![];
file.read_to_end(&mut buffer).unwrap();
buffer
},
};
FontTemplateData {
bytes: bytes,
identifier: identifier,
}
}
}
|
/// freetype and azure directly.
#[derive(Deserialize, Serialize)]
pub struct FontTemplateData {
pub bytes: Vec<u8>,
pub identifier: Atom,
|
random_line_split
|
font_template.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::fs::File;
use std::io::Read;
use string_cache::Atom;
/// Platform specific font representation for Linux.
/// The identifier is an absolute path, and the bytes
/// field is the loaded data that can be passed to
/// freetype and azure directly.
#[derive(Deserialize, Serialize)]
pub struct
|
{
pub bytes: Vec<u8>,
pub identifier: Atom,
}
impl FontTemplateData {
pub fn new(identifier: Atom, font_data: Option<Vec<u8>>) -> FontTemplateData {
let bytes = match font_data {
Some(bytes) => {
bytes
},
None => {
// TODO: Handle file load failure!
let mut file = File::open(&*identifier).unwrap();
let mut buffer = vec![];
file.read_to_end(&mut buffer).unwrap();
buffer
},
};
FontTemplateData {
bytes: bytes,
identifier: identifier,
}
}
}
|
FontTemplateData
|
identifier_name
|
font_template.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::fs::File;
use std::io::Read;
use string_cache::Atom;
/// Platform specific font representation for Linux.
/// The identifier is an absolute path, and the bytes
/// field is the loaded data that can be passed to
/// freetype and azure directly.
#[derive(Deserialize, Serialize)]
pub struct FontTemplateData {
pub bytes: Vec<u8>,
pub identifier: Atom,
}
impl FontTemplateData {
pub fn new(identifier: Atom, font_data: Option<Vec<u8>>) -> FontTemplateData
|
}
|
{
let bytes = match font_data {
Some(bytes) => {
bytes
},
None => {
// TODO: Handle file load failure!
let mut file = File::open(&*identifier).unwrap();
let mut buffer = vec![];
file.read_to_end(&mut buffer).unwrap();
buffer
},
};
FontTemplateData {
bytes: bytes,
identifier: identifier,
}
}
|
identifier_body
|
font_template.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::fs::File;
use std::io::Read;
use string_cache::Atom;
/// Platform specific font representation for Linux.
/// The identifier is an absolute path, and the bytes
/// field is the loaded data that can be passed to
/// freetype and azure directly.
#[derive(Deserialize, Serialize)]
pub struct FontTemplateData {
pub bytes: Vec<u8>,
pub identifier: Atom,
}
impl FontTemplateData {
pub fn new(identifier: Atom, font_data: Option<Vec<u8>>) -> FontTemplateData {
let bytes = match font_data {
Some(bytes) => {
bytes
},
None =>
|
,
};
FontTemplateData {
bytes: bytes,
identifier: identifier,
}
}
}
|
{
// TODO: Handle file load failure!
let mut file = File::open(&*identifier).unwrap();
let mut buffer = vec![];
file.read_to_end(&mut buffer).unwrap();
buffer
}
|
conditional_block
|
main.rs
|
extern crate env_logger;
extern crate glutin;
extern crate gfx;
extern crate gfx_window_glutin;
extern crate claymore_game as game;
pub fn
|
() {
use gfx::traits::*;
env_logger::init().unwrap();
println!("Initializing the window...");
let window = glutin::WindowBuilder::new()
.with_title("Claymore".to_string())
.with_vsync()
.with_gl(glutin::GlRequest::Specific(glutin::Api::OpenGl, (3, 2)))
.with_srgb(Some(true))
.build().unwrap();
let (mut stream, mut device, mut factory) = gfx_window_glutin::init(window);
let _ = stream.out.set_gamma(gfx::Gamma::Convert);
println!("Loading the game...");
let mut app = game::App::new(&mut factory);
println!("Rendering...");
let (mut mouse_x, mut mouse_y) = (0, 0);
'main: loop {
// quit when Esc is pressed.
for event in stream.out.window.poll_events() {
use glutin::{ElementState, Event, MouseButton, VirtualKeyCode};
match event {
Event::Closed => break'main,
Event::KeyboardInput(ElementState::Pressed, _, Some(VirtualKeyCode::Escape)) => break'main,
Event::KeyboardInput(ElementState::Pressed, _, Some(VirtualKeyCode::W)) =>
app.rotate_camera(-90.0),
Event::KeyboardInput(ElementState::Pressed, _, Some(VirtualKeyCode::Q)) =>
app.rotate_camera(90.0),
Event::MouseMoved((x, y)) => { mouse_x = x; mouse_y = y; },
Event::MouseInput(ElementState::Pressed, MouseButton::Left) => {
let (sx, sy) = stream.out.get_size();
app.mouse_click(mouse_x as f32 / sx as f32, mouse_y as f32 / sy as f32);
},
_ => (),
}
}
app.render(&mut stream);
stream.present(&mut device);
}
println!("Done.");
}
|
main
|
identifier_name
|
main.rs
|
extern crate env_logger;
extern crate glutin;
extern crate gfx;
extern crate gfx_window_glutin;
extern crate claymore_game as game;
pub fn main() {
use gfx::traits::*;
env_logger::init().unwrap();
println!("Initializing the window...");
let window = glutin::WindowBuilder::new()
.with_title("Claymore".to_string())
.with_vsync()
.with_gl(glutin::GlRequest::Specific(glutin::Api::OpenGl, (3, 2)))
.with_srgb(Some(true))
.build().unwrap();
let (mut stream, mut device, mut factory) = gfx_window_glutin::init(window);
let _ = stream.out.set_gamma(gfx::Gamma::Convert);
println!("Loading the game...");
let mut app = game::App::new(&mut factory);
println!("Rendering...");
let (mut mouse_x, mut mouse_y) = (0, 0);
'main: loop {
// quit when Esc is pressed.
for event in stream.out.window.poll_events() {
use glutin::{ElementState, Event, MouseButton, VirtualKeyCode};
match event {
Event::Closed => break'main,
Event::KeyboardInput(ElementState::Pressed, _, Some(VirtualKeyCode::Escape)) => break'main,
Event::KeyboardInput(ElementState::Pressed, _, Some(VirtualKeyCode::W)) =>
app.rotate_camera(-90.0),
Event::KeyboardInput(ElementState::Pressed, _, Some(VirtualKeyCode::Q)) =>
app.rotate_camera(90.0),
Event::MouseMoved((x, y)) => { mouse_x = x; mouse_y = y; },
Event::MouseInput(ElementState::Pressed, MouseButton::Left) => {
|
}
}
app.render(&mut stream);
stream.present(&mut device);
}
println!("Done.");
}
|
let (sx, sy) = stream.out.get_size();
app.mouse_click(mouse_x as f32 / sx as f32, mouse_y as f32 / sy as f32);
},
_ => (),
|
random_line_split
|
main.rs
|
extern crate env_logger;
extern crate glutin;
extern crate gfx;
extern crate gfx_window_glutin;
extern crate claymore_game as game;
pub fn main()
|
'main: loop {
// quit when Esc is pressed.
for event in stream.out.window.poll_events() {
use glutin::{ElementState, Event, MouseButton, VirtualKeyCode};
match event {
Event::Closed => break'main,
Event::KeyboardInput(ElementState::Pressed, _, Some(VirtualKeyCode::Escape)) => break'main,
Event::KeyboardInput(ElementState::Pressed, _, Some(VirtualKeyCode::W)) =>
app.rotate_camera(-90.0),
Event::KeyboardInput(ElementState::Pressed, _, Some(VirtualKeyCode::Q)) =>
app.rotate_camera(90.0),
Event::MouseMoved((x, y)) => { mouse_x = x; mouse_y = y; },
Event::MouseInput(ElementState::Pressed, MouseButton::Left) => {
let (sx, sy) = stream.out.get_size();
app.mouse_click(mouse_x as f32 / sx as f32, mouse_y as f32 / sy as f32);
},
_ => (),
}
}
app.render(&mut stream);
stream.present(&mut device);
}
println!("Done.");
}
|
{
use gfx::traits::*;
env_logger::init().unwrap();
println!("Initializing the window...");
let window = glutin::WindowBuilder::new()
.with_title("Claymore".to_string())
.with_vsync()
.with_gl(glutin::GlRequest::Specific(glutin::Api::OpenGl, (3, 2)))
.with_srgb(Some(true))
.build().unwrap();
let (mut stream, mut device, mut factory) = gfx_window_glutin::init(window);
let _ = stream.out.set_gamma(gfx::Gamma::Convert);
println!("Loading the game...");
let mut app = game::App::new(&mut factory);
println!("Rendering...");
let (mut mouse_x, mut mouse_y) = (0, 0);
|
identifier_body
|
use_suggestion_placement.rs
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-cloudabi no std::path support
macro_rules! y {
() => {}
}
mod m {
pub const A: i32 = 0;
}
mod foo {
#[derive(Debug)]
pub struct Foo;
// test whether the use suggestion isn't
// placed into the expansion of `#[derive(Debug)]
type Bar = Path; //~ ERROR cannot find
}
fn main() {
y!();
let _ = A; //~ ERROR cannot find
foo();
}
fn foo() {
type Dict<K, V> = HashMap<K, V>; //~ ERROR cannot find
}
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
random_line_split
|
use_suggestion_placement.rs
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-cloudabi no std::path support
macro_rules! y {
() => {}
}
mod m {
pub const A: i32 = 0;
}
mod foo {
#[derive(Debug)]
pub struct
|
;
// test whether the use suggestion isn't
// placed into the expansion of `#[derive(Debug)]
type Bar = Path; //~ ERROR cannot find
}
fn main() {
y!();
let _ = A; //~ ERROR cannot find
foo();
}
fn foo() {
type Dict<K, V> = HashMap<K, V>; //~ ERROR cannot find
}
|
Foo
|
identifier_name
|
use_suggestion_placement.rs
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-cloudabi no std::path support
macro_rules! y {
() => {}
}
mod m {
pub const A: i32 = 0;
}
mod foo {
#[derive(Debug)]
pub struct Foo;
// test whether the use suggestion isn't
// placed into the expansion of `#[derive(Debug)]
type Bar = Path; //~ ERROR cannot find
}
fn main() {
y!();
let _ = A; //~ ERROR cannot find
foo();
}
fn foo()
|
{
type Dict<K, V> = HashMap<K, V>; //~ ERROR cannot find
}
|
identifier_body
|
|
mpsc_queue.rs
|
/* Copyright (c) 2010-2011 Dmitry Vyukov. All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
|
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL DMITRY VYUKOV OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of Dmitry Vyukov.
*/
//! A mostly lock-free multi-producer, single consumer queue.
//!
//! This module contains an implementation of a concurrent MPSC queue. This
//! queue can be used to share data between tasks, and is also used as the
//! building block of channels in rust.
//!
//! Note that the current implementation of this queue has a caveat of the `pop`
//! method, and see the method for more information about it. Due to this
//! caveat, this queue may not be appropriate for all use-cases.
#![experimental]
// http://www.1024cores.net/home/lock-free-algorithms
// /queues/non-intrusive-mpsc-node-based-queue
pub use self::PopResult::*;
use core::prelude::*;
use alloc::boxed::Box;
use core::mem;
use core::cell::UnsafeCell;
use sync::atomic::{AtomicPtr, Release, Acquire, AcqRel, Relaxed};
/// A result of the `pop` function.
pub enum PopResult<T> {
/// Some data has been popped
Data(T),
/// The queue is empty
Empty,
/// The queue is in an inconsistent state. Popping data should succeed, but
/// some pushers have yet to make enough progress in order allow a pop to
/// succeed. It is recommended that a pop() occur "in the near future" in
/// order to see if the sender has made progress or not
Inconsistent,
}
struct Node<T> {
next: AtomicPtr<Node<T>>,
value: Option<T>,
}
/// The multi-producer single-consumer structure. This is not cloneable, but it
/// may be safely shared so long as it is guaranteed that there is only one
/// popper at a time (many pushers are allowed).
pub struct Queue<T> {
head: AtomicPtr<Node<T>>,
tail: UnsafeCell<*mut Node<T>>,
}
impl<T> Node<T> {
unsafe fn new(v: Option<T>) -> *mut Node<T> {
mem::transmute(box Node {
next: AtomicPtr::new(0 as *mut Node<T>),
value: v,
})
}
}
impl<T: Send> Queue<T> {
/// Creates a new queue that is safe to share among multiple producers and
/// one consumer.
pub fn new() -> Queue<T> {
let stub = unsafe { Node::new(None) };
Queue {
head: AtomicPtr::new(stub),
tail: UnsafeCell::new(stub),
}
}
/// Pushes a new value onto this queue.
pub fn push(&self, t: T) {
unsafe {
let n = Node::new(Some(t));
let prev = self.head.swap(n, AcqRel);
(*prev).next.store(n, Release);
}
}
/// Pops some data from this queue.
///
/// Note that the current implementation means that this function cannot
/// return `Option<T>`. It is possible for this queue to be in an
/// inconsistent state where many pushes have succeeded and completely
/// finished, but pops cannot return `Some(t)`. This inconsistent state
/// happens when a pusher is pre-empted at an inopportune moment.
///
/// This inconsistent state means that this queue does indeed have data, but
/// it does not currently have access to it at this time.
pub fn pop(&self) -> PopResult<T> {
unsafe {
let tail = *self.tail.get();
let next = (*tail).next.load(Acquire);
if!next.is_null() {
*self.tail.get() = next;
assert!((*tail).value.is_none());
assert!((*next).value.is_some());
let ret = (*next).value.take().unwrap();
let _: Box<Node<T>> = mem::transmute(tail);
return Data(ret);
}
if self.head.load(Acquire) == tail {Empty} else {Inconsistent}
}
}
}
#[unsafe_destructor]
impl<T: Send> Drop for Queue<T> {
fn drop(&mut self) {
unsafe {
let mut cur = *self.tail.get();
while!cur.is_null() {
let next = (*cur).next.load(Relaxed);
let _: Box<Node<T>> = mem::transmute(cur);
cur = next;
}
}
}
}
#[cfg(test)]
mod tests {
use prelude::*;
use alloc::arc::Arc;
use super::{Queue, Data, Empty, Inconsistent};
#[test]
fn test_full() {
let q = Queue::new();
q.push(box 1i);
q.push(box 2i);
}
#[test]
fn test() {
let nthreads = 8u;
let nmsgs = 1000u;
let q = Queue::new();
match q.pop() {
Empty => {}
Inconsistent | Data(..) => panic!()
}
let (tx, rx) = channel();
let q = Arc::new(q);
for _ in range(0, nthreads) {
let tx = tx.clone();
let q = q.clone();
spawn(proc() {
for i in range(0, nmsgs) {
q.push(i);
}
tx.send(());
});
}
let mut i = 0u;
while i < nthreads * nmsgs {
match q.pop() {
Empty | Inconsistent => {},
Data(_) => { i += 1 }
}
}
drop(tx);
for _ in range(0, nthreads) {
rx.recv();
}
}
}
|
*
* THIS SOFTWARE IS PROVIDED BY DMITRY VYUKOV "AS IS" AND ANY EXPRESS OR IMPLIED
|
random_line_split
|
mpsc_queue.rs
|
/* Copyright (c) 2010-2011 Dmitry Vyukov. All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY DMITRY VYUKOV "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL DMITRY VYUKOV OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of Dmitry Vyukov.
*/
//! A mostly lock-free multi-producer, single consumer queue.
//!
//! This module contains an implementation of a concurrent MPSC queue. This
//! queue can be used to share data between tasks, and is also used as the
//! building block of channels in rust.
//!
//! Note that the current implementation of this queue has a caveat of the `pop`
//! method, and see the method for more information about it. Due to this
//! caveat, this queue may not be appropriate for all use-cases.
#![experimental]
// http://www.1024cores.net/home/lock-free-algorithms
// /queues/non-intrusive-mpsc-node-based-queue
pub use self::PopResult::*;
use core::prelude::*;
use alloc::boxed::Box;
use core::mem;
use core::cell::UnsafeCell;
use sync::atomic::{AtomicPtr, Release, Acquire, AcqRel, Relaxed};
/// A result of the `pop` function.
pub enum PopResult<T> {
/// Some data has been popped
Data(T),
/// The queue is empty
Empty,
/// The queue is in an inconsistent state. Popping data should succeed, but
/// some pushers have yet to make enough progress in order allow a pop to
/// succeed. It is recommended that a pop() occur "in the near future" in
/// order to see if the sender has made progress or not
Inconsistent,
}
struct Node<T> {
next: AtomicPtr<Node<T>>,
value: Option<T>,
}
/// The multi-producer single-consumer structure. This is not cloneable, but it
/// may be safely shared so long as it is guaranteed that there is only one
/// popper at a time (many pushers are allowed).
pub struct Queue<T> {
head: AtomicPtr<Node<T>>,
tail: UnsafeCell<*mut Node<T>>,
}
impl<T> Node<T> {
unsafe fn new(v: Option<T>) -> *mut Node<T> {
mem::transmute(box Node {
next: AtomicPtr::new(0 as *mut Node<T>),
value: v,
})
}
}
impl<T: Send> Queue<T> {
/// Creates a new queue that is safe to share among multiple producers and
/// one consumer.
pub fn new() -> Queue<T> {
let stub = unsafe { Node::new(None) };
Queue {
head: AtomicPtr::new(stub),
tail: UnsafeCell::new(stub),
}
}
/// Pushes a new value onto this queue.
pub fn push(&self, t: T) {
unsafe {
let n = Node::new(Some(t));
let prev = self.head.swap(n, AcqRel);
(*prev).next.store(n, Release);
}
}
/// Pops some data from this queue.
///
/// Note that the current implementation means that this function cannot
/// return `Option<T>`. It is possible for this queue to be in an
/// inconsistent state where many pushes have succeeded and completely
/// finished, but pops cannot return `Some(t)`. This inconsistent state
/// happens when a pusher is pre-empted at an inopportune moment.
///
/// This inconsistent state means that this queue does indeed have data, but
/// it does not currently have access to it at this time.
pub fn pop(&self) -> PopResult<T> {
unsafe {
let tail = *self.tail.get();
let next = (*tail).next.load(Acquire);
if!next.is_null() {
*self.tail.get() = next;
assert!((*tail).value.is_none());
assert!((*next).value.is_some());
let ret = (*next).value.take().unwrap();
let _: Box<Node<T>> = mem::transmute(tail);
return Data(ret);
}
if self.head.load(Acquire) == tail {Empty} else {Inconsistent}
}
}
}
#[unsafe_destructor]
impl<T: Send> Drop for Queue<T> {
fn drop(&mut self) {
unsafe {
let mut cur = *self.tail.get();
while!cur.is_null() {
let next = (*cur).next.load(Relaxed);
let _: Box<Node<T>> = mem::transmute(cur);
cur = next;
}
}
}
}
#[cfg(test)]
mod tests {
use prelude::*;
use alloc::arc::Arc;
use super::{Queue, Data, Empty, Inconsistent};
#[test]
fn test_full()
|
#[test]
fn test() {
let nthreads = 8u;
let nmsgs = 1000u;
let q = Queue::new();
match q.pop() {
Empty => {}
Inconsistent | Data(..) => panic!()
}
let (tx, rx) = channel();
let q = Arc::new(q);
for _ in range(0, nthreads) {
let tx = tx.clone();
let q = q.clone();
spawn(proc() {
for i in range(0, nmsgs) {
q.push(i);
}
tx.send(());
});
}
let mut i = 0u;
while i < nthreads * nmsgs {
match q.pop() {
Empty | Inconsistent => {},
Data(_) => { i += 1 }
}
}
drop(tx);
for _ in range(0, nthreads) {
rx.recv();
}
}
}
|
{
let q = Queue::new();
q.push(box 1i);
q.push(box 2i);
}
|
identifier_body
|
mpsc_queue.rs
|
/* Copyright (c) 2010-2011 Dmitry Vyukov. All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY DMITRY VYUKOV "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL DMITRY VYUKOV OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of Dmitry Vyukov.
*/
//! A mostly lock-free multi-producer, single consumer queue.
//!
//! This module contains an implementation of a concurrent MPSC queue. This
//! queue can be used to share data between tasks, and is also used as the
//! building block of channels in rust.
//!
//! Note that the current implementation of this queue has a caveat of the `pop`
//! method, and see the method for more information about it. Due to this
//! caveat, this queue may not be appropriate for all use-cases.
#![experimental]
// http://www.1024cores.net/home/lock-free-algorithms
// /queues/non-intrusive-mpsc-node-based-queue
pub use self::PopResult::*;
use core::prelude::*;
use alloc::boxed::Box;
use core::mem;
use core::cell::UnsafeCell;
use sync::atomic::{AtomicPtr, Release, Acquire, AcqRel, Relaxed};
/// A result of the `pop` function.
pub enum PopResult<T> {
/// Some data has been popped
Data(T),
/// The queue is empty
Empty,
/// The queue is in an inconsistent state. Popping data should succeed, but
/// some pushers have yet to make enough progress in order allow a pop to
/// succeed. It is recommended that a pop() occur "in the near future" in
/// order to see if the sender has made progress or not
Inconsistent,
}
struct Node<T> {
next: AtomicPtr<Node<T>>,
value: Option<T>,
}
/// The multi-producer single-consumer structure. This is not cloneable, but it
/// may be safely shared so long as it is guaranteed that there is only one
/// popper at a time (many pushers are allowed).
pub struct Queue<T> {
head: AtomicPtr<Node<T>>,
tail: UnsafeCell<*mut Node<T>>,
}
impl<T> Node<T> {
unsafe fn new(v: Option<T>) -> *mut Node<T> {
mem::transmute(box Node {
next: AtomicPtr::new(0 as *mut Node<T>),
value: v,
})
}
}
impl<T: Send> Queue<T> {
/// Creates a new queue that is safe to share among multiple producers and
/// one consumer.
pub fn new() -> Queue<T> {
let stub = unsafe { Node::new(None) };
Queue {
head: AtomicPtr::new(stub),
tail: UnsafeCell::new(stub),
}
}
/// Pushes a new value onto this queue.
pub fn push(&self, t: T) {
unsafe {
let n = Node::new(Some(t));
let prev = self.head.swap(n, AcqRel);
(*prev).next.store(n, Release);
}
}
/// Pops some data from this queue.
///
/// Note that the current implementation means that this function cannot
/// return `Option<T>`. It is possible for this queue to be in an
/// inconsistent state where many pushes have succeeded and completely
/// finished, but pops cannot return `Some(t)`. This inconsistent state
/// happens when a pusher is pre-empted at an inopportune moment.
///
/// This inconsistent state means that this queue does indeed have data, but
/// it does not currently have access to it at this time.
pub fn pop(&self) -> PopResult<T> {
unsafe {
let tail = *self.tail.get();
let next = (*tail).next.load(Acquire);
if!next.is_null() {
*self.tail.get() = next;
assert!((*tail).value.is_none());
assert!((*next).value.is_some());
let ret = (*next).value.take().unwrap();
let _: Box<Node<T>> = mem::transmute(tail);
return Data(ret);
}
if self.head.load(Acquire) == tail {Empty} else {Inconsistent}
}
}
}
#[unsafe_destructor]
impl<T: Send> Drop for Queue<T> {
fn drop(&mut self) {
unsafe {
let mut cur = *self.tail.get();
while!cur.is_null() {
let next = (*cur).next.load(Relaxed);
let _: Box<Node<T>> = mem::transmute(cur);
cur = next;
}
}
}
}
#[cfg(test)]
mod tests {
use prelude::*;
use alloc::arc::Arc;
use super::{Queue, Data, Empty, Inconsistent};
#[test]
fn test_full() {
let q = Queue::new();
q.push(box 1i);
q.push(box 2i);
}
#[test]
fn test() {
let nthreads = 8u;
let nmsgs = 1000u;
let q = Queue::new();
match q.pop() {
Empty => {}
Inconsistent | Data(..) => panic!()
}
let (tx, rx) = channel();
let q = Arc::new(q);
for _ in range(0, nthreads) {
let tx = tx.clone();
let q = q.clone();
spawn(proc() {
for i in range(0, nmsgs) {
q.push(i);
}
tx.send(());
});
}
let mut i = 0u;
while i < nthreads * nmsgs {
match q.pop() {
Empty | Inconsistent => {},
Data(_) =>
|
}
}
drop(tx);
for _ in range(0, nthreads) {
rx.recv();
}
}
}
|
{ i += 1 }
|
conditional_block
|
mpsc_queue.rs
|
/* Copyright (c) 2010-2011 Dmitry Vyukov. All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY DMITRY VYUKOV "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL DMITRY VYUKOV OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of Dmitry Vyukov.
*/
//! A mostly lock-free multi-producer, single consumer queue.
//!
//! This module contains an implementation of a concurrent MPSC queue. This
//! queue can be used to share data between tasks, and is also used as the
//! building block of channels in rust.
//!
//! Note that the current implementation of this queue has a caveat of the `pop`
//! method, and see the method for more information about it. Due to this
//! caveat, this queue may not be appropriate for all use-cases.
#![experimental]
// http://www.1024cores.net/home/lock-free-algorithms
// /queues/non-intrusive-mpsc-node-based-queue
pub use self::PopResult::*;
use core::prelude::*;
use alloc::boxed::Box;
use core::mem;
use core::cell::UnsafeCell;
use sync::atomic::{AtomicPtr, Release, Acquire, AcqRel, Relaxed};
/// A result of the `pop` function.
pub enum PopResult<T> {
/// Some data has been popped
Data(T),
/// The queue is empty
Empty,
/// The queue is in an inconsistent state. Popping data should succeed, but
/// some pushers have yet to make enough progress in order allow a pop to
/// succeed. It is recommended that a pop() occur "in the near future" in
/// order to see if the sender has made progress or not
Inconsistent,
}
struct Node<T> {
next: AtomicPtr<Node<T>>,
value: Option<T>,
}
/// The multi-producer single-consumer structure. This is not cloneable, but it
/// may be safely shared so long as it is guaranteed that there is only one
/// popper at a time (many pushers are allowed).
pub struct Queue<T> {
head: AtomicPtr<Node<T>>,
tail: UnsafeCell<*mut Node<T>>,
}
impl<T> Node<T> {
unsafe fn new(v: Option<T>) -> *mut Node<T> {
mem::transmute(box Node {
next: AtomicPtr::new(0 as *mut Node<T>),
value: v,
})
}
}
impl<T: Send> Queue<T> {
/// Creates a new queue that is safe to share among multiple producers and
/// one consumer.
pub fn new() -> Queue<T> {
let stub = unsafe { Node::new(None) };
Queue {
head: AtomicPtr::new(stub),
tail: UnsafeCell::new(stub),
}
}
/// Pushes a new value onto this queue.
pub fn push(&self, t: T) {
unsafe {
let n = Node::new(Some(t));
let prev = self.head.swap(n, AcqRel);
(*prev).next.store(n, Release);
}
}
/// Pops some data from this queue.
///
/// Note that the current implementation means that this function cannot
/// return `Option<T>`. It is possible for this queue to be in an
/// inconsistent state where many pushes have succeeded and completely
/// finished, but pops cannot return `Some(t)`. This inconsistent state
/// happens when a pusher is pre-empted at an inopportune moment.
///
/// This inconsistent state means that this queue does indeed have data, but
/// it does not currently have access to it at this time.
pub fn pop(&self) -> PopResult<T> {
unsafe {
let tail = *self.tail.get();
let next = (*tail).next.load(Acquire);
if!next.is_null() {
*self.tail.get() = next;
assert!((*tail).value.is_none());
assert!((*next).value.is_some());
let ret = (*next).value.take().unwrap();
let _: Box<Node<T>> = mem::transmute(tail);
return Data(ret);
}
if self.head.load(Acquire) == tail {Empty} else {Inconsistent}
}
}
}
#[unsafe_destructor]
impl<T: Send> Drop for Queue<T> {
fn
|
(&mut self) {
unsafe {
let mut cur = *self.tail.get();
while!cur.is_null() {
let next = (*cur).next.load(Relaxed);
let _: Box<Node<T>> = mem::transmute(cur);
cur = next;
}
}
}
}
#[cfg(test)]
mod tests {
use prelude::*;
use alloc::arc::Arc;
use super::{Queue, Data, Empty, Inconsistent};
#[test]
fn test_full() {
let q = Queue::new();
q.push(box 1i);
q.push(box 2i);
}
#[test]
fn test() {
let nthreads = 8u;
let nmsgs = 1000u;
let q = Queue::new();
match q.pop() {
Empty => {}
Inconsistent | Data(..) => panic!()
}
let (tx, rx) = channel();
let q = Arc::new(q);
for _ in range(0, nthreads) {
let tx = tx.clone();
let q = q.clone();
spawn(proc() {
for i in range(0, nmsgs) {
q.push(i);
}
tx.send(());
});
}
let mut i = 0u;
while i < nthreads * nmsgs {
match q.pop() {
Empty | Inconsistent => {},
Data(_) => { i += 1 }
}
}
drop(tx);
for _ in range(0, nthreads) {
rx.recv();
}
}
}
|
drop
|
identifier_name
|
optimize_dead_defs.rs
|
use std::collections::{HashSet};
use grit;
pub fn optimize(mut prog: grit::ProgDef) -> grit::ProgDef {
let info = collect_prog_info(&prog);
prog.fun_defs = prog.fun_defs.into_iter().filter(|def| {
info.used_funs.contains(&def.name)
}).collect();
prog.obj_defs = prog.obj_defs.into_iter().filter(|def| {
info.used_objs.contains(&def.name)
}).collect();
prog
}
struct ProgInfo {
used_funs: HashSet<grit::FunName>,
used_objs: HashSet<grit::ObjName>,
}
fn collect_prog_info(prog: &grit::ProgDef) -> ProgInfo {
let mut info = ProgInfo {
used_funs: HashSet::new(),
used_objs: HashSet::new(),
};
info.used_funs.insert(prog.main_fun.clone());
let mut collected_funs = HashSet::new();
while info.used_funs.len() > collected_funs.len() {
for fun_def in prog.fun_defs.iter() {
let is_used = info.used_funs.contains(&fun_def.name);
let is_collected = collected_funs.contains(&fun_def.name);
if is_used &&!is_collected {
collect_fun(&mut info, fun_def);
collected_funs.insert(fun_def.name.clone());
}
}
}
info
}
fn collect_fun(info: &mut ProgInfo, fun_def: &grit::FunDef) {
for block in fun_def.blocks.iter() {
for op in block.ops.iter() {
collect_op(info, op);
}
collect_jump(info, &block.jump);
}
}
|
for arg in args.iter() {
collect_val(info, arg);
}
},
grit::Op::ExternCall(_, _, ref args) =>
for arg in args.iter() {
collect_val(info, arg);
},
grit::Op::AllocClos(ref closs) =>
for &(_, ref clos_name, ref captures) in closs.iter() {
info.used_funs.insert(clos_name.clone());
for capture in captures.iter() {
collect_val(info, capture);
}
},
grit::Op::Assign(ref var_vals) =>
for &(_, ref val) in var_vals.iter() {
collect_val(info, val);
},
}
}
fn collect_jump(info: &mut ProgInfo, jump: &grit::Jump) {
match *jump {
grit::Jump::Goto(_) => {},
grit::Jump::TailCall(ref callee, ref args) => {
collect_callee(info, callee);
for arg in args.iter() {
collect_val(info, arg);
}
},
grit::Jump::Return(ref val) |
grit::Jump::Branch(grit::Boolval::IsTrue(ref val), _, _) |
grit::Jump::Branch(grit::Boolval::IsFalse(ref val), _, _) =>
collect_val(info, val),
}
}
fn collect_val(info: &mut ProgInfo, val: &grit::Val) {
match *val {
grit::Val::Combinator(ref fun_name) => {
info.used_funs.insert(fun_name.clone());
},
grit::Val::Obj(ref obj_name) => {
info.used_objs.insert(obj_name.clone());
},
grit::Val::Var(_) |
grit::Val::Arg(_) |
grit::Val::Capture(_) |
grit::Val::Int(_) |
grit::Val::True |
grit::Val::False |
grit::Val::Undefined => (),
}
}
fn collect_callee(info: &mut ProgInfo, callee: &grit::Callee) {
match *callee {
grit::Callee::Combinator(ref fun_name) => {
info.used_funs.insert(fun_name.clone());
},
grit::Callee::KnownClosure(ref fun_name, ref val) => {
info.used_funs.insert(fun_name.clone());
collect_val(info, val);
},
grit::Callee::Unknown(ref val) =>
collect_val(info, val),
}
}
|
fn collect_op(info: &mut ProgInfo, op: &grit::Op) {
match *op {
grit::Op::Call(_, ref callee, ref args) => {
collect_callee(info, callee);
|
random_line_split
|
optimize_dead_defs.rs
|
use std::collections::{HashSet};
use grit;
pub fn optimize(mut prog: grit::ProgDef) -> grit::ProgDef {
let info = collect_prog_info(&prog);
prog.fun_defs = prog.fun_defs.into_iter().filter(|def| {
info.used_funs.contains(&def.name)
}).collect();
prog.obj_defs = prog.obj_defs.into_iter().filter(|def| {
info.used_objs.contains(&def.name)
}).collect();
prog
}
struct ProgInfo {
used_funs: HashSet<grit::FunName>,
used_objs: HashSet<grit::ObjName>,
}
fn collect_prog_info(prog: &grit::ProgDef) -> ProgInfo {
let mut info = ProgInfo {
used_funs: HashSet::new(),
used_objs: HashSet::new(),
};
info.used_funs.insert(prog.main_fun.clone());
let mut collected_funs = HashSet::new();
while info.used_funs.len() > collected_funs.len() {
for fun_def in prog.fun_defs.iter() {
let is_used = info.used_funs.contains(&fun_def.name);
let is_collected = collected_funs.contains(&fun_def.name);
if is_used &&!is_collected {
collect_fun(&mut info, fun_def);
collected_funs.insert(fun_def.name.clone());
}
}
}
info
}
fn collect_fun(info: &mut ProgInfo, fun_def: &grit::FunDef) {
for block in fun_def.blocks.iter() {
for op in block.ops.iter() {
collect_op(info, op);
}
collect_jump(info, &block.jump);
}
}
fn collect_op(info: &mut ProgInfo, op: &grit::Op)
|
for &(_, ref val) in var_vals.iter() {
collect_val(info, val);
},
}
}
fn collect_jump(info: &mut ProgInfo, jump: &grit::Jump) {
match *jump {
grit::Jump::Goto(_) => {},
grit::Jump::TailCall(ref callee, ref args) => {
collect_callee(info, callee);
for arg in args.iter() {
collect_val(info, arg);
}
},
grit::Jump::Return(ref val) |
grit::Jump::Branch(grit::Boolval::IsTrue(ref val), _, _) |
grit::Jump::Branch(grit::Boolval::IsFalse(ref val), _, _) =>
collect_val(info, val),
}
}
fn collect_val(info: &mut ProgInfo, val: &grit::Val) {
match *val {
grit::Val::Combinator(ref fun_name) => {
info.used_funs.insert(fun_name.clone());
},
grit::Val::Obj(ref obj_name) => {
info.used_objs.insert(obj_name.clone());
},
grit::Val::Var(_) |
grit::Val::Arg(_) |
grit::Val::Capture(_) |
grit::Val::Int(_) |
grit::Val::True |
grit::Val::False |
grit::Val::Undefined => (),
}
}
fn collect_callee(info: &mut ProgInfo, callee: &grit::Callee) {
match *callee {
grit::Callee::Combinator(ref fun_name) => {
info.used_funs.insert(fun_name.clone());
},
grit::Callee::KnownClosure(ref fun_name, ref val) => {
info.used_funs.insert(fun_name.clone());
collect_val(info, val);
},
grit::Callee::Unknown(ref val) =>
collect_val(info, val),
}
}
|
{
match *op {
grit::Op::Call(_, ref callee, ref args) => {
collect_callee(info, callee);
for arg in args.iter() {
collect_val(info, arg);
}
},
grit::Op::ExternCall(_, _, ref args) =>
for arg in args.iter() {
collect_val(info, arg);
},
grit::Op::AllocClos(ref closs) =>
for &(_, ref clos_name, ref captures) in closs.iter() {
info.used_funs.insert(clos_name.clone());
for capture in captures.iter() {
collect_val(info, capture);
}
},
grit::Op::Assign(ref var_vals) =>
|
identifier_body
|
optimize_dead_defs.rs
|
use std::collections::{HashSet};
use grit;
pub fn
|
(mut prog: grit::ProgDef) -> grit::ProgDef {
let info = collect_prog_info(&prog);
prog.fun_defs = prog.fun_defs.into_iter().filter(|def| {
info.used_funs.contains(&def.name)
}).collect();
prog.obj_defs = prog.obj_defs.into_iter().filter(|def| {
info.used_objs.contains(&def.name)
}).collect();
prog
}
struct ProgInfo {
used_funs: HashSet<grit::FunName>,
used_objs: HashSet<grit::ObjName>,
}
fn collect_prog_info(prog: &grit::ProgDef) -> ProgInfo {
let mut info = ProgInfo {
used_funs: HashSet::new(),
used_objs: HashSet::new(),
};
info.used_funs.insert(prog.main_fun.clone());
let mut collected_funs = HashSet::new();
while info.used_funs.len() > collected_funs.len() {
for fun_def in prog.fun_defs.iter() {
let is_used = info.used_funs.contains(&fun_def.name);
let is_collected = collected_funs.contains(&fun_def.name);
if is_used &&!is_collected {
collect_fun(&mut info, fun_def);
collected_funs.insert(fun_def.name.clone());
}
}
}
info
}
fn collect_fun(info: &mut ProgInfo, fun_def: &grit::FunDef) {
for block in fun_def.blocks.iter() {
for op in block.ops.iter() {
collect_op(info, op);
}
collect_jump(info, &block.jump);
}
}
fn collect_op(info: &mut ProgInfo, op: &grit::Op) {
match *op {
grit::Op::Call(_, ref callee, ref args) => {
collect_callee(info, callee);
for arg in args.iter() {
collect_val(info, arg);
}
},
grit::Op::ExternCall(_, _, ref args) =>
for arg in args.iter() {
collect_val(info, arg);
},
grit::Op::AllocClos(ref closs) =>
for &(_, ref clos_name, ref captures) in closs.iter() {
info.used_funs.insert(clos_name.clone());
for capture in captures.iter() {
collect_val(info, capture);
}
},
grit::Op::Assign(ref var_vals) =>
for &(_, ref val) in var_vals.iter() {
collect_val(info, val);
},
}
}
fn collect_jump(info: &mut ProgInfo, jump: &grit::Jump) {
match *jump {
grit::Jump::Goto(_) => {},
grit::Jump::TailCall(ref callee, ref args) => {
collect_callee(info, callee);
for arg in args.iter() {
collect_val(info, arg);
}
},
grit::Jump::Return(ref val) |
grit::Jump::Branch(grit::Boolval::IsTrue(ref val), _, _) |
grit::Jump::Branch(grit::Boolval::IsFalse(ref val), _, _) =>
collect_val(info, val),
}
}
fn collect_val(info: &mut ProgInfo, val: &grit::Val) {
match *val {
grit::Val::Combinator(ref fun_name) => {
info.used_funs.insert(fun_name.clone());
},
grit::Val::Obj(ref obj_name) => {
info.used_objs.insert(obj_name.clone());
},
grit::Val::Var(_) |
grit::Val::Arg(_) |
grit::Val::Capture(_) |
grit::Val::Int(_) |
grit::Val::True |
grit::Val::False |
grit::Val::Undefined => (),
}
}
fn collect_callee(info: &mut ProgInfo, callee: &grit::Callee) {
match *callee {
grit::Callee::Combinator(ref fun_name) => {
info.used_funs.insert(fun_name.clone());
},
grit::Callee::KnownClosure(ref fun_name, ref val) => {
info.used_funs.insert(fun_name.clone());
collect_val(info, val);
},
grit::Callee::Unknown(ref val) =>
collect_val(info, val),
}
}
|
optimize
|
identifier_name
|
optimize_dead_defs.rs
|
use std::collections::{HashSet};
use grit;
pub fn optimize(mut prog: grit::ProgDef) -> grit::ProgDef {
let info = collect_prog_info(&prog);
prog.fun_defs = prog.fun_defs.into_iter().filter(|def| {
info.used_funs.contains(&def.name)
}).collect();
prog.obj_defs = prog.obj_defs.into_iter().filter(|def| {
info.used_objs.contains(&def.name)
}).collect();
prog
}
struct ProgInfo {
used_funs: HashSet<grit::FunName>,
used_objs: HashSet<grit::ObjName>,
}
fn collect_prog_info(prog: &grit::ProgDef) -> ProgInfo {
let mut info = ProgInfo {
used_funs: HashSet::new(),
used_objs: HashSet::new(),
};
info.used_funs.insert(prog.main_fun.clone());
let mut collected_funs = HashSet::new();
while info.used_funs.len() > collected_funs.len() {
for fun_def in prog.fun_defs.iter() {
let is_used = info.used_funs.contains(&fun_def.name);
let is_collected = collected_funs.contains(&fun_def.name);
if is_used &&!is_collected {
collect_fun(&mut info, fun_def);
collected_funs.insert(fun_def.name.clone());
}
}
}
info
}
fn collect_fun(info: &mut ProgInfo, fun_def: &grit::FunDef) {
for block in fun_def.blocks.iter() {
for op in block.ops.iter() {
collect_op(info, op);
}
collect_jump(info, &block.jump);
}
}
fn collect_op(info: &mut ProgInfo, op: &grit::Op) {
match *op {
grit::Op::Call(_, ref callee, ref args) =>
|
,
grit::Op::ExternCall(_, _, ref args) =>
for arg in args.iter() {
collect_val(info, arg);
},
grit::Op::AllocClos(ref closs) =>
for &(_, ref clos_name, ref captures) in closs.iter() {
info.used_funs.insert(clos_name.clone());
for capture in captures.iter() {
collect_val(info, capture);
}
},
grit::Op::Assign(ref var_vals) =>
for &(_, ref val) in var_vals.iter() {
collect_val(info, val);
},
}
}
fn collect_jump(info: &mut ProgInfo, jump: &grit::Jump) {
match *jump {
grit::Jump::Goto(_) => {},
grit::Jump::TailCall(ref callee, ref args) => {
collect_callee(info, callee);
for arg in args.iter() {
collect_val(info, arg);
}
},
grit::Jump::Return(ref val) |
grit::Jump::Branch(grit::Boolval::IsTrue(ref val), _, _) |
grit::Jump::Branch(grit::Boolval::IsFalse(ref val), _, _) =>
collect_val(info, val),
}
}
fn collect_val(info: &mut ProgInfo, val: &grit::Val) {
match *val {
grit::Val::Combinator(ref fun_name) => {
info.used_funs.insert(fun_name.clone());
},
grit::Val::Obj(ref obj_name) => {
info.used_objs.insert(obj_name.clone());
},
grit::Val::Var(_) |
grit::Val::Arg(_) |
grit::Val::Capture(_) |
grit::Val::Int(_) |
grit::Val::True |
grit::Val::False |
grit::Val::Undefined => (),
}
}
fn collect_callee(info: &mut ProgInfo, callee: &grit::Callee) {
match *callee {
grit::Callee::Combinator(ref fun_name) => {
info.used_funs.insert(fun_name.clone());
},
grit::Callee::KnownClosure(ref fun_name, ref val) => {
info.used_funs.insert(fun_name.clone());
collect_val(info, val);
},
grit::Callee::Unknown(ref val) =>
collect_val(info, val),
}
}
|
{
collect_callee(info, callee);
for arg in args.iter() {
collect_val(info, arg);
}
}
|
conditional_block
|
util.rs
|
use std::fs::{File, OpenOptions};
use std::hash::Hasher;
use std::io::{Result, Write};
use std::path::Path;
use std::result::Result::Ok;
use std::sync::atomic::{AtomicUsize, Ordering};
use twox_hash::XxHash32 as TwoXhash32;
pub struct XxHash32(TwoXhash32);
impl XxHash32 {
pub fn new() -> XxHash32
|
pub fn update(&mut self, buf: &[u8]) {
self.0.write(buf);
}
pub fn get(&self) -> u32 {
self.0.finish() as u32
}
}
impl Write for XxHash32 {
fn write(&mut self, buf: &[u8]) -> Result<usize> {
self.update(buf);
Ok(buf.len())
}
fn flush(&mut self) -> Result<()> {
Ok(())
}
}
pub fn xxhash32(buf: &[u8]) -> u32 {
let mut hash = TwoXhash32::with_seed(0);
hash.write(buf);
hash.finish() as u32
}
pub fn get_file_handle(path: &Path, write: bool) -> Result<File> {
if write {
OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(path)
} else {
OpenOptions::new().read(true).open(path)
}
}
pub struct Sequence(AtomicUsize);
impl Sequence {
pub fn new(id: u32) -> Sequence {
Sequence(AtomicUsize::new(id as usize))
}
pub fn increment(&self) -> u32 {
self.0.fetch_add(1, Ordering::SeqCst) as u32 + 1
}
}
pub fn human_readable_byte_count(bytes: usize, si: bool) -> String {
let unit = if si { 1000 } else { 1024 };
if bytes < unit {
return format!("{} B", bytes);
}
let exp = ((bytes as f64).ln() / (unit as f64).ln()) as usize;
let units = if si { "kMGTPE" } else { "KMGTPE" };
let pre = format!(
"{}{}",
units.chars().nth(exp - 1).unwrap(),
if si { "" } else { "i" }
);
format!("{:.1} {}B", bytes / unit.pow(exp as u32), pre)
}
|
{
XxHash32(TwoXhash32::with_seed(0))
}
|
identifier_body
|
util.rs
|
use std::fs::{File, OpenOptions};
use std::hash::Hasher;
use std::io::{Result, Write};
use std::path::Path;
use std::result::Result::Ok;
use std::sync::atomic::{AtomicUsize, Ordering};
use twox_hash::XxHash32 as TwoXhash32;
pub struct XxHash32(TwoXhash32);
impl XxHash32 {
pub fn new() -> XxHash32 {
XxHash32(TwoXhash32::with_seed(0))
}
pub fn update(&mut self, buf: &[u8]) {
self.0.write(buf);
}
pub fn get(&self) -> u32 {
self.0.finish() as u32
}
}
impl Write for XxHash32 {
fn write(&mut self, buf: &[u8]) -> Result<usize> {
self.update(buf);
Ok(buf.len())
}
fn flush(&mut self) -> Result<()> {
Ok(())
}
}
|
pub fn get_file_handle(path: &Path, write: bool) -> Result<File> {
if write {
OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(path)
} else {
OpenOptions::new().read(true).open(path)
}
}
pub struct Sequence(AtomicUsize);
impl Sequence {
pub fn new(id: u32) -> Sequence {
Sequence(AtomicUsize::new(id as usize))
}
pub fn increment(&self) -> u32 {
self.0.fetch_add(1, Ordering::SeqCst) as u32 + 1
}
}
pub fn human_readable_byte_count(bytes: usize, si: bool) -> String {
let unit = if si { 1000 } else { 1024 };
if bytes < unit {
return format!("{} B", bytes);
}
let exp = ((bytes as f64).ln() / (unit as f64).ln()) as usize;
let units = if si { "kMGTPE" } else { "KMGTPE" };
let pre = format!(
"{}{}",
units.chars().nth(exp - 1).unwrap(),
if si { "" } else { "i" }
);
format!("{:.1} {}B", bytes / unit.pow(exp as u32), pre)
}
|
pub fn xxhash32(buf: &[u8]) -> u32 {
let mut hash = TwoXhash32::with_seed(0);
hash.write(buf);
hash.finish() as u32
}
|
random_line_split
|
util.rs
|
use std::fs::{File, OpenOptions};
use std::hash::Hasher;
use std::io::{Result, Write};
use std::path::Path;
use std::result::Result::Ok;
use std::sync::atomic::{AtomicUsize, Ordering};
use twox_hash::XxHash32 as TwoXhash32;
pub struct XxHash32(TwoXhash32);
impl XxHash32 {
pub fn new() -> XxHash32 {
XxHash32(TwoXhash32::with_seed(0))
}
pub fn update(&mut self, buf: &[u8]) {
self.0.write(buf);
}
pub fn get(&self) -> u32 {
self.0.finish() as u32
}
}
impl Write for XxHash32 {
fn write(&mut self, buf: &[u8]) -> Result<usize> {
self.update(buf);
Ok(buf.len())
}
fn flush(&mut self) -> Result<()> {
Ok(())
}
}
pub fn xxhash32(buf: &[u8]) -> u32 {
let mut hash = TwoXhash32::with_seed(0);
hash.write(buf);
hash.finish() as u32
}
pub fn get_file_handle(path: &Path, write: bool) -> Result<File> {
if write {
OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(path)
} else {
OpenOptions::new().read(true).open(path)
}
}
pub struct Sequence(AtomicUsize);
impl Sequence {
pub fn new(id: u32) -> Sequence {
Sequence(AtomicUsize::new(id as usize))
}
pub fn increment(&self) -> u32 {
self.0.fetch_add(1, Ordering::SeqCst) as u32 + 1
}
}
pub fn human_readable_byte_count(bytes: usize, si: bool) -> String {
let unit = if si { 1000 } else { 1024 };
if bytes < unit
|
let exp = ((bytes as f64).ln() / (unit as f64).ln()) as usize;
let units = if si { "kMGTPE" } else { "KMGTPE" };
let pre = format!(
"{}{}",
units.chars().nth(exp - 1).unwrap(),
if si { "" } else { "i" }
);
format!("{:.1} {}B", bytes / unit.pow(exp as u32), pre)
}
|
{
return format!("{} B", bytes);
}
|
conditional_block
|
util.rs
|
use std::fs::{File, OpenOptions};
use std::hash::Hasher;
use std::io::{Result, Write};
use std::path::Path;
use std::result::Result::Ok;
use std::sync::atomic::{AtomicUsize, Ordering};
use twox_hash::XxHash32 as TwoXhash32;
pub struct
|
(TwoXhash32);
impl XxHash32 {
pub fn new() -> XxHash32 {
XxHash32(TwoXhash32::with_seed(0))
}
pub fn update(&mut self, buf: &[u8]) {
self.0.write(buf);
}
pub fn get(&self) -> u32 {
self.0.finish() as u32
}
}
impl Write for XxHash32 {
fn write(&mut self, buf: &[u8]) -> Result<usize> {
self.update(buf);
Ok(buf.len())
}
fn flush(&mut self) -> Result<()> {
Ok(())
}
}
pub fn xxhash32(buf: &[u8]) -> u32 {
let mut hash = TwoXhash32::with_seed(0);
hash.write(buf);
hash.finish() as u32
}
pub fn get_file_handle(path: &Path, write: bool) -> Result<File> {
if write {
OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(path)
} else {
OpenOptions::new().read(true).open(path)
}
}
pub struct Sequence(AtomicUsize);
impl Sequence {
pub fn new(id: u32) -> Sequence {
Sequence(AtomicUsize::new(id as usize))
}
pub fn increment(&self) -> u32 {
self.0.fetch_add(1, Ordering::SeqCst) as u32 + 1
}
}
pub fn human_readable_byte_count(bytes: usize, si: bool) -> String {
let unit = if si { 1000 } else { 1024 };
if bytes < unit {
return format!("{} B", bytes);
}
let exp = ((bytes as f64).ln() / (unit as f64).ln()) as usize;
let units = if si { "kMGTPE" } else { "KMGTPE" };
let pre = format!(
"{}{}",
units.chars().nth(exp - 1).unwrap(),
if si { "" } else { "i" }
);
format!("{:.1} {}B", bytes / unit.pow(exp as u32), pre)
}
|
XxHash32
|
identifier_name
|
fdentry.rs
|
use crate::host;
use std::fs::File;
use std::os::unix::prelude::{FileTypeExt, FromRawFd, IntoRawFd, RawFd};
use std::path::PathBuf;
#[derive(Debug)]
pub struct FdEntry {
pub fd_object: FdObject,
pub rights_base: host::__wasi_rights_t,
pub rights_inheriting: host::__wasi_rights_t,
pub preopen_path: Option<PathBuf>,
}
impl FdEntry {
pub fn from_file(file: File) -> FdEntry {
unsafe { FdEntry::from_raw_fd(file.into_raw_fd()) }
}
}
impl FromRawFd for FdEntry {
// TODO: make this a different function with error handling, rather than using the trait method
unsafe fn
|
(rawfd: RawFd) -> FdEntry {
let (ty, mut rights_base, rights_inheriting) =
determine_type_rights(rawfd).expect("can determine file rights");
use nix::fcntl::{fcntl, OFlag, F_GETFL};
let flags_bits = fcntl(rawfd, F_GETFL).expect("fcntl succeeds");
let flags = OFlag::from_bits_truncate(flags_bits);
let accmode = flags & OFlag::O_ACCMODE;
if accmode == OFlag::O_RDONLY {
rights_base &=!host::__WASI_RIGHT_FD_WRITE as host::__wasi_rights_t;
} else if accmode == OFlag::O_WRONLY {
rights_base &=!host::__WASI_RIGHT_FD_READ as host::__wasi_rights_t;
}
FdEntry {
fd_object: FdObject {
ty: ty as u8,
rawfd,
needs_close: true,
},
rights_base,
rights_inheriting,
preopen_path: None,
}
}
}
// TODO: can probably make this safe by using fcntl directly rather than going through `File`
pub unsafe fn determine_type_rights(
rawfd: RawFd,
) -> Result<
(
host::__wasi_filetype_t,
host::__wasi_rights_t,
host::__wasi_rights_t,
),
host::__wasi_errno_t,
> {
let (ty, rights_base, rights_inheriting) = {
let file = File::from_raw_fd(rawfd);
let ft = file.metadata().unwrap().file_type();
// we just make a `File` here for convenience; we don't want it to close when it drops
std::mem::forget(file);
if ft.is_block_device() {
(
host::__WASI_FILETYPE_BLOCK_DEVICE,
host::RIGHTS_BLOCK_DEVICE_BASE,
host::RIGHTS_BLOCK_DEVICE_INHERITING,
)
} else if ft.is_char_device() {
if nix::unistd::isatty(rawfd).unwrap() {
(
host::__WASI_FILETYPE_CHARACTER_DEVICE,
host::RIGHTS_TTY_BASE,
host::RIGHTS_TTY_BASE,
)
} else {
(
host::__WASI_FILETYPE_CHARACTER_DEVICE,
host::RIGHTS_CHARACTER_DEVICE_BASE,
host::RIGHTS_CHARACTER_DEVICE_INHERITING,
)
}
} else if ft.is_dir() {
(
host::__WASI_FILETYPE_DIRECTORY,
host::RIGHTS_DIRECTORY_BASE,
host::RIGHTS_DIRECTORY_INHERITING,
)
} else if ft.is_file() {
(
host::__WASI_FILETYPE_REGULAR_FILE,
host::RIGHTS_REGULAR_FILE_BASE,
host::RIGHTS_REGULAR_FILE_INHERITING,
)
} else if ft.is_socket() {
use nix::sys::socket;
match socket::getsockopt(rawfd, socket::sockopt::SockType).unwrap() {
socket::SockType::Datagram => (
host::__WASI_FILETYPE_SOCKET_DGRAM,
host::RIGHTS_SOCKET_BASE,
host::RIGHTS_SOCKET_INHERITING,
),
socket::SockType::Stream => (
host::__WASI_FILETYPE_SOCKET_STREAM,
host::RIGHTS_SOCKET_BASE,
host::RIGHTS_SOCKET_INHERITING,
),
_ => return Err(host::__WASI_EINVAL as host::__wasi_errno_t),
}
} else if ft.is_fifo() {
(
host::__WASI_FILETYPE_SOCKET_STREAM,
host::RIGHTS_SOCKET_BASE,
host::RIGHTS_SOCKET_INHERITING,
)
} else {
return Err(host::__WASI_EINVAL as host::__wasi_errno_t);
}
};
Ok((
ty as host::__wasi_filetype_t,
rights_base,
rights_inheriting,
))
}
#[derive(Debug)]
pub struct FdObject {
pub ty: host::__wasi_filetype_t,
pub rawfd: RawFd,
pub needs_close: bool,
// TODO: directories
}
impl Drop for FdObject {
fn drop(&mut self) {
if self.needs_close {
nix::unistd::close(self.rawfd).unwrap_or_else(|e| eprintln!("FdObject::drop(): {}", e));
}
}
}
|
from_raw_fd
|
identifier_name
|
fdentry.rs
|
use crate::host;
use std::fs::File;
use std::os::unix::prelude::{FileTypeExt, FromRawFd, IntoRawFd, RawFd};
use std::path::PathBuf;
#[derive(Debug)]
pub struct FdEntry {
pub fd_object: FdObject,
pub rights_base: host::__wasi_rights_t,
pub rights_inheriting: host::__wasi_rights_t,
pub preopen_path: Option<PathBuf>,
}
impl FdEntry {
pub fn from_file(file: File) -> FdEntry {
unsafe { FdEntry::from_raw_fd(file.into_raw_fd()) }
}
}
impl FromRawFd for FdEntry {
// TODO: make this a different function with error handling, rather than using the trait method
unsafe fn from_raw_fd(rawfd: RawFd) -> FdEntry {
let (ty, mut rights_base, rights_inheriting) =
determine_type_rights(rawfd).expect("can determine file rights");
use nix::fcntl::{fcntl, OFlag, F_GETFL};
let flags_bits = fcntl(rawfd, F_GETFL).expect("fcntl succeeds");
let flags = OFlag::from_bits_truncate(flags_bits);
let accmode = flags & OFlag::O_ACCMODE;
if accmode == OFlag::O_RDONLY {
rights_base &=!host::__WASI_RIGHT_FD_WRITE as host::__wasi_rights_t;
} else if accmode == OFlag::O_WRONLY {
rights_base &=!host::__WASI_RIGHT_FD_READ as host::__wasi_rights_t;
}
FdEntry {
fd_object: FdObject {
ty: ty as u8,
rawfd,
needs_close: true,
},
rights_base,
rights_inheriting,
preopen_path: None,
}
}
}
// TODO: can probably make this safe by using fcntl directly rather than going through `File`
pub unsafe fn determine_type_rights(
rawfd: RawFd,
) -> Result<
(
host::__wasi_filetype_t,
host::__wasi_rights_t,
host::__wasi_rights_t,
),
host::__wasi_errno_t,
> {
let (ty, rights_base, rights_inheriting) = {
let file = File::from_raw_fd(rawfd);
let ft = file.metadata().unwrap().file_type();
// we just make a `File` here for convenience; we don't want it to close when it drops
std::mem::forget(file);
if ft.is_block_device() {
(
host::__WASI_FILETYPE_BLOCK_DEVICE,
host::RIGHTS_BLOCK_DEVICE_BASE,
host::RIGHTS_BLOCK_DEVICE_INHERITING,
)
} else if ft.is_char_device() {
if nix::unistd::isatty(rawfd).unwrap() {
(
host::__WASI_FILETYPE_CHARACTER_DEVICE,
host::RIGHTS_TTY_BASE,
host::RIGHTS_TTY_BASE,
)
} else {
(
host::__WASI_FILETYPE_CHARACTER_DEVICE,
host::RIGHTS_CHARACTER_DEVICE_BASE,
host::RIGHTS_CHARACTER_DEVICE_INHERITING,
)
}
} else if ft.is_dir() {
(
host::__WASI_FILETYPE_DIRECTORY,
host::RIGHTS_DIRECTORY_BASE,
host::RIGHTS_DIRECTORY_INHERITING,
)
} else if ft.is_file() {
(
host::__WASI_FILETYPE_REGULAR_FILE,
host::RIGHTS_REGULAR_FILE_BASE,
host::RIGHTS_REGULAR_FILE_INHERITING,
)
} else if ft.is_socket() {
use nix::sys::socket;
match socket::getsockopt(rawfd, socket::sockopt::SockType).unwrap() {
socket::SockType::Datagram => (
host::__WASI_FILETYPE_SOCKET_DGRAM,
host::RIGHTS_SOCKET_BASE,
host::RIGHTS_SOCKET_INHERITING,
),
socket::SockType::Stream => (
host::__WASI_FILETYPE_SOCKET_STREAM,
host::RIGHTS_SOCKET_BASE,
host::RIGHTS_SOCKET_INHERITING,
),
_ => return Err(host::__WASI_EINVAL as host::__wasi_errno_t),
}
} else if ft.is_fifo() {
(
host::__WASI_FILETYPE_SOCKET_STREAM,
host::RIGHTS_SOCKET_BASE,
host::RIGHTS_SOCKET_INHERITING,
)
} else {
return Err(host::__WASI_EINVAL as host::__wasi_errno_t);
}
};
Ok((
ty as host::__wasi_filetype_t,
rights_base,
rights_inheriting,
))
}
#[derive(Debug)]
pub struct FdObject {
pub ty: host::__wasi_filetype_t,
pub rawfd: RawFd,
|
impl Drop for FdObject {
fn drop(&mut self) {
if self.needs_close {
nix::unistd::close(self.rawfd).unwrap_or_else(|e| eprintln!("FdObject::drop(): {}", e));
}
}
}
|
pub needs_close: bool,
// TODO: directories
}
|
random_line_split
|
ahoy.rs
|
extern crate curl;
extern crate term_painter;
extern crate rustc_serialize;
extern crate toml;
use std::path::Path;
use std::env;
use turbo::util::{CliResult, Config};
use self::term_painter::Color::*;
use self::term_painter::ToStyle;
use self::rustc_serialize::json;
use meg::util::parse_toml::Configz;
//use self::curl::ffi::err::ErrCode;
pub const USAGE: &'static str = "
Get some help with a meg command.
Usage:
meg ahoy
meg ahoy -h | --help
";
#[derive(RustcDecodable)]
struct Options;
pub fn execute(_: Options, _: &Config) -> CliResult<Option<()>> {
//println!("executing; cmd=meg-version; args={:?}", env::args().collect::<Vec<_>>());
let code = self::ahoy();
// match self::ahoy() {
// Ok(v) => println!("Megam is up and running!"),
// Err(e) => println!("Megam is down! {:?}", e)
// }
if code == "200" {
println!("{}",
Green.paint("PONG! Megam is up."));
} else {
println!("{}",
Red.paint("Megam is down!"));
println!("{}",
Blue.paint("Please check whether your host is setup in /home/.megam/megam.toml"));
}
Ok(None)
}
//#[derive(Debug)]
//pub struct ErrCode {
// desc: String
//}
pub fn ahoy() -> String {
let hme = env::home_dir().unwrap();
let y = hme.to_str().unwrap();
let set_path = format!("{}/.megam/megam.toml", y.to_string());
|
let we = Configz { rand: "sample".to_string()};
let data = we.load(path);
let value: toml::Value = data.unwrap();
let host = value.lookup("api.host").unwrap().as_str().unwrap();
let res = curl::http::handle()
.get(host)
.exec();
let x = res.unwrap().get_code().to_string();
return x
}
/*
fn error_return(result: Result<curl::http::response::Response, ErrCode>) -> Result<curl::http::response::Response, ErrCode> {
match result {
Ok(n) => {
return Ok(n)
},
Err(FailOne) => return Err(FailOne),
}
} */
|
let path = Path::new(&set_path).to_str().unwrap();
|
random_line_split
|
ahoy.rs
|
extern crate curl;
extern crate term_painter;
extern crate rustc_serialize;
extern crate toml;
use std::path::Path;
use std::env;
use turbo::util::{CliResult, Config};
use self::term_painter::Color::*;
use self::term_painter::ToStyle;
use self::rustc_serialize::json;
use meg::util::parse_toml::Configz;
//use self::curl::ffi::err::ErrCode;
pub const USAGE: &'static str = "
Get some help with a meg command.
Usage:
meg ahoy
meg ahoy -h | --help
";
#[derive(RustcDecodable)]
struct Options;
pub fn execute(_: Options, _: &Config) -> CliResult<Option<()>> {
//println!("executing; cmd=meg-version; args={:?}", env::args().collect::<Vec<_>>());
let code = self::ahoy();
// match self::ahoy() {
// Ok(v) => println!("Megam is up and running!"),
// Err(e) => println!("Megam is down! {:?}", e)
// }
if code == "200" {
println!("{}",
Green.paint("PONG! Megam is up."));
} else
|
Ok(None)
}
//#[derive(Debug)]
//pub struct ErrCode {
// desc: String
//}
pub fn ahoy() -> String {
let hme = env::home_dir().unwrap();
let y = hme.to_str().unwrap();
let set_path = format!("{}/.megam/megam.toml", y.to_string());
let path = Path::new(&set_path).to_str().unwrap();
let we = Configz { rand: "sample".to_string()};
let data = we.load(path);
let value: toml::Value = data.unwrap();
let host = value.lookup("api.host").unwrap().as_str().unwrap();
let res = curl::http::handle()
.get(host)
.exec();
let x = res.unwrap().get_code().to_string();
return x
}
/*
fn error_return(result: Result<curl::http::response::Response, ErrCode>) -> Result<curl::http::response::Response, ErrCode> {
match result {
Ok(n) => {
return Ok(n)
},
Err(FailOne) => return Err(FailOne),
}
} */
|
{
println!("{}",
Red.paint("Megam is down!"));
println!("{}",
Blue.paint("Please check whether your host is setup in /home/.megam/megam.toml"));
}
|
conditional_block
|
ahoy.rs
|
extern crate curl;
extern crate term_painter;
extern crate rustc_serialize;
extern crate toml;
use std::path::Path;
use std::env;
use turbo::util::{CliResult, Config};
use self::term_painter::Color::*;
use self::term_painter::ToStyle;
use self::rustc_serialize::json;
use meg::util::parse_toml::Configz;
//use self::curl::ffi::err::ErrCode;
pub const USAGE: &'static str = "
Get some help with a meg command.
Usage:
meg ahoy
meg ahoy -h | --help
";
#[derive(RustcDecodable)]
struct
|
;
pub fn execute(_: Options, _: &Config) -> CliResult<Option<()>> {
//println!("executing; cmd=meg-version; args={:?}", env::args().collect::<Vec<_>>());
let code = self::ahoy();
// match self::ahoy() {
// Ok(v) => println!("Megam is up and running!"),
// Err(e) => println!("Megam is down! {:?}", e)
// }
if code == "200" {
println!("{}",
Green.paint("PONG! Megam is up."));
} else {
println!("{}",
Red.paint("Megam is down!"));
println!("{}",
Blue.paint("Please check whether your host is setup in /home/.megam/megam.toml"));
}
Ok(None)
}
//#[derive(Debug)]
//pub struct ErrCode {
// desc: String
//}
pub fn ahoy() -> String {
let hme = env::home_dir().unwrap();
let y = hme.to_str().unwrap();
let set_path = format!("{}/.megam/megam.toml", y.to_string());
let path = Path::new(&set_path).to_str().unwrap();
let we = Configz { rand: "sample".to_string()};
let data = we.load(path);
let value: toml::Value = data.unwrap();
let host = value.lookup("api.host").unwrap().as_str().unwrap();
let res = curl::http::handle()
.get(host)
.exec();
let x = res.unwrap().get_code().to_string();
return x
}
/*
fn error_return(result: Result<curl::http::response::Response, ErrCode>) -> Result<curl::http::response::Response, ErrCode> {
match result {
Ok(n) => {
return Ok(n)
},
Err(FailOne) => return Err(FailOne),
}
} */
|
Options
|
identifier_name
|
ahoy.rs
|
extern crate curl;
extern crate term_painter;
extern crate rustc_serialize;
extern crate toml;
use std::path::Path;
use std::env;
use turbo::util::{CliResult, Config};
use self::term_painter::Color::*;
use self::term_painter::ToStyle;
use self::rustc_serialize::json;
use meg::util::parse_toml::Configz;
//use self::curl::ffi::err::ErrCode;
pub const USAGE: &'static str = "
Get some help with a meg command.
Usage:
meg ahoy
meg ahoy -h | --help
";
#[derive(RustcDecodable)]
struct Options;
pub fn execute(_: Options, _: &Config) -> CliResult<Option<()>> {
//println!("executing; cmd=meg-version; args={:?}", env::args().collect::<Vec<_>>());
let code = self::ahoy();
// match self::ahoy() {
// Ok(v) => println!("Megam is up and running!"),
// Err(e) => println!("Megam is down! {:?}", e)
// }
if code == "200" {
println!("{}",
Green.paint("PONG! Megam is up."));
} else {
println!("{}",
Red.paint("Megam is down!"));
println!("{}",
Blue.paint("Please check whether your host is setup in /home/.megam/megam.toml"));
}
Ok(None)
}
//#[derive(Debug)]
//pub struct ErrCode {
// desc: String
//}
pub fn ahoy() -> String
|
/*
fn error_return(result: Result<curl::http::response::Response, ErrCode>) -> Result<curl::http::response::Response, ErrCode> {
match result {
Ok(n) => {
return Ok(n)
},
Err(FailOne) => return Err(FailOne),
}
} */
|
{
let hme = env::home_dir().unwrap();
let y = hme.to_str().unwrap();
let set_path = format!("{}/.megam/megam.toml", y.to_string());
let path = Path::new(&set_path).to_str().unwrap();
let we = Configz { rand: "sample".to_string()};
let data = we.load(path);
let value: toml::Value = data.unwrap();
let host = value.lookup("api.host").unwrap().as_str().unwrap();
let res = curl::http::handle()
.get(host)
.exec();
let x = res.unwrap().get_code().to_string();
return x
}
|
identifier_body
|
workernavigator.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::WorkerNavigatorBinding;
use crate::dom::bindings::codegen::Bindings::WorkerNavigatorBinding::WorkerNavigatorMethods;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector};
use crate::dom::bindings::root::{DomRoot, MutNullableDom};
use crate::dom::bindings::str::DOMString;
use crate::dom::navigatorinfo;
use crate::dom::permissions::Permissions;
use crate::dom::workerglobalscope::WorkerGlobalScope;
use dom_struct::dom_struct;
// https://html.spec.whatwg.org/multipage/#workernavigator
#[dom_struct]
pub struct WorkerNavigator {
reflector_: Reflector,
permissions: MutNullableDom<Permissions>,
}
impl WorkerNavigator {
fn new_inherited() -> WorkerNavigator {
WorkerNavigator {
reflector_: Reflector::new(),
permissions: Default::default(),
}
}
pub fn new(global: &WorkerGlobalScope) -> DomRoot<WorkerNavigator> {
reflect_dom_object(
Box::new(WorkerNavigator::new_inherited()),
global,
WorkerNavigatorBinding::Wrap,
)
}
}
impl WorkerNavigatorMethods for WorkerNavigator {
// https://html.spec.whatwg.org/multipage/#dom-navigator-product
fn Product(&self) -> DOMString {
navigatorinfo::Product()
}
// https://html.spec.whatwg.org/multipage/#dom-navigator-taintenabled
fn TaintEnabled(&self) -> bool {
navigatorinfo::TaintEnabled()
}
// https://html.spec.whatwg.org/multipage/#dom-navigator-appname
fn AppName(&self) -> DOMString
|
// https://html.spec.whatwg.org/multipage/#dom-navigator-appcodename
fn AppCodeName(&self) -> DOMString {
navigatorinfo::AppCodeName()
}
// https://html.spec.whatwg.org/multipage/#dom-navigator-platform
fn Platform(&self) -> DOMString {
navigatorinfo::Platform()
}
// https://html.spec.whatwg.org/multipage/#dom-navigator-useragent
fn UserAgent(&self) -> DOMString {
navigatorinfo::UserAgent()
}
// https://html.spec.whatwg.org/multipage/#dom-navigator-appversion
fn AppVersion(&self) -> DOMString {
navigatorinfo::AppVersion()
}
// https://html.spec.whatwg.org/multipage/#navigatorlanguage
fn Language(&self) -> DOMString {
navigatorinfo::Language()
}
// https://w3c.github.io/permissions/#navigator-and-workernavigator-extension
fn Permissions(&self) -> DomRoot<Permissions> {
self.permissions
.or_init(|| Permissions::new(&self.global()))
}
}
|
{
navigatorinfo::AppName()
}
|
identifier_body
|
workernavigator.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::WorkerNavigatorBinding;
use crate::dom::bindings::codegen::Bindings::WorkerNavigatorBinding::WorkerNavigatorMethods;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector};
use crate::dom::bindings::root::{DomRoot, MutNullableDom};
use crate::dom::bindings::str::DOMString;
use crate::dom::navigatorinfo;
use crate::dom::permissions::Permissions;
use crate::dom::workerglobalscope::WorkerGlobalScope;
use dom_struct::dom_struct;
// https://html.spec.whatwg.org/multipage/#workernavigator
#[dom_struct]
pub struct WorkerNavigator {
reflector_: Reflector,
permissions: MutNullableDom<Permissions>,
}
impl WorkerNavigator {
fn new_inherited() -> WorkerNavigator {
WorkerNavigator {
reflector_: Reflector::new(),
permissions: Default::default(),
}
}
pub fn new(global: &WorkerGlobalScope) -> DomRoot<WorkerNavigator> {
reflect_dom_object(
Box::new(WorkerNavigator::new_inherited()),
global,
WorkerNavigatorBinding::Wrap,
)
}
}
impl WorkerNavigatorMethods for WorkerNavigator {
// https://html.spec.whatwg.org/multipage/#dom-navigator-product
fn Product(&self) -> DOMString {
navigatorinfo::Product()
}
// https://html.spec.whatwg.org/multipage/#dom-navigator-taintenabled
fn TaintEnabled(&self) -> bool {
navigatorinfo::TaintEnabled()
}
// https://html.spec.whatwg.org/multipage/#dom-navigator-appname
fn AppName(&self) -> DOMString {
navigatorinfo::AppName()
}
// https://html.spec.whatwg.org/multipage/#dom-navigator-appcodename
fn AppCodeName(&self) -> DOMString {
navigatorinfo::AppCodeName()
}
// https://html.spec.whatwg.org/multipage/#dom-navigator-platform
fn Platform(&self) -> DOMString {
navigatorinfo::Platform()
}
// https://html.spec.whatwg.org/multipage/#dom-navigator-useragent
fn UserAgent(&self) -> DOMString {
navigatorinfo::UserAgent()
}
// https://html.spec.whatwg.org/multipage/#dom-navigator-appversion
fn AppVersion(&self) -> DOMString {
navigatorinfo::AppVersion()
}
// https://html.spec.whatwg.org/multipage/#navigatorlanguage
fn Language(&self) -> DOMString {
navigatorinfo::Language()
}
// https://w3c.github.io/permissions/#navigator-and-workernavigator-extension
fn Permissions(&self) -> DomRoot<Permissions> {
self.permissions
|
}
|
.or_init(|| Permissions::new(&self.global()))
}
|
random_line_split
|
workernavigator.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::WorkerNavigatorBinding;
use crate::dom::bindings::codegen::Bindings::WorkerNavigatorBinding::WorkerNavigatorMethods;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector};
use crate::dom::bindings::root::{DomRoot, MutNullableDom};
use crate::dom::bindings::str::DOMString;
use crate::dom::navigatorinfo;
use crate::dom::permissions::Permissions;
use crate::dom::workerglobalscope::WorkerGlobalScope;
use dom_struct::dom_struct;
// https://html.spec.whatwg.org/multipage/#workernavigator
#[dom_struct]
pub struct WorkerNavigator {
reflector_: Reflector,
permissions: MutNullableDom<Permissions>,
}
impl WorkerNavigator {
fn new_inherited() -> WorkerNavigator {
WorkerNavigator {
reflector_: Reflector::new(),
permissions: Default::default(),
}
}
pub fn new(global: &WorkerGlobalScope) -> DomRoot<WorkerNavigator> {
reflect_dom_object(
Box::new(WorkerNavigator::new_inherited()),
global,
WorkerNavigatorBinding::Wrap,
)
}
}
impl WorkerNavigatorMethods for WorkerNavigator {
// https://html.spec.whatwg.org/multipage/#dom-navigator-product
fn Product(&self) -> DOMString {
navigatorinfo::Product()
}
// https://html.spec.whatwg.org/multipage/#dom-navigator-taintenabled
fn TaintEnabled(&self) -> bool {
navigatorinfo::TaintEnabled()
}
// https://html.spec.whatwg.org/multipage/#dom-navigator-appname
fn AppName(&self) -> DOMString {
navigatorinfo::AppName()
}
// https://html.spec.whatwg.org/multipage/#dom-navigator-appcodename
fn AppCodeName(&self) -> DOMString {
navigatorinfo::AppCodeName()
}
// https://html.spec.whatwg.org/multipage/#dom-navigator-platform
fn Platform(&self) -> DOMString {
navigatorinfo::Platform()
}
// https://html.spec.whatwg.org/multipage/#dom-navigator-useragent
fn UserAgent(&self) -> DOMString {
navigatorinfo::UserAgent()
}
// https://html.spec.whatwg.org/multipage/#dom-navigator-appversion
fn AppVersion(&self) -> DOMString {
navigatorinfo::AppVersion()
}
// https://html.spec.whatwg.org/multipage/#navigatorlanguage
fn
|
(&self) -> DOMString {
navigatorinfo::Language()
}
// https://w3c.github.io/permissions/#navigator-and-workernavigator-extension
fn Permissions(&self) -> DomRoot<Permissions> {
self.permissions
.or_init(|| Permissions::new(&self.global()))
}
}
|
Language
|
identifier_name
|
cast.rs
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Defines helper functions for force Array type downcast
use crate::array::*;
use crate::datatypes::*;
/// Force downcast ArrayRef to PrimitiveArray<T>
pub fn
|
<T>(arr: &ArrayRef) -> &PrimitiveArray<T>
where
T: ArrowPrimitiveType,
{
arr.as_any()
.downcast_ref::<PrimitiveArray<T>>()
.expect("Unable to downcast to primitive array")
}
/// Force downcast ArrayRef to DictionaryArray<T>
pub fn as_dictionary_array<T>(arr: &ArrayRef) -> &DictionaryArray<T>
where
T: ArrowDictionaryKeyType,
{
arr.as_any()
.downcast_ref::<DictionaryArray<T>>()
.expect("Unable to downcast to dictionary array")
}
macro_rules! array_downcast_fn {
($name: ident, $arrty: ty, $arrty_str:expr) => {
#[doc = "Force downcast ArrayRef to "]
#[doc = $arrty_str]
pub fn $name(arr: &ArrayRef) -> &$arrty {
arr.as_any().downcast_ref::<$arrty>().expect(concat!(
"Unable to downcast to typed array through ",
stringify!($name)
))
}
};
// use recursive macro to generate dynamic doc string for a given array type
($name: ident, $arrty: ty) => {
array_downcast_fn!($name, $arrty, stringify!($arrty));
};
}
array_downcast_fn!(as_string_array, StringArray);
array_downcast_fn!(as_boolean_array, BooleanArray);
array_downcast_fn!(as_null_array, NullArray);
|
as_primitive_array
|
identifier_name
|
cast.rs
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Defines helper functions for force Array type downcast
use crate::array::*;
use crate::datatypes::*;
/// Force downcast ArrayRef to PrimitiveArray<T>
pub fn as_primitive_array<T>(arr: &ArrayRef) -> &PrimitiveArray<T>
where
T: ArrowPrimitiveType,
|
/// Force downcast ArrayRef to DictionaryArray<T>
pub fn as_dictionary_array<T>(arr: &ArrayRef) -> &DictionaryArray<T>
where
T: ArrowDictionaryKeyType,
{
arr.as_any()
.downcast_ref::<DictionaryArray<T>>()
.expect("Unable to downcast to dictionary array")
}
macro_rules! array_downcast_fn {
($name: ident, $arrty: ty, $arrty_str:expr) => {
#[doc = "Force downcast ArrayRef to "]
#[doc = $arrty_str]
pub fn $name(arr: &ArrayRef) -> &$arrty {
arr.as_any().downcast_ref::<$arrty>().expect(concat!(
"Unable to downcast to typed array through ",
stringify!($name)
))
}
};
// use recursive macro to generate dynamic doc string for a given array type
($name: ident, $arrty: ty) => {
array_downcast_fn!($name, $arrty, stringify!($arrty));
};
}
array_downcast_fn!(as_string_array, StringArray);
array_downcast_fn!(as_boolean_array, BooleanArray);
array_downcast_fn!(as_null_array, NullArray);
|
{
arr.as_any()
.downcast_ref::<PrimitiveArray<T>>()
.expect("Unable to downcast to primitive array")
}
|
identifier_body
|
cast.rs
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Defines helper functions for force Array type downcast
use crate::array::*;
use crate::datatypes::*;
/// Force downcast ArrayRef to PrimitiveArray<T>
pub fn as_primitive_array<T>(arr: &ArrayRef) -> &PrimitiveArray<T>
where
T: ArrowPrimitiveType,
{
arr.as_any()
.downcast_ref::<PrimitiveArray<T>>()
.expect("Unable to downcast to primitive array")
|
}
/// Force downcast ArrayRef to DictionaryArray<T>
pub fn as_dictionary_array<T>(arr: &ArrayRef) -> &DictionaryArray<T>
where
T: ArrowDictionaryKeyType,
{
arr.as_any()
.downcast_ref::<DictionaryArray<T>>()
.expect("Unable to downcast to dictionary array")
}
macro_rules! array_downcast_fn {
($name: ident, $arrty: ty, $arrty_str:expr) => {
#[doc = "Force downcast ArrayRef to "]
#[doc = $arrty_str]
pub fn $name(arr: &ArrayRef) -> &$arrty {
arr.as_any().downcast_ref::<$arrty>().expect(concat!(
"Unable to downcast to typed array through ",
stringify!($name)
))
}
};
// use recursive macro to generate dynamic doc string for a given array type
($name: ident, $arrty: ty) => {
array_downcast_fn!($name, $arrty, stringify!($arrty));
};
}
array_downcast_fn!(as_string_array, StringArray);
array_downcast_fn!(as_boolean_array, BooleanArray);
array_downcast_fn!(as_null_array, NullArray);
|
random_line_split
|
|
rectangle_wave.rs
|
//! Game Boy sounds 1 and 2 generate a rectangular waveform
//! with an envelope function. Channel 1 can also sweep through a
//! frequency range.
use spu::{Sample, Mode};
use spu::envelope::Envelope;
pub struct RectangleWave {
/// True if the sound is generating samples
running: bool,
/// Signal duty cycle
duty: DutyCycle,
/// Period counter, the period length is configurable and is used
/// to select the desired output frequency. This counter loops 8
/// times per cycle to let us generate the proper duty cycle.
counter: u16,
/// Divider value configured in the registers. The actual divider
/// value used for the counter is 4 x (0x800 - <this value>)
divider: u16,
/// Phase counter, increments 8 times per sound period
phase: u8,
/// Enveloppe that will be used at the next start()
start_envelope: Envelope,
/// Active envelope
envelope: Envelope,
/// Play mode (continuous or counter)
mode: Mode,
/// Counter for counter mode
remaining: u32,
/// Sweep function (only available on sound 1)
sweep: Sweep,
}
impl RectangleWave {
pub fn new() -> RectangleWave {
RectangleWave {
running: false,
duty: DutyCycle::from_field(0),
counter: 0,
divider: 0,
phase: 0,
start_envelope: Envelope::from_reg(0),
envelope: Envelope::from_reg(0),
mode: Mode::Continuous,
remaining: 64 * 0x4000,
sweep: Sweep::from_reg(0),
}
}
pub fn step(&mut self) {
// Counter runs even if the channel is disabled
if self.mode == Mode::Counter {
if self.remaining == 0 {
self.running = false;
// Reload counter default value
self.remaining = 64 * 0x4000;
return;
}
self.remaining -= 1;
}
if!self.running {
return;
}
self.envelope.step();
self.divider =
match self.sweep.step(self.divider) {
Some(div) => div,
None => {
// Sweep function ended, sound is stopped
self.running = false;
return;
}
};
if self.counter == 0 {
// Reset the counter. This weird equation is simply how
// the hardware does it, no tricks here.
self.counter = 4 * (0x800 - self.divider);
// Move on to the next phase.
self.phase = (self.phase + 1) % 8;
}
self.counter -= 1;
}
pub fn sample(&self) -> Sample {
if!self.running {
return 0;
}
if self.phase < self.duty.active_per_8() {
// Output is high
self.envelope.into_sample()
} else {
0
}
}
pub fn start(&mut self) {
self.envelope = self.start_envelope;
self.running = self.envelope.dac_enabled();
// What do I need to do here exactly? Which counters are
// reset?
}
pub fn running(&self) -> bool {
self.running
}
pub fn divider(&self) -> u16 {
self.divider
}
pub fn set_divider(&mut self, divider: u16) {
if divider >= 0x800 {
panic!("divider out of range: {:04x}", divider);
}
self.divider = divider;
}
pub fn duty(&self) -> DutyCycle {
self.duty
}
pub fn set_duty(&mut self, duty: DutyCycle) {
self.duty = duty;
}
pub fn envelope(&self) -> Envelope {
self.start_envelope
}
pub fn set_envelope(&mut self, envelope: Envelope) {
// New envelope will become active at the next start
self.start_envelope = envelope;
if!envelope.dac_enabled() {
self.running = false;
}
}
pub fn mode(&self) -> Mode {
self.mode
}
pub fn set_mode(&mut self, mode: Mode) {
self.mode = mode;
}
pub fn set_length(&mut self, len: u8) {
if len >= 64 {
panic!("sound length out of range: {}", len);
}
let len = len as u32;
self.remaining = (64 - len) * 0x4000;
}
pub fn sweep(&self) -> Sweep {
self.sweep
}
pub fn set_sweep(&mut self, sweep: Sweep) {
self.sweep = sweep;
}
}
/// Rectangular wave duty cycle.
#[derive(Clone,Copy)]
pub enum DutyCycle {
/// Duty cycle of 12.5% (1/8)
Duty13 = 1,
/// Duty cycle of 25% (2/8)
Duty25 = 2,
/// Duty cycle of 50% (4/8)
Duty50 = 4,
/// Duty cycle of 75% (6/8)
Duty75 = 6,
}
impl DutyCycle {
/// Construct a DutyCycle from a register field (NR11 and
/// NR21, bits [7:6])
pub fn from_field(field: u8) -> DutyCycle {
match field {
0 => DutyCycle::Duty13,
1 => DutyCycle::Duty25,
2 => DutyCycle::Duty50,
3 => DutyCycle::Duty75,
_ => unreachable!(),
}
}
/// Convert back into NR11/21 field value
pub fn into_field(self) -> u8 {
match self {
DutyCycle::Duty13 => 0,
DutyCycle::Duty25 => 1,
DutyCycle::Duty50 => 2,
DutyCycle::Duty75 => 3,
}
}
/// Return the number of active samples for a frequency whose
/// period is 8 samples
fn active_per_8(self) -> u8 {
self as u8
}
}
#[derive(Clone,Copy)]
pub struct Sweep {
direction: SweepDirection,
shift: u8,
step_duration: u32,
counter: u32,
}
impl Sweep {
// Build Sweep from NR10 register value
pub fn from_reg(val: u8) -> Sweep {
let dir =
match val & 8!= 0 {
false => SweepDirection::Up,
true => SweepDirection::Down,
};
let shift = val & 7;
let l = ((val & 0x70) >> 4) as u32;
Sweep {
direction: dir,
shift: shift,
step_duration: l * 0x8000,
counter: 0,
}
}
// Retreive value of NR10 register value
pub fn into_reg(&self) -> u8 {
let l = (self.step_duration / 0x8000) as u8;
let dir = self.direction as u8;
// MSB is undefined and always 1
(1 << 7) | (l << 4) | (dir << 3) | self.shift
}
/// Step through the Sweep state machine, returning the updated
/// divider or None if the sound must be stopped
fn step(&mut self, div: u16) -> Option<u16> {
if self.step_duration == 0 {
// Sweep OFF, do nothing
return Some(div);
}
self.counter += 1;
self.counter %= self.step_duration;
if self.counter!= 0 {
|
return Some(div);
}
// Update the frequency
let offset = div >> (self.shift as usize);
match self.direction {
SweepDirection::Up => {
let div = div + offset;
if div > 0x7ff {
// We stop on overflow
None
} else {
Some(div)
}
}
SweepDirection::Down => {
if self.shift == 0 || offset > div {
// If the substraction would underflow we do
// nothing
Some(div)
} else {
Some(div - offset)
}
}
}
}
}
// Sound envelopes can become louder or quieter
#[derive(Clone,Copy,PartialEq,Eq)]
enum SweepDirection {
// Frequency increases at each step
Up = 0,
// Frequency decreases at each step
Down = 1,
}
|
// Do nothing and wait for the next step
|
random_line_split
|
rectangle_wave.rs
|
//! Game Boy sounds 1 and 2 generate a rectangular waveform
//! with an envelope function. Channel 1 can also sweep through a
//! frequency range.
use spu::{Sample, Mode};
use spu::envelope::Envelope;
pub struct RectangleWave {
/// True if the sound is generating samples
running: bool,
/// Signal duty cycle
duty: DutyCycle,
/// Period counter, the period length is configurable and is used
/// to select the desired output frequency. This counter loops 8
/// times per cycle to let us generate the proper duty cycle.
counter: u16,
/// Divider value configured in the registers. The actual divider
/// value used for the counter is 4 x (0x800 - <this value>)
divider: u16,
/// Phase counter, increments 8 times per sound period
phase: u8,
/// Enveloppe that will be used at the next start()
start_envelope: Envelope,
/// Active envelope
envelope: Envelope,
/// Play mode (continuous or counter)
mode: Mode,
/// Counter for counter mode
remaining: u32,
/// Sweep function (only available on sound 1)
sweep: Sweep,
}
impl RectangleWave {
pub fn new() -> RectangleWave {
RectangleWave {
running: false,
duty: DutyCycle::from_field(0),
counter: 0,
divider: 0,
phase: 0,
start_envelope: Envelope::from_reg(0),
envelope: Envelope::from_reg(0),
mode: Mode::Continuous,
remaining: 64 * 0x4000,
sweep: Sweep::from_reg(0),
}
}
pub fn step(&mut self) {
// Counter runs even if the channel is disabled
if self.mode == Mode::Counter {
if self.remaining == 0 {
self.running = false;
// Reload counter default value
self.remaining = 64 * 0x4000;
return;
}
self.remaining -= 1;
}
if!self.running {
return;
}
self.envelope.step();
self.divider =
match self.sweep.step(self.divider) {
Some(div) => div,
None => {
// Sweep function ended, sound is stopped
self.running = false;
return;
}
};
if self.counter == 0 {
// Reset the counter. This weird equation is simply how
// the hardware does it, no tricks here.
self.counter = 4 * (0x800 - self.divider);
// Move on to the next phase.
self.phase = (self.phase + 1) % 8;
}
self.counter -= 1;
}
pub fn sample(&self) -> Sample {
if!self.running {
return 0;
}
if self.phase < self.duty.active_per_8() {
// Output is high
self.envelope.into_sample()
} else {
0
}
}
pub fn start(&mut self) {
self.envelope = self.start_envelope;
self.running = self.envelope.dac_enabled();
// What do I need to do here exactly? Which counters are
// reset?
}
pub fn running(&self) -> bool {
self.running
}
pub fn divider(&self) -> u16 {
self.divider
}
pub fn set_divider(&mut self, divider: u16) {
if divider >= 0x800 {
panic!("divider out of range: {:04x}", divider);
}
self.divider = divider;
}
pub fn duty(&self) -> DutyCycle {
self.duty
}
pub fn set_duty(&mut self, duty: DutyCycle) {
self.duty = duty;
}
pub fn envelope(&self) -> Envelope {
self.start_envelope
}
pub fn set_envelope(&mut self, envelope: Envelope)
|
pub fn mode(&self) -> Mode {
self.mode
}
pub fn set_mode(&mut self, mode: Mode) {
self.mode = mode;
}
pub fn set_length(&mut self, len: u8) {
if len >= 64 {
panic!("sound length out of range: {}", len);
}
let len = len as u32;
self.remaining = (64 - len) * 0x4000;
}
pub fn sweep(&self) -> Sweep {
self.sweep
}
pub fn set_sweep(&mut self, sweep: Sweep) {
self.sweep = sweep;
}
}
/// Rectangular wave duty cycle.
#[derive(Clone,Copy)]
pub enum DutyCycle {
/// Duty cycle of 12.5% (1/8)
Duty13 = 1,
/// Duty cycle of 25% (2/8)
Duty25 = 2,
/// Duty cycle of 50% (4/8)
Duty50 = 4,
/// Duty cycle of 75% (6/8)
Duty75 = 6,
}
impl DutyCycle {
/// Construct a DutyCycle from a register field (NR11 and
/// NR21, bits [7:6])
pub fn from_field(field: u8) -> DutyCycle {
match field {
0 => DutyCycle::Duty13,
1 => DutyCycle::Duty25,
2 => DutyCycle::Duty50,
3 => DutyCycle::Duty75,
_ => unreachable!(),
}
}
/// Convert back into NR11/21 field value
pub fn into_field(self) -> u8 {
match self {
DutyCycle::Duty13 => 0,
DutyCycle::Duty25 => 1,
DutyCycle::Duty50 => 2,
DutyCycle::Duty75 => 3,
}
}
/// Return the number of active samples for a frequency whose
/// period is 8 samples
fn active_per_8(self) -> u8 {
self as u8
}
}
#[derive(Clone,Copy)]
pub struct Sweep {
direction: SweepDirection,
shift: u8,
step_duration: u32,
counter: u32,
}
impl Sweep {
// Build Sweep from NR10 register value
pub fn from_reg(val: u8) -> Sweep {
let dir =
match val & 8!= 0 {
false => SweepDirection::Up,
true => SweepDirection::Down,
};
let shift = val & 7;
let l = ((val & 0x70) >> 4) as u32;
Sweep {
direction: dir,
shift: shift,
step_duration: l * 0x8000,
counter: 0,
}
}
// Retreive value of NR10 register value
pub fn into_reg(&self) -> u8 {
let l = (self.step_duration / 0x8000) as u8;
let dir = self.direction as u8;
// MSB is undefined and always 1
(1 << 7) | (l << 4) | (dir << 3) | self.shift
}
/// Step through the Sweep state machine, returning the updated
/// divider or None if the sound must be stopped
fn step(&mut self, div: u16) -> Option<u16> {
if self.step_duration == 0 {
// Sweep OFF, do nothing
return Some(div);
}
self.counter += 1;
self.counter %= self.step_duration;
if self.counter!= 0 {
// Do nothing and wait for the next step
return Some(div);
}
// Update the frequency
let offset = div >> (self.shift as usize);
match self.direction {
SweepDirection::Up => {
let div = div + offset;
if div > 0x7ff {
// We stop on overflow
None
} else {
Some(div)
}
}
SweepDirection::Down => {
if self.shift == 0 || offset > div {
// If the substraction would underflow we do
// nothing
Some(div)
} else {
Some(div - offset)
}
}
}
}
}
// Sound envelopes can become louder or quieter
#[derive(Clone,Copy,PartialEq,Eq)]
enum SweepDirection {
// Frequency increases at each step
Up = 0,
// Frequency decreases at each step
Down = 1,
}
|
{
// New envelope will become active at the next start
self.start_envelope = envelope;
if !envelope.dac_enabled() {
self.running = false;
}
}
|
identifier_body
|
rectangle_wave.rs
|
//! Game Boy sounds 1 and 2 generate a rectangular waveform
//! with an envelope function. Channel 1 can also sweep through a
//! frequency range.
use spu::{Sample, Mode};
use spu::envelope::Envelope;
pub struct
|
{
/// True if the sound is generating samples
running: bool,
/// Signal duty cycle
duty: DutyCycle,
/// Period counter, the period length is configurable and is used
/// to select the desired output frequency. This counter loops 8
/// times per cycle to let us generate the proper duty cycle.
counter: u16,
/// Divider value configured in the registers. The actual divider
/// value used for the counter is 4 x (0x800 - <this value>)
divider: u16,
/// Phase counter, increments 8 times per sound period
phase: u8,
/// Enveloppe that will be used at the next start()
start_envelope: Envelope,
/// Active envelope
envelope: Envelope,
/// Play mode (continuous or counter)
mode: Mode,
/// Counter for counter mode
remaining: u32,
/// Sweep function (only available on sound 1)
sweep: Sweep,
}
impl RectangleWave {
pub fn new() -> RectangleWave {
RectangleWave {
running: false,
duty: DutyCycle::from_field(0),
counter: 0,
divider: 0,
phase: 0,
start_envelope: Envelope::from_reg(0),
envelope: Envelope::from_reg(0),
mode: Mode::Continuous,
remaining: 64 * 0x4000,
sweep: Sweep::from_reg(0),
}
}
pub fn step(&mut self) {
// Counter runs even if the channel is disabled
if self.mode == Mode::Counter {
if self.remaining == 0 {
self.running = false;
// Reload counter default value
self.remaining = 64 * 0x4000;
return;
}
self.remaining -= 1;
}
if!self.running {
return;
}
self.envelope.step();
self.divider =
match self.sweep.step(self.divider) {
Some(div) => div,
None => {
// Sweep function ended, sound is stopped
self.running = false;
return;
}
};
if self.counter == 0 {
// Reset the counter. This weird equation is simply how
// the hardware does it, no tricks here.
self.counter = 4 * (0x800 - self.divider);
// Move on to the next phase.
self.phase = (self.phase + 1) % 8;
}
self.counter -= 1;
}
pub fn sample(&self) -> Sample {
if!self.running {
return 0;
}
if self.phase < self.duty.active_per_8() {
// Output is high
self.envelope.into_sample()
} else {
0
}
}
pub fn start(&mut self) {
self.envelope = self.start_envelope;
self.running = self.envelope.dac_enabled();
// What do I need to do here exactly? Which counters are
// reset?
}
pub fn running(&self) -> bool {
self.running
}
pub fn divider(&self) -> u16 {
self.divider
}
pub fn set_divider(&mut self, divider: u16) {
if divider >= 0x800 {
panic!("divider out of range: {:04x}", divider);
}
self.divider = divider;
}
pub fn duty(&self) -> DutyCycle {
self.duty
}
pub fn set_duty(&mut self, duty: DutyCycle) {
self.duty = duty;
}
pub fn envelope(&self) -> Envelope {
self.start_envelope
}
pub fn set_envelope(&mut self, envelope: Envelope) {
// New envelope will become active at the next start
self.start_envelope = envelope;
if!envelope.dac_enabled() {
self.running = false;
}
}
pub fn mode(&self) -> Mode {
self.mode
}
pub fn set_mode(&mut self, mode: Mode) {
self.mode = mode;
}
pub fn set_length(&mut self, len: u8) {
if len >= 64 {
panic!("sound length out of range: {}", len);
}
let len = len as u32;
self.remaining = (64 - len) * 0x4000;
}
pub fn sweep(&self) -> Sweep {
self.sweep
}
pub fn set_sweep(&mut self, sweep: Sweep) {
self.sweep = sweep;
}
}
/// Rectangular wave duty cycle.
#[derive(Clone,Copy)]
pub enum DutyCycle {
/// Duty cycle of 12.5% (1/8)
Duty13 = 1,
/// Duty cycle of 25% (2/8)
Duty25 = 2,
/// Duty cycle of 50% (4/8)
Duty50 = 4,
/// Duty cycle of 75% (6/8)
Duty75 = 6,
}
impl DutyCycle {
/// Construct a DutyCycle from a register field (NR11 and
/// NR21, bits [7:6])
pub fn from_field(field: u8) -> DutyCycle {
match field {
0 => DutyCycle::Duty13,
1 => DutyCycle::Duty25,
2 => DutyCycle::Duty50,
3 => DutyCycle::Duty75,
_ => unreachable!(),
}
}
/// Convert back into NR11/21 field value
pub fn into_field(self) -> u8 {
match self {
DutyCycle::Duty13 => 0,
DutyCycle::Duty25 => 1,
DutyCycle::Duty50 => 2,
DutyCycle::Duty75 => 3,
}
}
/// Return the number of active samples for a frequency whose
/// period is 8 samples
fn active_per_8(self) -> u8 {
self as u8
}
}
#[derive(Clone,Copy)]
pub struct Sweep {
direction: SweepDirection,
shift: u8,
step_duration: u32,
counter: u32,
}
impl Sweep {
// Build Sweep from NR10 register value
pub fn from_reg(val: u8) -> Sweep {
let dir =
match val & 8!= 0 {
false => SweepDirection::Up,
true => SweepDirection::Down,
};
let shift = val & 7;
let l = ((val & 0x70) >> 4) as u32;
Sweep {
direction: dir,
shift: shift,
step_duration: l * 0x8000,
counter: 0,
}
}
// Retreive value of NR10 register value
pub fn into_reg(&self) -> u8 {
let l = (self.step_duration / 0x8000) as u8;
let dir = self.direction as u8;
// MSB is undefined and always 1
(1 << 7) | (l << 4) | (dir << 3) | self.shift
}
/// Step through the Sweep state machine, returning the updated
/// divider or None if the sound must be stopped
fn step(&mut self, div: u16) -> Option<u16> {
if self.step_duration == 0 {
// Sweep OFF, do nothing
return Some(div);
}
self.counter += 1;
self.counter %= self.step_duration;
if self.counter!= 0 {
// Do nothing and wait for the next step
return Some(div);
}
// Update the frequency
let offset = div >> (self.shift as usize);
match self.direction {
SweepDirection::Up => {
let div = div + offset;
if div > 0x7ff {
// We stop on overflow
None
} else {
Some(div)
}
}
SweepDirection::Down => {
if self.shift == 0 || offset > div {
// If the substraction would underflow we do
// nothing
Some(div)
} else {
Some(div - offset)
}
}
}
}
}
// Sound envelopes can become louder or quieter
#[derive(Clone,Copy,PartialEq,Eq)]
enum SweepDirection {
// Frequency increases at each step
Up = 0,
// Frequency decreases at each step
Down = 1,
}
|
RectangleWave
|
identifier_name
|
mtwt.rs
|
), 181-216.
//! DOI=10.1017/S0956796812000093 http://dx.doi.org/10.1017/S0956796812000093
pub use self::SyntaxContext_::*;
use ast::{Ident, Mrk, Name, SyntaxContext};
use std::cell::RefCell;
use std::collections::HashMap;
use std::collections::hash_map::{Occupied, Vacant};
/// The SCTable contains a table of SyntaxContext_'s. It
/// represents a flattened tree structure, to avoid having
/// managed pointers everywhere (that caused an ICE).
/// the mark_memo and rename_memo fields are side-tables
/// that ensure that adding the same mark to the same context
/// gives you back the same context as before. This shouldn't
/// change the semantics--everything here is immutable--but
/// it should cut down on memory use *a lot*; applying a mark
/// to a tree containing 50 identifiers would otherwise generate
/// 50 new contexts
pub struct SCTable {
table: RefCell<Vec<SyntaxContext_>>,
mark_memo: RefCell<HashMap<(SyntaxContext,Mrk),SyntaxContext>>,
rename_memo: RefCell<HashMap<(SyntaxContext,Ident,Name),SyntaxContext>>,
}
#[deriving(PartialEq, Encodable, Decodable, Hash, Show)]
pub enum SyntaxContext_ {
EmptyCtxt,
Mark (Mrk,SyntaxContext),
/// flattening the name and syntaxcontext into the rename...
/// HIDDEN INVARIANTS:
/// 1) the first name in a Rename node
/// can only be a programmer-supplied name.
/// 2) Every Rename node with a given Name in the
/// "to" slot must have the same name and context
/// in the "from" slot. In essence, they're all
/// pointers to a single "rename" event node.
Rename (Ident,Name,SyntaxContext),
/// actually, IllegalCtxt may not be necessary.
IllegalCtxt
}
/// A list of ident->name renamings
pub type RenameList = Vec<(Ident, Name)>;
/// Extend a syntax context with a given mark
pub fn apply_mark(m: Mrk, ctxt: SyntaxContext) -> SyntaxContext {
with_sctable(|table| apply_mark_internal(m, ctxt, table))
}
/// Extend a syntax context with a given mark and sctable (explicit memoization)
fn apply_mark_internal(m: Mrk, ctxt: SyntaxContext, table: &SCTable) -> SyntaxContext {
let key = (ctxt, m);
* match table.mark_memo.borrow_mut().entry(key) {
Vacant(entry) => entry.set(idx_push(&mut *table.table.borrow_mut(), Mark(m, ctxt))),
Occupied(entry) => entry.into_mut(),
}
}
/// Extend a syntax context with a given rename
pub fn apply_rename(id: Ident, to:Name,
ctxt: SyntaxContext) -> SyntaxContext {
with_sctable(|table| apply_rename_internal(id, to, ctxt, table))
}
/// Extend a syntax context with a given rename and sctable (explicit memoization)
fn apply_rename_internal(id: Ident,
to: Name,
ctxt: SyntaxContext,
table: &SCTable) -> SyntaxContext {
let key = (ctxt, id, to);
* match table.rename_memo.borrow_mut().entry(key) {
Vacant(entry) => entry.set(idx_push(&mut *table.table.borrow_mut(), Rename(id, to, ctxt))),
Occupied(entry) => entry.into_mut(),
}
}
/// Apply a list of renamings to a context
// if these rename lists get long, it would make sense
// to consider memoizing this fold. This may come up
// when we add hygiene to item names.
pub fn apply_renames(renames: &RenameList, ctxt: SyntaxContext) -> SyntaxContext {
renames.iter().fold(ctxt, |ctxt, &(from, to)| {
apply_rename(from, to, ctxt)
})
}
/// Fetch the SCTable from TLS, create one if it doesn't yet exist.
pub fn with_sctable<T>(op: |&SCTable| -> T) -> T {
thread_local!(static SCTABLE_KEY: SCTable = new_sctable_internal())
SCTABLE_KEY.with(|slot| op(slot))
}
// Make a fresh syntax context table with EmptyCtxt in slot zero
// and IllegalCtxt in slot one.
fn new_sctable_internal() -> SCTable {
SCTable {
table: RefCell::new(vec!(EmptyCtxt, IllegalCtxt)),
mark_memo: RefCell::new(HashMap::new()),
rename_memo: RefCell::new(HashMap::new()),
}
}
/// Print out an SCTable for debugging
pub fn display_sctable(table: &SCTable) {
error!("SC table:");
for (idx,val) in table.table.borrow().iter().enumerate() {
error!("{:4} : {}",idx,val);
}
}
/// Clear the tables from TLD to reclaim memory.
pub fn clear_tables() {
with_sctable(|table| {
*table.table.borrow_mut() = Vec::new();
*table.mark_memo.borrow_mut() = HashMap::new();
*table.rename_memo.borrow_mut() = HashMap::new();
});
with_resolve_table_mut(|table| *table = HashMap::new());
}
/// Reset the tables to their initial state
pub fn reset_tables() {
with_sctable(|table| {
*table.table.borrow_mut() = vec!(EmptyCtxt, IllegalCtxt);
*table.mark_memo.borrow_mut() = HashMap::new();
*table.rename_memo.borrow_mut() = HashMap::new();
});
with_resolve_table_mut(|table| *table = HashMap::new());
}
/// Add a value to the end of a vec, return its index
fn idx_push<T>(vec: &mut Vec<T>, val: T) -> u32 {
vec.push(val);
(vec.len() - 1) as u32
}
/// Resolve a syntax object to a name, per MTWT.
pub fn resolve(id: Ident) -> Name {
with_sctable(|sctable| {
with_resolve_table_mut(|resolve_table| {
resolve_internal(id, sctable, resolve_table)
})
})
}
type ResolveTable = HashMap<(Name,SyntaxContext),Name>;
// okay, I admit, putting this in TLS is not so nice:
// fetch the SCTable from TLS, create one if it doesn't yet exist.
fn with_resolve_table_mut<T>(op: |&mut ResolveTable| -> T) -> T {
thread_local!(static RESOLVE_TABLE_KEY: RefCell<ResolveTable> = {
RefCell::new(HashMap::new())
})
RESOLVE_TABLE_KEY.with(|slot| op(&mut *slot.borrow_mut()))
}
/// Resolve a syntax object to a name, per MTWT.
/// adding memoization to resolve 500+ seconds in resolve for librustc (!)
fn resolve_internal(id: Ident,
table: &SCTable,
resolve_table: &mut ResolveTable) -> Name {
let key = (id.name, id.ctxt);
match resolve_table.get(&key) {
Some(&name) => return name,
None => {}
}
let resolved = {
let result = (*table.table.borrow())[id.ctxt as uint];
match result {
EmptyCtxt => id.name,
// ignore marks here:
Mark(_,subctxt) =>
resolve_internal(Ident{name:id.name, ctxt: subctxt},
table, resolve_table),
// do the rename if necessary:
Rename(Ident{name, ctxt}, toname, subctxt) => {
let resolvedfrom =
resolve_internal(Ident{name:name, ctxt:ctxt},
|
let resolvedthis =
resolve_internal(Ident{name:id.name, ctxt:subctxt},
table, resolve_table);
if (resolvedthis == resolvedfrom)
&& (marksof_internal(ctxt, resolvedthis, table)
== marksof_internal(subctxt, resolvedthis, table)) {
toname
} else {
resolvedthis
}
}
IllegalCtxt => panic!("expected resolvable context, got IllegalCtxt")
}
};
resolve_table.insert(key, resolved);
resolved
}
/// Compute the marks associated with a syntax context.
pub fn marksof(ctxt: SyntaxContext, stopname: Name) -> Vec<Mrk> {
with_sctable(|table| marksof_internal(ctxt, stopname, table))
}
// the internal function for computing marks
// it's not clear to me whether it's better to use a [] mutable
// vector or a cons-list for this.
fn marksof_internal(ctxt: SyntaxContext,
stopname: Name,
table: &SCTable) -> Vec<Mrk> {
let mut result = Vec::new();
let mut loopvar = ctxt;
loop {
let table_entry = (*table.table.borrow())[loopvar as uint];
match table_entry {
EmptyCtxt => {
return result;
},
Mark(mark, tl) => {
xor_push(&mut result, mark);
loopvar = tl;
},
Rename(_,name,tl) => {
// see MTWT for details on the purpose of the stopname.
// short version: it prevents duplication of effort.
if name == stopname {
return result;
} else {
loopvar = tl;
}
}
IllegalCtxt => panic!("expected resolvable context, got IllegalCtxt")
}
}
}
/// Return the outer mark for a context with a mark at the outside.
/// FAILS when outside is not a mark.
pub fn outer_mark(ctxt: SyntaxContext) -> Mrk {
with_sctable(|sctable| {
match (*sctable.table.borrow())[ctxt as uint] {
Mark(mrk, _) => mrk,
_ => panic!("can't retrieve outer mark when outside is not a mark")
}
})
}
/// Push a name... unless it matches the one on top, in which
/// case pop and discard (so two of the same marks cancel)
fn xor_push(marks: &mut Vec<Mrk>, mark: Mrk) {
if (marks.len() > 0) && (*marks.last().unwrap() == mark) {
marks.pop().unwrap();
} else {
marks.push(mark);
}
}
#[cfg(test)]
mod tests {
use self::TestSC::*;
use ast::{EMPTY_CTXT, Ident, Mrk, Name, SyntaxContext};
use super::{resolve, xor_push, apply_mark_internal, new_sctable_internal};
use super::{apply_rename_internal, apply_renames, marksof_internal, resolve_internal};
use super::{SCTable, EmptyCtxt, Mark, Rename, IllegalCtxt};
use std::collections::HashMap;
#[test]
fn xorpush_test () {
let mut s = Vec::new();
xor_push(&mut s, 14);
assert_eq!(s.clone(), vec!(14));
xor_push(&mut s, 14);
assert_eq!(s.clone(), Vec::new());
xor_push(&mut s, 14);
assert_eq!(s.clone(), vec!(14));
xor_push(&mut s, 15);
assert_eq!(s.clone(), vec!(14, 15));
xor_push(&mut s, 16);
assert_eq!(s.clone(), vec!(14, 15, 16));
xor_push(&mut s, 16);
assert_eq!(s.clone(), vec!(14, 15));
xor_push(&mut s, 15);
assert_eq!(s.clone(), vec!(14));
}
fn id(n: u32, s: SyntaxContext) -> Ident {
Ident {name: Name(n), ctxt: s}
}
// because of the SCTable, I now need a tidy way of
// creating syntax objects. Sigh.
#[deriving(Clone, PartialEq, Show)]
enum TestSC {
M(Mrk),
R(Ident,Name)
}
// unfold a vector of TestSC values into a SCTable,
// returning the resulting index
fn unfold_test_sc(tscs : Vec<TestSC>, tail: SyntaxContext, table: &SCTable)
-> SyntaxContext {
tscs.iter().rev().fold(tail, |tail : SyntaxContext, tsc : &TestSC|
{match *tsc {
M(mrk) => apply_mark_internal(mrk,tail,table),
R(ident,name) => apply_rename_internal(ident,name,tail,table)}})
}
// gather a SyntaxContext back into a vector of TestSCs
fn refold_test_sc(mut sc: SyntaxContext, table : &SCTable) -> Vec<TestSC> {
let mut result = Vec::new();
loop {
let table = table.table.borrow();
match (*table)[sc as uint] {
EmptyCtxt => {return result;},
Mark(mrk,tail) => {
result.push(M(mrk));
sc = tail;
continue;
},
Rename(id,name,tail) => {
result.push(R(id,name));
sc = tail;
continue;
}
IllegalCtxt => panic!("expected resolvable context, got IllegalCtxt")
}
}
}
#[test]
fn test_unfold_refold(){
let mut t = new_sctable_internal();
let test_sc = vec!(M(3),R(id(101,0),Name(14)),M(9));
assert_eq!(unfold_test_sc(test_sc.clone(),EMPTY_CTXT,&mut t),4);
{
let table = t.table.borrow();
assert!((*table)[2] == Mark(9,0));
assert!((*table)[3] == Rename(id(101,0),Name(14),2));
assert!((*table)[4] == Mark(3,3));
}
assert_eq!(refold_test_sc(4,&t),test_sc);
}
// extend a syntax context with a sequence of marks given
// in a vector. v[0] will be the outermost mark.
fn unfold_marks(mrks: Vec<Mrk>, tail: SyntaxContext, table: &SCTable)
-> SyntaxContext {
mrks.iter().rev().fold(tail, |tail:SyntaxContext, mrk:&Mrk|
{apply_mark_internal(*mrk,tail,table)})
}
#[test] fn unfold_marks_test() {
let mut t = new_sctable_internal();
assert_eq!(unfold_marks(vec!(3,7),EMPTY_CTXT,&mut t),3);
{
let table = t.table.borrow();
assert!((*table)[2] == Mark(7,0));
assert!((*table)[3] == Mark(3,2));
}
}
#[test]
fn test_marksof () {
let stopname = Name(242);
let name1 = Name(243);
let mut t = new_sctable_internal();
assert_eq!(marksof_internal (EMPTY_CTXT,stopname,&t),Vec::new());
// FIXME #5074: ANF'd to dodge nested calls
{ let ans = unfold_marks(vec!(4,98),EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans,stopname,&t),vec!(4,98));}
// does xoring work?
{ let ans = unfold_marks(vec!(5,5,16),EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans,stopname,&t), vec!(16));}
// does nested xoring work?
{ let ans = unfold_marks(vec!(5,10,10,5,16),EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans, stopname,&t), vec!(16));}
// rename where stop doesn't match:
{ let chain = vec!(M(9),
R(id(name1.uint() as u32,
apply_mark_internal (4, EMPTY_CTXT,&mut t)),
Name(100101102)),
M(14));
let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans, stopname, &t), vec!(9,14));}
// rename where stop does match
{ let name1sc = apply_mark_internal(4, EMPTY_CTXT, &mut t);
let chain = vec!(M(9),
R(id(name1.uint() as u32, name1sc),
stopname),
M(14));
let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans, stopname, &t), vec!(9)); }
}
#[test]
fn resolve_tests () {
let a = 40;
let mut t = new_sctable_internal();
let mut rt = HashMap::new();
// - ctxt is MT
assert_eq!(resolve_internal(id(a,EMPTY_CTXT),&mut t, &mut rt),Name(a));
// - simple ignored marks
{ let sc = unfold_marks(vec!(1,2,3),EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt),Name(a));}
// - orthogonal rename where names don't match
{ let sc = unfold_test_sc(vec!(R(id(50,EMPTY_CTXT),Name(51)),M(12)),EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt),Name(a));}
// - rename where names do match, but marks don't
{ let sc1 = apply_mark_internal(1,EMPTY_CTXT,&mut t);
let sc = unfold_test_sc(vec!(R(id(a,sc1),Name(50)),
M(1),
M(2)),
EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), Name(a));}
// - rename where names and marks match
{ let sc1 = unfold_test_sc(vec!(M(1),M(2)),EMPTY_CTXT,&mut t);
let sc = unfold_test_sc(vec!(R(id(a,sc1),Name(50)),M(1),M(2)),EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), Name(50)); }
// - rename where names and marks match by literal sharing
{ let sc1 = unfold_test_sc(vec!(M(1),M(2)),EMPTY_CTXT,&mut t);
let sc = unfold_test_sc(vec!(R(id(a,sc1),Name(50))),sc1,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), Name(50)); }
// - two renames of the same var.. can only happen if you use
// local-expand to prevent the inner binding from being renamed
// during the rename-pass caused by the first:
println!("about to run bad test");
{ let sc = unfold_test_sc(vec!(R(id(a,EMPTY_CTXT),Name(50)),
R(id(a,EMPTY_CTXT),Name(51))),
EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), Name(51)); }
// the simplest double-rename:
{ let a_to_a50 = apply_rename_internal(id(a,EMPTY_CTXT),Name(50),EMPTY_CTXT,&mut t);
let a50_to_a51 = apply_rename_internal(id(a,a_to_a50),Name(51),a_to_a50,&mut t);
assert_eq!(resolve_internal(id(a,a50_to_a51),&mut t, &mut rt),Name(51));
// mark on the outside doesn't stop rename:
let sc = apply_mark_internal(9,a50_to_a51,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt),Name(51));
// but mark on the inside does:
let a50_to_a51_b = unfold_test_sc(vec!(R(id(a,a_to_a50),Name(51)),
M(9)),
a_to_a50,
&mut t);
assert_eq!(resolve_internal(id(a,a50_to_a51_b),&mut t, &mut rt),Name(50));}
}
#[test]
fn mtwt_resolve_test(){
let a = 40;
assert_eq!(resolve(id(a,EMPTY_CTXT)),Name(a));
}
#[test]
fn hashing_tests () {
let mut t = new_sctable_internal();
assert_eq!(apply_mark_internal(12,EMPTY_CTXT,&mut t),2);
assert_eq!(apply_mark_internal(13,EMPTY_CTXT,&mut t),3);
// using the same one again should result in the same index:
assert
|
table, resolve_table);
|
random_line_split
|
mtwt.rs
|
-216.
//! DOI=10.1017/S0956796812000093 http://dx.doi.org/10.1017/S0956796812000093
pub use self::SyntaxContext_::*;
use ast::{Ident, Mrk, Name, SyntaxContext};
use std::cell::RefCell;
use std::collections::HashMap;
use std::collections::hash_map::{Occupied, Vacant};
/// The SCTable contains a table of SyntaxContext_'s. It
/// represents a flattened tree structure, to avoid having
/// managed pointers everywhere (that caused an ICE).
/// the mark_memo and rename_memo fields are side-tables
/// that ensure that adding the same mark to the same context
/// gives you back the same context as before. This shouldn't
/// change the semantics--everything here is immutable--but
/// it should cut down on memory use *a lot*; applying a mark
/// to a tree containing 50 identifiers would otherwise generate
/// 50 new contexts
pub struct SCTable {
table: RefCell<Vec<SyntaxContext_>>,
mark_memo: RefCell<HashMap<(SyntaxContext,Mrk),SyntaxContext>>,
rename_memo: RefCell<HashMap<(SyntaxContext,Ident,Name),SyntaxContext>>,
}
#[deriving(PartialEq, Encodable, Decodable, Hash, Show)]
pub enum SyntaxContext_ {
EmptyCtxt,
Mark (Mrk,SyntaxContext),
/// flattening the name and syntaxcontext into the rename...
/// HIDDEN INVARIANTS:
/// 1) the first name in a Rename node
/// can only be a programmer-supplied name.
/// 2) Every Rename node with a given Name in the
/// "to" slot must have the same name and context
/// in the "from" slot. In essence, they're all
/// pointers to a single "rename" event node.
Rename (Ident,Name,SyntaxContext),
/// actually, IllegalCtxt may not be necessary.
IllegalCtxt
}
/// A list of ident->name renamings
pub type RenameList = Vec<(Ident, Name)>;
/// Extend a syntax context with a given mark
pub fn apply_mark(m: Mrk, ctxt: SyntaxContext) -> SyntaxContext {
with_sctable(|table| apply_mark_internal(m, ctxt, table))
}
/// Extend a syntax context with a given mark and sctable (explicit memoization)
fn apply_mark_internal(m: Mrk, ctxt: SyntaxContext, table: &SCTable) -> SyntaxContext {
let key = (ctxt, m);
* match table.mark_memo.borrow_mut().entry(key) {
Vacant(entry) => entry.set(idx_push(&mut *table.table.borrow_mut(), Mark(m, ctxt))),
Occupied(entry) => entry.into_mut(),
}
}
/// Extend a syntax context with a given rename
pub fn apply_rename(id: Ident, to:Name,
ctxt: SyntaxContext) -> SyntaxContext {
with_sctable(|table| apply_rename_internal(id, to, ctxt, table))
}
/// Extend a syntax context with a given rename and sctable (explicit memoization)
fn apply_rename_internal(id: Ident,
to: Name,
ctxt: SyntaxContext,
table: &SCTable) -> SyntaxContext {
let key = (ctxt, id, to);
* match table.rename_memo.borrow_mut().entry(key) {
Vacant(entry) => entry.set(idx_push(&mut *table.table.borrow_mut(), Rename(id, to, ctxt))),
Occupied(entry) => entry.into_mut(),
}
}
/// Apply a list of renamings to a context
// if these rename lists get long, it would make sense
// to consider memoizing this fold. This may come up
// when we add hygiene to item names.
pub fn apply_renames(renames: &RenameList, ctxt: SyntaxContext) -> SyntaxContext {
renames.iter().fold(ctxt, |ctxt, &(from, to)| {
apply_rename(from, to, ctxt)
})
}
/// Fetch the SCTable from TLS, create one if it doesn't yet exist.
pub fn with_sctable<T>(op: |&SCTable| -> T) -> T {
thread_local!(static SCTABLE_KEY: SCTable = new_sctable_internal())
SCTABLE_KEY.with(|slot| op(slot))
}
// Make a fresh syntax context table with EmptyCtxt in slot zero
// and IllegalCtxt in slot one.
fn new_sctable_internal() -> SCTable {
SCTable {
table: RefCell::new(vec!(EmptyCtxt, IllegalCtxt)),
mark_memo: RefCell::new(HashMap::new()),
rename_memo: RefCell::new(HashMap::new()),
}
}
/// Print out an SCTable for debugging
pub fn display_sctable(table: &SCTable) {
error!("SC table:");
for (idx,val) in table.table.borrow().iter().enumerate() {
error!("{:4} : {}",idx,val);
}
}
/// Clear the tables from TLD to reclaim memory.
pub fn clear_tables() {
with_sctable(|table| {
*table.table.borrow_mut() = Vec::new();
*table.mark_memo.borrow_mut() = HashMap::new();
*table.rename_memo.borrow_mut() = HashMap::new();
});
with_resolve_table_mut(|table| *table = HashMap::new());
}
/// Reset the tables to their initial state
pub fn reset_tables() {
with_sctable(|table| {
*table.table.borrow_mut() = vec!(EmptyCtxt, IllegalCtxt);
*table.mark_memo.borrow_mut() = HashMap::new();
*table.rename_memo.borrow_mut() = HashMap::new();
});
with_resolve_table_mut(|table| *table = HashMap::new());
}
/// Add a value to the end of a vec, return its index
fn idx_push<T>(vec: &mut Vec<T>, val: T) -> u32 {
vec.push(val);
(vec.len() - 1) as u32
}
/// Resolve a syntax object to a name, per MTWT.
pub fn resolve(id: Ident) -> Name {
with_sctable(|sctable| {
with_resolve_table_mut(|resolve_table| {
resolve_internal(id, sctable, resolve_table)
})
})
}
type ResolveTable = HashMap<(Name,SyntaxContext),Name>;
// okay, I admit, putting this in TLS is not so nice:
// fetch the SCTable from TLS, create one if it doesn't yet exist.
fn with_resolve_table_mut<T>(op: |&mut ResolveTable| -> T) -> T {
thread_local!(static RESOLVE_TABLE_KEY: RefCell<ResolveTable> = {
RefCell::new(HashMap::new())
})
RESOLVE_TABLE_KEY.with(|slot| op(&mut *slot.borrow_mut()))
}
/// Resolve a syntax object to a name, per MTWT.
/// adding memoization to resolve 500+ seconds in resolve for librustc (!)
fn resolve_internal(id: Ident,
table: &SCTable,
resolve_table: &mut ResolveTable) -> Name {
let key = (id.name, id.ctxt);
match resolve_table.get(&key) {
Some(&name) => return name,
None => {}
}
let resolved = {
let result = (*table.table.borrow())[id.ctxt as uint];
match result {
EmptyCtxt => id.name,
// ignore marks here:
Mark(_,subctxt) =>
resolve_internal(Ident{name:id.name, ctxt: subctxt},
table, resolve_table),
// do the rename if necessary:
Rename(Ident{name, ctxt}, toname, subctxt) => {
let resolvedfrom =
resolve_internal(Ident{name:name, ctxt:ctxt},
table, resolve_table);
let resolvedthis =
resolve_internal(Ident{name:id.name, ctxt:subctxt},
table, resolve_table);
if (resolvedthis == resolvedfrom)
&& (marksof_internal(ctxt, resolvedthis, table)
== marksof_internal(subctxt, resolvedthis, table)) {
toname
} else {
resolvedthis
}
}
IllegalCtxt => panic!("expected resolvable context, got IllegalCtxt")
}
};
resolve_table.insert(key, resolved);
resolved
}
/// Compute the marks associated with a syntax context.
pub fn marksof(ctxt: SyntaxContext, stopname: Name) -> Vec<Mrk> {
with_sctable(|table| marksof_internal(ctxt, stopname, table))
}
// the internal function for computing marks
// it's not clear to me whether it's better to use a [] mutable
// vector or a cons-list for this.
fn marksof_internal(ctxt: SyntaxContext,
stopname: Name,
table: &SCTable) -> Vec<Mrk> {
let mut result = Vec::new();
let mut loopvar = ctxt;
loop {
let table_entry = (*table.table.borrow())[loopvar as uint];
match table_entry {
EmptyCtxt =>
|
,
Mark(mark, tl) => {
xor_push(&mut result, mark);
loopvar = tl;
},
Rename(_,name,tl) => {
// see MTWT for details on the purpose of the stopname.
// short version: it prevents duplication of effort.
if name == stopname {
return result;
} else {
loopvar = tl;
}
}
IllegalCtxt => panic!("expected resolvable context, got IllegalCtxt")
}
}
}
/// Return the outer mark for a context with a mark at the outside.
/// FAILS when outside is not a mark.
pub fn outer_mark(ctxt: SyntaxContext) -> Mrk {
with_sctable(|sctable| {
match (*sctable.table.borrow())[ctxt as uint] {
Mark(mrk, _) => mrk,
_ => panic!("can't retrieve outer mark when outside is not a mark")
}
})
}
/// Push a name... unless it matches the one on top, in which
/// case pop and discard (so two of the same marks cancel)
fn xor_push(marks: &mut Vec<Mrk>, mark: Mrk) {
if (marks.len() > 0) && (*marks.last().unwrap() == mark) {
marks.pop().unwrap();
} else {
marks.push(mark);
}
}
#[cfg(test)]
mod tests {
use self::TestSC::*;
use ast::{EMPTY_CTXT, Ident, Mrk, Name, SyntaxContext};
use super::{resolve, xor_push, apply_mark_internal, new_sctable_internal};
use super::{apply_rename_internal, apply_renames, marksof_internal, resolve_internal};
use super::{SCTable, EmptyCtxt, Mark, Rename, IllegalCtxt};
use std::collections::HashMap;
#[test]
fn xorpush_test () {
let mut s = Vec::new();
xor_push(&mut s, 14);
assert_eq!(s.clone(), vec!(14));
xor_push(&mut s, 14);
assert_eq!(s.clone(), Vec::new());
xor_push(&mut s, 14);
assert_eq!(s.clone(), vec!(14));
xor_push(&mut s, 15);
assert_eq!(s.clone(), vec!(14, 15));
xor_push(&mut s, 16);
assert_eq!(s.clone(), vec!(14, 15, 16));
xor_push(&mut s, 16);
assert_eq!(s.clone(), vec!(14, 15));
xor_push(&mut s, 15);
assert_eq!(s.clone(), vec!(14));
}
fn id(n: u32, s: SyntaxContext) -> Ident {
Ident {name: Name(n), ctxt: s}
}
// because of the SCTable, I now need a tidy way of
// creating syntax objects. Sigh.
#[deriving(Clone, PartialEq, Show)]
enum TestSC {
M(Mrk),
R(Ident,Name)
}
// unfold a vector of TestSC values into a SCTable,
// returning the resulting index
fn unfold_test_sc(tscs : Vec<TestSC>, tail: SyntaxContext, table: &SCTable)
-> SyntaxContext {
tscs.iter().rev().fold(tail, |tail : SyntaxContext, tsc : &TestSC|
{match *tsc {
M(mrk) => apply_mark_internal(mrk,tail,table),
R(ident,name) => apply_rename_internal(ident,name,tail,table)}})
}
// gather a SyntaxContext back into a vector of TestSCs
fn refold_test_sc(mut sc: SyntaxContext, table : &SCTable) -> Vec<TestSC> {
let mut result = Vec::new();
loop {
let table = table.table.borrow();
match (*table)[sc as uint] {
EmptyCtxt => {return result;},
Mark(mrk,tail) => {
result.push(M(mrk));
sc = tail;
continue;
},
Rename(id,name,tail) => {
result.push(R(id,name));
sc = tail;
continue;
}
IllegalCtxt => panic!("expected resolvable context, got IllegalCtxt")
}
}
}
#[test]
fn test_unfold_refold(){
let mut t = new_sctable_internal();
let test_sc = vec!(M(3),R(id(101,0),Name(14)),M(9));
assert_eq!(unfold_test_sc(test_sc.clone(),EMPTY_CTXT,&mut t),4);
{
let table = t.table.borrow();
assert!((*table)[2] == Mark(9,0));
assert!((*table)[3] == Rename(id(101,0),Name(14),2));
assert!((*table)[4] == Mark(3,3));
}
assert_eq!(refold_test_sc(4,&t),test_sc);
}
// extend a syntax context with a sequence of marks given
// in a vector. v[0] will be the outermost mark.
fn unfold_marks(mrks: Vec<Mrk>, tail: SyntaxContext, table: &SCTable)
-> SyntaxContext {
mrks.iter().rev().fold(tail, |tail:SyntaxContext, mrk:&Mrk|
{apply_mark_internal(*mrk,tail,table)})
}
#[test] fn unfold_marks_test() {
let mut t = new_sctable_internal();
assert_eq!(unfold_marks(vec!(3,7),EMPTY_CTXT,&mut t),3);
{
let table = t.table.borrow();
assert!((*table)[2] == Mark(7,0));
assert!((*table)[3] == Mark(3,2));
}
}
#[test]
fn test_marksof () {
let stopname = Name(242);
let name1 = Name(243);
let mut t = new_sctable_internal();
assert_eq!(marksof_internal (EMPTY_CTXT,stopname,&t),Vec::new());
// FIXME #5074: ANF'd to dodge nested calls
{ let ans = unfold_marks(vec!(4,98),EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans,stopname,&t),vec!(4,98));}
// does xoring work?
{ let ans = unfold_marks(vec!(5,5,16),EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans,stopname,&t), vec!(16));}
// does nested xoring work?
{ let ans = unfold_marks(vec!(5,10,10,5,16),EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans, stopname,&t), vec!(16));}
// rename where stop doesn't match:
{ let chain = vec!(M(9),
R(id(name1.uint() as u32,
apply_mark_internal (4, EMPTY_CTXT,&mut t)),
Name(100101102)),
M(14));
let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans, stopname, &t), vec!(9,14));}
// rename where stop does match
{ let name1sc = apply_mark_internal(4, EMPTY_CTXT, &mut t);
let chain = vec!(M(9),
R(id(name1.uint() as u32, name1sc),
stopname),
M(14));
let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans, stopname, &t), vec!(9)); }
}
#[test]
fn resolve_tests () {
let a = 40;
let mut t = new_sctable_internal();
let mut rt = HashMap::new();
// - ctxt is MT
assert_eq!(resolve_internal(id(a,EMPTY_CTXT),&mut t, &mut rt),Name(a));
// - simple ignored marks
{ let sc = unfold_marks(vec!(1,2,3),EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt),Name(a));}
// - orthogonal rename where names don't match
{ let sc = unfold_test_sc(vec!(R(id(50,EMPTY_CTXT),Name(51)),M(12)),EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt),Name(a));}
// - rename where names do match, but marks don't
{ let sc1 = apply_mark_internal(1,EMPTY_CTXT,&mut t);
let sc = unfold_test_sc(vec!(R(id(a,sc1),Name(50)),
M(1),
M(2)),
EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), Name(a));}
// - rename where names and marks match
{ let sc1 = unfold_test_sc(vec!(M(1),M(2)),EMPTY_CTXT,&mut t);
let sc = unfold_test_sc(vec!(R(id(a,sc1),Name(50)),M(1),M(2)),EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), Name(50)); }
// - rename where names and marks match by literal sharing
{ let sc1 = unfold_test_sc(vec!(M(1),M(2)),EMPTY_CTXT,&mut t);
let sc = unfold_test_sc(vec!(R(id(a,sc1),Name(50))),sc1,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), Name(50)); }
// - two renames of the same var.. can only happen if you use
// local-expand to prevent the inner binding from being renamed
// during the rename-pass caused by the first:
println!("about to run bad test");
{ let sc = unfold_test_sc(vec!(R(id(a,EMPTY_CTXT),Name(50)),
R(id(a,EMPTY_CTXT),Name(51))),
EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), Name(51)); }
// the simplest double-rename:
{ let a_to_a50 = apply_rename_internal(id(a,EMPTY_CTXT),Name(50),EMPTY_CTXT,&mut t);
let a50_to_a51 = apply_rename_internal(id(a,a_to_a50),Name(51),a_to_a50,&mut t);
assert_eq!(resolve_internal(id(a,a50_to_a51),&mut t, &mut rt),Name(51));
// mark on the outside doesn't stop rename:
let sc = apply_mark_internal(9,a50_to_a51,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt),Name(51));
// but mark on the inside does:
let a50_to_a51_b = unfold_test_sc(vec!(R(id(a,a_to_a50),Name(51)),
M(9)),
a_to_a50,
&mut t);
assert_eq!(resolve_internal(id(a,a50_to_a51_b),&mut t, &mut rt),Name(50));}
}
#[test]
fn mtwt_resolve_test(){
let a = 40;
assert_eq!(resolve(id(a,EMPTY_CTXT)),Name(a));
}
#[test]
fn hashing_tests () {
let mut t = new_sctable_internal();
assert_eq!(apply_mark_internal(12,EMPTY_CTXT,&mut t),2);
assert_eq!(apply_mark_internal(13,EMPTY_CTXT,&mut t),3);
// using the same one again should result in the same index:
|
{
return result;
}
|
conditional_block
|
mtwt.rs
|
181-216.
//! DOI=10.1017/S0956796812000093 http://dx.doi.org/10.1017/S0956796812000093
pub use self::SyntaxContext_::*;
use ast::{Ident, Mrk, Name, SyntaxContext};
use std::cell::RefCell;
use std::collections::HashMap;
use std::collections::hash_map::{Occupied, Vacant};
/// The SCTable contains a table of SyntaxContext_'s. It
/// represents a flattened tree structure, to avoid having
/// managed pointers everywhere (that caused an ICE).
/// the mark_memo and rename_memo fields are side-tables
/// that ensure that adding the same mark to the same context
/// gives you back the same context as before. This shouldn't
/// change the semantics--everything here is immutable--but
/// it should cut down on memory use *a lot*; applying a mark
/// to a tree containing 50 identifiers would otherwise generate
/// 50 new contexts
pub struct SCTable {
table: RefCell<Vec<SyntaxContext_>>,
mark_memo: RefCell<HashMap<(SyntaxContext,Mrk),SyntaxContext>>,
rename_memo: RefCell<HashMap<(SyntaxContext,Ident,Name),SyntaxContext>>,
}
#[deriving(PartialEq, Encodable, Decodable, Hash, Show)]
pub enum SyntaxContext_ {
EmptyCtxt,
Mark (Mrk,SyntaxContext),
/// flattening the name and syntaxcontext into the rename...
/// HIDDEN INVARIANTS:
/// 1) the first name in a Rename node
/// can only be a programmer-supplied name.
/// 2) Every Rename node with a given Name in the
/// "to" slot must have the same name and context
/// in the "from" slot. In essence, they're all
/// pointers to a single "rename" event node.
Rename (Ident,Name,SyntaxContext),
/// actually, IllegalCtxt may not be necessary.
IllegalCtxt
}
/// A list of ident->name renamings
pub type RenameList = Vec<(Ident, Name)>;
/// Extend a syntax context with a given mark
pub fn apply_mark(m: Mrk, ctxt: SyntaxContext) -> SyntaxContext {
with_sctable(|table| apply_mark_internal(m, ctxt, table))
}
/// Extend a syntax context with a given mark and sctable (explicit memoization)
fn apply_mark_internal(m: Mrk, ctxt: SyntaxContext, table: &SCTable) -> SyntaxContext {
let key = (ctxt, m);
* match table.mark_memo.borrow_mut().entry(key) {
Vacant(entry) => entry.set(idx_push(&mut *table.table.borrow_mut(), Mark(m, ctxt))),
Occupied(entry) => entry.into_mut(),
}
}
/// Extend a syntax context with a given rename
pub fn apply_rename(id: Ident, to:Name,
ctxt: SyntaxContext) -> SyntaxContext {
with_sctable(|table| apply_rename_internal(id, to, ctxt, table))
}
/// Extend a syntax context with a given rename and sctable (explicit memoization)
fn apply_rename_internal(id: Ident,
to: Name,
ctxt: SyntaxContext,
table: &SCTable) -> SyntaxContext {
let key = (ctxt, id, to);
* match table.rename_memo.borrow_mut().entry(key) {
Vacant(entry) => entry.set(idx_push(&mut *table.table.borrow_mut(), Rename(id, to, ctxt))),
Occupied(entry) => entry.into_mut(),
}
}
/// Apply a list of renamings to a context
// if these rename lists get long, it would make sense
// to consider memoizing this fold. This may come up
// when we add hygiene to item names.
pub fn apply_renames(renames: &RenameList, ctxt: SyntaxContext) -> SyntaxContext {
renames.iter().fold(ctxt, |ctxt, &(from, to)| {
apply_rename(from, to, ctxt)
})
}
/// Fetch the SCTable from TLS, create one if it doesn't yet exist.
pub fn with_sctable<T>(op: |&SCTable| -> T) -> T {
thread_local!(static SCTABLE_KEY: SCTable = new_sctable_internal())
SCTABLE_KEY.with(|slot| op(slot))
}
// Make a fresh syntax context table with EmptyCtxt in slot zero
// and IllegalCtxt in slot one.
fn new_sctable_internal() -> SCTable {
SCTable {
table: RefCell::new(vec!(EmptyCtxt, IllegalCtxt)),
mark_memo: RefCell::new(HashMap::new()),
rename_memo: RefCell::new(HashMap::new()),
}
}
/// Print out an SCTable for debugging
pub fn display_sctable(table: &SCTable) {
error!("SC table:");
for (idx,val) in table.table.borrow().iter().enumerate() {
error!("{:4} : {}",idx,val);
}
}
/// Clear the tables from TLD to reclaim memory.
pub fn clear_tables() {
with_sctable(|table| {
*table.table.borrow_mut() = Vec::new();
*table.mark_memo.borrow_mut() = HashMap::new();
*table.rename_memo.borrow_mut() = HashMap::new();
});
with_resolve_table_mut(|table| *table = HashMap::new());
}
/// Reset the tables to their initial state
pub fn reset_tables() {
with_sctable(|table| {
*table.table.borrow_mut() = vec!(EmptyCtxt, IllegalCtxt);
*table.mark_memo.borrow_mut() = HashMap::new();
*table.rename_memo.borrow_mut() = HashMap::new();
});
with_resolve_table_mut(|table| *table = HashMap::new());
}
/// Add a value to the end of a vec, return its index
fn idx_push<T>(vec: &mut Vec<T>, val: T) -> u32 {
vec.push(val);
(vec.len() - 1) as u32
}
/// Resolve a syntax object to a name, per MTWT.
pub fn resolve(id: Ident) -> Name {
with_sctable(|sctable| {
with_resolve_table_mut(|resolve_table| {
resolve_internal(id, sctable, resolve_table)
})
})
}
type ResolveTable = HashMap<(Name,SyntaxContext),Name>;
// okay, I admit, putting this in TLS is not so nice:
// fetch the SCTable from TLS, create one if it doesn't yet exist.
fn with_resolve_table_mut<T>(op: |&mut ResolveTable| -> T) -> T {
thread_local!(static RESOLVE_TABLE_KEY: RefCell<ResolveTable> = {
RefCell::new(HashMap::new())
})
RESOLVE_TABLE_KEY.with(|slot| op(&mut *slot.borrow_mut()))
}
/// Resolve a syntax object to a name, per MTWT.
/// adding memoization to resolve 500+ seconds in resolve for librustc (!)
fn resolve_internal(id: Ident,
table: &SCTable,
resolve_table: &mut ResolveTable) -> Name {
let key = (id.name, id.ctxt);
match resolve_table.get(&key) {
Some(&name) => return name,
None => {}
}
let resolved = {
let result = (*table.table.borrow())[id.ctxt as uint];
match result {
EmptyCtxt => id.name,
// ignore marks here:
Mark(_,subctxt) =>
resolve_internal(Ident{name:id.name, ctxt: subctxt},
table, resolve_table),
// do the rename if necessary:
Rename(Ident{name, ctxt}, toname, subctxt) => {
let resolvedfrom =
resolve_internal(Ident{name:name, ctxt:ctxt},
table, resolve_table);
let resolvedthis =
resolve_internal(Ident{name:id.name, ctxt:subctxt},
table, resolve_table);
if (resolvedthis == resolvedfrom)
&& (marksof_internal(ctxt, resolvedthis, table)
== marksof_internal(subctxt, resolvedthis, table)) {
toname
} else {
resolvedthis
}
}
IllegalCtxt => panic!("expected resolvable context, got IllegalCtxt")
}
};
resolve_table.insert(key, resolved);
resolved
}
/// Compute the marks associated with a syntax context.
pub fn marksof(ctxt: SyntaxContext, stopname: Name) -> Vec<Mrk> {
with_sctable(|table| marksof_internal(ctxt, stopname, table))
}
// the internal function for computing marks
// it's not clear to me whether it's better to use a [] mutable
// vector or a cons-list for this.
fn marksof_internal(ctxt: SyntaxContext,
stopname: Name,
table: &SCTable) -> Vec<Mrk> {
let mut result = Vec::new();
let mut loopvar = ctxt;
loop {
let table_entry = (*table.table.borrow())[loopvar as uint];
match table_entry {
EmptyCtxt => {
return result;
},
Mark(mark, tl) => {
xor_push(&mut result, mark);
loopvar = tl;
},
Rename(_,name,tl) => {
// see MTWT for details on the purpose of the stopname.
// short version: it prevents duplication of effort.
if name == stopname {
return result;
} else {
loopvar = tl;
}
}
IllegalCtxt => panic!("expected resolvable context, got IllegalCtxt")
}
}
}
/// Return the outer mark for a context with a mark at the outside.
/// FAILS when outside is not a mark.
pub fn outer_mark(ctxt: SyntaxContext) -> Mrk {
with_sctable(|sctable| {
match (*sctable.table.borrow())[ctxt as uint] {
Mark(mrk, _) => mrk,
_ => panic!("can't retrieve outer mark when outside is not a mark")
}
})
}
/// Push a name... unless it matches the one on top, in which
/// case pop and discard (so two of the same marks cancel)
fn xor_push(marks: &mut Vec<Mrk>, mark: Mrk) {
if (marks.len() > 0) && (*marks.last().unwrap() == mark) {
marks.pop().unwrap();
} else {
marks.push(mark);
}
}
#[cfg(test)]
mod tests {
use self::TestSC::*;
use ast::{EMPTY_CTXT, Ident, Mrk, Name, SyntaxContext};
use super::{resolve, xor_push, apply_mark_internal, new_sctable_internal};
use super::{apply_rename_internal, apply_renames, marksof_internal, resolve_internal};
use super::{SCTable, EmptyCtxt, Mark, Rename, IllegalCtxt};
use std::collections::HashMap;
#[test]
fn xorpush_test () {
let mut s = Vec::new();
xor_push(&mut s, 14);
assert_eq!(s.clone(), vec!(14));
xor_push(&mut s, 14);
assert_eq!(s.clone(), Vec::new());
xor_push(&mut s, 14);
assert_eq!(s.clone(), vec!(14));
xor_push(&mut s, 15);
assert_eq!(s.clone(), vec!(14, 15));
xor_push(&mut s, 16);
assert_eq!(s.clone(), vec!(14, 15, 16));
xor_push(&mut s, 16);
assert_eq!(s.clone(), vec!(14, 15));
xor_push(&mut s, 15);
assert_eq!(s.clone(), vec!(14));
}
fn id(n: u32, s: SyntaxContext) -> Ident {
Ident {name: Name(n), ctxt: s}
}
// because of the SCTable, I now need a tidy way of
// creating syntax objects. Sigh.
#[deriving(Clone, PartialEq, Show)]
enum TestSC {
M(Mrk),
R(Ident,Name)
}
// unfold a vector of TestSC values into a SCTable,
// returning the resulting index
fn unfold_test_sc(tscs : Vec<TestSC>, tail: SyntaxContext, table: &SCTable)
-> SyntaxContext {
tscs.iter().rev().fold(tail, |tail : SyntaxContext, tsc : &TestSC|
{match *tsc {
M(mrk) => apply_mark_internal(mrk,tail,table),
R(ident,name) => apply_rename_internal(ident,name,tail,table)}})
}
// gather a SyntaxContext back into a vector of TestSCs
fn refold_test_sc(mut sc: SyntaxContext, table : &SCTable) -> Vec<TestSC> {
let mut result = Vec::new();
loop {
let table = table.table.borrow();
match (*table)[sc as uint] {
EmptyCtxt => {return result;},
Mark(mrk,tail) => {
result.push(M(mrk));
sc = tail;
continue;
},
Rename(id,name,tail) => {
result.push(R(id,name));
sc = tail;
continue;
}
IllegalCtxt => panic!("expected resolvable context, got IllegalCtxt")
}
}
}
#[test]
fn test_unfold_refold(){
let mut t = new_sctable_internal();
let test_sc = vec!(M(3),R(id(101,0),Name(14)),M(9));
assert_eq!(unfold_test_sc(test_sc.clone(),EMPTY_CTXT,&mut t),4);
{
let table = t.table.borrow();
assert!((*table)[2] == Mark(9,0));
assert!((*table)[3] == Rename(id(101,0),Name(14),2));
assert!((*table)[4] == Mark(3,3));
}
assert_eq!(refold_test_sc(4,&t),test_sc);
}
// extend a syntax context with a sequence of marks given
// in a vector. v[0] will be the outermost mark.
fn unfold_marks(mrks: Vec<Mrk>, tail: SyntaxContext, table: &SCTable)
-> SyntaxContext {
mrks.iter().rev().fold(tail, |tail:SyntaxContext, mrk:&Mrk|
{apply_mark_internal(*mrk,tail,table)})
}
#[test] fn unfold_marks_test() {
let mut t = new_sctable_internal();
assert_eq!(unfold_marks(vec!(3,7),EMPTY_CTXT,&mut t),3);
{
let table = t.table.borrow();
assert!((*table)[2] == Mark(7,0));
assert!((*table)[3] == Mark(3,2));
}
}
#[test]
fn
|
() {
let stopname = Name(242);
let name1 = Name(243);
let mut t = new_sctable_internal();
assert_eq!(marksof_internal (EMPTY_CTXT,stopname,&t),Vec::new());
// FIXME #5074: ANF'd to dodge nested calls
{ let ans = unfold_marks(vec!(4,98),EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans,stopname,&t),vec!(4,98));}
// does xoring work?
{ let ans = unfold_marks(vec!(5,5,16),EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans,stopname,&t), vec!(16));}
// does nested xoring work?
{ let ans = unfold_marks(vec!(5,10,10,5,16),EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans, stopname,&t), vec!(16));}
// rename where stop doesn't match:
{ let chain = vec!(M(9),
R(id(name1.uint() as u32,
apply_mark_internal (4, EMPTY_CTXT,&mut t)),
Name(100101102)),
M(14));
let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans, stopname, &t), vec!(9,14));}
// rename where stop does match
{ let name1sc = apply_mark_internal(4, EMPTY_CTXT, &mut t);
let chain = vec!(M(9),
R(id(name1.uint() as u32, name1sc),
stopname),
M(14));
let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans, stopname, &t), vec!(9)); }
}
#[test]
fn resolve_tests () {
let a = 40;
let mut t = new_sctable_internal();
let mut rt = HashMap::new();
// - ctxt is MT
assert_eq!(resolve_internal(id(a,EMPTY_CTXT),&mut t, &mut rt),Name(a));
// - simple ignored marks
{ let sc = unfold_marks(vec!(1,2,3),EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt),Name(a));}
// - orthogonal rename where names don't match
{ let sc = unfold_test_sc(vec!(R(id(50,EMPTY_CTXT),Name(51)),M(12)),EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt),Name(a));}
// - rename where names do match, but marks don't
{ let sc1 = apply_mark_internal(1,EMPTY_CTXT,&mut t);
let sc = unfold_test_sc(vec!(R(id(a,sc1),Name(50)),
M(1),
M(2)),
EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), Name(a));}
// - rename where names and marks match
{ let sc1 = unfold_test_sc(vec!(M(1),M(2)),EMPTY_CTXT,&mut t);
let sc = unfold_test_sc(vec!(R(id(a,sc1),Name(50)),M(1),M(2)),EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), Name(50)); }
// - rename where names and marks match by literal sharing
{ let sc1 = unfold_test_sc(vec!(M(1),M(2)),EMPTY_CTXT,&mut t);
let sc = unfold_test_sc(vec!(R(id(a,sc1),Name(50))),sc1,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), Name(50)); }
// - two renames of the same var.. can only happen if you use
// local-expand to prevent the inner binding from being renamed
// during the rename-pass caused by the first:
println!("about to run bad test");
{ let sc = unfold_test_sc(vec!(R(id(a,EMPTY_CTXT),Name(50)),
R(id(a,EMPTY_CTXT),Name(51))),
EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), Name(51)); }
// the simplest double-rename:
{ let a_to_a50 = apply_rename_internal(id(a,EMPTY_CTXT),Name(50),EMPTY_CTXT,&mut t);
let a50_to_a51 = apply_rename_internal(id(a,a_to_a50),Name(51),a_to_a50,&mut t);
assert_eq!(resolve_internal(id(a,a50_to_a51),&mut t, &mut rt),Name(51));
// mark on the outside doesn't stop rename:
let sc = apply_mark_internal(9,a50_to_a51,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt),Name(51));
// but mark on the inside does:
let a50_to_a51_b = unfold_test_sc(vec!(R(id(a,a_to_a50),Name(51)),
M(9)),
a_to_a50,
&mut t);
assert_eq!(resolve_internal(id(a,a50_to_a51_b),&mut t, &mut rt),Name(50));}
}
#[test]
fn mtwt_resolve_test(){
let a = 40;
assert_eq!(resolve(id(a,EMPTY_CTXT)),Name(a));
}
#[test]
fn hashing_tests () {
let mut t = new_sctable_internal();
assert_eq!(apply_mark_internal(12,EMPTY_CTXT,&mut t),2);
assert_eq!(apply_mark_internal(13,EMPTY_CTXT,&mut t),3);
// using the same one again should result in the same index:
|
test_marksof
|
identifier_name
|
mtwt.rs
|
181-216.
//! DOI=10.1017/S0956796812000093 http://dx.doi.org/10.1017/S0956796812000093
pub use self::SyntaxContext_::*;
use ast::{Ident, Mrk, Name, SyntaxContext};
use std::cell::RefCell;
use std::collections::HashMap;
use std::collections::hash_map::{Occupied, Vacant};
/// The SCTable contains a table of SyntaxContext_'s. It
/// represents a flattened tree structure, to avoid having
/// managed pointers everywhere (that caused an ICE).
/// the mark_memo and rename_memo fields are side-tables
/// that ensure that adding the same mark to the same context
/// gives you back the same context as before. This shouldn't
/// change the semantics--everything here is immutable--but
/// it should cut down on memory use *a lot*; applying a mark
/// to a tree containing 50 identifiers would otherwise generate
/// 50 new contexts
pub struct SCTable {
table: RefCell<Vec<SyntaxContext_>>,
mark_memo: RefCell<HashMap<(SyntaxContext,Mrk),SyntaxContext>>,
rename_memo: RefCell<HashMap<(SyntaxContext,Ident,Name),SyntaxContext>>,
}
#[deriving(PartialEq, Encodable, Decodable, Hash, Show)]
pub enum SyntaxContext_ {
EmptyCtxt,
Mark (Mrk,SyntaxContext),
/// flattening the name and syntaxcontext into the rename...
/// HIDDEN INVARIANTS:
/// 1) the first name in a Rename node
/// can only be a programmer-supplied name.
/// 2) Every Rename node with a given Name in the
/// "to" slot must have the same name and context
/// in the "from" slot. In essence, they're all
/// pointers to a single "rename" event node.
Rename (Ident,Name,SyntaxContext),
/// actually, IllegalCtxt may not be necessary.
IllegalCtxt
}
/// A list of ident->name renamings
pub type RenameList = Vec<(Ident, Name)>;
/// Extend a syntax context with a given mark
pub fn apply_mark(m: Mrk, ctxt: SyntaxContext) -> SyntaxContext {
with_sctable(|table| apply_mark_internal(m, ctxt, table))
}
/// Extend a syntax context with a given mark and sctable (explicit memoization)
fn apply_mark_internal(m: Mrk, ctxt: SyntaxContext, table: &SCTable) -> SyntaxContext {
let key = (ctxt, m);
* match table.mark_memo.borrow_mut().entry(key) {
Vacant(entry) => entry.set(idx_push(&mut *table.table.borrow_mut(), Mark(m, ctxt))),
Occupied(entry) => entry.into_mut(),
}
}
/// Extend a syntax context with a given rename
pub fn apply_rename(id: Ident, to:Name,
ctxt: SyntaxContext) -> SyntaxContext {
with_sctable(|table| apply_rename_internal(id, to, ctxt, table))
}
/// Extend a syntax context with a given rename and sctable (explicit memoization)
fn apply_rename_internal(id: Ident,
to: Name,
ctxt: SyntaxContext,
table: &SCTable) -> SyntaxContext {
let key = (ctxt, id, to);
* match table.rename_memo.borrow_mut().entry(key) {
Vacant(entry) => entry.set(idx_push(&mut *table.table.borrow_mut(), Rename(id, to, ctxt))),
Occupied(entry) => entry.into_mut(),
}
}
/// Apply a list of renamings to a context
// if these rename lists get long, it would make sense
// to consider memoizing this fold. This may come up
// when we add hygiene to item names.
pub fn apply_renames(renames: &RenameList, ctxt: SyntaxContext) -> SyntaxContext {
renames.iter().fold(ctxt, |ctxt, &(from, to)| {
apply_rename(from, to, ctxt)
})
}
/// Fetch the SCTable from TLS, create one if it doesn't yet exist.
pub fn with_sctable<T>(op: |&SCTable| -> T) -> T {
thread_local!(static SCTABLE_KEY: SCTable = new_sctable_internal())
SCTABLE_KEY.with(|slot| op(slot))
}
// Make a fresh syntax context table with EmptyCtxt in slot zero
// and IllegalCtxt in slot one.
fn new_sctable_internal() -> SCTable {
SCTable {
table: RefCell::new(vec!(EmptyCtxt, IllegalCtxt)),
mark_memo: RefCell::new(HashMap::new()),
rename_memo: RefCell::new(HashMap::new()),
}
}
/// Print out an SCTable for debugging
pub fn display_sctable(table: &SCTable) {
error!("SC table:");
for (idx,val) in table.table.borrow().iter().enumerate() {
error!("{:4} : {}",idx,val);
}
}
/// Clear the tables from TLD to reclaim memory.
pub fn clear_tables() {
with_sctable(|table| {
*table.table.borrow_mut() = Vec::new();
*table.mark_memo.borrow_mut() = HashMap::new();
*table.rename_memo.borrow_mut() = HashMap::new();
});
with_resolve_table_mut(|table| *table = HashMap::new());
}
/// Reset the tables to their initial state
pub fn reset_tables() {
with_sctable(|table| {
*table.table.borrow_mut() = vec!(EmptyCtxt, IllegalCtxt);
*table.mark_memo.borrow_mut() = HashMap::new();
*table.rename_memo.borrow_mut() = HashMap::new();
});
with_resolve_table_mut(|table| *table = HashMap::new());
}
/// Add a value to the end of a vec, return its index
fn idx_push<T>(vec: &mut Vec<T>, val: T) -> u32 {
vec.push(val);
(vec.len() - 1) as u32
}
/// Resolve a syntax object to a name, per MTWT.
pub fn resolve(id: Ident) -> Name {
with_sctable(|sctable| {
with_resolve_table_mut(|resolve_table| {
resolve_internal(id, sctable, resolve_table)
})
})
}
type ResolveTable = HashMap<(Name,SyntaxContext),Name>;
// okay, I admit, putting this in TLS is not so nice:
// fetch the SCTable from TLS, create one if it doesn't yet exist.
fn with_resolve_table_mut<T>(op: |&mut ResolveTable| -> T) -> T {
thread_local!(static RESOLVE_TABLE_KEY: RefCell<ResolveTable> = {
RefCell::new(HashMap::new())
})
RESOLVE_TABLE_KEY.with(|slot| op(&mut *slot.borrow_mut()))
}
/// Resolve a syntax object to a name, per MTWT.
/// adding memoization to resolve 500+ seconds in resolve for librustc (!)
fn resolve_internal(id: Ident,
table: &SCTable,
resolve_table: &mut ResolveTable) -> Name {
let key = (id.name, id.ctxt);
match resolve_table.get(&key) {
Some(&name) => return name,
None => {}
}
let resolved = {
let result = (*table.table.borrow())[id.ctxt as uint];
match result {
EmptyCtxt => id.name,
// ignore marks here:
Mark(_,subctxt) =>
resolve_internal(Ident{name:id.name, ctxt: subctxt},
table, resolve_table),
// do the rename if necessary:
Rename(Ident{name, ctxt}, toname, subctxt) => {
let resolvedfrom =
resolve_internal(Ident{name:name, ctxt:ctxt},
table, resolve_table);
let resolvedthis =
resolve_internal(Ident{name:id.name, ctxt:subctxt},
table, resolve_table);
if (resolvedthis == resolvedfrom)
&& (marksof_internal(ctxt, resolvedthis, table)
== marksof_internal(subctxt, resolvedthis, table)) {
toname
} else {
resolvedthis
}
}
IllegalCtxt => panic!("expected resolvable context, got IllegalCtxt")
}
};
resolve_table.insert(key, resolved);
resolved
}
/// Compute the marks associated with a syntax context.
pub fn marksof(ctxt: SyntaxContext, stopname: Name) -> Vec<Mrk> {
with_sctable(|table| marksof_internal(ctxt, stopname, table))
}
// the internal function for computing marks
// it's not clear to me whether it's better to use a [] mutable
// vector or a cons-list for this.
fn marksof_internal(ctxt: SyntaxContext,
stopname: Name,
table: &SCTable) -> Vec<Mrk> {
let mut result = Vec::new();
let mut loopvar = ctxt;
loop {
let table_entry = (*table.table.borrow())[loopvar as uint];
match table_entry {
EmptyCtxt => {
return result;
},
Mark(mark, tl) => {
xor_push(&mut result, mark);
loopvar = tl;
},
Rename(_,name,tl) => {
// see MTWT for details on the purpose of the stopname.
// short version: it prevents duplication of effort.
if name == stopname {
return result;
} else {
loopvar = tl;
}
}
IllegalCtxt => panic!("expected resolvable context, got IllegalCtxt")
}
}
}
/// Return the outer mark for a context with a mark at the outside.
/// FAILS when outside is not a mark.
pub fn outer_mark(ctxt: SyntaxContext) -> Mrk {
with_sctable(|sctable| {
match (*sctable.table.borrow())[ctxt as uint] {
Mark(mrk, _) => mrk,
_ => panic!("can't retrieve outer mark when outside is not a mark")
}
})
}
/// Push a name... unless it matches the one on top, in which
/// case pop and discard (so two of the same marks cancel)
fn xor_push(marks: &mut Vec<Mrk>, mark: Mrk) {
if (marks.len() > 0) && (*marks.last().unwrap() == mark) {
marks.pop().unwrap();
} else {
marks.push(mark);
}
}
#[cfg(test)]
mod tests {
use self::TestSC::*;
use ast::{EMPTY_CTXT, Ident, Mrk, Name, SyntaxContext};
use super::{resolve, xor_push, apply_mark_internal, new_sctable_internal};
use super::{apply_rename_internal, apply_renames, marksof_internal, resolve_internal};
use super::{SCTable, EmptyCtxt, Mark, Rename, IllegalCtxt};
use std::collections::HashMap;
#[test]
fn xorpush_test () {
let mut s = Vec::new();
xor_push(&mut s, 14);
assert_eq!(s.clone(), vec!(14));
xor_push(&mut s, 14);
assert_eq!(s.clone(), Vec::new());
xor_push(&mut s, 14);
assert_eq!(s.clone(), vec!(14));
xor_push(&mut s, 15);
assert_eq!(s.clone(), vec!(14, 15));
xor_push(&mut s, 16);
assert_eq!(s.clone(), vec!(14, 15, 16));
xor_push(&mut s, 16);
assert_eq!(s.clone(), vec!(14, 15));
xor_push(&mut s, 15);
assert_eq!(s.clone(), vec!(14));
}
fn id(n: u32, s: SyntaxContext) -> Ident {
Ident {name: Name(n), ctxt: s}
}
// because of the SCTable, I now need a tidy way of
// creating syntax objects. Sigh.
#[deriving(Clone, PartialEq, Show)]
enum TestSC {
M(Mrk),
R(Ident,Name)
}
// unfold a vector of TestSC values into a SCTable,
// returning the resulting index
fn unfold_test_sc(tscs : Vec<TestSC>, tail: SyntaxContext, table: &SCTable)
-> SyntaxContext {
tscs.iter().rev().fold(tail, |tail : SyntaxContext, tsc : &TestSC|
{match *tsc {
M(mrk) => apply_mark_internal(mrk,tail,table),
R(ident,name) => apply_rename_internal(ident,name,tail,table)}})
}
// gather a SyntaxContext back into a vector of TestSCs
fn refold_test_sc(mut sc: SyntaxContext, table : &SCTable) -> Vec<TestSC>
|
#[test]
fn test_unfold_refold(){
let mut t = new_sctable_internal();
let test_sc = vec!(M(3),R(id(101,0),Name(14)),M(9));
assert_eq!(unfold_test_sc(test_sc.clone(),EMPTY_CTXT,&mut t),4);
{
let table = t.table.borrow();
assert!((*table)[2] == Mark(9,0));
assert!((*table)[3] == Rename(id(101,0),Name(14),2));
assert!((*table)[4] == Mark(3,3));
}
assert_eq!(refold_test_sc(4,&t),test_sc);
}
// extend a syntax context with a sequence of marks given
// in a vector. v[0] will be the outermost mark.
fn unfold_marks(mrks: Vec<Mrk>, tail: SyntaxContext, table: &SCTable)
-> SyntaxContext {
mrks.iter().rev().fold(tail, |tail:SyntaxContext, mrk:&Mrk|
{apply_mark_internal(*mrk,tail,table)})
}
#[test] fn unfold_marks_test() {
let mut t = new_sctable_internal();
assert_eq!(unfold_marks(vec!(3,7),EMPTY_CTXT,&mut t),3);
{
let table = t.table.borrow();
assert!((*table)[2] == Mark(7,0));
assert!((*table)[3] == Mark(3,2));
}
}
#[test]
fn test_marksof () {
let stopname = Name(242);
let name1 = Name(243);
let mut t = new_sctable_internal();
assert_eq!(marksof_internal (EMPTY_CTXT,stopname,&t),Vec::new());
// FIXME #5074: ANF'd to dodge nested calls
{ let ans = unfold_marks(vec!(4,98),EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans,stopname,&t),vec!(4,98));}
// does xoring work?
{ let ans = unfold_marks(vec!(5,5,16),EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans,stopname,&t), vec!(16));}
// does nested xoring work?
{ let ans = unfold_marks(vec!(5,10,10,5,16),EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans, stopname,&t), vec!(16));}
// rename where stop doesn't match:
{ let chain = vec!(M(9),
R(id(name1.uint() as u32,
apply_mark_internal (4, EMPTY_CTXT,&mut t)),
Name(100101102)),
M(14));
let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans, stopname, &t), vec!(9,14));}
// rename where stop does match
{ let name1sc = apply_mark_internal(4, EMPTY_CTXT, &mut t);
let chain = vec!(M(9),
R(id(name1.uint() as u32, name1sc),
stopname),
M(14));
let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t);
assert_eq! (marksof_internal (ans, stopname, &t), vec!(9)); }
}
#[test]
fn resolve_tests () {
let a = 40;
let mut t = new_sctable_internal();
let mut rt = HashMap::new();
// - ctxt is MT
assert_eq!(resolve_internal(id(a,EMPTY_CTXT),&mut t, &mut rt),Name(a));
// - simple ignored marks
{ let sc = unfold_marks(vec!(1,2,3),EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt),Name(a));}
// - orthogonal rename where names don't match
{ let sc = unfold_test_sc(vec!(R(id(50,EMPTY_CTXT),Name(51)),M(12)),EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt),Name(a));}
// - rename where names do match, but marks don't
{ let sc1 = apply_mark_internal(1,EMPTY_CTXT,&mut t);
let sc = unfold_test_sc(vec!(R(id(a,sc1),Name(50)),
M(1),
M(2)),
EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), Name(a));}
// - rename where names and marks match
{ let sc1 = unfold_test_sc(vec!(M(1),M(2)),EMPTY_CTXT,&mut t);
let sc = unfold_test_sc(vec!(R(id(a,sc1),Name(50)),M(1),M(2)),EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), Name(50)); }
// - rename where names and marks match by literal sharing
{ let sc1 = unfold_test_sc(vec!(M(1),M(2)),EMPTY_CTXT,&mut t);
let sc = unfold_test_sc(vec!(R(id(a,sc1),Name(50))),sc1,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), Name(50)); }
// - two renames of the same var.. can only happen if you use
// local-expand to prevent the inner binding from being renamed
// during the rename-pass caused by the first:
println!("about to run bad test");
{ let sc = unfold_test_sc(vec!(R(id(a,EMPTY_CTXT),Name(50)),
R(id(a,EMPTY_CTXT),Name(51))),
EMPTY_CTXT,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), Name(51)); }
// the simplest double-rename:
{ let a_to_a50 = apply_rename_internal(id(a,EMPTY_CTXT),Name(50),EMPTY_CTXT,&mut t);
let a50_to_a51 = apply_rename_internal(id(a,a_to_a50),Name(51),a_to_a50,&mut t);
assert_eq!(resolve_internal(id(a,a50_to_a51),&mut t, &mut rt),Name(51));
// mark on the outside doesn't stop rename:
let sc = apply_mark_internal(9,a50_to_a51,&mut t);
assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt),Name(51));
// but mark on the inside does:
let a50_to_a51_b = unfold_test_sc(vec!(R(id(a,a_to_a50),Name(51)),
M(9)),
a_to_a50,
&mut t);
assert_eq!(resolve_internal(id(a,a50_to_a51_b),&mut t, &mut rt),Name(50));}
}
#[test]
fn mtwt_resolve_test(){
let a = 40;
assert_eq!(resolve(id(a,EMPTY_CTXT)),Name(a));
}
#[test]
fn hashing_tests () {
let mut t = new_sctable_internal();
assert_eq!(apply_mark_internal(12,EMPTY_CTXT,&mut t),2);
assert_eq!(apply_mark_internal(13,EMPTY_CTXT,&mut t),3);
// using the same one again should result in the same index:
|
{
let mut result = Vec::new();
loop {
let table = table.table.borrow();
match (*table)[sc as uint] {
EmptyCtxt => {return result;},
Mark(mrk,tail) => {
result.push(M(mrk));
sc = tail;
continue;
},
Rename(id,name,tail) => {
result.push(R(id,name));
sc = tail;
continue;
}
IllegalCtxt => panic!("expected resolvable context, got IllegalCtxt")
}
}
}
|
identifier_body
|
multiwindow.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
extern crate app_units;
extern crate euclid;
extern crate gleam;
extern crate glutin;
extern crate webrender;
use app_units::Au;
use gleam::gl;
use glutin::GlContext;
use std::fs::File;
use std::io::Read;
use webrender::api::*;
struct
|
{
events_proxy: glutin::EventsLoopProxy,
}
impl Notifier {
fn new(events_proxy: glutin::EventsLoopProxy) -> Notifier {
Notifier { events_proxy }
}
}
impl RenderNotifier for Notifier {
fn clone(&self) -> Box<RenderNotifier> {
Box::new(Notifier {
events_proxy: self.events_proxy.clone(),
})
}
fn wake_up(&self) {
#[cfg(not(target_os = "android"))]
let _ = self.events_proxy.wakeup();
}
fn new_frame_ready(&self, _: DocumentId, _scrolled: bool, _composite_needed: bool) {
self.wake_up();
}
}
struct Window {
events_loop: glutin::EventsLoop, //TODO: share events loop?
window: glutin::GlWindow,
renderer: webrender::Renderer,
name: &'static str,
pipeline_id: PipelineId,
document_id: DocumentId,
epoch: Epoch,
api: RenderApi,
font_instance_key: FontInstanceKey,
}
impl Window {
fn new(name: &'static str, clear_color: ColorF) -> Self {
let events_loop = glutin::EventsLoop::new();
let context_builder = glutin::ContextBuilder::new()
.with_gl(glutin::GlRequest::GlThenGles {
opengl_version: (3, 2),
opengles_version: (3, 0),
});
let window_builder = glutin::WindowBuilder::new()
.with_title(name)
.with_multitouch()
.with_dimensions(800, 600);
let window = glutin::GlWindow::new(window_builder, context_builder, &events_loop)
.unwrap();
unsafe {
window.make_current().ok();
}
let gl = match window.get_api() {
glutin::Api::OpenGl => unsafe {
gl::GlFns::load_with(|symbol| window.get_proc_address(symbol) as *const _)
},
glutin::Api::OpenGlEs => unsafe {
gl::GlesFns::load_with(|symbol| window.get_proc_address(symbol) as *const _)
},
glutin::Api::WebGl => unimplemented!(),
};
let device_pixel_ratio = window.hidpi_factor();
let opts = webrender::RendererOptions {
device_pixel_ratio,
clear_color: Some(clear_color),
..webrender::RendererOptions::default()
};
let framebuffer_size = {
let (width, height) = window.get_inner_size().unwrap();
DeviceUintSize::new(width, height)
};
let notifier = Box::new(Notifier::new(events_loop.create_proxy()));
let (renderer, sender) = webrender::Renderer::new(gl.clone(), notifier, opts).unwrap();
let api = sender.create_api();
let document_id = api.add_document(framebuffer_size, 0);
let epoch = Epoch(0);
let pipeline_id = PipelineId(0, 0);
let mut resources = ResourceUpdates::new();
let font_key = api.generate_font_key();
let font_bytes = load_file("../wrench/reftests/text/FreeSans.ttf");
resources.add_raw_font(font_key, font_bytes, 0);
let font_instance_key = api.generate_font_instance_key();
resources.add_font_instance(font_instance_key, font_key, Au::from_px(32), None, None, Vec::new());
let mut txn = Transaction::new();
txn.update_resources(resources);
api.send_transaction(document_id, txn);
Window {
events_loop,
window,
renderer,
name,
epoch,
pipeline_id,
document_id,
api,
font_instance_key,
}
}
fn tick(&mut self) -> bool {
unsafe {
self.window.make_current().ok();
}
let mut do_exit = false;
let my_name = &self.name;
let renderer = &mut self.renderer;
self.events_loop.poll_events(|global_event| match global_event {
glutin::Event::WindowEvent { event,.. } => match event {
glutin::WindowEvent::Closed |
glutin::WindowEvent::KeyboardInput {
input: glutin::KeyboardInput {
virtual_keycode: Some(glutin::VirtualKeyCode::Escape),
..
},
..
} => {
do_exit = true
}
glutin::WindowEvent::KeyboardInput {
input: glutin::KeyboardInput {
state: glutin::ElementState::Pressed,
virtual_keycode: Some(glutin::VirtualKeyCode::P),
..
},
..
} => {
println!("toggle flags {}", my_name);
renderer.toggle_debug_flags(webrender::DebugFlags::PROFILER_DBG);
}
_ => {}
}
_ => {}
});
if do_exit {
return true
}
let framebuffer_size = {
let (width, height) = self.window.get_inner_size().unwrap();
DeviceUintSize::new(width, height)
};
let device_pixel_ratio = self.window.hidpi_factor();
let layout_size = framebuffer_size.to_f32() / euclid::TypedScale::new(device_pixel_ratio);
let mut txn = Transaction::new();
let mut builder = DisplayListBuilder::new(self.pipeline_id, layout_size);
let bounds = LayoutRect::new(LayoutPoint::zero(), builder.content_size());
let info = LayoutPrimitiveInfo::new(bounds);
builder.push_stacking_context(
&info,
None,
None,
TransformStyle::Flat,
None,
MixBlendMode::Normal,
Vec::new(),
GlyphRasterSpace::Screen,
);
let info = LayoutPrimitiveInfo::new(LayoutRect::new(
LayoutPoint::new(100.0, 100.0),
LayoutSize::new(100.0, 200.0)
));
builder.push_rect(&info, ColorF::new(0.0, 1.0, 0.0, 1.0));
let text_bounds = LayoutRect::new(
LayoutPoint::new(100.0, 50.0),
LayoutSize::new(700.0, 200.0)
);
let glyphs = vec![
GlyphInstance {
index: 48,
point: LayoutPoint::new(100.0, 100.0),
},
GlyphInstance {
index: 68,
point: LayoutPoint::new(150.0, 100.0),
},
GlyphInstance {
index: 80,
point: LayoutPoint::new(200.0, 100.0),
},
GlyphInstance {
index: 82,
point: LayoutPoint::new(250.0, 100.0),
},
GlyphInstance {
index: 81,
point: LayoutPoint::new(300.0, 100.0),
},
GlyphInstance {
index: 3,
point: LayoutPoint::new(350.0, 100.0),
},
GlyphInstance {
index: 86,
point: LayoutPoint::new(400.0, 100.0),
},
GlyphInstance {
index: 79,
point: LayoutPoint::new(450.0, 100.0),
},
GlyphInstance {
index: 72,
point: LayoutPoint::new(500.0, 100.0),
},
GlyphInstance {
index: 83,
point: LayoutPoint::new(550.0, 100.0),
},
GlyphInstance {
index: 87,
point: LayoutPoint::new(600.0, 100.0),
},
GlyphInstance {
index: 17,
point: LayoutPoint::new(650.0, 100.0),
},
];
let info = LayoutPrimitiveInfo::new(text_bounds);
builder.push_text(
&info,
&glyphs,
self.font_instance_key,
ColorF::new(1.0, 1.0, 0.0, 1.0),
None,
);
builder.pop_stacking_context();
txn.set_display_list(
self.epoch,
None,
layout_size,
builder.finalize(),
true,
);
txn.set_root_pipeline(self.pipeline_id);
txn.generate_frame();
self.api.send_transaction(self.document_id, txn);
renderer.update();
renderer.render(framebuffer_size).unwrap();
self.window.swap_buffers().ok();
false
}
fn deinit(self) {
self.renderer.deinit();
}
}
fn main() {
let mut win1 = Window::new("window1", ColorF::new(0.3, 0.0, 0.0, 1.0));
let mut win2 = Window::new("window2", ColorF::new(0.0, 0.3, 0.0, 1.0));
loop {
if win1.tick() {
break;
}
if win2.tick() {
break;
}
}
win1.deinit();
win2.deinit();
}
fn load_file(name: &str) -> Vec<u8> {
let mut file = File::open(name).unwrap();
let mut buffer = vec![];
file.read_to_end(&mut buffer).unwrap();
buffer
}
|
Notifier
|
identifier_name
|
multiwindow.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
extern crate app_units;
extern crate euclid;
extern crate gleam;
extern crate glutin;
extern crate webrender;
use app_units::Au;
use gleam::gl;
use glutin::GlContext;
use std::fs::File;
use std::io::Read;
use webrender::api::*;
struct Notifier {
events_proxy: glutin::EventsLoopProxy,
}
impl Notifier {
fn new(events_proxy: glutin::EventsLoopProxy) -> Notifier {
Notifier { events_proxy }
}
}
impl RenderNotifier for Notifier {
fn clone(&self) -> Box<RenderNotifier> {
Box::new(Notifier {
events_proxy: self.events_proxy.clone(),
})
}
fn wake_up(&self) {
#[cfg(not(target_os = "android"))]
let _ = self.events_proxy.wakeup();
}
fn new_frame_ready(&self, _: DocumentId, _scrolled: bool, _composite_needed: bool) {
self.wake_up();
}
}
struct Window {
events_loop: glutin::EventsLoop, //TODO: share events loop?
window: glutin::GlWindow,
renderer: webrender::Renderer,
name: &'static str,
pipeline_id: PipelineId,
document_id: DocumentId,
epoch: Epoch,
api: RenderApi,
font_instance_key: FontInstanceKey,
}
impl Window {
fn new(name: &'static str, clear_color: ColorF) -> Self {
let events_loop = glutin::EventsLoop::new();
let context_builder = glutin::ContextBuilder::new()
.with_gl(glutin::GlRequest::GlThenGles {
opengl_version: (3, 2),
opengles_version: (3, 0),
});
let window_builder = glutin::WindowBuilder::new()
.with_title(name)
.with_multitouch()
.with_dimensions(800, 600);
let window = glutin::GlWindow::new(window_builder, context_builder, &events_loop)
.unwrap();
unsafe {
window.make_current().ok();
}
let gl = match window.get_api() {
glutin::Api::OpenGl => unsafe {
gl::GlFns::load_with(|symbol| window.get_proc_address(symbol) as *const _)
},
glutin::Api::OpenGlEs => unsafe {
gl::GlesFns::load_with(|symbol| window.get_proc_address(symbol) as *const _)
},
glutin::Api::WebGl => unimplemented!(),
};
let device_pixel_ratio = window.hidpi_factor();
let opts = webrender::RendererOptions {
device_pixel_ratio,
clear_color: Some(clear_color),
..webrender::RendererOptions::default()
};
let framebuffer_size = {
let (width, height) = window.get_inner_size().unwrap();
DeviceUintSize::new(width, height)
};
let notifier = Box::new(Notifier::new(events_loop.create_proxy()));
let (renderer, sender) = webrender::Renderer::new(gl.clone(), notifier, opts).unwrap();
let api = sender.create_api();
let document_id = api.add_document(framebuffer_size, 0);
let epoch = Epoch(0);
let pipeline_id = PipelineId(0, 0);
let mut resources = ResourceUpdates::new();
let font_key = api.generate_font_key();
let font_bytes = load_file("../wrench/reftests/text/FreeSans.ttf");
resources.add_raw_font(font_key, font_bytes, 0);
let font_instance_key = api.generate_font_instance_key();
resources.add_font_instance(font_instance_key, font_key, Au::from_px(32), None, None, Vec::new());
let mut txn = Transaction::new();
txn.update_resources(resources);
api.send_transaction(document_id, txn);
Window {
events_loop,
window,
renderer,
name,
epoch,
pipeline_id,
document_id,
api,
font_instance_key,
}
}
fn tick(&mut self) -> bool {
unsafe {
self.window.make_current().ok();
}
let mut do_exit = false;
let my_name = &self.name;
let renderer = &mut self.renderer;
self.events_loop.poll_events(|global_event| match global_event {
glutin::Event::WindowEvent { event,.. } => match event {
glutin::WindowEvent::Closed |
glutin::WindowEvent::KeyboardInput {
input: glutin::KeyboardInput {
virtual_keycode: Some(glutin::VirtualKeyCode::Escape),
..
},
..
} => {
do_exit = true
}
glutin::WindowEvent::KeyboardInput {
input: glutin::KeyboardInput {
state: glutin::ElementState::Pressed,
virtual_keycode: Some(glutin::VirtualKeyCode::P),
..
},
..
} =>
|
_ => {}
}
_ => {}
});
if do_exit {
return true
}
let framebuffer_size = {
let (width, height) = self.window.get_inner_size().unwrap();
DeviceUintSize::new(width, height)
};
let device_pixel_ratio = self.window.hidpi_factor();
let layout_size = framebuffer_size.to_f32() / euclid::TypedScale::new(device_pixel_ratio);
let mut txn = Transaction::new();
let mut builder = DisplayListBuilder::new(self.pipeline_id, layout_size);
let bounds = LayoutRect::new(LayoutPoint::zero(), builder.content_size());
let info = LayoutPrimitiveInfo::new(bounds);
builder.push_stacking_context(
&info,
None,
None,
TransformStyle::Flat,
None,
MixBlendMode::Normal,
Vec::new(),
GlyphRasterSpace::Screen,
);
let info = LayoutPrimitiveInfo::new(LayoutRect::new(
LayoutPoint::new(100.0, 100.0),
LayoutSize::new(100.0, 200.0)
));
builder.push_rect(&info, ColorF::new(0.0, 1.0, 0.0, 1.0));
let text_bounds = LayoutRect::new(
LayoutPoint::new(100.0, 50.0),
LayoutSize::new(700.0, 200.0)
);
let glyphs = vec![
GlyphInstance {
index: 48,
point: LayoutPoint::new(100.0, 100.0),
},
GlyphInstance {
index: 68,
point: LayoutPoint::new(150.0, 100.0),
},
GlyphInstance {
index: 80,
point: LayoutPoint::new(200.0, 100.0),
},
GlyphInstance {
index: 82,
point: LayoutPoint::new(250.0, 100.0),
},
GlyphInstance {
index: 81,
point: LayoutPoint::new(300.0, 100.0),
},
GlyphInstance {
index: 3,
point: LayoutPoint::new(350.0, 100.0),
},
GlyphInstance {
index: 86,
point: LayoutPoint::new(400.0, 100.0),
},
GlyphInstance {
index: 79,
point: LayoutPoint::new(450.0, 100.0),
},
GlyphInstance {
index: 72,
point: LayoutPoint::new(500.0, 100.0),
},
GlyphInstance {
index: 83,
point: LayoutPoint::new(550.0, 100.0),
},
GlyphInstance {
index: 87,
point: LayoutPoint::new(600.0, 100.0),
},
GlyphInstance {
index: 17,
point: LayoutPoint::new(650.0, 100.0),
},
];
let info = LayoutPrimitiveInfo::new(text_bounds);
builder.push_text(
&info,
&glyphs,
self.font_instance_key,
ColorF::new(1.0, 1.0, 0.0, 1.0),
None,
);
builder.pop_stacking_context();
txn.set_display_list(
self.epoch,
None,
layout_size,
builder.finalize(),
true,
);
txn.set_root_pipeline(self.pipeline_id);
txn.generate_frame();
self.api.send_transaction(self.document_id, txn);
renderer.update();
renderer.render(framebuffer_size).unwrap();
self.window.swap_buffers().ok();
false
}
fn deinit(self) {
self.renderer.deinit();
}
}
fn main() {
let mut win1 = Window::new("window1", ColorF::new(0.3, 0.0, 0.0, 1.0));
let mut win2 = Window::new("window2", ColorF::new(0.0, 0.3, 0.0, 1.0));
loop {
if win1.tick() {
break;
}
if win2.tick() {
break;
}
}
win1.deinit();
win2.deinit();
}
fn load_file(name: &str) -> Vec<u8> {
let mut file = File::open(name).unwrap();
let mut buffer = vec![];
file.read_to_end(&mut buffer).unwrap();
buffer
}
|
{
println!("toggle flags {}", my_name);
renderer.toggle_debug_flags(webrender::DebugFlags::PROFILER_DBG);
}
|
conditional_block
|
multiwindow.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
extern crate app_units;
extern crate euclid;
extern crate gleam;
extern crate glutin;
extern crate webrender;
use app_units::Au;
use gleam::gl;
use glutin::GlContext;
use std::fs::File;
use std::io::Read;
use webrender::api::*;
struct Notifier {
events_proxy: glutin::EventsLoopProxy,
}
impl Notifier {
fn new(events_proxy: glutin::EventsLoopProxy) -> Notifier {
Notifier { events_proxy }
}
}
impl RenderNotifier for Notifier {
fn clone(&self) -> Box<RenderNotifier> {
Box::new(Notifier {
events_proxy: self.events_proxy.clone(),
})
}
fn wake_up(&self) {
#[cfg(not(target_os = "android"))]
let _ = self.events_proxy.wakeup();
}
fn new_frame_ready(&self, _: DocumentId, _scrolled: bool, _composite_needed: bool) {
self.wake_up();
}
}
struct Window {
events_loop: glutin::EventsLoop, //TODO: share events loop?
window: glutin::GlWindow,
renderer: webrender::Renderer,
name: &'static str,
pipeline_id: PipelineId,
document_id: DocumentId,
epoch: Epoch,
api: RenderApi,
font_instance_key: FontInstanceKey,
}
impl Window {
fn new(name: &'static str, clear_color: ColorF) -> Self {
let events_loop = glutin::EventsLoop::new();
let context_builder = glutin::ContextBuilder::new()
.with_gl(glutin::GlRequest::GlThenGles {
opengl_version: (3, 2),
opengles_version: (3, 0),
});
|
.with_multitouch()
.with_dimensions(800, 600);
let window = glutin::GlWindow::new(window_builder, context_builder, &events_loop)
.unwrap();
unsafe {
window.make_current().ok();
}
let gl = match window.get_api() {
glutin::Api::OpenGl => unsafe {
gl::GlFns::load_with(|symbol| window.get_proc_address(symbol) as *const _)
},
glutin::Api::OpenGlEs => unsafe {
gl::GlesFns::load_with(|symbol| window.get_proc_address(symbol) as *const _)
},
glutin::Api::WebGl => unimplemented!(),
};
let device_pixel_ratio = window.hidpi_factor();
let opts = webrender::RendererOptions {
device_pixel_ratio,
clear_color: Some(clear_color),
..webrender::RendererOptions::default()
};
let framebuffer_size = {
let (width, height) = window.get_inner_size().unwrap();
DeviceUintSize::new(width, height)
};
let notifier = Box::new(Notifier::new(events_loop.create_proxy()));
let (renderer, sender) = webrender::Renderer::new(gl.clone(), notifier, opts).unwrap();
let api = sender.create_api();
let document_id = api.add_document(framebuffer_size, 0);
let epoch = Epoch(0);
let pipeline_id = PipelineId(0, 0);
let mut resources = ResourceUpdates::new();
let font_key = api.generate_font_key();
let font_bytes = load_file("../wrench/reftests/text/FreeSans.ttf");
resources.add_raw_font(font_key, font_bytes, 0);
let font_instance_key = api.generate_font_instance_key();
resources.add_font_instance(font_instance_key, font_key, Au::from_px(32), None, None, Vec::new());
let mut txn = Transaction::new();
txn.update_resources(resources);
api.send_transaction(document_id, txn);
Window {
events_loop,
window,
renderer,
name,
epoch,
pipeline_id,
document_id,
api,
font_instance_key,
}
}
fn tick(&mut self) -> bool {
unsafe {
self.window.make_current().ok();
}
let mut do_exit = false;
let my_name = &self.name;
let renderer = &mut self.renderer;
self.events_loop.poll_events(|global_event| match global_event {
glutin::Event::WindowEvent { event,.. } => match event {
glutin::WindowEvent::Closed |
glutin::WindowEvent::KeyboardInput {
input: glutin::KeyboardInput {
virtual_keycode: Some(glutin::VirtualKeyCode::Escape),
..
},
..
} => {
do_exit = true
}
glutin::WindowEvent::KeyboardInput {
input: glutin::KeyboardInput {
state: glutin::ElementState::Pressed,
virtual_keycode: Some(glutin::VirtualKeyCode::P),
..
},
..
} => {
println!("toggle flags {}", my_name);
renderer.toggle_debug_flags(webrender::DebugFlags::PROFILER_DBG);
}
_ => {}
}
_ => {}
});
if do_exit {
return true
}
let framebuffer_size = {
let (width, height) = self.window.get_inner_size().unwrap();
DeviceUintSize::new(width, height)
};
let device_pixel_ratio = self.window.hidpi_factor();
let layout_size = framebuffer_size.to_f32() / euclid::TypedScale::new(device_pixel_ratio);
let mut txn = Transaction::new();
let mut builder = DisplayListBuilder::new(self.pipeline_id, layout_size);
let bounds = LayoutRect::new(LayoutPoint::zero(), builder.content_size());
let info = LayoutPrimitiveInfo::new(bounds);
builder.push_stacking_context(
&info,
None,
None,
TransformStyle::Flat,
None,
MixBlendMode::Normal,
Vec::new(),
GlyphRasterSpace::Screen,
);
let info = LayoutPrimitiveInfo::new(LayoutRect::new(
LayoutPoint::new(100.0, 100.0),
LayoutSize::new(100.0, 200.0)
));
builder.push_rect(&info, ColorF::new(0.0, 1.0, 0.0, 1.0));
let text_bounds = LayoutRect::new(
LayoutPoint::new(100.0, 50.0),
LayoutSize::new(700.0, 200.0)
);
let glyphs = vec![
GlyphInstance {
index: 48,
point: LayoutPoint::new(100.0, 100.0),
},
GlyphInstance {
index: 68,
point: LayoutPoint::new(150.0, 100.0),
},
GlyphInstance {
index: 80,
point: LayoutPoint::new(200.0, 100.0),
},
GlyphInstance {
index: 82,
point: LayoutPoint::new(250.0, 100.0),
},
GlyphInstance {
index: 81,
point: LayoutPoint::new(300.0, 100.0),
},
GlyphInstance {
index: 3,
point: LayoutPoint::new(350.0, 100.0),
},
GlyphInstance {
index: 86,
point: LayoutPoint::new(400.0, 100.0),
},
GlyphInstance {
index: 79,
point: LayoutPoint::new(450.0, 100.0),
},
GlyphInstance {
index: 72,
point: LayoutPoint::new(500.0, 100.0),
},
GlyphInstance {
index: 83,
point: LayoutPoint::new(550.0, 100.0),
},
GlyphInstance {
index: 87,
point: LayoutPoint::new(600.0, 100.0),
},
GlyphInstance {
index: 17,
point: LayoutPoint::new(650.0, 100.0),
},
];
let info = LayoutPrimitiveInfo::new(text_bounds);
builder.push_text(
&info,
&glyphs,
self.font_instance_key,
ColorF::new(1.0, 1.0, 0.0, 1.0),
None,
);
builder.pop_stacking_context();
txn.set_display_list(
self.epoch,
None,
layout_size,
builder.finalize(),
true,
);
txn.set_root_pipeline(self.pipeline_id);
txn.generate_frame();
self.api.send_transaction(self.document_id, txn);
renderer.update();
renderer.render(framebuffer_size).unwrap();
self.window.swap_buffers().ok();
false
}
fn deinit(self) {
self.renderer.deinit();
}
}
fn main() {
let mut win1 = Window::new("window1", ColorF::new(0.3, 0.0, 0.0, 1.0));
let mut win2 = Window::new("window2", ColorF::new(0.0, 0.3, 0.0, 1.0));
loop {
if win1.tick() {
break;
}
if win2.tick() {
break;
}
}
win1.deinit();
win2.deinit();
}
fn load_file(name: &str) -> Vec<u8> {
let mut file = File::open(name).unwrap();
let mut buffer = vec![];
file.read_to_end(&mut buffer).unwrap();
buffer
}
|
let window_builder = glutin::WindowBuilder::new()
.with_title(name)
|
random_line_split
|
multiwindow.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
extern crate app_units;
extern crate euclid;
extern crate gleam;
extern crate glutin;
extern crate webrender;
use app_units::Au;
use gleam::gl;
use glutin::GlContext;
use std::fs::File;
use std::io::Read;
use webrender::api::*;
struct Notifier {
events_proxy: glutin::EventsLoopProxy,
}
impl Notifier {
fn new(events_proxy: glutin::EventsLoopProxy) -> Notifier {
Notifier { events_proxy }
}
}
impl RenderNotifier for Notifier {
fn clone(&self) -> Box<RenderNotifier> {
Box::new(Notifier {
events_proxy: self.events_proxy.clone(),
})
}
fn wake_up(&self) {
#[cfg(not(target_os = "android"))]
let _ = self.events_proxy.wakeup();
}
fn new_frame_ready(&self, _: DocumentId, _scrolled: bool, _composite_needed: bool) {
self.wake_up();
}
}
struct Window {
events_loop: glutin::EventsLoop, //TODO: share events loop?
window: glutin::GlWindow,
renderer: webrender::Renderer,
name: &'static str,
pipeline_id: PipelineId,
document_id: DocumentId,
epoch: Epoch,
api: RenderApi,
font_instance_key: FontInstanceKey,
}
impl Window {
fn new(name: &'static str, clear_color: ColorF) -> Self {
let events_loop = glutin::EventsLoop::new();
let context_builder = glutin::ContextBuilder::new()
.with_gl(glutin::GlRequest::GlThenGles {
opengl_version: (3, 2),
opengles_version: (3, 0),
});
let window_builder = glutin::WindowBuilder::new()
.with_title(name)
.with_multitouch()
.with_dimensions(800, 600);
let window = glutin::GlWindow::new(window_builder, context_builder, &events_loop)
.unwrap();
unsafe {
window.make_current().ok();
}
let gl = match window.get_api() {
glutin::Api::OpenGl => unsafe {
gl::GlFns::load_with(|symbol| window.get_proc_address(symbol) as *const _)
},
glutin::Api::OpenGlEs => unsafe {
gl::GlesFns::load_with(|symbol| window.get_proc_address(symbol) as *const _)
},
glutin::Api::WebGl => unimplemented!(),
};
let device_pixel_ratio = window.hidpi_factor();
let opts = webrender::RendererOptions {
device_pixel_ratio,
clear_color: Some(clear_color),
..webrender::RendererOptions::default()
};
let framebuffer_size = {
let (width, height) = window.get_inner_size().unwrap();
DeviceUintSize::new(width, height)
};
let notifier = Box::new(Notifier::new(events_loop.create_proxy()));
let (renderer, sender) = webrender::Renderer::new(gl.clone(), notifier, opts).unwrap();
let api = sender.create_api();
let document_id = api.add_document(framebuffer_size, 0);
let epoch = Epoch(0);
let pipeline_id = PipelineId(0, 0);
let mut resources = ResourceUpdates::new();
let font_key = api.generate_font_key();
let font_bytes = load_file("../wrench/reftests/text/FreeSans.ttf");
resources.add_raw_font(font_key, font_bytes, 0);
let font_instance_key = api.generate_font_instance_key();
resources.add_font_instance(font_instance_key, font_key, Au::from_px(32), None, None, Vec::new());
let mut txn = Transaction::new();
txn.update_resources(resources);
api.send_transaction(document_id, txn);
Window {
events_loop,
window,
renderer,
name,
epoch,
pipeline_id,
document_id,
api,
font_instance_key,
}
}
fn tick(&mut self) -> bool {
unsafe {
self.window.make_current().ok();
}
let mut do_exit = false;
let my_name = &self.name;
let renderer = &mut self.renderer;
self.events_loop.poll_events(|global_event| match global_event {
glutin::Event::WindowEvent { event,.. } => match event {
glutin::WindowEvent::Closed |
glutin::WindowEvent::KeyboardInput {
input: glutin::KeyboardInput {
virtual_keycode: Some(glutin::VirtualKeyCode::Escape),
..
},
..
} => {
do_exit = true
}
glutin::WindowEvent::KeyboardInput {
input: glutin::KeyboardInput {
state: glutin::ElementState::Pressed,
virtual_keycode: Some(glutin::VirtualKeyCode::P),
..
},
..
} => {
println!("toggle flags {}", my_name);
renderer.toggle_debug_flags(webrender::DebugFlags::PROFILER_DBG);
}
_ => {}
}
_ => {}
});
if do_exit {
return true
}
let framebuffer_size = {
let (width, height) = self.window.get_inner_size().unwrap();
DeviceUintSize::new(width, height)
};
let device_pixel_ratio = self.window.hidpi_factor();
let layout_size = framebuffer_size.to_f32() / euclid::TypedScale::new(device_pixel_ratio);
let mut txn = Transaction::new();
let mut builder = DisplayListBuilder::new(self.pipeline_id, layout_size);
let bounds = LayoutRect::new(LayoutPoint::zero(), builder.content_size());
let info = LayoutPrimitiveInfo::new(bounds);
builder.push_stacking_context(
&info,
None,
None,
TransformStyle::Flat,
None,
MixBlendMode::Normal,
Vec::new(),
GlyphRasterSpace::Screen,
);
let info = LayoutPrimitiveInfo::new(LayoutRect::new(
LayoutPoint::new(100.0, 100.0),
LayoutSize::new(100.0, 200.0)
));
builder.push_rect(&info, ColorF::new(0.0, 1.0, 0.0, 1.0));
let text_bounds = LayoutRect::new(
LayoutPoint::new(100.0, 50.0),
LayoutSize::new(700.0, 200.0)
);
let glyphs = vec![
GlyphInstance {
index: 48,
point: LayoutPoint::new(100.0, 100.0),
},
GlyphInstance {
index: 68,
point: LayoutPoint::new(150.0, 100.0),
},
GlyphInstance {
index: 80,
point: LayoutPoint::new(200.0, 100.0),
},
GlyphInstance {
index: 82,
point: LayoutPoint::new(250.0, 100.0),
},
GlyphInstance {
index: 81,
point: LayoutPoint::new(300.0, 100.0),
},
GlyphInstance {
index: 3,
point: LayoutPoint::new(350.0, 100.0),
},
GlyphInstance {
index: 86,
point: LayoutPoint::new(400.0, 100.0),
},
GlyphInstance {
index: 79,
point: LayoutPoint::new(450.0, 100.0),
},
GlyphInstance {
index: 72,
point: LayoutPoint::new(500.0, 100.0),
},
GlyphInstance {
index: 83,
point: LayoutPoint::new(550.0, 100.0),
},
GlyphInstance {
index: 87,
point: LayoutPoint::new(600.0, 100.0),
},
GlyphInstance {
index: 17,
point: LayoutPoint::new(650.0, 100.0),
},
];
let info = LayoutPrimitiveInfo::new(text_bounds);
builder.push_text(
&info,
&glyphs,
self.font_instance_key,
ColorF::new(1.0, 1.0, 0.0, 1.0),
None,
);
builder.pop_stacking_context();
txn.set_display_list(
self.epoch,
None,
layout_size,
builder.finalize(),
true,
);
txn.set_root_pipeline(self.pipeline_id);
txn.generate_frame();
self.api.send_transaction(self.document_id, txn);
renderer.update();
renderer.render(framebuffer_size).unwrap();
self.window.swap_buffers().ok();
false
}
fn deinit(self)
|
}
fn main() {
let mut win1 = Window::new("window1", ColorF::new(0.3, 0.0, 0.0, 1.0));
let mut win2 = Window::new("window2", ColorF::new(0.0, 0.3, 0.0, 1.0));
loop {
if win1.tick() {
break;
}
if win2.tick() {
break;
}
}
win1.deinit();
win2.deinit();
}
fn load_file(name: &str) -> Vec<u8> {
let mut file = File::open(name).unwrap();
let mut buffer = vec![];
file.read_to_end(&mut buffer).unwrap();
buffer
}
|
{
self.renderer.deinit();
}
|
identifier_body
|
main.rs
|
use crossbeam::crossbeam_channel::unbounded;
use log::{debug, error, info};
use serialport::prelude::*;
#[cfg(feature = "mqtt")]
use standaertha_gateway::mqtt;
#[cfg(feature = "webthing")]
use standaertha_gateway::webthing;
use standaertha_gateway::{
append_crc16, config, slip_encode, Command, CommandType, Package, PackageInputStream, Service,
};
use std::io::Read;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::thread;
use std::time::{Duration, Instant};
const MAX_COMMANDS: usize = 64;
fn
|
() -> Result<(), Box<dyn std::error::Error +'static>> {
pretty_env_logger::init();
let config = config::read_config()?;
let mut services: Vec<Box<dyn Service>> = vec![];
let (sender, recv) = unbounded();
#[cfg(feature = "mqtt")]
{
let mqtt = mqtt::init(&config, &sender)?;
if mqtt.is_some() {
services.push(mqtt.unwrap());
}
}
#[cfg(feature = "webthing")]
{
let thing = webthing::init(&config, &sender)?;
if thing.is_some() {
services.push(thing.unwrap());
}
}
/*
thread::spawn(move || {
loop {
let mut commands = vec![];
let notification = notifications.recv().expect("Error receiving notification");
commands.push(notification);
if let Ok(notification) = notifications.recv_timeout(Duration::from_millis(10)) {
commands.push(notification);
}
while commands.len() < MAX_COMMANDS {
if let Ok(notification) = notifications.try_recv() {
commands.push(notification);
} else {
break;
}
}
commands.into_iter().map(|notification| {
if let Notification::Publish(command) = notification {
// TODO: handle topic name/payload
}
});
}
});
*/
let s = SerialPortSettings {
baud_rate: config.serial.baud_rate,
data_bits: DataBits::Eight,
flow_control: FlowControl::None,
parity: Parity::None,
stop_bits: StopBits::One,
timeout: config.serial.timeout,
};
debug!("Using serial port: {}", config.serial.port);
let mut serial = serialport::open_with_settings(&config.serial.port, &s).unwrap();
let running = Arc::new(AtomicBool::new(true));
let r = running.clone();
ctrlc::set_handler(move || {
info!("Received interrupt, stopping...");
r.store(false, Ordering::SeqCst);
})
.expect("Error setting Ctrl-C handler");
let input = serial.try_clone().unwrap().bytes();
let r = running.clone();
let cmd_thread = thread::spawn(move || loop {
if!r.load(Ordering::SeqCst) {
break;
}
if let Ok(c) = recv.recv_timeout(Duration::from_millis(1000)) {
let mut cmds = vec![c.raw()];
let now = Instant::now();
let timeout_duration = Duration::from_millis(100);
let timeout = now + timeout_duration;
let mut remaining = timeout_duration;
while let Ok(c) = recv.recv_timeout(remaining) {
cmds.push(c.raw());
if cmds.len() >= MAX_COMMANDS {
break;
}
let now = Instant::now();
if now > timeout {
break;
}
remaining = timeout - now;
}
cmds = append_crc16(cmds);
serial.write_all(&slip_encode(&cmds)).unwrap();
}
});
sender.send(Command::new(CommandType::Refresh, 0)).unwrap();
let mut last_package = None;
for p in PackageInputStream::new(input) {
match p {
Ok(p) => {
if p.len() == 36 {
let pkg = Package::from_buf(&p[0..36]);
debug!("Package: {:?}", pkg);
last_package = Some(Instant::now());
for service in &mut services {
service.handle_package(&pkg);
}
} else {
info!("Discarding package of length!= 36, was {}", p.len());
}
}
Err(e) => {
if e.kind() == std::io::ErrorKind::TimedOut {
debug!("Routine timeout on serial input");
} else {
error!("Error on input stream: {:?}", e);
}
}
}
if last_package.is_some()
&& Instant::now() - last_package.unwrap() > Duration::from_secs(10)
{
sender.send(Command::new(CommandType::Refresh, 0)).unwrap();
}
if!running.load(Ordering::SeqCst) {
break;
}
}
for service in &mut services {
service.join();
}
cmd_thread.join().unwrap();
Ok(())
}
|
main
|
identifier_name
|
main.rs
|
use crossbeam::crossbeam_channel::unbounded;
use log::{debug, error, info};
use serialport::prelude::*;
#[cfg(feature = "mqtt")]
use standaertha_gateway::mqtt;
#[cfg(feature = "webthing")]
use standaertha_gateway::webthing;
use standaertha_gateway::{
append_crc16, config, slip_encode, Command, CommandType, Package, PackageInputStream, Service,
};
use std::io::Read;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::thread;
use std::time::{Duration, Instant};
const MAX_COMMANDS: usize = 64;
fn main() -> Result<(), Box<dyn std::error::Error +'static>>
|
if thing.is_some() {
services.push(thing.unwrap());
}
}
/*
thread::spawn(move || {
loop {
let mut commands = vec![];
let notification = notifications.recv().expect("Error receiving notification");
commands.push(notification);
if let Ok(notification) = notifications.recv_timeout(Duration::from_millis(10)) {
commands.push(notification);
}
while commands.len() < MAX_COMMANDS {
if let Ok(notification) = notifications.try_recv() {
commands.push(notification);
} else {
break;
}
}
commands.into_iter().map(|notification| {
if let Notification::Publish(command) = notification {
// TODO: handle topic name/payload
}
});
}
});
*/
let s = SerialPortSettings {
baud_rate: config.serial.baud_rate,
data_bits: DataBits::Eight,
flow_control: FlowControl::None,
parity: Parity::None,
stop_bits: StopBits::One,
timeout: config.serial.timeout,
};
debug!("Using serial port: {}", config.serial.port);
let mut serial = serialport::open_with_settings(&config.serial.port, &s).unwrap();
let running = Arc::new(AtomicBool::new(true));
let r = running.clone();
ctrlc::set_handler(move || {
info!("Received interrupt, stopping...");
r.store(false, Ordering::SeqCst);
})
.expect("Error setting Ctrl-C handler");
let input = serial.try_clone().unwrap().bytes();
let r = running.clone();
let cmd_thread = thread::spawn(move || loop {
if!r.load(Ordering::SeqCst) {
break;
}
if let Ok(c) = recv.recv_timeout(Duration::from_millis(1000)) {
let mut cmds = vec![c.raw()];
let now = Instant::now();
let timeout_duration = Duration::from_millis(100);
let timeout = now + timeout_duration;
let mut remaining = timeout_duration;
while let Ok(c) = recv.recv_timeout(remaining) {
cmds.push(c.raw());
if cmds.len() >= MAX_COMMANDS {
break;
}
let now = Instant::now();
if now > timeout {
break;
}
remaining = timeout - now;
}
cmds = append_crc16(cmds);
serial.write_all(&slip_encode(&cmds)).unwrap();
}
});
sender.send(Command::new(CommandType::Refresh, 0)).unwrap();
let mut last_package = None;
for p in PackageInputStream::new(input) {
match p {
Ok(p) => {
if p.len() == 36 {
let pkg = Package::from_buf(&p[0..36]);
debug!("Package: {:?}", pkg);
last_package = Some(Instant::now());
for service in &mut services {
service.handle_package(&pkg);
}
} else {
info!("Discarding package of length!= 36, was {}", p.len());
}
}
Err(e) => {
if e.kind() == std::io::ErrorKind::TimedOut {
debug!("Routine timeout on serial input");
} else {
error!("Error on input stream: {:?}", e);
}
}
}
if last_package.is_some()
&& Instant::now() - last_package.unwrap() > Duration::from_secs(10)
{
sender.send(Command::new(CommandType::Refresh, 0)).unwrap();
}
if!running.load(Ordering::SeqCst) {
break;
}
}
for service in &mut services {
service.join();
}
cmd_thread.join().unwrap();
Ok(())
}
|
{
pretty_env_logger::init();
let config = config::read_config()?;
let mut services: Vec<Box<dyn Service>> = vec![];
let (sender, recv) = unbounded();
#[cfg(feature = "mqtt")]
{
let mqtt = mqtt::init(&config, &sender)?;
if mqtt.is_some() {
services.push(mqtt.unwrap());
}
}
#[cfg(feature = "webthing")]
{
let thing = webthing::init(&config, &sender)?;
|
identifier_body
|
main.rs
|
use crossbeam::crossbeam_channel::unbounded;
|
use standaertha_gateway::mqtt;
#[cfg(feature = "webthing")]
use standaertha_gateway::webthing;
use standaertha_gateway::{
append_crc16, config, slip_encode, Command, CommandType, Package, PackageInputStream, Service,
};
use std::io::Read;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::thread;
use std::time::{Duration, Instant};
const MAX_COMMANDS: usize = 64;
fn main() -> Result<(), Box<dyn std::error::Error +'static>> {
pretty_env_logger::init();
let config = config::read_config()?;
let mut services: Vec<Box<dyn Service>> = vec![];
let (sender, recv) = unbounded();
#[cfg(feature = "mqtt")]
{
let mqtt = mqtt::init(&config, &sender)?;
if mqtt.is_some() {
services.push(mqtt.unwrap());
}
}
#[cfg(feature = "webthing")]
{
let thing = webthing::init(&config, &sender)?;
if thing.is_some() {
services.push(thing.unwrap());
}
}
/*
thread::spawn(move || {
loop {
let mut commands = vec![];
let notification = notifications.recv().expect("Error receiving notification");
commands.push(notification);
if let Ok(notification) = notifications.recv_timeout(Duration::from_millis(10)) {
commands.push(notification);
}
while commands.len() < MAX_COMMANDS {
if let Ok(notification) = notifications.try_recv() {
commands.push(notification);
} else {
break;
}
}
commands.into_iter().map(|notification| {
if let Notification::Publish(command) = notification {
// TODO: handle topic name/payload
}
});
}
});
*/
let s = SerialPortSettings {
baud_rate: config.serial.baud_rate,
data_bits: DataBits::Eight,
flow_control: FlowControl::None,
parity: Parity::None,
stop_bits: StopBits::One,
timeout: config.serial.timeout,
};
debug!("Using serial port: {}", config.serial.port);
let mut serial = serialport::open_with_settings(&config.serial.port, &s).unwrap();
let running = Arc::new(AtomicBool::new(true));
let r = running.clone();
ctrlc::set_handler(move || {
info!("Received interrupt, stopping...");
r.store(false, Ordering::SeqCst);
})
.expect("Error setting Ctrl-C handler");
let input = serial.try_clone().unwrap().bytes();
let r = running.clone();
let cmd_thread = thread::spawn(move || loop {
if!r.load(Ordering::SeqCst) {
break;
}
if let Ok(c) = recv.recv_timeout(Duration::from_millis(1000)) {
let mut cmds = vec![c.raw()];
let now = Instant::now();
let timeout_duration = Duration::from_millis(100);
let timeout = now + timeout_duration;
let mut remaining = timeout_duration;
while let Ok(c) = recv.recv_timeout(remaining) {
cmds.push(c.raw());
if cmds.len() >= MAX_COMMANDS {
break;
}
let now = Instant::now();
if now > timeout {
break;
}
remaining = timeout - now;
}
cmds = append_crc16(cmds);
serial.write_all(&slip_encode(&cmds)).unwrap();
}
});
sender.send(Command::new(CommandType::Refresh, 0)).unwrap();
let mut last_package = None;
for p in PackageInputStream::new(input) {
match p {
Ok(p) => {
if p.len() == 36 {
let pkg = Package::from_buf(&p[0..36]);
debug!("Package: {:?}", pkg);
last_package = Some(Instant::now());
for service in &mut services {
service.handle_package(&pkg);
}
} else {
info!("Discarding package of length!= 36, was {}", p.len());
}
}
Err(e) => {
if e.kind() == std::io::ErrorKind::TimedOut {
debug!("Routine timeout on serial input");
} else {
error!("Error on input stream: {:?}", e);
}
}
}
if last_package.is_some()
&& Instant::now() - last_package.unwrap() > Duration::from_secs(10)
{
sender.send(Command::new(CommandType::Refresh, 0)).unwrap();
}
if!running.load(Ordering::SeqCst) {
break;
}
}
for service in &mut services {
service.join();
}
cmd_thread.join().unwrap();
Ok(())
}
|
use log::{debug, error, info};
use serialport::prelude::*;
#[cfg(feature = "mqtt")]
|
random_line_split
|
main.rs
|
use crossbeam::crossbeam_channel::unbounded;
use log::{debug, error, info};
use serialport::prelude::*;
#[cfg(feature = "mqtt")]
use standaertha_gateway::mqtt;
#[cfg(feature = "webthing")]
use standaertha_gateway::webthing;
use standaertha_gateway::{
append_crc16, config, slip_encode, Command, CommandType, Package, PackageInputStream, Service,
};
use std::io::Read;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::thread;
use std::time::{Duration, Instant};
const MAX_COMMANDS: usize = 64;
fn main() -> Result<(), Box<dyn std::error::Error +'static>> {
pretty_env_logger::init();
let config = config::read_config()?;
let mut services: Vec<Box<dyn Service>> = vec![];
let (sender, recv) = unbounded();
#[cfg(feature = "mqtt")]
{
let mqtt = mqtt::init(&config, &sender)?;
if mqtt.is_some() {
services.push(mqtt.unwrap());
}
}
#[cfg(feature = "webthing")]
{
let thing = webthing::init(&config, &sender)?;
if thing.is_some() {
services.push(thing.unwrap());
}
}
/*
thread::spawn(move || {
loop {
let mut commands = vec![];
let notification = notifications.recv().expect("Error receiving notification");
commands.push(notification);
if let Ok(notification) = notifications.recv_timeout(Duration::from_millis(10)) {
commands.push(notification);
}
while commands.len() < MAX_COMMANDS {
if let Ok(notification) = notifications.try_recv() {
commands.push(notification);
} else {
break;
}
}
commands.into_iter().map(|notification| {
if let Notification::Publish(command) = notification {
// TODO: handle topic name/payload
}
});
}
});
*/
let s = SerialPortSettings {
baud_rate: config.serial.baud_rate,
data_bits: DataBits::Eight,
flow_control: FlowControl::None,
parity: Parity::None,
stop_bits: StopBits::One,
timeout: config.serial.timeout,
};
debug!("Using serial port: {}", config.serial.port);
let mut serial = serialport::open_with_settings(&config.serial.port, &s).unwrap();
let running = Arc::new(AtomicBool::new(true));
let r = running.clone();
ctrlc::set_handler(move || {
info!("Received interrupt, stopping...");
r.store(false, Ordering::SeqCst);
})
.expect("Error setting Ctrl-C handler");
let input = serial.try_clone().unwrap().bytes();
let r = running.clone();
let cmd_thread = thread::spawn(move || loop {
if!r.load(Ordering::SeqCst) {
break;
}
if let Ok(c) = recv.recv_timeout(Duration::from_millis(1000))
|
});
sender.send(Command::new(CommandType::Refresh, 0)).unwrap();
let mut last_package = None;
for p in PackageInputStream::new(input) {
match p {
Ok(p) => {
if p.len() == 36 {
let pkg = Package::from_buf(&p[0..36]);
debug!("Package: {:?}", pkg);
last_package = Some(Instant::now());
for service in &mut services {
service.handle_package(&pkg);
}
} else {
info!("Discarding package of length!= 36, was {}", p.len());
}
}
Err(e) => {
if e.kind() == std::io::ErrorKind::TimedOut {
debug!("Routine timeout on serial input");
} else {
error!("Error on input stream: {:?}", e);
}
}
}
if last_package.is_some()
&& Instant::now() - last_package.unwrap() > Duration::from_secs(10)
{
sender.send(Command::new(CommandType::Refresh, 0)).unwrap();
}
if!running.load(Ordering::SeqCst) {
break;
}
}
for service in &mut services {
service.join();
}
cmd_thread.join().unwrap();
Ok(())
}
|
{
let mut cmds = vec![c.raw()];
let now = Instant::now();
let timeout_duration = Duration::from_millis(100);
let timeout = now + timeout_duration;
let mut remaining = timeout_duration;
while let Ok(c) = recv.recv_timeout(remaining) {
cmds.push(c.raw());
if cmds.len() >= MAX_COMMANDS {
break;
}
let now = Instant::now();
if now > timeout {
break;
}
remaining = timeout - now;
}
cmds = append_crc16(cmds);
serial.write_all(&slip_encode(&cmds)).unwrap();
}
|
conditional_block
|
coherence-pair-covered-uncovered-1.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that the same coverage rules apply even if the local type appears in the
// list of type parameters, not the self type.
// aux-build:coherence-lib.rs
extern crate "coherence-lib" as lib;
use lib::{Remote1, Pair};
pub struct
|
<T>(T);
impl<T, U> Remote1<Pair<T, Local<U>>> for i32 { }
//~^ ERROR type parameter `T` must be used as the type parameter for some local type
fn main() { }
|
Local
|
identifier_name
|
coherence-pair-covered-uncovered-1.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that the same coverage rules apply even if the local type appears in the
// list of type parameters, not the self type.
// aux-build:coherence-lib.rs
extern crate "coherence-lib" as lib;
use lib::{Remote1, Pair};
pub struct Local<T>(T);
impl<T, U> Remote1<Pair<T, Local<U>>> for i32 { }
//~^ ERROR type parameter `T` must be used as the type parameter for some local type
|
fn main() { }
|
random_line_split
|
|
coherence-pair-covered-uncovered-1.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that the same coverage rules apply even if the local type appears in the
// list of type parameters, not the self type.
// aux-build:coherence-lib.rs
extern crate "coherence-lib" as lib;
use lib::{Remote1, Pair};
pub struct Local<T>(T);
impl<T, U> Remote1<Pair<T, Local<U>>> for i32 { }
//~^ ERROR type parameter `T` must be used as the type parameter for some local type
fn main()
|
{ }
|
identifier_body
|
|
static_lifetime.rs
|
// Make a constant with `'static` lifetime.
static NUM: i32 = 18;
// Returns a reference to `NUM` where its `'static`
// lifetime is coerced to that of the input argument.
fn coerce_static<'a>(_: &'a i32) -> &'a i32 {
&NUM
}
fn main() {
{
// Make a `string` literal and print it:
let static_string = "I'm in read-only memory";
println!("static_string: {}", static_string);
// When `static_string` goes out of scope, the reference
// can no longer be used, but the data remains in the binary.
}
{
// Make an integer to use for `coerce_static`:
|
let coerced_static = coerce_static(&lifetime_num);
println!("coerced_static: {}", coerced_static);
}
println!("NUM: {} stays accessible!", NUM);
}
|
let lifetime_num = 9;
// Coerce `NUM` to lifetime of `lifetime_num`:
|
random_line_split
|
static_lifetime.rs
|
// Make a constant with `'static` lifetime.
static NUM: i32 = 18;
// Returns a reference to `NUM` where its `'static`
// lifetime is coerced to that of the input argument.
fn coerce_static<'a>(_: &'a i32) -> &'a i32 {
&NUM
}
fn
|
() {
{
// Make a `string` literal and print it:
let static_string = "I'm in read-only memory";
println!("static_string: {}", static_string);
// When `static_string` goes out of scope, the reference
// can no longer be used, but the data remains in the binary.
}
{
// Make an integer to use for `coerce_static`:
let lifetime_num = 9;
// Coerce `NUM` to lifetime of `lifetime_num`:
let coerced_static = coerce_static(&lifetime_num);
println!("coerced_static: {}", coerced_static);
}
println!("NUM: {} stays accessible!", NUM);
}
|
main
|
identifier_name
|
static_lifetime.rs
|
// Make a constant with `'static` lifetime.
static NUM: i32 = 18;
// Returns a reference to `NUM` where its `'static`
// lifetime is coerced to that of the input argument.
fn coerce_static<'a>(_: &'a i32) -> &'a i32
|
fn main() {
{
// Make a `string` literal and print it:
let static_string = "I'm in read-only memory";
println!("static_string: {}", static_string);
// When `static_string` goes out of scope, the reference
// can no longer be used, but the data remains in the binary.
}
{
// Make an integer to use for `coerce_static`:
let lifetime_num = 9;
// Coerce `NUM` to lifetime of `lifetime_num`:
let coerced_static = coerce_static(&lifetime_num);
println!("coerced_static: {}", coerced_static);
}
println!("NUM: {} stays accessible!", NUM);
}
|
{
&NUM
}
|
identifier_body
|
transform.rs
|
use std::fmt;
use Result;
/// A scale and an offset that transforms xyz coordinates.
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct Transform {
/// The scale.
pub scale: f64,
/// The offset.
pub offset: f64,
}
impl Transform {
/// Applies this transform to an i32, returning a float.
///
/// # Examples
///
/// ```
/// use las::Transform;
/// let transform = Transform { scale: 2., offset: 1. };
/// assert_eq!(3., transform.direct(1));
/// ```
pub fn direct(&self, n: i32) -> f64 {
self.scale * f64::from(n) + self.offset
}
/// Applies the inverse transform, and rounds the result.
///
/// Returns an error if the resultant value can't be represented as an i32.
///
/// # Examples
///
/// ```
/// use las::Transform;
/// let transform = Transform { scale: 2., offset: 1. };
/// assert_eq!(1, transform.inverse(2.9).unwrap());
/// ```
pub fn inverse(&self, n: f64) -> Result<i32> {
use std::i32;
use Error;
let n = ((n - self.offset) / self.scale).round();
if n > f64::from(i32::MAX) || n < f64::from(i32::MIN)
|
else {
Ok(n as i32)
}
}
}
impl Default for Transform {
fn default() -> Transform {
Transform {
scale: 0.001,
offset: 0.,
}
}
}
impl fmt::Display for Transform {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "`{} * x + {}`", self.scale, self.offset)
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::i32;
#[test]
fn too_large() {
let transform = Transform::default();
let n = i32::MAX as f64 * transform.scale + 1.;
assert!(transform.inverse(n).is_err());
}
#[test]
fn too_small() {
let transform = Transform::default();
let n = i32::MIN as f64 * transform.scale - 1.;
assert!(transform.inverse(n).is_err());
}
}
|
{
Err(Error::InverseTransform {
n,
transform: *self,
})
}
|
conditional_block
|
transform.rs
|
use std::fmt;
use Result;
/// A scale and an offset that transforms xyz coordinates.
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct Transform {
/// The scale.
pub scale: f64,
/// The offset.
pub offset: f64,
}
impl Transform {
/// Applies this transform to an i32, returning a float.
///
/// # Examples
///
/// ```
/// use las::Transform;
/// let transform = Transform { scale: 2., offset: 1. };
/// assert_eq!(3., transform.direct(1));
/// ```
pub fn direct(&self, n: i32) -> f64 {
self.scale * f64::from(n) + self.offset
}
/// Applies the inverse transform, and rounds the result.
///
/// Returns an error if the resultant value can't be represented as an i32.
///
/// # Examples
///
/// ```
/// use las::Transform;
/// let transform = Transform { scale: 2., offset: 1. };
/// assert_eq!(1, transform.inverse(2.9).unwrap());
/// ```
pub fn inverse(&self, n: f64) -> Result<i32> {
use std::i32;
use Error;
let n = ((n - self.offset) / self.scale).round();
if n > f64::from(i32::MAX) || n < f64::from(i32::MIN) {
Err(Error::InverseTransform {
n,
transform: *self,
})
} else {
Ok(n as i32)
}
}
}
impl Default for Transform {
fn default() -> Transform {
Transform {
scale: 0.001,
offset: 0.,
}
}
}
impl fmt::Display for Transform {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "`{} * x + {}`", self.scale, self.offset)
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::i32;
#[test]
fn
|
() {
let transform = Transform::default();
let n = i32::MAX as f64 * transform.scale + 1.;
assert!(transform.inverse(n).is_err());
}
#[test]
fn too_small() {
let transform = Transform::default();
let n = i32::MIN as f64 * transform.scale - 1.;
assert!(transform.inverse(n).is_err());
}
}
|
too_large
|
identifier_name
|
transform.rs
|
use std::fmt;
use Result;
/// A scale and an offset that transforms xyz coordinates.
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct Transform {
/// The scale.
pub scale: f64,
/// The offset.
pub offset: f64,
}
impl Transform {
/// Applies this transform to an i32, returning a float.
///
/// # Examples
///
/// ```
/// use las::Transform;
/// let transform = Transform { scale: 2., offset: 1. };
/// assert_eq!(3., transform.direct(1));
/// ```
pub fn direct(&self, n: i32) -> f64 {
self.scale * f64::from(n) + self.offset
}
/// Applies the inverse transform, and rounds the result.
///
/// Returns an error if the resultant value can't be represented as an i32.
///
/// # Examples
///
/// ```
/// use las::Transform;
/// let transform = Transform { scale: 2., offset: 1. };
/// assert_eq!(1, transform.inverse(2.9).unwrap());
/// ```
pub fn inverse(&self, n: f64) -> Result<i32> {
use std::i32;
use Error;
let n = ((n - self.offset) / self.scale).round();
if n > f64::from(i32::MAX) || n < f64::from(i32::MIN) {
Err(Error::InverseTransform {
n,
transform: *self,
})
} else {
Ok(n as i32)
}
}
}
impl Default for Transform {
fn default() -> Transform {
Transform {
scale: 0.001,
offset: 0.,
}
}
}
impl fmt::Display for Transform {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "`{} * x + {}`", self.scale, self.offset)
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::i32;
#[test]
fn too_large()
|
#[test]
fn too_small() {
let transform = Transform::default();
let n = i32::MIN as f64 * transform.scale - 1.;
assert!(transform.inverse(n).is_err());
}
}
|
{
let transform = Transform::default();
let n = i32::MAX as f64 * transform.scale + 1.;
assert!(transform.inverse(n).is_err());
}
|
identifier_body
|
transform.rs
|
use std::fmt;
use Result;
/// A scale and an offset that transforms xyz coordinates.
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct Transform {
/// The scale.
pub scale: f64,
/// The offset.
pub offset: f64,
|
///
/// # Examples
///
/// ```
/// use las::Transform;
/// let transform = Transform { scale: 2., offset: 1. };
/// assert_eq!(3., transform.direct(1));
/// ```
pub fn direct(&self, n: i32) -> f64 {
self.scale * f64::from(n) + self.offset
}
/// Applies the inverse transform, and rounds the result.
///
/// Returns an error if the resultant value can't be represented as an i32.
///
/// # Examples
///
/// ```
/// use las::Transform;
/// let transform = Transform { scale: 2., offset: 1. };
/// assert_eq!(1, transform.inverse(2.9).unwrap());
/// ```
pub fn inverse(&self, n: f64) -> Result<i32> {
use std::i32;
use Error;
let n = ((n - self.offset) / self.scale).round();
if n > f64::from(i32::MAX) || n < f64::from(i32::MIN) {
Err(Error::InverseTransform {
n,
transform: *self,
})
} else {
Ok(n as i32)
}
}
}
impl Default for Transform {
fn default() -> Transform {
Transform {
scale: 0.001,
offset: 0.,
}
}
}
impl fmt::Display for Transform {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "`{} * x + {}`", self.scale, self.offset)
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::i32;
#[test]
fn too_large() {
let transform = Transform::default();
let n = i32::MAX as f64 * transform.scale + 1.;
assert!(transform.inverse(n).is_err());
}
#[test]
fn too_small() {
let transform = Transform::default();
let n = i32::MIN as f64 * transform.scale - 1.;
assert!(transform.inverse(n).is_err());
}
}
|
}
impl Transform {
/// Applies this transform to an i32, returning a float.
|
random_line_split
|
http_headers.rs
|
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use log::trace;
use proxy_wasm::traits::*;
use proxy_wasm::types::*;
#[no_mangle]
pub fn _start() {
proxy_wasm::set_log_level(LogLevel::Trace);
proxy_wasm::set_root_context(|_| -> Box<dyn RootContext> { Box::new(HttpHeadersRoot) });
}
struct HttpHeadersRoot;
impl Context for HttpHeadersRoot {}
impl RootContext for HttpHeadersRoot {
fn
|
(&self) -> Option<ContextType> {
Some(ContextType::HttpContext)
}
fn create_http_context(&self, context_id: u32) -> Option<Box<dyn HttpContext>> {
Some(Box::new(HttpHeaders { context_id }))
}
}
struct HttpHeaders {
context_id: u32,
}
impl Context for HttpHeaders {}
impl HttpContext for HttpHeaders {
fn on_http_request_headers(&mut self, _: usize, _: bool) -> Action {
for (name, value) in &self.get_http_request_headers() {
trace!("#{} -> {}: {}", self.context_id, name, value);
}
match self.get_http_request_header(":path") {
Some(path) if path == "/hello" => {
self.send_http_response(
200,
vec![("Hello", "World"), ("Powered-By", "proxy-wasm")],
Some(b"Hello, World!\n"),
);
Action::Pause
}
_ => Action::Continue,
}
}
fn on_http_response_headers(&mut self, _: usize, _: bool) -> Action {
for (name, value) in &self.get_http_response_headers() {
trace!("#{} <- {}: {}", self.context_id, name, value);
}
Action::Continue
}
fn on_log(&mut self) {
trace!("#{} completed.", self.context_id);
}
}
|
get_type
|
identifier_name
|
http_headers.rs
|
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use log::trace;
use proxy_wasm::traits::*;
use proxy_wasm::types::*;
#[no_mangle]
pub fn _start() {
proxy_wasm::set_log_level(LogLevel::Trace);
proxy_wasm::set_root_context(|_| -> Box<dyn RootContext> { Box::new(HttpHeadersRoot) });
}
struct HttpHeadersRoot;
impl Context for HttpHeadersRoot {}
impl RootContext for HttpHeadersRoot {
fn get_type(&self) -> Option<ContextType> {
Some(ContextType::HttpContext)
}
fn create_http_context(&self, context_id: u32) -> Option<Box<dyn HttpContext>> {
Some(Box::new(HttpHeaders { context_id }))
}
}
struct HttpHeaders {
context_id: u32,
}
impl Context for HttpHeaders {}
impl HttpContext for HttpHeaders {
fn on_http_request_headers(&mut self, _: usize, _: bool) -> Action {
for (name, value) in &self.get_http_request_headers() {
trace!("#{} -> {}: {}", self.context_id, name, value);
}
match self.get_http_request_header(":path") {
Some(path) if path == "/hello" => {
self.send_http_response(
200,
vec![("Hello", "World"), ("Powered-By", "proxy-wasm")],
Some(b"Hello, World!\n"),
);
Action::Pause
}
_ => Action::Continue,
}
}
|
}
Action::Continue
}
fn on_log(&mut self) {
trace!("#{} completed.", self.context_id);
}
}
|
fn on_http_response_headers(&mut self, _: usize, _: bool) -> Action {
for (name, value) in &self.get_http_response_headers() {
trace!("#{} <- {}: {}", self.context_id, name, value);
|
random_line_split
|
status.rs
|
#![feature(use_extern_macros)]
extern crate tapioca_testutil;
tapioca_testutil::infer_test_api!(httpbin);
use httpbin::redirect_to;
use httpbin::redirect_to::get::QueryParams;
#[test]
fn ok_err_matching()
|
}
#[test]
fn status_body_matching() {
let auth = httpbin::ServerAuth::new();
let query200 = QueryParams {
url: "http://httpbin.org/status/200".into(),
};
let query400 = QueryParams {
url: "http://httpbin.org/status/400".into(),
};
match redirect_to::get(&query200, auth) {
Ok(response) => match response.body() {
redirect_to::get::OkBody::Status200(_) => assert!(true),
_ => assert!(false),
},
Err(_) => assert!(false),
}
match redirect_to::get(&query400, auth) {
Ok(_) => assert!(false),
Err(response) => match response.body() {
redirect_to::get::ErrBody::Status400(_) => assert!(true),
_ => assert!(false),
},
}
}
|
{
let auth = httpbin::ServerAuth::new();
let query200 = QueryParams {
url: "http://httpbin.org/status/200".into(),
};
let query400 = QueryParams {
url: "http://httpbin.org/status/400".into(),
};
match redirect_to::get(&query200, auth) {
Ok(_) => assert!(true),
Err(_) => assert!(false),
}
match redirect_to::get(&query400, auth) {
Ok(_) => assert!(false),
Err(_) => assert!(true),
}
|
identifier_body
|
status.rs
|
#![feature(use_extern_macros)]
extern crate tapioca_testutil;
tapioca_testutil::infer_test_api!(httpbin);
use httpbin::redirect_to;
use httpbin::redirect_to::get::QueryParams;
#[test]
fn ok_err_matching() {
let auth = httpbin::ServerAuth::new();
let query200 = QueryParams {
url: "http://httpbin.org/status/200".into(),
};
let query400 = QueryParams {
url: "http://httpbin.org/status/400".into(),
};
match redirect_to::get(&query200, auth) {
Ok(_) => assert!(true),
Err(_) => assert!(false),
}
match redirect_to::get(&query400, auth) {
Ok(_) => assert!(false),
Err(_) => assert!(true),
}
}
#[test]
fn
|
() {
let auth = httpbin::ServerAuth::new();
let query200 = QueryParams {
url: "http://httpbin.org/status/200".into(),
};
let query400 = QueryParams {
url: "http://httpbin.org/status/400".into(),
};
match redirect_to::get(&query200, auth) {
Ok(response) => match response.body() {
redirect_to::get::OkBody::Status200(_) => assert!(true),
_ => assert!(false),
},
Err(_) => assert!(false),
}
match redirect_to::get(&query400, auth) {
Ok(_) => assert!(false),
Err(response) => match response.body() {
redirect_to::get::ErrBody::Status400(_) => assert!(true),
_ => assert!(false),
},
}
}
|
status_body_matching
|
identifier_name
|
status.rs
|
#![feature(use_extern_macros)]
extern crate tapioca_testutil;
tapioca_testutil::infer_test_api!(httpbin);
use httpbin::redirect_to;
use httpbin::redirect_to::get::QueryParams;
#[test]
fn ok_err_matching() {
let auth = httpbin::ServerAuth::new();
let query200 = QueryParams {
url: "http://httpbin.org/status/200".into(),
};
let query400 = QueryParams {
url: "http://httpbin.org/status/400".into(),
};
match redirect_to::get(&query200, auth) {
Ok(_) => assert!(true),
Err(_) => assert!(false),
}
match redirect_to::get(&query400, auth) {
Ok(_) => assert!(false),
Err(_) => assert!(true),
}
}
#[test]
fn status_body_matching() {
let auth = httpbin::ServerAuth::new();
let query200 = QueryParams {
url: "http://httpbin.org/status/200".into(),
};
let query400 = QueryParams {
|
Ok(response) => match response.body() {
redirect_to::get::OkBody::Status200(_) => assert!(true),
_ => assert!(false),
},
Err(_) => assert!(false),
}
match redirect_to::get(&query400, auth) {
Ok(_) => assert!(false),
Err(response) => match response.body() {
redirect_to::get::ErrBody::Status400(_) => assert!(true),
_ => assert!(false),
},
}
}
|
url: "http://httpbin.org/status/400".into(),
};
match redirect_to::get(&query200, auth) {
|
random_line_split
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.