file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
lib.rs | //! # Mime
//!
//! Mime is now Media Type, technically, but `Mime` is more immediately
//! understandable, so the main type here is `Mime`.
//!
//! ## What is Mime?
//!
//! Example mime string: `text/plain;charset=utf-8`
//!
//! ```rust
//! # #[macro_use] extern crate mime;
//! # fn main() {
//! let plain_text: mime::Mime = "text/plain;charset=utf-8".parse().unwrap();
//! assert_eq!(plain_text, mime!(Text/Plain; Charset=Utf8));
//! # }
//! ```
#![doc(html_root_url = "https://hyperium.github.io/mime.rs")]
#![cfg_attr(test, deny(warnings))]
#![cfg_attr(all(feature = "nightly", test), feature(test))]
#[macro_use]
extern crate log;
#[cfg(feature = "nightly")]
#[cfg(test)]
extern crate test;
#[cfg(feature = "serde")]
extern crate serde;
#[cfg(feature = "serde")]
#[cfg(test)]
extern crate serde_json;
#[cfg(feature = "heapsize")]
extern crate heapsize;
use std::ascii::AsciiExt;
use std::fmt;
use std::iter::Enumerate;
use std::str::{FromStr, Chars};
/// Mime, or Media Type. Encapsulates common registers types.
///
/// Consider that a traditional mime type contains a "top level type",
/// a "sub level type", and 0-N "parameters". And they're all strings.
/// Strings everywhere. Strings mean typos. Rust has type safety. We should
/// use types!
///
/// So, Mime bundles together this data into types so the compiler can catch
/// your typos.
///
/// This improves things so you use match without Strings:
///
/// ```rust
/// use mime::{Mime, TopLevel, SubLevel};
///
/// let mime: Mime = "application/json".parse().unwrap();
///
/// match mime {
/// Mime(TopLevel::Application, SubLevel::Json, _) => println!("matched json!"),
/// _ => ()
/// }
/// ```
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd)]
pub struct Mime<T: AsRef<[Param]> = Vec<Param>>(pub TopLevel, pub SubLevel, pub T);
#[cfg(feature = "heapsize")]
impl<T: AsRef<[Param]> + heapsize::HeapSizeOf> heapsize::HeapSizeOf for Mime<T> {
fn heap_size_of_children(&self) -> usize {
self.0.heap_size_of_children() +
self.1.heap_size_of_children() +
self.2.heap_size_of_children()
}
}
impl<LHS: AsRef<[Param]>, RHS: AsRef<[Param]>> PartialEq<Mime<RHS>> for Mime<LHS> {
#[inline]
fn eq(&self, other: &Mime<RHS>) -> bool {
self.0 == other.0 && self.1 == other.1 && self.2.as_ref() == other.2.as_ref()
}
}
/// Easily create a Mime without having to import so many enums.
///
/// # Example
///
/// ```
/// # #[macro_use] extern crate mime;
///
/// # fn main() {
/// let json = mime!(Application/Json);
/// let plain = mime!(Text/Plain; Charset=Utf8);
/// let text = mime!(Text/Html; Charset=("bar"), ("baz")=("quux"));
/// let img = mime!(Image/_);
/// # }
/// ```
#[macro_export]
macro_rules! mime {
($top:tt / $sub:tt) => (
mime!($top / $sub;)
);
($top:tt / $sub:tt ; $($attr:tt = $val:tt),*) => (
$crate::Mime(
__mime__ident_or_ext!(TopLevel::$top),
__mime__ident_or_ext!(SubLevel::$sub),
vec![ $((__mime__ident_or_ext!(Attr::$attr), __mime__ident_or_ext!(Value::$val))),* ]
)
);
}
#[doc(hidden)]
#[macro_export]
macro_rules! __mime__ident_or_ext {
($enoom:ident::_) => (
$crate::$enoom::Star
);
($enoom:ident::($inner:expr)) => (
$crate::$enoom::Ext($inner.to_string())
);
($enoom:ident::$var:ident) => (
$crate::$enoom::$var
)
}
macro_rules! enoom {
(pub enum $en:ident; $ext:ident; $($ty:ident, $text:expr;)*) => (
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd)]
pub enum $en {
$($ty),*,
$ext(String)
}
impl $en {
pub fn as_str(&self) -> &str {
match *self {
$($en::$ty => $text),*,
$en::$ext(ref s) => &s
}
}
}
impl ::std::ops::Deref for $en {
type Target = str;
fn deref(&self) -> &str {
self.as_str()
}
}
impl PartialEq for $en {
#[inline]
fn eq(&self, other: &$en) -> bool {
match (self, other) {
$( (&$en::$ty, &$en::$ty) => true ),*,
(&$en::$ext(ref a), &$en::$ext(ref b)) => a == b,
(_, _) => self.as_str() == other.as_str(),
}
}
}
impl PartialEq<String> for $en {
fn eq(&self, other: &String) -> bool {
self.as_str() == other
}
}
impl PartialEq<str> for $en {
fn eq(&self, other: &str) -> bool {
self.as_str() == other
}
}
impl<'a> PartialEq<&'a str> for $en {
fn eq(&self, other: &&'a str) -> bool {
self.as_str() == *other
}
}
impl PartialEq<$en> for String {
fn eq(&self, other: &$en) -> bool {
self == other.as_str()
}
}
impl PartialEq<$en> for str {
fn eq(&self, other: &$en) -> bool {
self == other.as_str()
}
}
impl<'a> PartialEq<$en> for &'a str {
fn eq(&self, other: &$en) -> bool {
*self == other.as_str()
}
}
impl fmt::Display for $en {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str(match *self {
$($en::$ty => $text),*,
$en::$ext(ref s) => s
})
}
}
impl FromStr for $en {
type Err = ();
fn from_str(s: &str) -> Result<$en, ()> {
Ok(match s {
$(_s if _s == $text => $en::$ty),*,
s => $en::$ext(s.to_string())
})
}
}
#[cfg(feature = "heapsize")]
impl heapsize::HeapSizeOf for $en {
fn heap_size_of_children(&self) -> usize {
match *self {
$en::$ext(ref ext) => ext.heap_size_of_children(),
_ => 0,
}
}
}
)
}
enoom! {
pub enum TopLevel;
Ext;
Star, "*";
Text, "text";
Image, "image";
Audio, "audio";
Video, "video";
Application, "application";
Multipart, "multipart";
Message, "message";
Model, "model";
}
enoom! {
pub enum SubLevel;
Ext;
Star, "*";
// common text/*
Plain, "plain";
Html, "html";
Xml, "xml";
Javascript, "javascript";
Css, "css";
EventStream, "event-stream";
// common application/*
Json, "json";
WwwFormUrlEncoded, "x-www-form-urlencoded";
Msgpack, "msgpack";
OctetStream, "octet-stream";
// multipart/*
FormData, "form-data";
// common image/*
Png, "png";
Gif, "gif";
Bmp, "bmp";
Jpeg, "jpeg";
// audio/*
Mpeg, "mpeg";
Mp4, "mp4";
Ogg, "ogg";
}
enoom! {
pub enum Attr;
Ext;
Charset, "charset";
Boundary, "boundary";
Q, "q";
}
enoom! {
pub enum Value;
Ext;
Utf8, "utf-8";
}
pub type Param = (Attr, Value);
impl<T: AsRef<[Param]>> fmt::Display for Mime<T> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// It's much faster to write a single string, as opposed to push
// several parts through f.write_str(). So, check for the most common
// mime types, and fast track them.
if let TopLevel::Text = self.0 {
if let SubLevel::Plain = self.1 {
let attrs = self.2.as_ref();
if attrs.len() == 0 {
return f.write_str("text/plain");
} else if &[(Attr::Charset, Value::Utf8)] == attrs {
return f.write_str("text/plain; charset=utf-8");
}
}
} else if let TopLevel::Application = self.0 {
if let SubLevel::Json = self.1 {
let attrs = self.2.as_ref();
if attrs.len() == 0 {
return f.write_str("application/json");
}
}
} else if let TopLevel::Star = self.0 {
if let SubLevel::Star = self.1 {
let attrs = self.2.as_ref();
if attrs.len() == 0 {
return f.write_str("*/*");
}
}
}
// slower general purpose fmt
try!(fmt::Display::fmt(&self.0, f));
try!(f.write_str("/"));
try!(fmt::Display::fmt(&self.1, f));
for param in self.2.as_ref() {
try!(f.write_str("; "));
try!(fmt::Display::fmt(¶m.0, f));
try!(f.write_str("="));
try!(fmt::Display::fmt(¶m.1, f));
}
Ok(())
}
}
impl<P: AsRef<[Param]>> Mime<P> {
pub fn get_param<A: PartialEq<Attr>>(&self, attr: A) -> Option<&Value> {
self.2.as_ref().iter().find(|&&(ref name, _)| attr == *name).map(|&(_, ref value)| value)
}
}
impl FromStr for Mime {
type Err = ();
fn from_str(raw: &str) -> Result<Mime, ()> {
if raw == "*/*" {
return Ok(mime!(Star/Star));
}
let ascii = raw.to_ascii_lowercase(); // lifetimes :(
let len = ascii.len();
let mut iter = ascii.chars().enumerate();
let mut params = vec![];
// toplevel
let mut start;
let top;
loop {
match iter.next() {
Some((0, c)) if is_restricted_name_first_char(c) => (),
Some((i, c)) if i > 0 && is_restricted_name_char(c) => (),
Some((i, '/')) if i > 0 => match FromStr::from_str(&ascii[..i]) {
Ok(t) => {
top = t;
start = i + 1;
break;
}
Err(_) => return Err(())
},
_ => return Err(()) // EOF and no toplevel is no Mime
};
}
// sublevel
let sub;
let mut sub_star = false;
loop {
match iter.next() {
Some((i, '*')) if i == start => {
sub_star = true;
},
Some((i, c)) if i == start && is_restricted_name_first_char(c) => (),
Some((i, c)) if !sub_star && i > start && is_restricted_name_char(c) => (),
Some((i, ';')) if i > start => match FromStr::from_str(&ascii[start..i]) {
Ok(s) => {
sub = s;
start = i + 1;
break;
}
Err(_) => return Err(())
},
None => match FromStr::from_str(&ascii[start..]) {
Ok(s) => return Ok(Mime(top, s, params)),
Err(_) => return Err(())
},
_ => return Err(())
};
}
// params
debug!("starting params, len={}", len);
loop {
match param_from_str(raw, &ascii, &mut iter, start) {
Some((p, end)) => {
params.push(p);
start = end;
if start >= len {
break;
}
}
None => break
}
}
Ok(Mime(top, sub, params))
}
}
#[cfg(feature = "serde")]
impl serde::ser::Serialize for Mime {
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
where S: serde::ser::Serializer
{
serializer.serialize_str(&*format!("{}",self))
}
}
#[cfg(feature = "serde")]
impl serde::de::Deserialize for Mime {
fn deserialize<D>(deserializer: &mut D) -> Result<Self, D::Error>
where D: serde::de::Deserializer
{
let string: String = try!(serde::Deserialize::deserialize(deserializer));
let mime: Mime = match FromStr::from_str(&*string) {
Ok(mime) => mime,
Err(_) => return Err(serde::de::Error::custom("Invalid serialized mime")),
};
Ok(mime)
}
}
fn param_from_str(raw: &str, ascii: &str, iter: &mut Enumerate<Chars>, mut start: usize) -> Option<(Param, usize)> {
let attr;
debug!("param_from_str, start={}", start);
loop {
match iter.next() {
Some((i, ' ')) if i == start => start = i + 1,
Some((i, c)) if i == start && is_restricted_name_first_char(c) => (),
Some((i, c)) if i > start && is_restricted_name_char(c) => (),
Some((i, '=')) if i > start => match FromStr::from_str(&ascii[start..i]) {
Ok(a) => {
attr = a;
start = i + 1;
break;
},
Err(_) => return None
},
_ => return None
}
}
let value;
// values must be restrict-name-char or "anything goes"
let mut is_quoted = false;
{
let substr = |a,b| { if attr==Attr::Charset { &ascii[a..b] } else { &raw[a..b] } };
let endstr = |a| { if attr==Attr::Charset { &ascii[a..] } else { &raw[a..] } };
loop {
match iter.next() {
Some((i, '"')) if i == start => {
debug!("quoted");
is_quoted = true;
start = i + 1;
},
Some((i, c)) if i == start && is_restricted_name_first_char(c) => (),
Some((i, '"')) if i > start && is_quoted => match FromStr::from_str(substr(start,i)) {
Ok(v) => {
value = v;
start = i + 1;
break;
},
Err(_) => return None
},
Some((i, c)) if i > start && is_quoted || is_restricted_name_char(c) => (),
Some((i, ';')) if i > start => match FromStr::from_str(substr(start,i)) {
Ok(v) => {
value = v;
start = i + 1;
break;
},
Err(_) => return None
},
None => match FromStr::from_str(endstr(start)) {
Ok(v) => {
value = v;
start = raw.len();
break;
},
Err(_) => return None
},
_ => return None
}
}
}
Some(((attr, value), start))
}
// From [RFC6838](http://tools.ietf.org/html/rfc6838#section-4.2):
//
// > All registered media types MUST be assigned top-level type and
// > subtype names. The combination of these names serves to uniquely
// > identify the media type, and the subtype name facet (or the absence
// > of one) identifies the registration tree. Both top-level type and
// > subtype names are case-insensitive.
// >
// > Type and subtype names MUST conform to the following ABNF:
// >
// > type-name = restricted-name
// > subtype-name = restricted-name
// >
// > restricted-name = restricted-name-first *126restricted-name-chars
// > restricted-name-first = ALPHA / DIGIT
// > restricted-name-chars = ALPHA / DIGIT / "!" / "#" /
// > "$" / "&" / "-" / "^" / "_"
// > restricted-name-chars =/ "." ; Characters before first dot always
// > ; specify a facet name
// > restricted-name-chars =/ "+" ; Characters after last plus always
// > ; specify a structured syntax suffix
//
fn is_restricted_name_first_char(c: char) -> bool {
match c {
'a'...'z' |
'0'...'9' => true,
_ => false
}
}
fn is_restricted_name_char(c: char) -> bool {
if is_restricted_name_first_char(c) {
true
} else {
match c {
'!' |
'#' |
'$' |
'&' |
'-' |
'^' |
'.' |
'+' |
'_' => true,
_ => false
}
}
}
#[cfg(test)]
mod tests {
use std::str::FromStr;
#[cfg(feature = "nightly")]
use test::Bencher;
use super::{Mime, Value, Attr};
#[test]
fn test_mime_show() {
let mime = mime!(Text/Plain);
assert_eq!(mime.to_string(), "text/plain".to_string());
let mime = mime!(Text/Plain; Charset=Utf8);
assert_eq!(mime.to_string(), "text/plain; charset=utf-8".to_string());
}
#[test]
fn test_mime_from_str() {
assert_eq!(Mime::from_str("text/plain").unwrap(), mime!(Text/Plain));
assert_eq!(Mime::from_str("TEXT/PLAIN").unwrap(), mime!(Text/Plain));
assert_eq!(Mime::from_str("text/plain; charset=utf-8").unwrap(), mime!(Text/Plain; Charset=Utf8));
assert_eq!(Mime::from_str("text/plain;charset=\"utf-8\"").unwrap(), mime!(Text/Plain; Charset=Utf8));
assert_eq!(Mime::from_str("text/plain; charset=utf-8; foo=bar").unwrap(),
mime!(Text/Plain; Charset=Utf8, ("foo")=("bar")));
assert_eq!("*/*".parse::<Mime>().unwrap(), mime!(Star/Star));
assert_eq!("image/*".parse::<Mime>().unwrap(), mime!(Image/Star));
assert_eq!("text/*; charset=utf-8".parse::<Mime>().unwrap(), mime!(Text/Star; Charset=Utf8));
assert!("*/png".parse::<Mime>().is_err());
assert!("*image/png".parse::<Mime>().is_err());
assert!("text/*plain".parse::<Mime>().is_err());
}
| mime!(Multipart/FormData; Boundary=("ABCDEFG")));
assert_eq!(Mime::from_str("multipart/form-data; charset=BASE64; boundary=ABCDEFG").unwrap(),
mime!(Multipart/FormData; Charset=("base64"), Boundary=("ABCDEFG")));
}
#[test]
fn test_get_param() {
let mime = Mime::from_str("text/plain; charset=utf-8; foo=bar").unwrap();
assert_eq!(mime.get_param(Attr::Charset), Some(&Value::Utf8));
assert_eq!(mime.get_param("charset"), Some(&Value::Utf8));
assert_eq!(mime.get_param("foo").unwrap(), "bar");
assert_eq!(mime.get_param("baz"), None);
}
#[test]
fn test_value_as_str() {
assert_eq!(Value::Utf8.as_str(), "utf-8");
}
#[test]
fn test_value_eq_str() {
assert_eq!(Value::Utf8, "utf-8");
assert_eq!("utf-8", Value::Utf8);
}
#[cfg(feature = "serde")]
#[test]
fn test_serialize_deserialize() {
use serde_json;
let mime = Mime::from_str("text/plain; charset=utf-8; foo=bar").unwrap();
let serialized = serde_json::to_string(&mime).unwrap();
let deserialized: Mime = serde_json::from_str(&serialized).unwrap();
assert_eq!(mime, deserialized);
}
#[cfg(feature = "nightly")]
#[bench]
fn bench_fmt(b: &mut Bencher) {
use std::fmt::Write;
let mime = mime!(Text/Plain; Charset=Utf8);
b.bytes = mime.to_string().as_bytes().len() as u64;
let mut s = String::with_capacity(64);
b.iter(|| {
let _ = write!(s, "{}", mime);
::test::black_box(&s);
unsafe { s.as_mut_vec().set_len(0); }
})
}
#[cfg(feature = "nightly")]
#[bench]
fn bench_from_str(b: &mut Bencher) {
let s = "text/plain; charset=utf-8; foo=bar";
b.bytes = s.as_bytes().len() as u64;
b.iter(|| s.parse::<Mime>())
}
} | #[test]
fn test_case_sensitive_values() {
assert_eq!(Mime::from_str("multipart/form-data; boundary=ABCDEFG").unwrap(), | random_line_split |
lib.rs | //! # Mime
//!
//! Mime is now Media Type, technically, but `Mime` is more immediately
//! understandable, so the main type here is `Mime`.
//!
//! ## What is Mime?
//!
//! Example mime string: `text/plain;charset=utf-8`
//!
//! ```rust
//! # #[macro_use] extern crate mime;
//! # fn main() {
//! let plain_text: mime::Mime = "text/plain;charset=utf-8".parse().unwrap();
//! assert_eq!(plain_text, mime!(Text/Plain; Charset=Utf8));
//! # }
//! ```
#![doc(html_root_url = "https://hyperium.github.io/mime.rs")]
#![cfg_attr(test, deny(warnings))]
#![cfg_attr(all(feature = "nightly", test), feature(test))]
#[macro_use]
extern crate log;
#[cfg(feature = "nightly")]
#[cfg(test)]
extern crate test;
#[cfg(feature = "serde")]
extern crate serde;
#[cfg(feature = "serde")]
#[cfg(test)]
extern crate serde_json;
#[cfg(feature = "heapsize")]
extern crate heapsize;
use std::ascii::AsciiExt;
use std::fmt;
use std::iter::Enumerate;
use std::str::{FromStr, Chars};
/// Mime, or Media Type. Encapsulates common registers types.
///
/// Consider that a traditional mime type contains a "top level type",
/// a "sub level type", and 0-N "parameters". And they're all strings.
/// Strings everywhere. Strings mean typos. Rust has type safety. We should
/// use types!
///
/// So, Mime bundles together this data into types so the compiler can catch
/// your typos.
///
/// This improves things so you use match without Strings:
///
/// ```rust
/// use mime::{Mime, TopLevel, SubLevel};
///
/// let mime: Mime = "application/json".parse().unwrap();
///
/// match mime {
/// Mime(TopLevel::Application, SubLevel::Json, _) => println!("matched json!"),
/// _ => ()
/// }
/// ```
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd)]
pub struct Mime<T: AsRef<[Param]> = Vec<Param>>(pub TopLevel, pub SubLevel, pub T);
#[cfg(feature = "heapsize")]
impl<T: AsRef<[Param]> + heapsize::HeapSizeOf> heapsize::HeapSizeOf for Mime<T> {
fn heap_size_of_children(&self) -> usize {
self.0.heap_size_of_children() +
self.1.heap_size_of_children() +
self.2.heap_size_of_children()
}
}
impl<LHS: AsRef<[Param]>, RHS: AsRef<[Param]>> PartialEq<Mime<RHS>> for Mime<LHS> {
#[inline]
fn eq(&self, other: &Mime<RHS>) -> bool {
self.0 == other.0 && self.1 == other.1 && self.2.as_ref() == other.2.as_ref()
}
}
/// Easily create a Mime without having to import so many enums.
///
/// # Example
///
/// ```
/// # #[macro_use] extern crate mime;
///
/// # fn main() {
/// let json = mime!(Application/Json);
/// let plain = mime!(Text/Plain; Charset=Utf8);
/// let text = mime!(Text/Html; Charset=("bar"), ("baz")=("quux"));
/// let img = mime!(Image/_);
/// # }
/// ```
#[macro_export]
macro_rules! mime {
($top:tt / $sub:tt) => (
mime!($top / $sub;)
);
($top:tt / $sub:tt ; $($attr:tt = $val:tt),*) => (
$crate::Mime(
__mime__ident_or_ext!(TopLevel::$top),
__mime__ident_or_ext!(SubLevel::$sub),
vec![ $((__mime__ident_or_ext!(Attr::$attr), __mime__ident_or_ext!(Value::$val))),* ]
)
);
}
#[doc(hidden)]
#[macro_export]
macro_rules! __mime__ident_or_ext {
($enoom:ident::_) => (
$crate::$enoom::Star
);
($enoom:ident::($inner:expr)) => (
$crate::$enoom::Ext($inner.to_string())
);
($enoom:ident::$var:ident) => (
$crate::$enoom::$var
)
}
macro_rules! enoom {
(pub enum $en:ident; $ext:ident; $($ty:ident, $text:expr;)*) => (
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd)]
pub enum $en {
$($ty),*,
$ext(String)
}
impl $en {
pub fn as_str(&self) -> &str {
match *self {
$($en::$ty => $text),*,
$en::$ext(ref s) => &s
}
}
}
impl ::std::ops::Deref for $en {
type Target = str;
fn deref(&self) -> &str {
self.as_str()
}
}
impl PartialEq for $en {
#[inline]
fn eq(&self, other: &$en) -> bool {
match (self, other) {
$( (&$en::$ty, &$en::$ty) => true ),*,
(&$en::$ext(ref a), &$en::$ext(ref b)) => a == b,
(_, _) => self.as_str() == other.as_str(),
}
}
}
impl PartialEq<String> for $en {
fn eq(&self, other: &String) -> bool {
self.as_str() == other
}
}
impl PartialEq<str> for $en {
fn eq(&self, other: &str) -> bool {
self.as_str() == other
}
}
impl<'a> PartialEq<&'a str> for $en {
fn eq(&self, other: &&'a str) -> bool {
self.as_str() == *other
}
}
impl PartialEq<$en> for String {
fn eq(&self, other: &$en) -> bool {
self == other.as_str()
}
}
impl PartialEq<$en> for str {
fn eq(&self, other: &$en) -> bool {
self == other.as_str()
}
}
impl<'a> PartialEq<$en> for &'a str {
fn eq(&self, other: &$en) -> bool {
*self == other.as_str()
}
}
impl fmt::Display for $en {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str(match *self {
$($en::$ty => $text),*,
$en::$ext(ref s) => s
})
}
}
impl FromStr for $en {
type Err = ();
fn from_str(s: &str) -> Result<$en, ()> {
Ok(match s {
$(_s if _s == $text => $en::$ty),*,
s => $en::$ext(s.to_string())
})
}
}
#[cfg(feature = "heapsize")]
impl heapsize::HeapSizeOf for $en {
fn heap_size_of_children(&self) -> usize {
match *self {
$en::$ext(ref ext) => ext.heap_size_of_children(),
_ => 0,
}
}
}
)
}
enoom! {
pub enum TopLevel;
Ext;
Star, "*";
Text, "text";
Image, "image";
Audio, "audio";
Video, "video";
Application, "application";
Multipart, "multipart";
Message, "message";
Model, "model";
}
enoom! {
pub enum SubLevel;
Ext;
Star, "*";
// common text/*
Plain, "plain";
Html, "html";
Xml, "xml";
Javascript, "javascript";
Css, "css";
EventStream, "event-stream";
// common application/*
Json, "json";
WwwFormUrlEncoded, "x-www-form-urlencoded";
Msgpack, "msgpack";
OctetStream, "octet-stream";
// multipart/*
FormData, "form-data";
// common image/*
Png, "png";
Gif, "gif";
Bmp, "bmp";
Jpeg, "jpeg";
// audio/*
Mpeg, "mpeg";
Mp4, "mp4";
Ogg, "ogg";
}
enoom! {
pub enum Attr;
Ext;
Charset, "charset";
Boundary, "boundary";
Q, "q";
}
enoom! {
pub enum Value;
Ext;
Utf8, "utf-8";
}
pub type Param = (Attr, Value);
impl<T: AsRef<[Param]>> fmt::Display for Mime<T> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// It's much faster to write a single string, as opposed to push
// several parts through f.write_str(). So, check for the most common
// mime types, and fast track them.
if let TopLevel::Text = self.0 {
if let SubLevel::Plain = self.1 {
let attrs = self.2.as_ref();
if attrs.len() == 0 {
return f.write_str("text/plain");
} else if &[(Attr::Charset, Value::Utf8)] == attrs {
return f.write_str("text/plain; charset=utf-8");
}
}
} else if let TopLevel::Application = self.0 | else if let TopLevel::Star = self.0 {
if let SubLevel::Star = self.1 {
let attrs = self.2.as_ref();
if attrs.len() == 0 {
return f.write_str("*/*");
}
}
}
// slower general purpose fmt
try!(fmt::Display::fmt(&self.0, f));
try!(f.write_str("/"));
try!(fmt::Display::fmt(&self.1, f));
for param in self.2.as_ref() {
try!(f.write_str("; "));
try!(fmt::Display::fmt(¶m.0, f));
try!(f.write_str("="));
try!(fmt::Display::fmt(¶m.1, f));
}
Ok(())
}
}
impl<P: AsRef<[Param]>> Mime<P> {
pub fn get_param<A: PartialEq<Attr>>(&self, attr: A) -> Option<&Value> {
self.2.as_ref().iter().find(|&&(ref name, _)| attr == *name).map(|&(_, ref value)| value)
}
}
impl FromStr for Mime {
type Err = ();
fn from_str(raw: &str) -> Result<Mime, ()> {
if raw == "*/*" {
return Ok(mime!(Star/Star));
}
let ascii = raw.to_ascii_lowercase(); // lifetimes :(
let len = ascii.len();
let mut iter = ascii.chars().enumerate();
let mut params = vec![];
// toplevel
let mut start;
let top;
loop {
match iter.next() {
Some((0, c)) if is_restricted_name_first_char(c) => (),
Some((i, c)) if i > 0 && is_restricted_name_char(c) => (),
Some((i, '/')) if i > 0 => match FromStr::from_str(&ascii[..i]) {
Ok(t) => {
top = t;
start = i + 1;
break;
}
Err(_) => return Err(())
},
_ => return Err(()) // EOF and no toplevel is no Mime
};
}
// sublevel
let sub;
let mut sub_star = false;
loop {
match iter.next() {
Some((i, '*')) if i == start => {
sub_star = true;
},
Some((i, c)) if i == start && is_restricted_name_first_char(c) => (),
Some((i, c)) if !sub_star && i > start && is_restricted_name_char(c) => (),
Some((i, ';')) if i > start => match FromStr::from_str(&ascii[start..i]) {
Ok(s) => {
sub = s;
start = i + 1;
break;
}
Err(_) => return Err(())
},
None => match FromStr::from_str(&ascii[start..]) {
Ok(s) => return Ok(Mime(top, s, params)),
Err(_) => return Err(())
},
_ => return Err(())
};
}
// params
debug!("starting params, len={}", len);
loop {
match param_from_str(raw, &ascii, &mut iter, start) {
Some((p, end)) => {
params.push(p);
start = end;
if start >= len {
break;
}
}
None => break
}
}
Ok(Mime(top, sub, params))
}
}
#[cfg(feature = "serde")]
impl serde::ser::Serialize for Mime {
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
where S: serde::ser::Serializer
{
serializer.serialize_str(&*format!("{}",self))
}
}
#[cfg(feature = "serde")]
impl serde::de::Deserialize for Mime {
fn deserialize<D>(deserializer: &mut D) -> Result<Self, D::Error>
where D: serde::de::Deserializer
{
let string: String = try!(serde::Deserialize::deserialize(deserializer));
let mime: Mime = match FromStr::from_str(&*string) {
Ok(mime) => mime,
Err(_) => return Err(serde::de::Error::custom("Invalid serialized mime")),
};
Ok(mime)
}
}
fn param_from_str(raw: &str, ascii: &str, iter: &mut Enumerate<Chars>, mut start: usize) -> Option<(Param, usize)> {
let attr;
debug!("param_from_str, start={}", start);
loop {
match iter.next() {
Some((i, ' ')) if i == start => start = i + 1,
Some((i, c)) if i == start && is_restricted_name_first_char(c) => (),
Some((i, c)) if i > start && is_restricted_name_char(c) => (),
Some((i, '=')) if i > start => match FromStr::from_str(&ascii[start..i]) {
Ok(a) => {
attr = a;
start = i + 1;
break;
},
Err(_) => return None
},
_ => return None
}
}
let value;
// values must be restrict-name-char or "anything goes"
let mut is_quoted = false;
{
let substr = |a,b| { if attr==Attr::Charset { &ascii[a..b] } else { &raw[a..b] } };
let endstr = |a| { if attr==Attr::Charset { &ascii[a..] } else { &raw[a..] } };
loop {
match iter.next() {
Some((i, '"')) if i == start => {
debug!("quoted");
is_quoted = true;
start = i + 1;
},
Some((i, c)) if i == start && is_restricted_name_first_char(c) => (),
Some((i, '"')) if i > start && is_quoted => match FromStr::from_str(substr(start,i)) {
Ok(v) => {
value = v;
start = i + 1;
break;
},
Err(_) => return None
},
Some((i, c)) if i > start && is_quoted || is_restricted_name_char(c) => (),
Some((i, ';')) if i > start => match FromStr::from_str(substr(start,i)) {
Ok(v) => {
value = v;
start = i + 1;
break;
},
Err(_) => return None
},
None => match FromStr::from_str(endstr(start)) {
Ok(v) => {
value = v;
start = raw.len();
break;
},
Err(_) => return None
},
_ => return None
}
}
}
Some(((attr, value), start))
}
// From [RFC6838](http://tools.ietf.org/html/rfc6838#section-4.2):
//
// > All registered media types MUST be assigned top-level type and
// > subtype names. The combination of these names serves to uniquely
// > identify the media type, and the subtype name facet (or the absence
// > of one) identifies the registration tree. Both top-level type and
// > subtype names are case-insensitive.
// >
// > Type and subtype names MUST conform to the following ABNF:
// >
// > type-name = restricted-name
// > subtype-name = restricted-name
// >
// > restricted-name = restricted-name-first *126restricted-name-chars
// > restricted-name-first = ALPHA / DIGIT
// > restricted-name-chars = ALPHA / DIGIT / "!" / "#" /
// > "$" / "&" / "-" / "^" / "_"
// > restricted-name-chars =/ "." ; Characters before first dot always
// > ; specify a facet name
// > restricted-name-chars =/ "+" ; Characters after last plus always
// > ; specify a structured syntax suffix
//
fn is_restricted_name_first_char(c: char) -> bool {
match c {
'a'...'z' |
'0'...'9' => true,
_ => false
}
}
fn is_restricted_name_char(c: char) -> bool {
if is_restricted_name_first_char(c) {
true
} else {
match c {
'!' |
'#' |
'$' |
'&' |
'-' |
'^' |
'.' |
'+' |
'_' => true,
_ => false
}
}
}
#[cfg(test)]
mod tests {
use std::str::FromStr;
#[cfg(feature = "nightly")]
use test::Bencher;
use super::{Mime, Value, Attr};
#[test]
fn test_mime_show() {
let mime = mime!(Text/Plain);
assert_eq!(mime.to_string(), "text/plain".to_string());
let mime = mime!(Text/Plain; Charset=Utf8);
assert_eq!(mime.to_string(), "text/plain; charset=utf-8".to_string());
}
#[test]
fn test_mime_from_str() {
assert_eq!(Mime::from_str("text/plain").unwrap(), mime!(Text/Plain));
assert_eq!(Mime::from_str("TEXT/PLAIN").unwrap(), mime!(Text/Plain));
assert_eq!(Mime::from_str("text/plain; charset=utf-8").unwrap(), mime!(Text/Plain; Charset=Utf8));
assert_eq!(Mime::from_str("text/plain;charset=\"utf-8\"").unwrap(), mime!(Text/Plain; Charset=Utf8));
assert_eq!(Mime::from_str("text/plain; charset=utf-8; foo=bar").unwrap(),
mime!(Text/Plain; Charset=Utf8, ("foo")=("bar")));
assert_eq!("*/*".parse::<Mime>().unwrap(), mime!(Star/Star));
assert_eq!("image/*".parse::<Mime>().unwrap(), mime!(Image/Star));
assert_eq!("text/*; charset=utf-8".parse::<Mime>().unwrap(), mime!(Text/Star; Charset=Utf8));
assert!("*/png".parse::<Mime>().is_err());
assert!("*image/png".parse::<Mime>().is_err());
assert!("text/*plain".parse::<Mime>().is_err());
}
#[test]
fn test_case_sensitive_values() {
assert_eq!(Mime::from_str("multipart/form-data; boundary=ABCDEFG").unwrap(),
mime!(Multipart/FormData; Boundary=("ABCDEFG")));
assert_eq!(Mime::from_str("multipart/form-data; charset=BASE64; boundary=ABCDEFG").unwrap(),
mime!(Multipart/FormData; Charset=("base64"), Boundary=("ABCDEFG")));
}
#[test]
fn test_get_param() {
let mime = Mime::from_str("text/plain; charset=utf-8; foo=bar").unwrap();
assert_eq!(mime.get_param(Attr::Charset), Some(&Value::Utf8));
assert_eq!(mime.get_param("charset"), Some(&Value::Utf8));
assert_eq!(mime.get_param("foo").unwrap(), "bar");
assert_eq!(mime.get_param("baz"), None);
}
#[test]
fn test_value_as_str() {
assert_eq!(Value::Utf8.as_str(), "utf-8");
}
#[test]
fn test_value_eq_str() {
assert_eq!(Value::Utf8, "utf-8");
assert_eq!("utf-8", Value::Utf8);
}
#[cfg(feature = "serde")]
#[test]
fn test_serialize_deserialize() {
use serde_json;
let mime = Mime::from_str("text/plain; charset=utf-8; foo=bar").unwrap();
let serialized = serde_json::to_string(&mime).unwrap();
let deserialized: Mime = serde_json::from_str(&serialized).unwrap();
assert_eq!(mime, deserialized);
}
#[cfg(feature = "nightly")]
#[bench]
fn bench_fmt(b: &mut Bencher) {
use std::fmt::Write;
let mime = mime!(Text/Plain; Charset=Utf8);
b.bytes = mime.to_string().as_bytes().len() as u64;
let mut s = String::with_capacity(64);
b.iter(|| {
let _ = write!(s, "{}", mime);
::test::black_box(&s);
unsafe { s.as_mut_vec().set_len(0); }
})
}
#[cfg(feature = "nightly")]
#[bench]
fn bench_from_str(b: &mut Bencher) {
let s = "text/plain; charset=utf-8; foo=bar";
b.bytes = s.as_bytes().len() as u64;
b.iter(|| s.parse::<Mime>())
}
}
| {
if let SubLevel::Json = self.1 {
let attrs = self.2.as_ref();
if attrs.len() == 0 {
return f.write_str("application/json");
}
}
} | conditional_block |
lib.rs | //! # Mime
//!
//! Mime is now Media Type, technically, but `Mime` is more immediately
//! understandable, so the main type here is `Mime`.
//!
//! ## What is Mime?
//!
//! Example mime string: `text/plain;charset=utf-8`
//!
//! ```rust
//! # #[macro_use] extern crate mime;
//! # fn main() {
//! let plain_text: mime::Mime = "text/plain;charset=utf-8".parse().unwrap();
//! assert_eq!(plain_text, mime!(Text/Plain; Charset=Utf8));
//! # }
//! ```
#![doc(html_root_url = "https://hyperium.github.io/mime.rs")]
#![cfg_attr(test, deny(warnings))]
#![cfg_attr(all(feature = "nightly", test), feature(test))]
#[macro_use]
extern crate log;
#[cfg(feature = "nightly")]
#[cfg(test)]
extern crate test;
#[cfg(feature = "serde")]
extern crate serde;
#[cfg(feature = "serde")]
#[cfg(test)]
extern crate serde_json;
#[cfg(feature = "heapsize")]
extern crate heapsize;
use std::ascii::AsciiExt;
use std::fmt;
use std::iter::Enumerate;
use std::str::{FromStr, Chars};
/// Mime, or Media Type. Encapsulates common registers types.
///
/// Consider that a traditional mime type contains a "top level type",
/// a "sub level type", and 0-N "parameters". And they're all strings.
/// Strings everywhere. Strings mean typos. Rust has type safety. We should
/// use types!
///
/// So, Mime bundles together this data into types so the compiler can catch
/// your typos.
///
/// This improves things so you use match without Strings:
///
/// ```rust
/// use mime::{Mime, TopLevel, SubLevel};
///
/// let mime: Mime = "application/json".parse().unwrap();
///
/// match mime {
/// Mime(TopLevel::Application, SubLevel::Json, _) => println!("matched json!"),
/// _ => ()
/// }
/// ```
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd)]
pub struct Mime<T: AsRef<[Param]> = Vec<Param>>(pub TopLevel, pub SubLevel, pub T);
#[cfg(feature = "heapsize")]
impl<T: AsRef<[Param]> + heapsize::HeapSizeOf> heapsize::HeapSizeOf for Mime<T> {
fn heap_size_of_children(&self) -> usize {
self.0.heap_size_of_children() +
self.1.heap_size_of_children() +
self.2.heap_size_of_children()
}
}
impl<LHS: AsRef<[Param]>, RHS: AsRef<[Param]>> PartialEq<Mime<RHS>> for Mime<LHS> {
#[inline]
fn eq(&self, other: &Mime<RHS>) -> bool {
self.0 == other.0 && self.1 == other.1 && self.2.as_ref() == other.2.as_ref()
}
}
/// Easily create a Mime without having to import so many enums.
///
/// # Example
///
/// ```
/// # #[macro_use] extern crate mime;
///
/// # fn main() {
/// let json = mime!(Application/Json);
/// let plain = mime!(Text/Plain; Charset=Utf8);
/// let text = mime!(Text/Html; Charset=("bar"), ("baz")=("quux"));
/// let img = mime!(Image/_);
/// # }
/// ```
#[macro_export]
macro_rules! mime {
($top:tt / $sub:tt) => (
mime!($top / $sub;)
);
($top:tt / $sub:tt ; $($attr:tt = $val:tt),*) => (
$crate::Mime(
__mime__ident_or_ext!(TopLevel::$top),
__mime__ident_or_ext!(SubLevel::$sub),
vec![ $((__mime__ident_or_ext!(Attr::$attr), __mime__ident_or_ext!(Value::$val))),* ]
)
);
}
#[doc(hidden)]
#[macro_export]
macro_rules! __mime__ident_or_ext {
($enoom:ident::_) => (
$crate::$enoom::Star
);
($enoom:ident::($inner:expr)) => (
$crate::$enoom::Ext($inner.to_string())
);
($enoom:ident::$var:ident) => (
$crate::$enoom::$var
)
}
macro_rules! enoom {
(pub enum $en:ident; $ext:ident; $($ty:ident, $text:expr;)*) => (
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd)]
pub enum $en {
$($ty),*,
$ext(String)
}
impl $en {
pub fn as_str(&self) -> &str {
match *self {
$($en::$ty => $text),*,
$en::$ext(ref s) => &s
}
}
}
impl ::std::ops::Deref for $en {
type Target = str;
fn deref(&self) -> &str {
self.as_str()
}
}
impl PartialEq for $en {
#[inline]
fn eq(&self, other: &$en) -> bool {
match (self, other) {
$( (&$en::$ty, &$en::$ty) => true ),*,
(&$en::$ext(ref a), &$en::$ext(ref b)) => a == b,
(_, _) => self.as_str() == other.as_str(),
}
}
}
impl PartialEq<String> for $en {
fn eq(&self, other: &String) -> bool {
self.as_str() == other
}
}
impl PartialEq<str> for $en {
fn eq(&self, other: &str) -> bool {
self.as_str() == other
}
}
impl<'a> PartialEq<&'a str> for $en {
fn eq(&self, other: &&'a str) -> bool {
self.as_str() == *other
}
}
impl PartialEq<$en> for String {
fn eq(&self, other: &$en) -> bool {
self == other.as_str()
}
}
impl PartialEq<$en> for str {
fn eq(&self, other: &$en) -> bool {
self == other.as_str()
}
}
impl<'a> PartialEq<$en> for &'a str {
fn eq(&self, other: &$en) -> bool {
*self == other.as_str()
}
}
impl fmt::Display for $en {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str(match *self {
$($en::$ty => $text),*,
$en::$ext(ref s) => s
})
}
}
impl FromStr for $en {
type Err = ();
fn from_str(s: &str) -> Result<$en, ()> {
Ok(match s {
$(_s if _s == $text => $en::$ty),*,
s => $en::$ext(s.to_string())
})
}
}
#[cfg(feature = "heapsize")]
impl heapsize::HeapSizeOf for $en {
fn heap_size_of_children(&self) -> usize {
match *self {
$en::$ext(ref ext) => ext.heap_size_of_children(),
_ => 0,
}
}
}
)
}
enoom! {
pub enum TopLevel;
Ext;
Star, "*";
Text, "text";
Image, "image";
Audio, "audio";
Video, "video";
Application, "application";
Multipart, "multipart";
Message, "message";
Model, "model";
}
enoom! {
pub enum SubLevel;
Ext;
Star, "*";
// common text/*
Plain, "plain";
Html, "html";
Xml, "xml";
Javascript, "javascript";
Css, "css";
EventStream, "event-stream";
// common application/*
Json, "json";
WwwFormUrlEncoded, "x-www-form-urlencoded";
Msgpack, "msgpack";
OctetStream, "octet-stream";
// multipart/*
FormData, "form-data";
// common image/*
Png, "png";
Gif, "gif";
Bmp, "bmp";
Jpeg, "jpeg";
// audio/*
Mpeg, "mpeg";
Mp4, "mp4";
Ogg, "ogg";
}
enoom! {
pub enum Attr;
Ext;
Charset, "charset";
Boundary, "boundary";
Q, "q";
}
enoom! {
pub enum Value;
Ext;
Utf8, "utf-8";
}
pub type Param = (Attr, Value);
impl<T: AsRef<[Param]>> fmt::Display for Mime<T> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// It's much faster to write a single string, as opposed to push
// several parts through f.write_str(). So, check for the most common
// mime types, and fast track them.
if let TopLevel::Text = self.0 {
if let SubLevel::Plain = self.1 {
let attrs = self.2.as_ref();
if attrs.len() == 0 {
return f.write_str("text/plain");
} else if &[(Attr::Charset, Value::Utf8)] == attrs {
return f.write_str("text/plain; charset=utf-8");
}
}
} else if let TopLevel::Application = self.0 {
if let SubLevel::Json = self.1 {
let attrs = self.2.as_ref();
if attrs.len() == 0 {
return f.write_str("application/json");
}
}
} else if let TopLevel::Star = self.0 {
if let SubLevel::Star = self.1 {
let attrs = self.2.as_ref();
if attrs.len() == 0 {
return f.write_str("*/*");
}
}
}
// slower general purpose fmt
try!(fmt::Display::fmt(&self.0, f));
try!(f.write_str("/"));
try!(fmt::Display::fmt(&self.1, f));
for param in self.2.as_ref() {
try!(f.write_str("; "));
try!(fmt::Display::fmt(¶m.0, f));
try!(f.write_str("="));
try!(fmt::Display::fmt(¶m.1, f));
}
Ok(())
}
}
impl<P: AsRef<[Param]>> Mime<P> {
pub fn get_param<A: PartialEq<Attr>>(&self, attr: A) -> Option<&Value> {
self.2.as_ref().iter().find(|&&(ref name, _)| attr == *name).map(|&(_, ref value)| value)
}
}
impl FromStr for Mime {
type Err = ();
fn from_str(raw: &str) -> Result<Mime, ()> {
if raw == "*/*" {
return Ok(mime!(Star/Star));
}
let ascii = raw.to_ascii_lowercase(); // lifetimes :(
let len = ascii.len();
let mut iter = ascii.chars().enumerate();
let mut params = vec![];
// toplevel
let mut start;
let top;
loop {
match iter.next() {
Some((0, c)) if is_restricted_name_first_char(c) => (),
Some((i, c)) if i > 0 && is_restricted_name_char(c) => (),
Some((i, '/')) if i > 0 => match FromStr::from_str(&ascii[..i]) {
Ok(t) => {
top = t;
start = i + 1;
break;
}
Err(_) => return Err(())
},
_ => return Err(()) // EOF and no toplevel is no Mime
};
}
// sublevel
let sub;
let mut sub_star = false;
loop {
match iter.next() {
Some((i, '*')) if i == start => {
sub_star = true;
},
Some((i, c)) if i == start && is_restricted_name_first_char(c) => (),
Some((i, c)) if !sub_star && i > start && is_restricted_name_char(c) => (),
Some((i, ';')) if i > start => match FromStr::from_str(&ascii[start..i]) {
Ok(s) => {
sub = s;
start = i + 1;
break;
}
Err(_) => return Err(())
},
None => match FromStr::from_str(&ascii[start..]) {
Ok(s) => return Ok(Mime(top, s, params)),
Err(_) => return Err(())
},
_ => return Err(())
};
}
// params
debug!("starting params, len={}", len);
loop {
match param_from_str(raw, &ascii, &mut iter, start) {
Some((p, end)) => {
params.push(p);
start = end;
if start >= len {
break;
}
}
None => break
}
}
Ok(Mime(top, sub, params))
}
}
#[cfg(feature = "serde")]
impl serde::ser::Serialize for Mime {
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
where S: serde::ser::Serializer
{
serializer.serialize_str(&*format!("{}",self))
}
}
#[cfg(feature = "serde")]
impl serde::de::Deserialize for Mime {
fn deserialize<D>(deserializer: &mut D) -> Result<Self, D::Error>
where D: serde::de::Deserializer
{
let string: String = try!(serde::Deserialize::deserialize(deserializer));
let mime: Mime = match FromStr::from_str(&*string) {
Ok(mime) => mime,
Err(_) => return Err(serde::de::Error::custom("Invalid serialized mime")),
};
Ok(mime)
}
}
fn param_from_str(raw: &str, ascii: &str, iter: &mut Enumerate<Chars>, mut start: usize) -> Option<(Param, usize)> {
let attr;
debug!("param_from_str, start={}", start);
loop {
match iter.next() {
Some((i, ' ')) if i == start => start = i + 1,
Some((i, c)) if i == start && is_restricted_name_first_char(c) => (),
Some((i, c)) if i > start && is_restricted_name_char(c) => (),
Some((i, '=')) if i > start => match FromStr::from_str(&ascii[start..i]) {
Ok(a) => {
attr = a;
start = i + 1;
break;
},
Err(_) => return None
},
_ => return None
}
}
let value;
// values must be restrict-name-char or "anything goes"
let mut is_quoted = false;
{
let substr = |a,b| { if attr==Attr::Charset { &ascii[a..b] } else { &raw[a..b] } };
let endstr = |a| { if attr==Attr::Charset { &ascii[a..] } else { &raw[a..] } };
loop {
match iter.next() {
Some((i, '"')) if i == start => {
debug!("quoted");
is_quoted = true;
start = i + 1;
},
Some((i, c)) if i == start && is_restricted_name_first_char(c) => (),
Some((i, '"')) if i > start && is_quoted => match FromStr::from_str(substr(start,i)) {
Ok(v) => {
value = v;
start = i + 1;
break;
},
Err(_) => return None
},
Some((i, c)) if i > start && is_quoted || is_restricted_name_char(c) => (),
Some((i, ';')) if i > start => match FromStr::from_str(substr(start,i)) {
Ok(v) => {
value = v;
start = i + 1;
break;
},
Err(_) => return None
},
None => match FromStr::from_str(endstr(start)) {
Ok(v) => {
value = v;
start = raw.len();
break;
},
Err(_) => return None
},
_ => return None
}
}
}
Some(((attr, value), start))
}
// From [RFC6838](http://tools.ietf.org/html/rfc6838#section-4.2):
//
// > All registered media types MUST be assigned top-level type and
// > subtype names. The combination of these names serves to uniquely
// > identify the media type, and the subtype name facet (or the absence
// > of one) identifies the registration tree. Both top-level type and
// > subtype names are case-insensitive.
// >
// > Type and subtype names MUST conform to the following ABNF:
// >
// > type-name = restricted-name
// > subtype-name = restricted-name
// >
// > restricted-name = restricted-name-first *126restricted-name-chars
// > restricted-name-first = ALPHA / DIGIT
// > restricted-name-chars = ALPHA / DIGIT / "!" / "#" /
// > "$" / "&" / "-" / "^" / "_"
// > restricted-name-chars =/ "." ; Characters before first dot always
// > ; specify a facet name
// > restricted-name-chars =/ "+" ; Characters after last plus always
// > ; specify a structured syntax suffix
//
fn is_restricted_name_first_char(c: char) -> bool {
match c {
'a'...'z' |
'0'...'9' => true,
_ => false
}
}
fn | (c: char) -> bool {
if is_restricted_name_first_char(c) {
true
} else {
match c {
'!' |
'#' |
'$' |
'&' |
'-' |
'^' |
'.' |
'+' |
'_' => true,
_ => false
}
}
}
#[cfg(test)]
mod tests {
use std::str::FromStr;
#[cfg(feature = "nightly")]
use test::Bencher;
use super::{Mime, Value, Attr};
#[test]
fn test_mime_show() {
let mime = mime!(Text/Plain);
assert_eq!(mime.to_string(), "text/plain".to_string());
let mime = mime!(Text/Plain; Charset=Utf8);
assert_eq!(mime.to_string(), "text/plain; charset=utf-8".to_string());
}
#[test]
fn test_mime_from_str() {
assert_eq!(Mime::from_str("text/plain").unwrap(), mime!(Text/Plain));
assert_eq!(Mime::from_str("TEXT/PLAIN").unwrap(), mime!(Text/Plain));
assert_eq!(Mime::from_str("text/plain; charset=utf-8").unwrap(), mime!(Text/Plain; Charset=Utf8));
assert_eq!(Mime::from_str("text/plain;charset=\"utf-8\"").unwrap(), mime!(Text/Plain; Charset=Utf8));
assert_eq!(Mime::from_str("text/plain; charset=utf-8; foo=bar").unwrap(),
mime!(Text/Plain; Charset=Utf8, ("foo")=("bar")));
assert_eq!("*/*".parse::<Mime>().unwrap(), mime!(Star/Star));
assert_eq!("image/*".parse::<Mime>().unwrap(), mime!(Image/Star));
assert_eq!("text/*; charset=utf-8".parse::<Mime>().unwrap(), mime!(Text/Star; Charset=Utf8));
assert!("*/png".parse::<Mime>().is_err());
assert!("*image/png".parse::<Mime>().is_err());
assert!("text/*plain".parse::<Mime>().is_err());
}
#[test]
fn test_case_sensitive_values() {
assert_eq!(Mime::from_str("multipart/form-data; boundary=ABCDEFG").unwrap(),
mime!(Multipart/FormData; Boundary=("ABCDEFG")));
assert_eq!(Mime::from_str("multipart/form-data; charset=BASE64; boundary=ABCDEFG").unwrap(),
mime!(Multipart/FormData; Charset=("base64"), Boundary=("ABCDEFG")));
}
#[test]
fn test_get_param() {
let mime = Mime::from_str("text/plain; charset=utf-8; foo=bar").unwrap();
assert_eq!(mime.get_param(Attr::Charset), Some(&Value::Utf8));
assert_eq!(mime.get_param("charset"), Some(&Value::Utf8));
assert_eq!(mime.get_param("foo").unwrap(), "bar");
assert_eq!(mime.get_param("baz"), None);
}
#[test]
fn test_value_as_str() {
assert_eq!(Value::Utf8.as_str(), "utf-8");
}
#[test]
fn test_value_eq_str() {
assert_eq!(Value::Utf8, "utf-8");
assert_eq!("utf-8", Value::Utf8);
}
#[cfg(feature = "serde")]
#[test]
fn test_serialize_deserialize() {
use serde_json;
let mime = Mime::from_str("text/plain; charset=utf-8; foo=bar").unwrap();
let serialized = serde_json::to_string(&mime).unwrap();
let deserialized: Mime = serde_json::from_str(&serialized).unwrap();
assert_eq!(mime, deserialized);
}
#[cfg(feature = "nightly")]
#[bench]
fn bench_fmt(b: &mut Bencher) {
use std::fmt::Write;
let mime = mime!(Text/Plain; Charset=Utf8);
b.bytes = mime.to_string().as_bytes().len() as u64;
let mut s = String::with_capacity(64);
b.iter(|| {
let _ = write!(s, "{}", mime);
::test::black_box(&s);
unsafe { s.as_mut_vec().set_len(0); }
})
}
#[cfg(feature = "nightly")]
#[bench]
fn bench_from_str(b: &mut Bencher) {
let s = "text/plain; charset=utf-8; foo=bar";
b.bytes = s.as_bytes().len() as u64;
b.iter(|| s.parse::<Mime>())
}
}
| is_restricted_name_char | identifier_name |
lib.rs | //! # Mime
//!
//! Mime is now Media Type, technically, but `Mime` is more immediately
//! understandable, so the main type here is `Mime`.
//!
//! ## What is Mime?
//!
//! Example mime string: `text/plain;charset=utf-8`
//!
//! ```rust
//! # #[macro_use] extern crate mime;
//! # fn main() {
//! let plain_text: mime::Mime = "text/plain;charset=utf-8".parse().unwrap();
//! assert_eq!(plain_text, mime!(Text/Plain; Charset=Utf8));
//! # }
//! ```
#![doc(html_root_url = "https://hyperium.github.io/mime.rs")]
#![cfg_attr(test, deny(warnings))]
#![cfg_attr(all(feature = "nightly", test), feature(test))]
#[macro_use]
extern crate log;
#[cfg(feature = "nightly")]
#[cfg(test)]
extern crate test;
#[cfg(feature = "serde")]
extern crate serde;
#[cfg(feature = "serde")]
#[cfg(test)]
extern crate serde_json;
#[cfg(feature = "heapsize")]
extern crate heapsize;
use std::ascii::AsciiExt;
use std::fmt;
use std::iter::Enumerate;
use std::str::{FromStr, Chars};
/// Mime, or Media Type. Encapsulates common registers types.
///
/// Consider that a traditional mime type contains a "top level type",
/// a "sub level type", and 0-N "parameters". And they're all strings.
/// Strings everywhere. Strings mean typos. Rust has type safety. We should
/// use types!
///
/// So, Mime bundles together this data into types so the compiler can catch
/// your typos.
///
/// This improves things so you use match without Strings:
///
/// ```rust
/// use mime::{Mime, TopLevel, SubLevel};
///
/// let mime: Mime = "application/json".parse().unwrap();
///
/// match mime {
/// Mime(TopLevel::Application, SubLevel::Json, _) => println!("matched json!"),
/// _ => ()
/// }
/// ```
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd)]
pub struct Mime<T: AsRef<[Param]> = Vec<Param>>(pub TopLevel, pub SubLevel, pub T);
#[cfg(feature = "heapsize")]
impl<T: AsRef<[Param]> + heapsize::HeapSizeOf> heapsize::HeapSizeOf for Mime<T> {
fn heap_size_of_children(&self) -> usize {
self.0.heap_size_of_children() +
self.1.heap_size_of_children() +
self.2.heap_size_of_children()
}
}
impl<LHS: AsRef<[Param]>, RHS: AsRef<[Param]>> PartialEq<Mime<RHS>> for Mime<LHS> {
#[inline]
fn eq(&self, other: &Mime<RHS>) -> bool {
self.0 == other.0 && self.1 == other.1 && self.2.as_ref() == other.2.as_ref()
}
}
/// Easily create a Mime without having to import so many enums.
///
/// # Example
///
/// ```
/// # #[macro_use] extern crate mime;
///
/// # fn main() {
/// let json = mime!(Application/Json);
/// let plain = mime!(Text/Plain; Charset=Utf8);
/// let text = mime!(Text/Html; Charset=("bar"), ("baz")=("quux"));
/// let img = mime!(Image/_);
/// # }
/// ```
#[macro_export]
macro_rules! mime {
($top:tt / $sub:tt) => (
mime!($top / $sub;)
);
($top:tt / $sub:tt ; $($attr:tt = $val:tt),*) => (
$crate::Mime(
__mime__ident_or_ext!(TopLevel::$top),
__mime__ident_or_ext!(SubLevel::$sub),
vec![ $((__mime__ident_or_ext!(Attr::$attr), __mime__ident_or_ext!(Value::$val))),* ]
)
);
}
#[doc(hidden)]
#[macro_export]
macro_rules! __mime__ident_or_ext {
($enoom:ident::_) => (
$crate::$enoom::Star
);
($enoom:ident::($inner:expr)) => (
$crate::$enoom::Ext($inner.to_string())
);
($enoom:ident::$var:ident) => (
$crate::$enoom::$var
)
}
macro_rules! enoom {
(pub enum $en:ident; $ext:ident; $($ty:ident, $text:expr;)*) => (
#[derive(Clone, Debug, Eq, Hash, Ord, PartialOrd)]
pub enum $en {
$($ty),*,
$ext(String)
}
impl $en {
pub fn as_str(&self) -> &str {
match *self {
$($en::$ty => $text),*,
$en::$ext(ref s) => &s
}
}
}
impl ::std::ops::Deref for $en {
type Target = str;
fn deref(&self) -> &str {
self.as_str()
}
}
impl PartialEq for $en {
#[inline]
fn eq(&self, other: &$en) -> bool {
match (self, other) {
$( (&$en::$ty, &$en::$ty) => true ),*,
(&$en::$ext(ref a), &$en::$ext(ref b)) => a == b,
(_, _) => self.as_str() == other.as_str(),
}
}
}
impl PartialEq<String> for $en {
fn eq(&self, other: &String) -> bool {
self.as_str() == other
}
}
impl PartialEq<str> for $en {
fn eq(&self, other: &str) -> bool {
self.as_str() == other
}
}
impl<'a> PartialEq<&'a str> for $en {
fn eq(&self, other: &&'a str) -> bool {
self.as_str() == *other
}
}
impl PartialEq<$en> for String {
fn eq(&self, other: &$en) -> bool {
self == other.as_str()
}
}
impl PartialEq<$en> for str {
fn eq(&self, other: &$en) -> bool {
self == other.as_str()
}
}
impl<'a> PartialEq<$en> for &'a str {
fn eq(&self, other: &$en) -> bool {
*self == other.as_str()
}
}
impl fmt::Display for $en {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str(match *self {
$($en::$ty => $text),*,
$en::$ext(ref s) => s
})
}
}
impl FromStr for $en {
type Err = ();
fn from_str(s: &str) -> Result<$en, ()> {
Ok(match s {
$(_s if _s == $text => $en::$ty),*,
s => $en::$ext(s.to_string())
})
}
}
#[cfg(feature = "heapsize")]
impl heapsize::HeapSizeOf for $en {
fn heap_size_of_children(&self) -> usize {
match *self {
$en::$ext(ref ext) => ext.heap_size_of_children(),
_ => 0,
}
}
}
)
}
enoom! {
pub enum TopLevel;
Ext;
Star, "*";
Text, "text";
Image, "image";
Audio, "audio";
Video, "video";
Application, "application";
Multipart, "multipart";
Message, "message";
Model, "model";
}
enoom! {
pub enum SubLevel;
Ext;
Star, "*";
// common text/*
Plain, "plain";
Html, "html";
Xml, "xml";
Javascript, "javascript";
Css, "css";
EventStream, "event-stream";
// common application/*
Json, "json";
WwwFormUrlEncoded, "x-www-form-urlencoded";
Msgpack, "msgpack";
OctetStream, "octet-stream";
// multipart/*
FormData, "form-data";
// common image/*
Png, "png";
Gif, "gif";
Bmp, "bmp";
Jpeg, "jpeg";
// audio/*
Mpeg, "mpeg";
Mp4, "mp4";
Ogg, "ogg";
}
enoom! {
pub enum Attr;
Ext;
Charset, "charset";
Boundary, "boundary";
Q, "q";
}
enoom! {
pub enum Value;
Ext;
Utf8, "utf-8";
}
pub type Param = (Attr, Value);
impl<T: AsRef<[Param]>> fmt::Display for Mime<T> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result |
}
impl<P: AsRef<[Param]>> Mime<P> {
pub fn get_param<A: PartialEq<Attr>>(&self, attr: A) -> Option<&Value> {
self.2.as_ref().iter().find(|&&(ref name, _)| attr == *name).map(|&(_, ref value)| value)
}
}
impl FromStr for Mime {
type Err = ();
fn from_str(raw: &str) -> Result<Mime, ()> {
if raw == "*/*" {
return Ok(mime!(Star/Star));
}
let ascii = raw.to_ascii_lowercase(); // lifetimes :(
let len = ascii.len();
let mut iter = ascii.chars().enumerate();
let mut params = vec![];
// toplevel
let mut start;
let top;
loop {
match iter.next() {
Some((0, c)) if is_restricted_name_first_char(c) => (),
Some((i, c)) if i > 0 && is_restricted_name_char(c) => (),
Some((i, '/')) if i > 0 => match FromStr::from_str(&ascii[..i]) {
Ok(t) => {
top = t;
start = i + 1;
break;
}
Err(_) => return Err(())
},
_ => return Err(()) // EOF and no toplevel is no Mime
};
}
// sublevel
let sub;
let mut sub_star = false;
loop {
match iter.next() {
Some((i, '*')) if i == start => {
sub_star = true;
},
Some((i, c)) if i == start && is_restricted_name_first_char(c) => (),
Some((i, c)) if !sub_star && i > start && is_restricted_name_char(c) => (),
Some((i, ';')) if i > start => match FromStr::from_str(&ascii[start..i]) {
Ok(s) => {
sub = s;
start = i + 1;
break;
}
Err(_) => return Err(())
},
None => match FromStr::from_str(&ascii[start..]) {
Ok(s) => return Ok(Mime(top, s, params)),
Err(_) => return Err(())
},
_ => return Err(())
};
}
// params
debug!("starting params, len={}", len);
loop {
match param_from_str(raw, &ascii, &mut iter, start) {
Some((p, end)) => {
params.push(p);
start = end;
if start >= len {
break;
}
}
None => break
}
}
Ok(Mime(top, sub, params))
}
}
#[cfg(feature = "serde")]
impl serde::ser::Serialize for Mime {
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
where S: serde::ser::Serializer
{
serializer.serialize_str(&*format!("{}",self))
}
}
#[cfg(feature = "serde")]
impl serde::de::Deserialize for Mime {
fn deserialize<D>(deserializer: &mut D) -> Result<Self, D::Error>
where D: serde::de::Deserializer
{
let string: String = try!(serde::Deserialize::deserialize(deserializer));
let mime: Mime = match FromStr::from_str(&*string) {
Ok(mime) => mime,
Err(_) => return Err(serde::de::Error::custom("Invalid serialized mime")),
};
Ok(mime)
}
}
fn param_from_str(raw: &str, ascii: &str, iter: &mut Enumerate<Chars>, mut start: usize) -> Option<(Param, usize)> {
let attr;
debug!("param_from_str, start={}", start);
loop {
match iter.next() {
Some((i, ' ')) if i == start => start = i + 1,
Some((i, c)) if i == start && is_restricted_name_first_char(c) => (),
Some((i, c)) if i > start && is_restricted_name_char(c) => (),
Some((i, '=')) if i > start => match FromStr::from_str(&ascii[start..i]) {
Ok(a) => {
attr = a;
start = i + 1;
break;
},
Err(_) => return None
},
_ => return None
}
}
let value;
// values must be restrict-name-char or "anything goes"
let mut is_quoted = false;
{
let substr = |a,b| { if attr==Attr::Charset { &ascii[a..b] } else { &raw[a..b] } };
let endstr = |a| { if attr==Attr::Charset { &ascii[a..] } else { &raw[a..] } };
loop {
match iter.next() {
Some((i, '"')) if i == start => {
debug!("quoted");
is_quoted = true;
start = i + 1;
},
Some((i, c)) if i == start && is_restricted_name_first_char(c) => (),
Some((i, '"')) if i > start && is_quoted => match FromStr::from_str(substr(start,i)) {
Ok(v) => {
value = v;
start = i + 1;
break;
},
Err(_) => return None
},
Some((i, c)) if i > start && is_quoted || is_restricted_name_char(c) => (),
Some((i, ';')) if i > start => match FromStr::from_str(substr(start,i)) {
Ok(v) => {
value = v;
start = i + 1;
break;
},
Err(_) => return None
},
None => match FromStr::from_str(endstr(start)) {
Ok(v) => {
value = v;
start = raw.len();
break;
},
Err(_) => return None
},
_ => return None
}
}
}
Some(((attr, value), start))
}
// From [RFC6838](http://tools.ietf.org/html/rfc6838#section-4.2):
//
// > All registered media types MUST be assigned top-level type and
// > subtype names. The combination of these names serves to uniquely
// > identify the media type, and the subtype name facet (or the absence
// > of one) identifies the registration tree. Both top-level type and
// > subtype names are case-insensitive.
// >
// > Type and subtype names MUST conform to the following ABNF:
// >
// > type-name = restricted-name
// > subtype-name = restricted-name
// >
// > restricted-name = restricted-name-first *126restricted-name-chars
// > restricted-name-first = ALPHA / DIGIT
// > restricted-name-chars = ALPHA / DIGIT / "!" / "#" /
// > "$" / "&" / "-" / "^" / "_"
// > restricted-name-chars =/ "." ; Characters before first dot always
// > ; specify a facet name
// > restricted-name-chars =/ "+" ; Characters after last plus always
// > ; specify a structured syntax suffix
//
fn is_restricted_name_first_char(c: char) -> bool {
match c {
'a'...'z' |
'0'...'9' => true,
_ => false
}
}
fn is_restricted_name_char(c: char) -> bool {
if is_restricted_name_first_char(c) {
true
} else {
match c {
'!' |
'#' |
'$' |
'&' |
'-' |
'^' |
'.' |
'+' |
'_' => true,
_ => false
}
}
}
#[cfg(test)]
mod tests {
use std::str::FromStr;
#[cfg(feature = "nightly")]
use test::Bencher;
use super::{Mime, Value, Attr};
#[test]
fn test_mime_show() {
let mime = mime!(Text/Plain);
assert_eq!(mime.to_string(), "text/plain".to_string());
let mime = mime!(Text/Plain; Charset=Utf8);
assert_eq!(mime.to_string(), "text/plain; charset=utf-8".to_string());
}
#[test]
fn test_mime_from_str() {
assert_eq!(Mime::from_str("text/plain").unwrap(), mime!(Text/Plain));
assert_eq!(Mime::from_str("TEXT/PLAIN").unwrap(), mime!(Text/Plain));
assert_eq!(Mime::from_str("text/plain; charset=utf-8").unwrap(), mime!(Text/Plain; Charset=Utf8));
assert_eq!(Mime::from_str("text/plain;charset=\"utf-8\"").unwrap(), mime!(Text/Plain; Charset=Utf8));
assert_eq!(Mime::from_str("text/plain; charset=utf-8; foo=bar").unwrap(),
mime!(Text/Plain; Charset=Utf8, ("foo")=("bar")));
assert_eq!("*/*".parse::<Mime>().unwrap(), mime!(Star/Star));
assert_eq!("image/*".parse::<Mime>().unwrap(), mime!(Image/Star));
assert_eq!("text/*; charset=utf-8".parse::<Mime>().unwrap(), mime!(Text/Star; Charset=Utf8));
assert!("*/png".parse::<Mime>().is_err());
assert!("*image/png".parse::<Mime>().is_err());
assert!("text/*plain".parse::<Mime>().is_err());
}
#[test]
fn test_case_sensitive_values() {
assert_eq!(Mime::from_str("multipart/form-data; boundary=ABCDEFG").unwrap(),
mime!(Multipart/FormData; Boundary=("ABCDEFG")));
assert_eq!(Mime::from_str("multipart/form-data; charset=BASE64; boundary=ABCDEFG").unwrap(),
mime!(Multipart/FormData; Charset=("base64"), Boundary=("ABCDEFG")));
}
#[test]
fn test_get_param() {
let mime = Mime::from_str("text/plain; charset=utf-8; foo=bar").unwrap();
assert_eq!(mime.get_param(Attr::Charset), Some(&Value::Utf8));
assert_eq!(mime.get_param("charset"), Some(&Value::Utf8));
assert_eq!(mime.get_param("foo").unwrap(), "bar");
assert_eq!(mime.get_param("baz"), None);
}
#[test]
fn test_value_as_str() {
assert_eq!(Value::Utf8.as_str(), "utf-8");
}
#[test]
fn test_value_eq_str() {
assert_eq!(Value::Utf8, "utf-8");
assert_eq!("utf-8", Value::Utf8);
}
#[cfg(feature = "serde")]
#[test]
fn test_serialize_deserialize() {
use serde_json;
let mime = Mime::from_str("text/plain; charset=utf-8; foo=bar").unwrap();
let serialized = serde_json::to_string(&mime).unwrap();
let deserialized: Mime = serde_json::from_str(&serialized).unwrap();
assert_eq!(mime, deserialized);
}
#[cfg(feature = "nightly")]
#[bench]
fn bench_fmt(b: &mut Bencher) {
use std::fmt::Write;
let mime = mime!(Text/Plain; Charset=Utf8);
b.bytes = mime.to_string().as_bytes().len() as u64;
let mut s = String::with_capacity(64);
b.iter(|| {
let _ = write!(s, "{}", mime);
::test::black_box(&s);
unsafe { s.as_mut_vec().set_len(0); }
})
}
#[cfg(feature = "nightly")]
#[bench]
fn bench_from_str(b: &mut Bencher) {
let s = "text/plain; charset=utf-8; foo=bar";
b.bytes = s.as_bytes().len() as u64;
b.iter(|| s.parse::<Mime>())
}
}
| {
// It's much faster to write a single string, as opposed to push
// several parts through f.write_str(). So, check for the most common
// mime types, and fast track them.
if let TopLevel::Text = self.0 {
if let SubLevel::Plain = self.1 {
let attrs = self.2.as_ref();
if attrs.len() == 0 {
return f.write_str("text/plain");
} else if &[(Attr::Charset, Value::Utf8)] == attrs {
return f.write_str("text/plain; charset=utf-8");
}
}
} else if let TopLevel::Application = self.0 {
if let SubLevel::Json = self.1 {
let attrs = self.2.as_ref();
if attrs.len() == 0 {
return f.write_str("application/json");
}
}
} else if let TopLevel::Star = self.0 {
if let SubLevel::Star = self.1 {
let attrs = self.2.as_ref();
if attrs.len() == 0 {
return f.write_str("*/*");
}
}
}
// slower general purpose fmt
try!(fmt::Display::fmt(&self.0, f));
try!(f.write_str("/"));
try!(fmt::Display::fmt(&self.1, f));
for param in self.2.as_ref() {
try!(f.write_str("; "));
try!(fmt::Display::fmt(¶m.0, f));
try!(f.write_str("="));
try!(fmt::Display::fmt(¶m.1, f));
}
Ok(())
} | identifier_body |
utils.py | from __future__ import division
from time import sleep
import numpy as np
import pandas as pd
import numpy.ma as ma
import os
import h5py
"""
Module to define some useful util functions
"""
def removeIncompleteSamples(data):
""" Method to remove samples with missing views
PARAMETERS
----------
data: list
"""
print("Removing incomplete samples...")
M = len(data)
N = data[0].shape[0]
samples_to_remove = []
for n in range(N):
for m in range(M):
if pd.isnull(data[m].iloc[n][0]):
samples_to_remove.append(n)
break
if len(samples_to_remove) > 0:
print("A total of " + str(len(samples_to_remove)) + " sample(s) have at least a missing view and will be removed")
data_filt = [None]*M
samples_to_keep = np.setdiff1d(range(N),samples_to_remove)
for m in range(M):
data_filt[m] = data[m].iloc[samples_to_keep]
return data_filt
def maskData(data, data_opts):
""" Method to mask values of the data,
It is mainly to test missing values and to evaluate imputation
PARAMETERS
----------
data_opts: dic
"""
print("Masking data with the following options:")
print("at random:")
print(data_opts['maskAtRandom'])
print("full cases:")
print(data_opts['maskNSamples'])
for m in range(len(data)):
# Mask values at random
D = data[m].shape[1]
N = data[m].shape[0]
p2Mask = data_opts['maskAtRandom'][m]
if p2Mask != 0:
idxMask = np.zeros(N*D)
idxMask[:int(round(N*D*p2Mask))] = 1
np.random.shuffle(idxMask)
idxMask = np.reshape(idxMask, [N, D])
data[m] = data[m].mask(idxMask==1)
# Mask samples in a complete view
Nsamples2Mask = data_opts['maskNSamples'][m]
if Nsamples2Mask != 0:
idxMask = np.random.choice(N, size=Nsamples2Mask, replace = False)
# idxMask = np.arange(Nsamples2Mask)
# print idxMask
tmp = data[m].copy()
tmp.ix[idxMask,:] = pd.np.nan
data[m] = tmp
return data
# Function to load the data
def loadData(data_opts, verbose=True):
""" Method to load the data
PARAMETERS
----------
data_opts: dic
verbose: boolean
"""
print ("\n")
print ("#"*18)
print ("## Loading data ##")
print ("#"*18)
print ("\n")
sleep(1)
M = len(data_opts['input_files'])
Y = [None]*M
for m in range(M):
# Read file
file = data_opts['input_files'][m]
Y[m] = pd.read_csv(file, delimiter=data_opts["delimiter"], header=data_opts["colnames"], index_col=data_opts["rownames"]).astype(pd.np.float32)
# Y[m] = pd.read_csv(file, delimiter=data_opts["delimiter"])
print("Loaded %s with %d samples and %d features..." % (file, Y[m].shape[0], Y[m].shape[1]))
# Checking missing values on features
# print max(np.isnan(Y[m]).mean(axis=1))
# exit()
# Checking missing values on samples
# print np.isnan(Y[m]).mean(axis=1)
# exit()
# Check that the dimensions match
if len(set([Y[m].shape[0] for m in range(M)])) != 1:
if all([Y[m].shape[1] for m in range(M)]):
print("\nColumns seem to be the shared axis, transposing the data...")
for m in range(M): Y[m] = Y[m].T
else:
print("\nDimensionalities do not match, aborting. Make sure that either columns or rows are shared!")
exit()
# TO-DO: CHECK IF ANY SAMPLE HAS MISSING VALUES IN ALL VIEWS
# Sanity checks on the data
print ("\n" +"#"*46)
print("## Doing sanity checks and parsing the data ##")
print ("#"*46 + "\n")
for m in range(M):
# Removing features with complete missing values
nas = np.isnan(Y[m]).mean(axis=0)
if np.any(nas==1.):
print("Warning: %d features(s) on view %d have missing values in all samples, removing them..." % ( (nas==1.).sum(), m) )
Y[m].drop(Y[m].columns[np.where(nas==1.)], axis=1, inplace=True)
# Warning if there are features with no variance
var = Y[m].std(axis=0)
if np.any(var==0.):
print("Warning: %d features(s) on view %d have zero variance, consider removing them..." % ( (var==0.).sum(),m) )
# Y[m].drop(Y[m].columns[np.where(var==0.)], axis=1, inplace=True)
# Center the features
if data_opts['center_features'][m]:
print("Centering features for view " + str(m) + "...")
Y[m] = (Y[m] - Y[m].mean(axis=0))
# Scale the views to unit variance
if data_opts['scale_views'][m]:
print("Scaling view " + str(m) + " to unit variance...")
Y[m] = Y[m] / np.nanstd(Y[m].as_matrix())
# Scale the features to unit variance
if data_opts['scale_features'][m]:
print("Scaling features for view " + str(m) + " to unit variance...")
Y[m] = Y[m] / np.std(Y[m], axis=0, )
print("\nAfter data processing:")
for m in range(M): print("view %d has %d samples and %d features..." % (m, Y[m].shape[0], Y[m].shape[1]))
return Y
def dotd(A, B, out=None):
"""Diagonal of :math:`\mathrm A\mathrm B^\intercal`.
If ``A`` is :math:`n\times p` and ``B`` is :math:`p\times n`, it is done in :math:`O(pn)`.
Args:
A (array_like): Left matrix.
B (array_like): Right matrix.
out (:class:`numpy.ndarray`, optional): copy result to.
Returns:
:class:`numpy.ndarray`: Resulting diagonal.
"""
A = ma.asarray(A, float)
B = ma.asarray(B, float)
if A.ndim == 1 and B.ndim == 1:
if out is None:
return ma.dot(A, B)
return ma.dot(A, B, out)
if out is None:
out = ma.empty((A.shape[0], ), float)
out[:] = ma.sum(A * B.T, axis=1)
return out
def nans(shape, dtype=float):
""" Method to create an array filled with missing values """
a = np.empty(shape, dtype)
a.fill(np.nan)
return a
def corr(A,B):
""" Method to efficiently compute correlation coefficients between two matrices
PARMETERS
---------
A: np array
B: np array
"""
# Rowwise mean of input arrays & subtract from input arrays themeselves
A_mA = A - A.mean(1)[:,None]
B_mB = B - B.mean(1)[:,None]
# Sum of squares across rows
ssA = (A_mA**2).sum(1);
ssB = (B_mB**2).sum(1);
# Finally get corr coeff
return np.dot(A_mA,B_mB.T)/np.sqrt(np.dot(ssA[:,None],ssB[None]))
# NOT HERE
def logdet(X):
return np.log(np.linalg.det(X))
# UC = np.linalg.cholesky(X)
# return 2*sum(np.log(np.diag(UC)))
# NOT HERE
def ddot(d, mtx, left=True):
"""Multiply a full matrix by a diagonal matrix.
This function should always be faster than dot.
Input:
d -- 1D (N,) array (contains the diagonal elements)
mtx -- 2D (N,N) array
left: is the diagonal matrix on the left or on the right of the product?
Output:
ddot(d, mts, left=True) == dot(diag(d), mtx)
ddot(d, mts, left=False) == dot(mtx, diag(d))
"""
if left:
return (d*mtx.T).T
else:
return d*mtx
def saveParameters(model, hdf5, view_names=None):
""" Method to save the parameters of the model in an hdf5 file
PARAMETERS
----------
model: a BayesNet instance
hdf5:
view_names
"""
# Get nodes from the model
nodes = model.getNodes()
# Create groups
param_grp = hdf5.create_group("parameters")
# Iterate over nodes
for node in nodes:
# Collect node parameters
parameters = nodes[node].getParameters()
# Create node subgroup
node_subgrp = param_grp.create_group(node)
# Multi-view nodes
if type(parameters) == list:
# Loop through the views
for m in range(len(parameters)):
if view_names is not None:
tmp = view_names[m]
else:
tmp = "%d" % m
# Create subsubgroup for the view
view_subgrp = node_subgrp.create_group(tmp)
# Loop through the parameters of the view
if parameters[m] is not None:
# Variational nodes
if type(parameters[m]) == dict:
for param_name in parameters[m].keys():
if parameters[m][param_name] is not None:
view_subgrp.create_dataset(param_name, data=parameters[m][param_name].T)
# Non-variational nodes (no distributions)
elif type(parameters[m]) == np.ndarray:
view_subgrp.create_dataset("value", data=parameters[m].T)
# Single-view nodes
else:
for param_name in parameters.keys():
node_subgrp.create_dataset("%s" % (param_name), data=parameters[param_name].T)
pass
def saveExpectations(model, hdf5, view_names=None):
""" Method to save the expectations of the model in an hdf5 file
PARAMETERS
----------
model: a BayesNet instance
hdf5:
view_names:
"""
# Get nodes from the model
nodes = model.getNodes()
exp_grp = hdf5.create_group("expectations")
# Iterate over nodes
for node in nodes:
# Collect node expectations
expectations = nodes[node].getExpectations()
# Multi-view nodes
if type(expectations) == list:
# Create subgroup for the node
node_subgrp = exp_grp.create_group(node)
# Iterate over views
for m in range(len(expectations)):
if view_names is not None:
view = view_names[m]
else:
view = "%d" % m
# Collect expectations
exp = expectations[m]["E"]
if exp is not None:
if type(exp) == ma.core.MaskedArray:
tmp = ma.filled(exp, fill_value=np.nan)
node_subgrp.create_dataset(view, data=tmp.T)
else:
node_subgrp.create_dataset(view, data=exp.T)
# Single-view nodes
else:
exp_grp.create_dataset(node, data=expectations["E"].T)
def saveTrainingStats(model, hdf5):
""" Method to save the training statistics in an hdf5 file
PARAMETERS
----------
model: a BayesNet instance
hdf5:
"""
stats = model.getTrainingStats()
stats_grp = hdf5.create_group("training_stats")
stats_grp.create_dataset("activeK", data=stats["activeK"])
stats_grp.create_dataset("elbo", data=stats["elbo"])
stats_grp.create_dataset("elbo_terms", data=stats["elbo_terms"].T)
stats_grp['elbo_terms'].attrs['colnames'] = [a.encode('utf8') for a in stats["elbo_terms"].columns.values]
def saveTrainingOpts(opts, hdf5):
""" Method to save the training options in an hdf5 file
PARAMETERS
----------
opts:
hdf5:
"""
# Remove dictionaries from the options
for k,v in opts.copy().items():
if type(v)==dict:
for k1,v1 in v.items():
opts[str(k)+"_"+str(k1)] = v1
opts.pop(k)
# Create HDF5 data set
hdf5.create_dataset("training_opts", data=np.array(list(opts.values()), dtype=np.float))
hdf5['training_opts'].attrs['names'] = np.asarray(list(opts.keys())).astype('S')
def saveModelOpts(opts, hdf5):
""" Method to save the model options in an hdf5 file
PARAMETERS
----------
opts:
hdf5:
"""
opts_interest = ["learnIntercept","schedule","likelihood","sparsity"]
opts = dict((k, opts[k]) for k in opts_interest)
grp = hdf5.create_group('model_opts')
for k,v in opts.items():
grp.create_dataset(k, data=np.asarray(v).astype('S'))
grp[k].attrs['names'] = np.asarray(list(opts.keys())).astype('S')
def saveTrainingData(model, hdf5, view_names=None, sample_names=None, feature_names=None, likelihoods=None):
""" Method to save the training data in an hdf5 file
PARAMETERS
----------
model: a BayesNet instance
hdf5:
view_names
sample_names
feature_names
"""
data = model.getTrainingData()
data_grp = hdf5.create_group("data")
featuredata_grp = hdf5.create_group("features")
hdf5.create_dataset("samples", data=np.array(sample_names, dtype='S50'))
if likelihoods is not None:
data_grp.attrs['likelihood'] = np.array(likelihoods, dtype='S50')
for m in range(len(data)):
view = view_names[m] if view_names is not None else str(m)
data_grp.create_dataset(view, data=data[m].data.T)
if feature_names is not None:
# data_grp.attrs['features'] = np.array(feature_names[m], dtype='S')
featuredata_grp.create_dataset(view, data=np.array(feature_names[m], dtype='S50'))
def saveModel(model, outfile, train_opts, model_opts, view_names=None, sample_names=None, feature_names=None):
""" Method to save the model in an hdf5 file
PARAMETERS
----------
TO-FILL.... | # QC checks
assert model.trained == True, "Model is not trained yet"
assert len(np.unique(view_names)) == len(view_names), 'View names must be unique'
assert len(np.unique(sample_names)) == len(sample_names), 'Sample names must be unique'
# Create output directory
if not os.path.isdir(os.path.dirname(outfile)):
print("Output directory does not exist, creating it...")
os.makedirs(os.path.dirname(outfile))
# For some reason h5py orders the datasets alphabetically, so we have to sort the likelihoods accordingly
idx = sorted(range(len(view_names)), key=lambda k: view_names[k])
tmp = [model_opts["likelihood"][idx[m]] for m in range(len(model_opts["likelihood"]))]
model_opts["likelihood"] = tmp
# Open HDF5 handler
hdf5 = h5py.File(outfile,'w')
# Save expectations
saveExpectations(model,hdf5,view_names)
# Save parameters
# saveParameters(model,hdf5,view_names)
# Save training statistics
saveTrainingStats(model,hdf5)
# Save training options
saveTrainingOpts(train_opts,hdf5)
# Save model options
saveModelOpts(model_opts,hdf5)
# Save training data
saveTrainingData(model, hdf5, view_names, sample_names, feature_names, model_opts["likelihood"])
# Close HDF5 file
hdf5.close() | """
| random_line_split |
utils.py | from __future__ import division
from time import sleep
import numpy as np
import pandas as pd
import numpy.ma as ma
import os
import h5py
"""
Module to define some useful util functions
"""
def removeIncompleteSamples(data):
""" Method to remove samples with missing views
PARAMETERS
----------
data: list
"""
print("Removing incomplete samples...")
M = len(data)
N = data[0].shape[0]
samples_to_remove = []
for n in range(N):
for m in range(M):
if pd.isnull(data[m].iloc[n][0]):
samples_to_remove.append(n)
break
if len(samples_to_remove) > 0:
print("A total of " + str(len(samples_to_remove)) + " sample(s) have at least a missing view and will be removed")
data_filt = [None]*M
samples_to_keep = np.setdiff1d(range(N),samples_to_remove)
for m in range(M):
data_filt[m] = data[m].iloc[samples_to_keep]
return data_filt
def maskData(data, data_opts):
""" Method to mask values of the data,
It is mainly to test missing values and to evaluate imputation
PARAMETERS
----------
data_opts: dic
"""
print("Masking data with the following options:")
print("at random:")
print(data_opts['maskAtRandom'])
print("full cases:")
print(data_opts['maskNSamples'])
for m in range(len(data)):
# Mask values at random
D = data[m].shape[1]
N = data[m].shape[0]
p2Mask = data_opts['maskAtRandom'][m]
if p2Mask != 0:
idxMask = np.zeros(N*D)
idxMask[:int(round(N*D*p2Mask))] = 1
np.random.shuffle(idxMask)
idxMask = np.reshape(idxMask, [N, D])
data[m] = data[m].mask(idxMask==1)
# Mask samples in a complete view
Nsamples2Mask = data_opts['maskNSamples'][m]
if Nsamples2Mask != 0:
idxMask = np.random.choice(N, size=Nsamples2Mask, replace = False)
# idxMask = np.arange(Nsamples2Mask)
# print idxMask
tmp = data[m].copy()
tmp.ix[idxMask,:] = pd.np.nan
data[m] = tmp
return data
# Function to load the data
def loadData(data_opts, verbose=True):
""" Method to load the data
PARAMETERS
----------
data_opts: dic
verbose: boolean
"""
print ("\n")
print ("#"*18)
print ("## Loading data ##")
print ("#"*18)
print ("\n")
sleep(1)
M = len(data_opts['input_files'])
Y = [None]*M
for m in range(M):
# Read file
file = data_opts['input_files'][m]
Y[m] = pd.read_csv(file, delimiter=data_opts["delimiter"], header=data_opts["colnames"], index_col=data_opts["rownames"]).astype(pd.np.float32)
# Y[m] = pd.read_csv(file, delimiter=data_opts["delimiter"])
print("Loaded %s with %d samples and %d features..." % (file, Y[m].shape[0], Y[m].shape[1]))
# Checking missing values on features
# print max(np.isnan(Y[m]).mean(axis=1))
# exit()
# Checking missing values on samples
# print np.isnan(Y[m]).mean(axis=1)
# exit()
# Check that the dimensions match
if len(set([Y[m].shape[0] for m in range(M)])) != 1:
if all([Y[m].shape[1] for m in range(M)]):
print("\nColumns seem to be the shared axis, transposing the data...")
for m in range(M): Y[m] = Y[m].T
else:
print("\nDimensionalities do not match, aborting. Make sure that either columns or rows are shared!")
exit()
# TO-DO: CHECK IF ANY SAMPLE HAS MISSING VALUES IN ALL VIEWS
# Sanity checks on the data
print ("\n" +"#"*46)
print("## Doing sanity checks and parsing the data ##")
print ("#"*46 + "\n")
for m in range(M):
# Removing features with complete missing values
nas = np.isnan(Y[m]).mean(axis=0)
if np.any(nas==1.):
print("Warning: %d features(s) on view %d have missing values in all samples, removing them..." % ( (nas==1.).sum(), m) )
Y[m].drop(Y[m].columns[np.where(nas==1.)], axis=1, inplace=True)
# Warning if there are features with no variance
var = Y[m].std(axis=0)
if np.any(var==0.):
print("Warning: %d features(s) on view %d have zero variance, consider removing them..." % ( (var==0.).sum(),m) )
# Y[m].drop(Y[m].columns[np.where(var==0.)], axis=1, inplace=True)
# Center the features
if data_opts['center_features'][m]:
print("Centering features for view " + str(m) + "...")
Y[m] = (Y[m] - Y[m].mean(axis=0))
# Scale the views to unit variance
if data_opts['scale_views'][m]:
print("Scaling view " + str(m) + " to unit variance...")
Y[m] = Y[m] / np.nanstd(Y[m].as_matrix())
# Scale the features to unit variance
if data_opts['scale_features'][m]:
print("Scaling features for view " + str(m) + " to unit variance...")
Y[m] = Y[m] / np.std(Y[m], axis=0, )
print("\nAfter data processing:")
for m in range(M): print("view %d has %d samples and %d features..." % (m, Y[m].shape[0], Y[m].shape[1]))
return Y
def dotd(A, B, out=None):
"""Diagonal of :math:`\mathrm A\mathrm B^\intercal`.
If ``A`` is :math:`n\times p` and ``B`` is :math:`p\times n`, it is done in :math:`O(pn)`.
Args:
A (array_like): Left matrix.
B (array_like): Right matrix.
out (:class:`numpy.ndarray`, optional): copy result to.
Returns:
:class:`numpy.ndarray`: Resulting diagonal.
"""
A = ma.asarray(A, float)
B = ma.asarray(B, float)
if A.ndim == 1 and B.ndim == 1:
if out is None:
return ma.dot(A, B)
return ma.dot(A, B, out)
if out is None:
out = ma.empty((A.shape[0], ), float)
out[:] = ma.sum(A * B.T, axis=1)
return out
def nans(shape, dtype=float):
""" Method to create an array filled with missing values """
a = np.empty(shape, dtype)
a.fill(np.nan)
return a
def | (A,B):
""" Method to efficiently compute correlation coefficients between two matrices
PARMETERS
---------
A: np array
B: np array
"""
# Rowwise mean of input arrays & subtract from input arrays themeselves
A_mA = A - A.mean(1)[:,None]
B_mB = B - B.mean(1)[:,None]
# Sum of squares across rows
ssA = (A_mA**2).sum(1);
ssB = (B_mB**2).sum(1);
# Finally get corr coeff
return np.dot(A_mA,B_mB.T)/np.sqrt(np.dot(ssA[:,None],ssB[None]))
# NOT HERE
def logdet(X):
return np.log(np.linalg.det(X))
# UC = np.linalg.cholesky(X)
# return 2*sum(np.log(np.diag(UC)))
# NOT HERE
def ddot(d, mtx, left=True):
"""Multiply a full matrix by a diagonal matrix.
This function should always be faster than dot.
Input:
d -- 1D (N,) array (contains the diagonal elements)
mtx -- 2D (N,N) array
left: is the diagonal matrix on the left or on the right of the product?
Output:
ddot(d, mts, left=True) == dot(diag(d), mtx)
ddot(d, mts, left=False) == dot(mtx, diag(d))
"""
if left:
return (d*mtx.T).T
else:
return d*mtx
def saveParameters(model, hdf5, view_names=None):
""" Method to save the parameters of the model in an hdf5 file
PARAMETERS
----------
model: a BayesNet instance
hdf5:
view_names
"""
# Get nodes from the model
nodes = model.getNodes()
# Create groups
param_grp = hdf5.create_group("parameters")
# Iterate over nodes
for node in nodes:
# Collect node parameters
parameters = nodes[node].getParameters()
# Create node subgroup
node_subgrp = param_grp.create_group(node)
# Multi-view nodes
if type(parameters) == list:
# Loop through the views
for m in range(len(parameters)):
if view_names is not None:
tmp = view_names[m]
else:
tmp = "%d" % m
# Create subsubgroup for the view
view_subgrp = node_subgrp.create_group(tmp)
# Loop through the parameters of the view
if parameters[m] is not None:
# Variational nodes
if type(parameters[m]) == dict:
for param_name in parameters[m].keys():
if parameters[m][param_name] is not None:
view_subgrp.create_dataset(param_name, data=parameters[m][param_name].T)
# Non-variational nodes (no distributions)
elif type(parameters[m]) == np.ndarray:
view_subgrp.create_dataset("value", data=parameters[m].T)
# Single-view nodes
else:
for param_name in parameters.keys():
node_subgrp.create_dataset("%s" % (param_name), data=parameters[param_name].T)
pass
def saveExpectations(model, hdf5, view_names=None):
""" Method to save the expectations of the model in an hdf5 file
PARAMETERS
----------
model: a BayesNet instance
hdf5:
view_names:
"""
# Get nodes from the model
nodes = model.getNodes()
exp_grp = hdf5.create_group("expectations")
# Iterate over nodes
for node in nodes:
# Collect node expectations
expectations = nodes[node].getExpectations()
# Multi-view nodes
if type(expectations) == list:
# Create subgroup for the node
node_subgrp = exp_grp.create_group(node)
# Iterate over views
for m in range(len(expectations)):
if view_names is not None:
view = view_names[m]
else:
view = "%d" % m
# Collect expectations
exp = expectations[m]["E"]
if exp is not None:
if type(exp) == ma.core.MaskedArray:
tmp = ma.filled(exp, fill_value=np.nan)
node_subgrp.create_dataset(view, data=tmp.T)
else:
node_subgrp.create_dataset(view, data=exp.T)
# Single-view nodes
else:
exp_grp.create_dataset(node, data=expectations["E"].T)
def saveTrainingStats(model, hdf5):
""" Method to save the training statistics in an hdf5 file
PARAMETERS
----------
model: a BayesNet instance
hdf5:
"""
stats = model.getTrainingStats()
stats_grp = hdf5.create_group("training_stats")
stats_grp.create_dataset("activeK", data=stats["activeK"])
stats_grp.create_dataset("elbo", data=stats["elbo"])
stats_grp.create_dataset("elbo_terms", data=stats["elbo_terms"].T)
stats_grp['elbo_terms'].attrs['colnames'] = [a.encode('utf8') for a in stats["elbo_terms"].columns.values]
def saveTrainingOpts(opts, hdf5):
""" Method to save the training options in an hdf5 file
PARAMETERS
----------
opts:
hdf5:
"""
# Remove dictionaries from the options
for k,v in opts.copy().items():
if type(v)==dict:
for k1,v1 in v.items():
opts[str(k)+"_"+str(k1)] = v1
opts.pop(k)
# Create HDF5 data set
hdf5.create_dataset("training_opts", data=np.array(list(opts.values()), dtype=np.float))
hdf5['training_opts'].attrs['names'] = np.asarray(list(opts.keys())).astype('S')
def saveModelOpts(opts, hdf5):
""" Method to save the model options in an hdf5 file
PARAMETERS
----------
opts:
hdf5:
"""
opts_interest = ["learnIntercept","schedule","likelihood","sparsity"]
opts = dict((k, opts[k]) for k in opts_interest)
grp = hdf5.create_group('model_opts')
for k,v in opts.items():
grp.create_dataset(k, data=np.asarray(v).astype('S'))
grp[k].attrs['names'] = np.asarray(list(opts.keys())).astype('S')
def saveTrainingData(model, hdf5, view_names=None, sample_names=None, feature_names=None, likelihoods=None):
""" Method to save the training data in an hdf5 file
PARAMETERS
----------
model: a BayesNet instance
hdf5:
view_names
sample_names
feature_names
"""
data = model.getTrainingData()
data_grp = hdf5.create_group("data")
featuredata_grp = hdf5.create_group("features")
hdf5.create_dataset("samples", data=np.array(sample_names, dtype='S50'))
if likelihoods is not None:
data_grp.attrs['likelihood'] = np.array(likelihoods, dtype='S50')
for m in range(len(data)):
view = view_names[m] if view_names is not None else str(m)
data_grp.create_dataset(view, data=data[m].data.T)
if feature_names is not None:
# data_grp.attrs['features'] = np.array(feature_names[m], dtype='S')
featuredata_grp.create_dataset(view, data=np.array(feature_names[m], dtype='S50'))
def saveModel(model, outfile, train_opts, model_opts, view_names=None, sample_names=None, feature_names=None):
""" Method to save the model in an hdf5 file
PARAMETERS
----------
TO-FILL....
"""
# QC checks
assert model.trained == True, "Model is not trained yet"
assert len(np.unique(view_names)) == len(view_names), 'View names must be unique'
assert len(np.unique(sample_names)) == len(sample_names), 'Sample names must be unique'
# Create output directory
if not os.path.isdir(os.path.dirname(outfile)):
print("Output directory does not exist, creating it...")
os.makedirs(os.path.dirname(outfile))
# For some reason h5py orders the datasets alphabetically, so we have to sort the likelihoods accordingly
idx = sorted(range(len(view_names)), key=lambda k: view_names[k])
tmp = [model_opts["likelihood"][idx[m]] for m in range(len(model_opts["likelihood"]))]
model_opts["likelihood"] = tmp
# Open HDF5 handler
hdf5 = h5py.File(outfile,'w')
# Save expectations
saveExpectations(model,hdf5,view_names)
# Save parameters
# saveParameters(model,hdf5,view_names)
# Save training statistics
saveTrainingStats(model,hdf5)
# Save training options
saveTrainingOpts(train_opts,hdf5)
# Save model options
saveModelOpts(model_opts,hdf5)
# Save training data
saveTrainingData(model, hdf5, view_names, sample_names, feature_names, model_opts["likelihood"])
# Close HDF5 file
hdf5.close()
| corr | identifier_name |
utils.py | from __future__ import division
from time import sleep
import numpy as np
import pandas as pd
import numpy.ma as ma
import os
import h5py
"""
Module to define some useful util functions
"""
def removeIncompleteSamples(data):
""" Method to remove samples with missing views
PARAMETERS
----------
data: list
"""
print("Removing incomplete samples...")
M = len(data)
N = data[0].shape[0]
samples_to_remove = []
for n in range(N):
for m in range(M):
if pd.isnull(data[m].iloc[n][0]):
samples_to_remove.append(n)
break
if len(samples_to_remove) > 0:
print("A total of " + str(len(samples_to_remove)) + " sample(s) have at least a missing view and will be removed")
data_filt = [None]*M
samples_to_keep = np.setdiff1d(range(N),samples_to_remove)
for m in range(M):
data_filt[m] = data[m].iloc[samples_to_keep]
return data_filt
def maskData(data, data_opts):
""" Method to mask values of the data,
It is mainly to test missing values and to evaluate imputation
PARAMETERS
----------
data_opts: dic
"""
print("Masking data with the following options:")
print("at random:")
print(data_opts['maskAtRandom'])
print("full cases:")
print(data_opts['maskNSamples'])
for m in range(len(data)):
# Mask values at random
D = data[m].shape[1]
N = data[m].shape[0]
p2Mask = data_opts['maskAtRandom'][m]
if p2Mask != 0:
idxMask = np.zeros(N*D)
idxMask[:int(round(N*D*p2Mask))] = 1
np.random.shuffle(idxMask)
idxMask = np.reshape(idxMask, [N, D])
data[m] = data[m].mask(idxMask==1)
# Mask samples in a complete view
Nsamples2Mask = data_opts['maskNSamples'][m]
if Nsamples2Mask != 0:
idxMask = np.random.choice(N, size=Nsamples2Mask, replace = False)
# idxMask = np.arange(Nsamples2Mask)
# print idxMask
tmp = data[m].copy()
tmp.ix[idxMask,:] = pd.np.nan
data[m] = tmp
return data
# Function to load the data
def loadData(data_opts, verbose=True):
|
def dotd(A, B, out=None):
"""Diagonal of :math:`\mathrm A\mathrm B^\intercal`.
If ``A`` is :math:`n\times p` and ``B`` is :math:`p\times n`, it is done in :math:`O(pn)`.
Args:
A (array_like): Left matrix.
B (array_like): Right matrix.
out (:class:`numpy.ndarray`, optional): copy result to.
Returns:
:class:`numpy.ndarray`: Resulting diagonal.
"""
A = ma.asarray(A, float)
B = ma.asarray(B, float)
if A.ndim == 1 and B.ndim == 1:
if out is None:
return ma.dot(A, B)
return ma.dot(A, B, out)
if out is None:
out = ma.empty((A.shape[0], ), float)
out[:] = ma.sum(A * B.T, axis=1)
return out
def nans(shape, dtype=float):
""" Method to create an array filled with missing values """
a = np.empty(shape, dtype)
a.fill(np.nan)
return a
def corr(A,B):
""" Method to efficiently compute correlation coefficients between two matrices
PARMETERS
---------
A: np array
B: np array
"""
# Rowwise mean of input arrays & subtract from input arrays themeselves
A_mA = A - A.mean(1)[:,None]
B_mB = B - B.mean(1)[:,None]
# Sum of squares across rows
ssA = (A_mA**2).sum(1);
ssB = (B_mB**2).sum(1);
# Finally get corr coeff
return np.dot(A_mA,B_mB.T)/np.sqrt(np.dot(ssA[:,None],ssB[None]))
# NOT HERE
def logdet(X):
return np.log(np.linalg.det(X))
# UC = np.linalg.cholesky(X)
# return 2*sum(np.log(np.diag(UC)))
# NOT HERE
def ddot(d, mtx, left=True):
"""Multiply a full matrix by a diagonal matrix.
This function should always be faster than dot.
Input:
d -- 1D (N,) array (contains the diagonal elements)
mtx -- 2D (N,N) array
left: is the diagonal matrix on the left or on the right of the product?
Output:
ddot(d, mts, left=True) == dot(diag(d), mtx)
ddot(d, mts, left=False) == dot(mtx, diag(d))
"""
if left:
return (d*mtx.T).T
else:
return d*mtx
def saveParameters(model, hdf5, view_names=None):
""" Method to save the parameters of the model in an hdf5 file
PARAMETERS
----------
model: a BayesNet instance
hdf5:
view_names
"""
# Get nodes from the model
nodes = model.getNodes()
# Create groups
param_grp = hdf5.create_group("parameters")
# Iterate over nodes
for node in nodes:
# Collect node parameters
parameters = nodes[node].getParameters()
# Create node subgroup
node_subgrp = param_grp.create_group(node)
# Multi-view nodes
if type(parameters) == list:
# Loop through the views
for m in range(len(parameters)):
if view_names is not None:
tmp = view_names[m]
else:
tmp = "%d" % m
# Create subsubgroup for the view
view_subgrp = node_subgrp.create_group(tmp)
# Loop through the parameters of the view
if parameters[m] is not None:
# Variational nodes
if type(parameters[m]) == dict:
for param_name in parameters[m].keys():
if parameters[m][param_name] is not None:
view_subgrp.create_dataset(param_name, data=parameters[m][param_name].T)
# Non-variational nodes (no distributions)
elif type(parameters[m]) == np.ndarray:
view_subgrp.create_dataset("value", data=parameters[m].T)
# Single-view nodes
else:
for param_name in parameters.keys():
node_subgrp.create_dataset("%s" % (param_name), data=parameters[param_name].T)
pass
def saveExpectations(model, hdf5, view_names=None):
""" Method to save the expectations of the model in an hdf5 file
PARAMETERS
----------
model: a BayesNet instance
hdf5:
view_names:
"""
# Get nodes from the model
nodes = model.getNodes()
exp_grp = hdf5.create_group("expectations")
# Iterate over nodes
for node in nodes:
# Collect node expectations
expectations = nodes[node].getExpectations()
# Multi-view nodes
if type(expectations) == list:
# Create subgroup for the node
node_subgrp = exp_grp.create_group(node)
# Iterate over views
for m in range(len(expectations)):
if view_names is not None:
view = view_names[m]
else:
view = "%d" % m
# Collect expectations
exp = expectations[m]["E"]
if exp is not None:
if type(exp) == ma.core.MaskedArray:
tmp = ma.filled(exp, fill_value=np.nan)
node_subgrp.create_dataset(view, data=tmp.T)
else:
node_subgrp.create_dataset(view, data=exp.T)
# Single-view nodes
else:
exp_grp.create_dataset(node, data=expectations["E"].T)
def saveTrainingStats(model, hdf5):
""" Method to save the training statistics in an hdf5 file
PARAMETERS
----------
model: a BayesNet instance
hdf5:
"""
stats = model.getTrainingStats()
stats_grp = hdf5.create_group("training_stats")
stats_grp.create_dataset("activeK", data=stats["activeK"])
stats_grp.create_dataset("elbo", data=stats["elbo"])
stats_grp.create_dataset("elbo_terms", data=stats["elbo_terms"].T)
stats_grp['elbo_terms'].attrs['colnames'] = [a.encode('utf8') for a in stats["elbo_terms"].columns.values]
def saveTrainingOpts(opts, hdf5):
""" Method to save the training options in an hdf5 file
PARAMETERS
----------
opts:
hdf5:
"""
# Remove dictionaries from the options
for k,v in opts.copy().items():
if type(v)==dict:
for k1,v1 in v.items():
opts[str(k)+"_"+str(k1)] = v1
opts.pop(k)
# Create HDF5 data set
hdf5.create_dataset("training_opts", data=np.array(list(opts.values()), dtype=np.float))
hdf5['training_opts'].attrs['names'] = np.asarray(list(opts.keys())).astype('S')
def saveModelOpts(opts, hdf5):
""" Method to save the model options in an hdf5 file
PARAMETERS
----------
opts:
hdf5:
"""
opts_interest = ["learnIntercept","schedule","likelihood","sparsity"]
opts = dict((k, opts[k]) for k in opts_interest)
grp = hdf5.create_group('model_opts')
for k,v in opts.items():
grp.create_dataset(k, data=np.asarray(v).astype('S'))
grp[k].attrs['names'] = np.asarray(list(opts.keys())).astype('S')
def saveTrainingData(model, hdf5, view_names=None, sample_names=None, feature_names=None, likelihoods=None):
""" Method to save the training data in an hdf5 file
PARAMETERS
----------
model: a BayesNet instance
hdf5:
view_names
sample_names
feature_names
"""
data = model.getTrainingData()
data_grp = hdf5.create_group("data")
featuredata_grp = hdf5.create_group("features")
hdf5.create_dataset("samples", data=np.array(sample_names, dtype='S50'))
if likelihoods is not None:
data_grp.attrs['likelihood'] = np.array(likelihoods, dtype='S50')
for m in range(len(data)):
view = view_names[m] if view_names is not None else str(m)
data_grp.create_dataset(view, data=data[m].data.T)
if feature_names is not None:
# data_grp.attrs['features'] = np.array(feature_names[m], dtype='S')
featuredata_grp.create_dataset(view, data=np.array(feature_names[m], dtype='S50'))
def saveModel(model, outfile, train_opts, model_opts, view_names=None, sample_names=None, feature_names=None):
""" Method to save the model in an hdf5 file
PARAMETERS
----------
TO-FILL....
"""
# QC checks
assert model.trained == True, "Model is not trained yet"
assert len(np.unique(view_names)) == len(view_names), 'View names must be unique'
assert len(np.unique(sample_names)) == len(sample_names), 'Sample names must be unique'
# Create output directory
if not os.path.isdir(os.path.dirname(outfile)):
print("Output directory does not exist, creating it...")
os.makedirs(os.path.dirname(outfile))
# For some reason h5py orders the datasets alphabetically, so we have to sort the likelihoods accordingly
idx = sorted(range(len(view_names)), key=lambda k: view_names[k])
tmp = [model_opts["likelihood"][idx[m]] for m in range(len(model_opts["likelihood"]))]
model_opts["likelihood"] = tmp
# Open HDF5 handler
hdf5 = h5py.File(outfile,'w')
# Save expectations
saveExpectations(model,hdf5,view_names)
# Save parameters
# saveParameters(model,hdf5,view_names)
# Save training statistics
saveTrainingStats(model,hdf5)
# Save training options
saveTrainingOpts(train_opts,hdf5)
# Save model options
saveModelOpts(model_opts,hdf5)
# Save training data
saveTrainingData(model, hdf5, view_names, sample_names, feature_names, model_opts["likelihood"])
# Close HDF5 file
hdf5.close()
| """ Method to load the data
PARAMETERS
----------
data_opts: dic
verbose: boolean
"""
print ("\n")
print ("#"*18)
print ("## Loading data ##")
print ("#"*18)
print ("\n")
sleep(1)
M = len(data_opts['input_files'])
Y = [None]*M
for m in range(M):
# Read file
file = data_opts['input_files'][m]
Y[m] = pd.read_csv(file, delimiter=data_opts["delimiter"], header=data_opts["colnames"], index_col=data_opts["rownames"]).astype(pd.np.float32)
# Y[m] = pd.read_csv(file, delimiter=data_opts["delimiter"])
print("Loaded %s with %d samples and %d features..." % (file, Y[m].shape[0], Y[m].shape[1]))
# Checking missing values on features
# print max(np.isnan(Y[m]).mean(axis=1))
# exit()
# Checking missing values on samples
# print np.isnan(Y[m]).mean(axis=1)
# exit()
# Check that the dimensions match
if len(set([Y[m].shape[0] for m in range(M)])) != 1:
if all([Y[m].shape[1] for m in range(M)]):
print("\nColumns seem to be the shared axis, transposing the data...")
for m in range(M): Y[m] = Y[m].T
else:
print("\nDimensionalities do not match, aborting. Make sure that either columns or rows are shared!")
exit()
# TO-DO: CHECK IF ANY SAMPLE HAS MISSING VALUES IN ALL VIEWS
# Sanity checks on the data
print ("\n" +"#"*46)
print("## Doing sanity checks and parsing the data ##")
print ("#"*46 + "\n")
for m in range(M):
# Removing features with complete missing values
nas = np.isnan(Y[m]).mean(axis=0)
if np.any(nas==1.):
print("Warning: %d features(s) on view %d have missing values in all samples, removing them..." % ( (nas==1.).sum(), m) )
Y[m].drop(Y[m].columns[np.where(nas==1.)], axis=1, inplace=True)
# Warning if there are features with no variance
var = Y[m].std(axis=0)
if np.any(var==0.):
print("Warning: %d features(s) on view %d have zero variance, consider removing them..." % ( (var==0.).sum(),m) )
# Y[m].drop(Y[m].columns[np.where(var==0.)], axis=1, inplace=True)
# Center the features
if data_opts['center_features'][m]:
print("Centering features for view " + str(m) + "...")
Y[m] = (Y[m] - Y[m].mean(axis=0))
# Scale the views to unit variance
if data_opts['scale_views'][m]:
print("Scaling view " + str(m) + " to unit variance...")
Y[m] = Y[m] / np.nanstd(Y[m].as_matrix())
# Scale the features to unit variance
if data_opts['scale_features'][m]:
print("Scaling features for view " + str(m) + " to unit variance...")
Y[m] = Y[m] / np.std(Y[m], axis=0, )
print("\nAfter data processing:")
for m in range(M): print("view %d has %d samples and %d features..." % (m, Y[m].shape[0], Y[m].shape[1]))
return Y | identifier_body |
utils.py | from __future__ import division
from time import sleep
import numpy as np
import pandas as pd
import numpy.ma as ma
import os
import h5py
"""
Module to define some useful util functions
"""
def removeIncompleteSamples(data):
""" Method to remove samples with missing views
PARAMETERS
----------
data: list
"""
print("Removing incomplete samples...")
M = len(data)
N = data[0].shape[0]
samples_to_remove = []
for n in range(N):
for m in range(M):
if pd.isnull(data[m].iloc[n][0]):
samples_to_remove.append(n)
break
if len(samples_to_remove) > 0:
|
data_filt = [None]*M
samples_to_keep = np.setdiff1d(range(N),samples_to_remove)
for m in range(M):
data_filt[m] = data[m].iloc[samples_to_keep]
return data_filt
def maskData(data, data_opts):
""" Method to mask values of the data,
It is mainly to test missing values and to evaluate imputation
PARAMETERS
----------
data_opts: dic
"""
print("Masking data with the following options:")
print("at random:")
print(data_opts['maskAtRandom'])
print("full cases:")
print(data_opts['maskNSamples'])
for m in range(len(data)):
# Mask values at random
D = data[m].shape[1]
N = data[m].shape[0]
p2Mask = data_opts['maskAtRandom'][m]
if p2Mask != 0:
idxMask = np.zeros(N*D)
idxMask[:int(round(N*D*p2Mask))] = 1
np.random.shuffle(idxMask)
idxMask = np.reshape(idxMask, [N, D])
data[m] = data[m].mask(idxMask==1)
# Mask samples in a complete view
Nsamples2Mask = data_opts['maskNSamples'][m]
if Nsamples2Mask != 0:
idxMask = np.random.choice(N, size=Nsamples2Mask, replace = False)
# idxMask = np.arange(Nsamples2Mask)
# print idxMask
tmp = data[m].copy()
tmp.ix[idxMask,:] = pd.np.nan
data[m] = tmp
return data
# Function to load the data
def loadData(data_opts, verbose=True):
""" Method to load the data
PARAMETERS
----------
data_opts: dic
verbose: boolean
"""
print ("\n")
print ("#"*18)
print ("## Loading data ##")
print ("#"*18)
print ("\n")
sleep(1)
M = len(data_opts['input_files'])
Y = [None]*M
for m in range(M):
# Read file
file = data_opts['input_files'][m]
Y[m] = pd.read_csv(file, delimiter=data_opts["delimiter"], header=data_opts["colnames"], index_col=data_opts["rownames"]).astype(pd.np.float32)
# Y[m] = pd.read_csv(file, delimiter=data_opts["delimiter"])
print("Loaded %s with %d samples and %d features..." % (file, Y[m].shape[0], Y[m].shape[1]))
# Checking missing values on features
# print max(np.isnan(Y[m]).mean(axis=1))
# exit()
# Checking missing values on samples
# print np.isnan(Y[m]).mean(axis=1)
# exit()
# Check that the dimensions match
if len(set([Y[m].shape[0] for m in range(M)])) != 1:
if all([Y[m].shape[1] for m in range(M)]):
print("\nColumns seem to be the shared axis, transposing the data...")
for m in range(M): Y[m] = Y[m].T
else:
print("\nDimensionalities do not match, aborting. Make sure that either columns or rows are shared!")
exit()
# TO-DO: CHECK IF ANY SAMPLE HAS MISSING VALUES IN ALL VIEWS
# Sanity checks on the data
print ("\n" +"#"*46)
print("## Doing sanity checks and parsing the data ##")
print ("#"*46 + "\n")
for m in range(M):
# Removing features with complete missing values
nas = np.isnan(Y[m]).mean(axis=0)
if np.any(nas==1.):
print("Warning: %d features(s) on view %d have missing values in all samples, removing them..." % ( (nas==1.).sum(), m) )
Y[m].drop(Y[m].columns[np.where(nas==1.)], axis=1, inplace=True)
# Warning if there are features with no variance
var = Y[m].std(axis=0)
if np.any(var==0.):
print("Warning: %d features(s) on view %d have zero variance, consider removing them..." % ( (var==0.).sum(),m) )
# Y[m].drop(Y[m].columns[np.where(var==0.)], axis=1, inplace=True)
# Center the features
if data_opts['center_features'][m]:
print("Centering features for view " + str(m) + "...")
Y[m] = (Y[m] - Y[m].mean(axis=0))
# Scale the views to unit variance
if data_opts['scale_views'][m]:
print("Scaling view " + str(m) + " to unit variance...")
Y[m] = Y[m] / np.nanstd(Y[m].as_matrix())
# Scale the features to unit variance
if data_opts['scale_features'][m]:
print("Scaling features for view " + str(m) + " to unit variance...")
Y[m] = Y[m] / np.std(Y[m], axis=0, )
print("\nAfter data processing:")
for m in range(M): print("view %d has %d samples and %d features..." % (m, Y[m].shape[0], Y[m].shape[1]))
return Y
def dotd(A, B, out=None):
"""Diagonal of :math:`\mathrm A\mathrm B^\intercal`.
If ``A`` is :math:`n\times p` and ``B`` is :math:`p\times n`, it is done in :math:`O(pn)`.
Args:
A (array_like): Left matrix.
B (array_like): Right matrix.
out (:class:`numpy.ndarray`, optional): copy result to.
Returns:
:class:`numpy.ndarray`: Resulting diagonal.
"""
A = ma.asarray(A, float)
B = ma.asarray(B, float)
if A.ndim == 1 and B.ndim == 1:
if out is None:
return ma.dot(A, B)
return ma.dot(A, B, out)
if out is None:
out = ma.empty((A.shape[0], ), float)
out[:] = ma.sum(A * B.T, axis=1)
return out
def nans(shape, dtype=float):
""" Method to create an array filled with missing values """
a = np.empty(shape, dtype)
a.fill(np.nan)
return a
def corr(A,B):
""" Method to efficiently compute correlation coefficients between two matrices
PARMETERS
---------
A: np array
B: np array
"""
# Rowwise mean of input arrays & subtract from input arrays themeselves
A_mA = A - A.mean(1)[:,None]
B_mB = B - B.mean(1)[:,None]
# Sum of squares across rows
ssA = (A_mA**2).sum(1);
ssB = (B_mB**2).sum(1);
# Finally get corr coeff
return np.dot(A_mA,B_mB.T)/np.sqrt(np.dot(ssA[:,None],ssB[None]))
# NOT HERE
def logdet(X):
return np.log(np.linalg.det(X))
# UC = np.linalg.cholesky(X)
# return 2*sum(np.log(np.diag(UC)))
# NOT HERE
def ddot(d, mtx, left=True):
"""Multiply a full matrix by a diagonal matrix.
This function should always be faster than dot.
Input:
d -- 1D (N,) array (contains the diagonal elements)
mtx -- 2D (N,N) array
left: is the diagonal matrix on the left or on the right of the product?
Output:
ddot(d, mts, left=True) == dot(diag(d), mtx)
ddot(d, mts, left=False) == dot(mtx, diag(d))
"""
if left:
return (d*mtx.T).T
else:
return d*mtx
def saveParameters(model, hdf5, view_names=None):
""" Method to save the parameters of the model in an hdf5 file
PARAMETERS
----------
model: a BayesNet instance
hdf5:
view_names
"""
# Get nodes from the model
nodes = model.getNodes()
# Create groups
param_grp = hdf5.create_group("parameters")
# Iterate over nodes
for node in nodes:
# Collect node parameters
parameters = nodes[node].getParameters()
# Create node subgroup
node_subgrp = param_grp.create_group(node)
# Multi-view nodes
if type(parameters) == list:
# Loop through the views
for m in range(len(parameters)):
if view_names is not None:
tmp = view_names[m]
else:
tmp = "%d" % m
# Create subsubgroup for the view
view_subgrp = node_subgrp.create_group(tmp)
# Loop through the parameters of the view
if parameters[m] is not None:
# Variational nodes
if type(parameters[m]) == dict:
for param_name in parameters[m].keys():
if parameters[m][param_name] is not None:
view_subgrp.create_dataset(param_name, data=parameters[m][param_name].T)
# Non-variational nodes (no distributions)
elif type(parameters[m]) == np.ndarray:
view_subgrp.create_dataset("value", data=parameters[m].T)
# Single-view nodes
else:
for param_name in parameters.keys():
node_subgrp.create_dataset("%s" % (param_name), data=parameters[param_name].T)
pass
def saveExpectations(model, hdf5, view_names=None):
""" Method to save the expectations of the model in an hdf5 file
PARAMETERS
----------
model: a BayesNet instance
hdf5:
view_names:
"""
# Get nodes from the model
nodes = model.getNodes()
exp_grp = hdf5.create_group("expectations")
# Iterate over nodes
for node in nodes:
# Collect node expectations
expectations = nodes[node].getExpectations()
# Multi-view nodes
if type(expectations) == list:
# Create subgroup for the node
node_subgrp = exp_grp.create_group(node)
# Iterate over views
for m in range(len(expectations)):
if view_names is not None:
view = view_names[m]
else:
view = "%d" % m
# Collect expectations
exp = expectations[m]["E"]
if exp is not None:
if type(exp) == ma.core.MaskedArray:
tmp = ma.filled(exp, fill_value=np.nan)
node_subgrp.create_dataset(view, data=tmp.T)
else:
node_subgrp.create_dataset(view, data=exp.T)
# Single-view nodes
else:
exp_grp.create_dataset(node, data=expectations["E"].T)
def saveTrainingStats(model, hdf5):
""" Method to save the training statistics in an hdf5 file
PARAMETERS
----------
model: a BayesNet instance
hdf5:
"""
stats = model.getTrainingStats()
stats_grp = hdf5.create_group("training_stats")
stats_grp.create_dataset("activeK", data=stats["activeK"])
stats_grp.create_dataset("elbo", data=stats["elbo"])
stats_grp.create_dataset("elbo_terms", data=stats["elbo_terms"].T)
stats_grp['elbo_terms'].attrs['colnames'] = [a.encode('utf8') for a in stats["elbo_terms"].columns.values]
def saveTrainingOpts(opts, hdf5):
""" Method to save the training options in an hdf5 file
PARAMETERS
----------
opts:
hdf5:
"""
# Remove dictionaries from the options
for k,v in opts.copy().items():
if type(v)==dict:
for k1,v1 in v.items():
opts[str(k)+"_"+str(k1)] = v1
opts.pop(k)
# Create HDF5 data set
hdf5.create_dataset("training_opts", data=np.array(list(opts.values()), dtype=np.float))
hdf5['training_opts'].attrs['names'] = np.asarray(list(opts.keys())).astype('S')
def saveModelOpts(opts, hdf5):
""" Method to save the model options in an hdf5 file
PARAMETERS
----------
opts:
hdf5:
"""
opts_interest = ["learnIntercept","schedule","likelihood","sparsity"]
opts = dict((k, opts[k]) for k in opts_interest)
grp = hdf5.create_group('model_opts')
for k,v in opts.items():
grp.create_dataset(k, data=np.asarray(v).astype('S'))
grp[k].attrs['names'] = np.asarray(list(opts.keys())).astype('S')
def saveTrainingData(model, hdf5, view_names=None, sample_names=None, feature_names=None, likelihoods=None):
""" Method to save the training data in an hdf5 file
PARAMETERS
----------
model: a BayesNet instance
hdf5:
view_names
sample_names
feature_names
"""
data = model.getTrainingData()
data_grp = hdf5.create_group("data")
featuredata_grp = hdf5.create_group("features")
hdf5.create_dataset("samples", data=np.array(sample_names, dtype='S50'))
if likelihoods is not None:
data_grp.attrs['likelihood'] = np.array(likelihoods, dtype='S50')
for m in range(len(data)):
view = view_names[m] if view_names is not None else str(m)
data_grp.create_dataset(view, data=data[m].data.T)
if feature_names is not None:
# data_grp.attrs['features'] = np.array(feature_names[m], dtype='S')
featuredata_grp.create_dataset(view, data=np.array(feature_names[m], dtype='S50'))
def saveModel(model, outfile, train_opts, model_opts, view_names=None, sample_names=None, feature_names=None):
""" Method to save the model in an hdf5 file
PARAMETERS
----------
TO-FILL....
"""
# QC checks
assert model.trained == True, "Model is not trained yet"
assert len(np.unique(view_names)) == len(view_names), 'View names must be unique'
assert len(np.unique(sample_names)) == len(sample_names), 'Sample names must be unique'
# Create output directory
if not os.path.isdir(os.path.dirname(outfile)):
print("Output directory does not exist, creating it...")
os.makedirs(os.path.dirname(outfile))
# For some reason h5py orders the datasets alphabetically, so we have to sort the likelihoods accordingly
idx = sorted(range(len(view_names)), key=lambda k: view_names[k])
tmp = [model_opts["likelihood"][idx[m]] for m in range(len(model_opts["likelihood"]))]
model_opts["likelihood"] = tmp
# Open HDF5 handler
hdf5 = h5py.File(outfile,'w')
# Save expectations
saveExpectations(model,hdf5,view_names)
# Save parameters
# saveParameters(model,hdf5,view_names)
# Save training statistics
saveTrainingStats(model,hdf5)
# Save training options
saveTrainingOpts(train_opts,hdf5)
# Save model options
saveModelOpts(model_opts,hdf5)
# Save training data
saveTrainingData(model, hdf5, view_names, sample_names, feature_names, model_opts["likelihood"])
# Close HDF5 file
hdf5.close()
| print("A total of " + str(len(samples_to_remove)) + " sample(s) have at least a missing view and will be removed") | conditional_block |
list.js | //https://developers.google.com/places/javascript/
//https://developers.google.com/maps/documentation/javascript/places#placeid
// Link to google api-key for the MapP5 project
// https://console.developers.google.com/apis/credentials?project=mapp5-1232
// https://maps.googleapis.com/maps/api/place/radarsearch/json?location=48.859294,2.347589&radius=5000&type=cafe&keyword=vegetarian&key=YOUR_API_KEY
var sfo = {
lat: -37.7833,
lng: 122.4167
};
$('#googleMapError').hide();
var map, infowindow;
function handleErrors(event, source, lineno, colno, error) {
$('#googleMapError').text("There was an error loading resources. Please correct and try again");
$('#googleMapError').show();
}
// Thanks to the following link for help with this function.
//http://stackoverflow.com/questions/14184956/async-google-maps-api-v3-undefined-is-not-a-function
// This function is called when google maps is ready to be loaded. (this is the callback when the map is loaded asynchronously.
/*
Uses geocoder to obtain lat and lng values for the location of interest.
Passes the location info to getBookstores, which queries Foursquare.
If getBookstores succeeds then processFrsqrBooks is called with the response from FourSquare.
*/
function loadMap() {
infowindow = new google.maps.InfoWindow({});
infowindow.isOpen = false; // property of global infowindow variable that holds the open state of the infowindow.
// Request latitide and longitude of Fisherman's wharf, SanFrancisco, CA.
var geocoder = new google.maps.Geocoder();
var address = 'fisherman\'s wharf, sanfrancisco, CA, USA';
geocoder.geocode({
'address': address
}, function(results, status) {
switch (status) {
case 'OK':
if (status == google.maps.GeocoderStatus.OK) {
map = new google.maps.Map(document.getElementById('map'), {
center: sfo,
zoom: 15,
scrollwheel: false
});
var service = new google.maps.places.PlacesService(map);
var geocoderSearchResult = results[0];
map.setCenter(geocoderSearchResult.geometry.location);
getBookstores(geocoderSearchResult).then(function(response) {
processFrsqrBooks(response);
}, function(error) {
$('#googleMapError').text("Could not load FourSquare data" + error);
$('#googleMapError').show();
console.log(error);
});
}
break;
case 'ZERO_RESULTS':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
case 'OVER_QUERY_LIMIT':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
case 'REQUEST_DENIED':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
case 'INVALID_REQUEST':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
case 'UNKNOWN_ERROR':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
}
});
}
/*
This function is called when the request to Foursquare succeeds i.e. comes back with any result.
If the status code of the response is 200, then the knockout observable arrays are populated with the
result data.
*/
function processFrsqrBooks(response) {
if (getBooksRequest.readyState === XMLHttpRequest.DONE) {
if (getBooksRequest.status === 200) {
var jsonResponse = JSON.parse(getBooksRequest.responseText);
var bkstr = []; // array, holds the frsqrItem object literal that is defined inside the loop below.
var frsqrBookItems = [];
if (jsonResponse.response.groups.length > 0) {
bookVenues = jsonResponse.response.groups[0];
items = bookVenues.items;
for (var i = 0; i < items.length; i++) {
// object that holds data for individual locations from the Foursquare response.
var frsqrItem = new Venue();
// populate the object literal with data from the response.
frsqrItem.tips = items[i].tips;
frsqrItem.name = items[i].venue.name;
frsqrItem.venueUrl = items[i].venue.url;
frsqrItem.lat = items[i].venue.location.lat;
frsqrItem.lng = items[i].venue.location.lng;
frsqrItem.index = i;
// Photos for the locations,
if (items[i].venue.photos.count > 0) {
// there is at least one photo - so construct photo url.
var groups = items[i].venue.photos.groups;
// Some Foursquare 'venues' do not have photos, so check if the location has any photos
if (groups.length > 0) {
var photoItems = groups[0].items;
if (photoItems.length > 0) {
frsqrItem.venuePhotoUrl = photoItems[0].prefix + '50x50' + photoItems[0].suffix;
}
}
}
frsqrItem.rating = items[i].venue.rating;
frsqrItem.marker = createMarker(frsqrItem);
bookstoreViewModel.venues.push(frsqrItem);
}
}
} else {
alert('There was a problem with the request.');
}
}
}
// This function sets up markes for points of interest and adds click handlers to all the markers.
function createMarker(frsqrItem) {
var content = "";
// The marker object ,
// - animation property set to DROP.
// - icon property is set to an icon from Templatic
var marker = new google.maps.Marker({
map: map,
animation: google.maps.Animation.DROP,
title: frsqrItem.name,
icon: './templatic/books-media.png',
//infowindow: new google.maps.InfoWindow(),
position: {
lat: frsqrItem.lat,
lng: frsqrItem.lng
}
});
content = content + "</br>";
content = content + "<p> " + marker.title + "</p>";
content = content + "<img src=\"" + frsqrItem.venuePhotoUrl + "\"/>";
marker.content = content;
content = '';
// add click handler to every marker.
// When a marker is clicked, the name of the location and photo is displayed.
// The animation property is set to bounce, so the marker bounces when you click on it
google.maps.event.addListener(marker, 'click', function() {
var self = this;
if (self.getAnimation() !== null) {
self.setAnimation(null);
} else {
self.setAnimation(google.maps.Animation.BOUNCE, 1400);
stopAnimation(marker);
}
// TODO: Open the infowindow only if it is not already open.
if(!infowindow.isOpen){
//The infowindow is not already open.
infowindow.setContent(self.content);
infowindow.open(self.map, this);
infowindow.isOpen = true;
}
else{
infowindow.close();
infowindow.setContent(self.content);
infowindow.open(self.map, this);
infowindow.isOpen = true;
}
});
return marker;
}
var stopAnimation = function(marker) {
setTimeout(function() {
marker.setAnimation(null);
}, 1400);
};
// code attribution: https://github.com/mdn/promises-test/blob/gh-pages/index.html
/*
This function takes the result from the geocoder request and subits a request to the Foursquare API.
*/
function | (geocoderSearchResult) {
return new Promise(function(resolve, reject) {
if (geocoderSearchResult.geometry.location) {
map.setCenter(geocoderSearchResult.geometry.location);
// Create a list and display all the results.
var cll = geocoderSearchResult.geometry.location.lat() + "," + geocoderSearchResult.geometry.location.lng();
var foursquareQuery = "https://api.foursquare.com/v2/venues/explore?client_id=F0XYIB113FEQQVQFFK5DGZ4V5PJBZA2DRNAXHFUW1G3UBE3N&client_secret=ZYY5PZ15D02DLZ0D3RGBADODPBC1KMKX4ZIQ4XNDNLUKBKEB&v=20140701&ll=" + cll + "&radius=1000&query=books&venuePhotos=1&limit=50";
getBooksRequest = new XMLHttpRequest();
if (!getBooksRequest) {
alert('Giving up getBookstores:( Cannot create an XMLHTTP instance');
return false;
}
getBooksRequest.open('GET', foursquareQuery);
getBooksRequest.responseType = 'text';
getBooksRequest.onload = function() {
if (getBooksRequest.status === 200) {
resolve(getBooksRequest.response);
} else {
reject(Error('Request did not load successfully' + getBooksRequest.statusText));
}
};
getBooksRequest.onerror = function() {
reject(Error('There was a network error'));
};
getBooksRequest.send();
} // if ends
});
}
var Venue = function() {
this.tips = '';
this.name = '';
this.venueUrl = '';
this.venuePhotoUrl = '';
this.rating = 0.0;
this.lat = 0;
this.lng = 0;
this.index = '';
this.marker = {};
this.displaySelection = function() {
if(!infowindow.isOpen){
//The infowindow is not already open.
infowindow.setContent(this.marker.content);
infowindow.open(map, this.marker);
infowindow.isOpen = true;
}
else{
infowindow.close();
infowindow.setContent(this.marker.content);
infowindow.open(map, this.marker);
infowindow.isOpen = true;
}
if (this.marker.getAnimation() !== null) {
this.marker.setAnimation(null);
} else {
this.marker.setAnimation(google.maps.Animation.BOUNCE, 1400);
stopAnimation(this.marker);
}
};
};
/*
***** Knockout *********
*/
// View Model for bookstore.
function BookstoreViewModel() {
var self = this;
self.searchText = ko.observable('');
self.visibleMarkers = ko.observableArray([]);
self.markers = ko.observableArray([]);
self.venues = ko.observableArray();
self.showMarkers = function(element, index, array){
if(!element.marker.getVisible()){
element.marker.setVisible(true);
}
};
self.visibleVenues = ko.computed(function() {
var searchInput = self.searchText().toLowerCase();
if (searchInput === '') {
self.venues().forEach(self.showMarkers);
return self.venues();
} else {
return ko.utils.arrayFilter(self.venues(), function(venue) {
//return ko.utils.stringStartsWith(item.name().toLowerCase(), filter);
if (venue.name.toLowerCase().indexOf(searchInput) > -1) {
// make sure venue marker is turned on.
if(!venue.marker.getVisible()){
venue.marker.setVisible(true);
}
return venue;
} else {
venue.marker.setVisible(false);
}
});
}
}, self.venues());
}
var bookstoreViewModel = new BookstoreViewModel();
// Attribution/thanks!: http://stackoverflow.com/questions/20857594/knockout-filtering-on-observable-array
ko.applyBindings(bookstoreViewModel);
var getBooksRequest;
var getBakeriesRequest;
var bookPhotosRequest;
var bakeryPhotosRequest;
| getBookstores | identifier_name |
list.js | //https://developers.google.com/places/javascript/
//https://developers.google.com/maps/documentation/javascript/places#placeid
// Link to google api-key for the MapP5 project
// https://console.developers.google.com/apis/credentials?project=mapp5-1232
// https://maps.googleapis.com/maps/api/place/radarsearch/json?location=48.859294,2.347589&radius=5000&type=cafe&keyword=vegetarian&key=YOUR_API_KEY
var sfo = {
lat: -37.7833,
lng: 122.4167
};
$('#googleMapError').hide();
var map, infowindow;
function handleErrors(event, source, lineno, colno, error) {
$('#googleMapError').text("There was an error loading resources. Please correct and try again");
$('#googleMapError').show();
}
// Thanks to the following link for help with this function.
//http://stackoverflow.com/questions/14184956/async-google-maps-api-v3-undefined-is-not-a-function
// This function is called when google maps is ready to be loaded. (this is the callback when the map is loaded asynchronously.
/*
Uses geocoder to obtain lat and lng values for the location of interest.
Passes the location info to getBookstores, which queries Foursquare.
If getBookstores succeeds then processFrsqrBooks is called with the response from FourSquare.
*/
function loadMap() {
infowindow = new google.maps.InfoWindow({});
infowindow.isOpen = false; // property of global infowindow variable that holds the open state of the infowindow.
// Request latitide and longitude of Fisherman's wharf, SanFrancisco, CA.
var geocoder = new google.maps.Geocoder();
var address = 'fisherman\'s wharf, sanfrancisco, CA, USA';
geocoder.geocode({
'address': address
}, function(results, status) {
switch (status) {
case 'OK':
if (status == google.maps.GeocoderStatus.OK) {
map = new google.maps.Map(document.getElementById('map'), {
center: sfo,
zoom: 15,
scrollwheel: false
});
var service = new google.maps.places.PlacesService(map);
var geocoderSearchResult = results[0];
map.setCenter(geocoderSearchResult.geometry.location);
getBookstores(geocoderSearchResult).then(function(response) {
processFrsqrBooks(response);
}, function(error) {
$('#googleMapError').text("Could not load FourSquare data" + error);
$('#googleMapError').show();
console.log(error);
});
}
break;
case 'ZERO_RESULTS':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
case 'OVER_QUERY_LIMIT':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
case 'REQUEST_DENIED':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
case 'INVALID_REQUEST':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
case 'UNKNOWN_ERROR':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
}
});
}
/*
This function is called when the request to Foursquare succeeds i.e. comes back with any result.
If the status code of the response is 200, then the knockout observable arrays are populated with the
result data.
*/
function processFrsqrBooks(response) {
if (getBooksRequest.readyState === XMLHttpRequest.DONE) {
if (getBooksRequest.status === 200) {
var jsonResponse = JSON.parse(getBooksRequest.responseText);
var bkstr = []; // array, holds the frsqrItem object literal that is defined inside the loop below.
var frsqrBookItems = [];
if (jsonResponse.response.groups.length > 0) {
bookVenues = jsonResponse.response.groups[0];
items = bookVenues.items;
for (var i = 0; i < items.length; i++) {
// object that holds data for individual locations from the Foursquare response.
var frsqrItem = new Venue();
// populate the object literal with data from the response.
frsqrItem.tips = items[i].tips;
frsqrItem.name = items[i].venue.name;
frsqrItem.venueUrl = items[i].venue.url;
frsqrItem.lat = items[i].venue.location.lat;
frsqrItem.lng = items[i].venue.location.lng;
frsqrItem.index = i;
// Photos for the locations,
if (items[i].venue.photos.count > 0) {
// there is at least one photo - so construct photo url.
var groups = items[i].venue.photos.groups;
// Some Foursquare 'venues' do not have photos, so check if the location has any photos
if (groups.length > 0) {
var photoItems = groups[0].items;
if (photoItems.length > 0) {
frsqrItem.venuePhotoUrl = photoItems[0].prefix + '50x50' + photoItems[0].suffix;
}
}
}
frsqrItem.rating = items[i].venue.rating;
frsqrItem.marker = createMarker(frsqrItem);
bookstoreViewModel.venues.push(frsqrItem);
}
}
} else {
alert('There was a problem with the request.');
}
}
}
// This function sets up markes for points of interest and adds click handlers to all the markers.
function createMarker(frsqrItem) {
var content = "";
// The marker object ,
// - animation property set to DROP.
// - icon property is set to an icon from Templatic
var marker = new google.maps.Marker({
map: map,
animation: google.maps.Animation.DROP,
title: frsqrItem.name,
icon: './templatic/books-media.png',
//infowindow: new google.maps.InfoWindow(),
position: {
lat: frsqrItem.lat,
lng: frsqrItem.lng
}
});
content = content + "</br>";
content = content + "<p> " + marker.title + "</p>";
content = content + "<img src=\"" + frsqrItem.venuePhotoUrl + "\"/>";
marker.content = content;
content = '';
// add click handler to every marker.
// When a marker is clicked, the name of the location and photo is displayed.
// The animation property is set to bounce, so the marker bounces when you click on it
google.maps.event.addListener(marker, 'click', function() {
var self = this;
if (self.getAnimation() !== null) {
self.setAnimation(null);
} else {
self.setAnimation(google.maps.Animation.BOUNCE, 1400);
stopAnimation(marker);
}
// TODO: Open the infowindow only if it is not already open.
if(!infowindow.isOpen){
//The infowindow is not already open.
infowindow.setContent(self.content);
infowindow.open(self.map, this);
infowindow.isOpen = true;
}
else{
infowindow.close();
infowindow.setContent(self.content);
infowindow.open(self.map, this);
infowindow.isOpen = true;
}
});
return marker;
}
var stopAnimation = function(marker) {
setTimeout(function() {
marker.setAnimation(null);
}, 1400);
};
// code attribution: https://github.com/mdn/promises-test/blob/gh-pages/index.html
/*
This function takes the result from the geocoder request and subits a request to the Foursquare API.
*/
function getBookstores(geocoderSearchResult) {
return new Promise(function(resolve, reject) {
if (geocoderSearchResult.geometry.location) {
map.setCenter(geocoderSearchResult.geometry.location);
// Create a list and display all the results.
var cll = geocoderSearchResult.geometry.location.lat() + "," + geocoderSearchResult.geometry.location.lng();
var foursquareQuery = "https://api.foursquare.com/v2/venues/explore?client_id=F0XYIB113FEQQVQFFK5DGZ4V5PJBZA2DRNAXHFUW1G3UBE3N&client_secret=ZYY5PZ15D02DLZ0D3RGBADODPBC1KMKX4ZIQ4XNDNLUKBKEB&v=20140701&ll=" + cll + "&radius=1000&query=books&venuePhotos=1&limit=50";
getBooksRequest = new XMLHttpRequest();
if (!getBooksRequest) {
alert('Giving up getBookstores:( Cannot create an XMLHTTP instance');
return false;
}
getBooksRequest.open('GET', foursquareQuery);
getBooksRequest.responseType = 'text';
getBooksRequest.onload = function() {
if (getBooksRequest.status === 200) {
resolve(getBooksRequest.response);
} else {
reject(Error('Request did not load successfully' + getBooksRequest.statusText));
}
};
getBooksRequest.onerror = function() {
reject(Error('There was a network error'));
};
getBooksRequest.send();
} // if ends
});
}
|
var Venue = function() {
this.tips = '';
this.name = '';
this.venueUrl = '';
this.venuePhotoUrl = '';
this.rating = 0.0;
this.lat = 0;
this.lng = 0;
this.index = '';
this.marker = {};
this.displaySelection = function() {
if(!infowindow.isOpen){
//The infowindow is not already open.
infowindow.setContent(this.marker.content);
infowindow.open(map, this.marker);
infowindow.isOpen = true;
}
else{
infowindow.close();
infowindow.setContent(this.marker.content);
infowindow.open(map, this.marker);
infowindow.isOpen = true;
}
if (this.marker.getAnimation() !== null) {
this.marker.setAnimation(null);
} else {
this.marker.setAnimation(google.maps.Animation.BOUNCE, 1400);
stopAnimation(this.marker);
}
};
};
/*
***** Knockout *********
*/
// View Model for bookstore.
function BookstoreViewModel() {
var self = this;
self.searchText = ko.observable('');
self.visibleMarkers = ko.observableArray([]);
self.markers = ko.observableArray([]);
self.venues = ko.observableArray();
self.showMarkers = function(element, index, array){
if(!element.marker.getVisible()){
element.marker.setVisible(true);
}
};
self.visibleVenues = ko.computed(function() {
var searchInput = self.searchText().toLowerCase();
if (searchInput === '') {
self.venues().forEach(self.showMarkers);
return self.venues();
} else {
return ko.utils.arrayFilter(self.venues(), function(venue) {
//return ko.utils.stringStartsWith(item.name().toLowerCase(), filter);
if (venue.name.toLowerCase().indexOf(searchInput) > -1) {
// make sure venue marker is turned on.
if(!venue.marker.getVisible()){
venue.marker.setVisible(true);
}
return venue;
} else {
venue.marker.setVisible(false);
}
});
}
}, self.venues());
}
var bookstoreViewModel = new BookstoreViewModel();
// Attribution/thanks!: http://stackoverflow.com/questions/20857594/knockout-filtering-on-observable-array
ko.applyBindings(bookstoreViewModel);
var getBooksRequest;
var getBakeriesRequest;
var bookPhotosRequest;
var bakeryPhotosRequest; | random_line_split |
|
list.js | //https://developers.google.com/places/javascript/
//https://developers.google.com/maps/documentation/javascript/places#placeid
// Link to google api-key for the MapP5 project
// https://console.developers.google.com/apis/credentials?project=mapp5-1232
// https://maps.googleapis.com/maps/api/place/radarsearch/json?location=48.859294,2.347589&radius=5000&type=cafe&keyword=vegetarian&key=YOUR_API_KEY
var sfo = {
lat: -37.7833,
lng: 122.4167
};
$('#googleMapError').hide();
var map, infowindow;
function handleErrors(event, source, lineno, colno, error) {
$('#googleMapError').text("There was an error loading resources. Please correct and try again");
$('#googleMapError').show();
}
// Thanks to the following link for help with this function.
//http://stackoverflow.com/questions/14184956/async-google-maps-api-v3-undefined-is-not-a-function
// This function is called when google maps is ready to be loaded. (this is the callback when the map is loaded asynchronously.
/*
Uses geocoder to obtain lat and lng values for the location of interest.
Passes the location info to getBookstores, which queries Foursquare.
If getBookstores succeeds then processFrsqrBooks is called with the response from FourSquare.
*/
function loadMap() {
infowindow = new google.maps.InfoWindow({});
infowindow.isOpen = false; // property of global infowindow variable that holds the open state of the infowindow.
// Request latitide and longitude of Fisherman's wharf, SanFrancisco, CA.
var geocoder = new google.maps.Geocoder();
var address = 'fisherman\'s wharf, sanfrancisco, CA, USA';
geocoder.geocode({
'address': address
}, function(results, status) {
switch (status) {
case 'OK':
if (status == google.maps.GeocoderStatus.OK) {
map = new google.maps.Map(document.getElementById('map'), {
center: sfo,
zoom: 15,
scrollwheel: false
});
var service = new google.maps.places.PlacesService(map);
var geocoderSearchResult = results[0];
map.setCenter(geocoderSearchResult.geometry.location);
getBookstores(geocoderSearchResult).then(function(response) {
processFrsqrBooks(response);
}, function(error) {
$('#googleMapError').text("Could not load FourSquare data" + error);
$('#googleMapError').show();
console.log(error);
});
}
break;
case 'ZERO_RESULTS':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
case 'OVER_QUERY_LIMIT':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
case 'REQUEST_DENIED':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
case 'INVALID_REQUEST':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
case 'UNKNOWN_ERROR':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
}
});
}
/*
This function is called when the request to Foursquare succeeds i.e. comes back with any result.
If the status code of the response is 200, then the knockout observable arrays are populated with the
result data.
*/
function processFrsqrBooks(response) {
if (getBooksRequest.readyState === XMLHttpRequest.DONE) {
if (getBooksRequest.status === 200) {
var jsonResponse = JSON.parse(getBooksRequest.responseText);
var bkstr = []; // array, holds the frsqrItem object literal that is defined inside the loop below.
var frsqrBookItems = [];
if (jsonResponse.response.groups.length > 0) {
bookVenues = jsonResponse.response.groups[0];
items = bookVenues.items;
for (var i = 0; i < items.length; i++) {
// object that holds data for individual locations from the Foursquare response.
var frsqrItem = new Venue();
// populate the object literal with data from the response.
frsqrItem.tips = items[i].tips;
frsqrItem.name = items[i].venue.name;
frsqrItem.venueUrl = items[i].venue.url;
frsqrItem.lat = items[i].venue.location.lat;
frsqrItem.lng = items[i].venue.location.lng;
frsqrItem.index = i;
// Photos for the locations,
if (items[i].venue.photos.count > 0) {
// there is at least one photo - so construct photo url.
var groups = items[i].venue.photos.groups;
// Some Foursquare 'venues' do not have photos, so check if the location has any photos
if (groups.length > 0) {
var photoItems = groups[0].items;
if (photoItems.length > 0) {
frsqrItem.venuePhotoUrl = photoItems[0].prefix + '50x50' + photoItems[0].suffix;
}
}
}
frsqrItem.rating = items[i].venue.rating;
frsqrItem.marker = createMarker(frsqrItem);
bookstoreViewModel.venues.push(frsqrItem);
}
}
} else {
alert('There was a problem with the request.');
}
}
}
// This function sets up markes for points of interest and adds click handlers to all the markers.
function createMarker(frsqrItem) {
var content = "";
// The marker object ,
// - animation property set to DROP.
// - icon property is set to an icon from Templatic
var marker = new google.maps.Marker({
map: map,
animation: google.maps.Animation.DROP,
title: frsqrItem.name,
icon: './templatic/books-media.png',
//infowindow: new google.maps.InfoWindow(),
position: {
lat: frsqrItem.lat,
lng: frsqrItem.lng
}
});
content = content + "</br>";
content = content + "<p> " + marker.title + "</p>";
content = content + "<img src=\"" + frsqrItem.venuePhotoUrl + "\"/>";
marker.content = content;
content = '';
// add click handler to every marker.
// When a marker is clicked, the name of the location and photo is displayed.
// The animation property is set to bounce, so the marker bounces when you click on it
google.maps.event.addListener(marker, 'click', function() {
var self = this;
if (self.getAnimation() !== null) {
self.setAnimation(null);
} else {
self.setAnimation(google.maps.Animation.BOUNCE, 1400);
stopAnimation(marker);
}
// TODO: Open the infowindow only if it is not already open.
if(!infowindow.isOpen){
//The infowindow is not already open.
infowindow.setContent(self.content);
infowindow.open(self.map, this);
infowindow.isOpen = true;
}
else |
});
return marker;
}
var stopAnimation = function(marker) {
setTimeout(function() {
marker.setAnimation(null);
}, 1400);
};
// code attribution: https://github.com/mdn/promises-test/blob/gh-pages/index.html
/*
This function takes the result from the geocoder request and subits a request to the Foursquare API.
*/
function getBookstores(geocoderSearchResult) {
return new Promise(function(resolve, reject) {
if (geocoderSearchResult.geometry.location) {
map.setCenter(geocoderSearchResult.geometry.location);
// Create a list and display all the results.
var cll = geocoderSearchResult.geometry.location.lat() + "," + geocoderSearchResult.geometry.location.lng();
var foursquareQuery = "https://api.foursquare.com/v2/venues/explore?client_id=F0XYIB113FEQQVQFFK5DGZ4V5PJBZA2DRNAXHFUW1G3UBE3N&client_secret=ZYY5PZ15D02DLZ0D3RGBADODPBC1KMKX4ZIQ4XNDNLUKBKEB&v=20140701&ll=" + cll + "&radius=1000&query=books&venuePhotos=1&limit=50";
getBooksRequest = new XMLHttpRequest();
if (!getBooksRequest) {
alert('Giving up getBookstores:( Cannot create an XMLHTTP instance');
return false;
}
getBooksRequest.open('GET', foursquareQuery);
getBooksRequest.responseType = 'text';
getBooksRequest.onload = function() {
if (getBooksRequest.status === 200) {
resolve(getBooksRequest.response);
} else {
reject(Error('Request did not load successfully' + getBooksRequest.statusText));
}
};
getBooksRequest.onerror = function() {
reject(Error('There was a network error'));
};
getBooksRequest.send();
} // if ends
});
}
var Venue = function() {
this.tips = '';
this.name = '';
this.venueUrl = '';
this.venuePhotoUrl = '';
this.rating = 0.0;
this.lat = 0;
this.lng = 0;
this.index = '';
this.marker = {};
this.displaySelection = function() {
if(!infowindow.isOpen){
//The infowindow is not already open.
infowindow.setContent(this.marker.content);
infowindow.open(map, this.marker);
infowindow.isOpen = true;
}
else{
infowindow.close();
infowindow.setContent(this.marker.content);
infowindow.open(map, this.marker);
infowindow.isOpen = true;
}
if (this.marker.getAnimation() !== null) {
this.marker.setAnimation(null);
} else {
this.marker.setAnimation(google.maps.Animation.BOUNCE, 1400);
stopAnimation(this.marker);
}
};
};
/*
***** Knockout *********
*/
// View Model for bookstore.
function BookstoreViewModel() {
var self = this;
self.searchText = ko.observable('');
self.visibleMarkers = ko.observableArray([]);
self.markers = ko.observableArray([]);
self.venues = ko.observableArray();
self.showMarkers = function(element, index, array){
if(!element.marker.getVisible()){
element.marker.setVisible(true);
}
};
self.visibleVenues = ko.computed(function() {
var searchInput = self.searchText().toLowerCase();
if (searchInput === '') {
self.venues().forEach(self.showMarkers);
return self.venues();
} else {
return ko.utils.arrayFilter(self.venues(), function(venue) {
//return ko.utils.stringStartsWith(item.name().toLowerCase(), filter);
if (venue.name.toLowerCase().indexOf(searchInput) > -1) {
// make sure venue marker is turned on.
if(!venue.marker.getVisible()){
venue.marker.setVisible(true);
}
return venue;
} else {
venue.marker.setVisible(false);
}
});
}
}, self.venues());
}
var bookstoreViewModel = new BookstoreViewModel();
// Attribution/thanks!: http://stackoverflow.com/questions/20857594/knockout-filtering-on-observable-array
ko.applyBindings(bookstoreViewModel);
var getBooksRequest;
var getBakeriesRequest;
var bookPhotosRequest;
var bakeryPhotosRequest;
| {
infowindow.close();
infowindow.setContent(self.content);
infowindow.open(self.map, this);
infowindow.isOpen = true;
} | conditional_block |
list.js | //https://developers.google.com/places/javascript/
//https://developers.google.com/maps/documentation/javascript/places#placeid
// Link to google api-key for the MapP5 project
// https://console.developers.google.com/apis/credentials?project=mapp5-1232
// https://maps.googleapis.com/maps/api/place/radarsearch/json?location=48.859294,2.347589&radius=5000&type=cafe&keyword=vegetarian&key=YOUR_API_KEY
var sfo = {
lat: -37.7833,
lng: 122.4167
};
$('#googleMapError').hide();
var map, infowindow;
function handleErrors(event, source, lineno, colno, error) {
$('#googleMapError').text("There was an error loading resources. Please correct and try again");
$('#googleMapError').show();
}
// Thanks to the following link for help with this function.
//http://stackoverflow.com/questions/14184956/async-google-maps-api-v3-undefined-is-not-a-function
// This function is called when google maps is ready to be loaded. (this is the callback when the map is loaded asynchronously.
/*
Uses geocoder to obtain lat and lng values for the location of interest.
Passes the location info to getBookstores, which queries Foursquare.
If getBookstores succeeds then processFrsqrBooks is called with the response from FourSquare.
*/
function loadMap() {
infowindow = new google.maps.InfoWindow({});
infowindow.isOpen = false; // property of global infowindow variable that holds the open state of the infowindow.
// Request latitide and longitude of Fisherman's wharf, SanFrancisco, CA.
var geocoder = new google.maps.Geocoder();
var address = 'fisherman\'s wharf, sanfrancisco, CA, USA';
geocoder.geocode({
'address': address
}, function(results, status) {
switch (status) {
case 'OK':
if (status == google.maps.GeocoderStatus.OK) {
map = new google.maps.Map(document.getElementById('map'), {
center: sfo,
zoom: 15,
scrollwheel: false
});
var service = new google.maps.places.PlacesService(map);
var geocoderSearchResult = results[0];
map.setCenter(geocoderSearchResult.geometry.location);
getBookstores(geocoderSearchResult).then(function(response) {
processFrsqrBooks(response);
}, function(error) {
$('#googleMapError').text("Could not load FourSquare data" + error);
$('#googleMapError').show();
console.log(error);
});
}
break;
case 'ZERO_RESULTS':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
case 'OVER_QUERY_LIMIT':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
case 'REQUEST_DENIED':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
case 'INVALID_REQUEST':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
case 'UNKNOWN_ERROR':
$('#googleMapError').text(status);
$('#googleMapError').show();
break;
}
});
}
/*
This function is called when the request to Foursquare succeeds i.e. comes back with any result.
If the status code of the response is 200, then the knockout observable arrays are populated with the
result data.
*/
function processFrsqrBooks(response) |
// This function sets up markes for points of interest and adds click handlers to all the markers.
function createMarker(frsqrItem) {
var content = "";
// The marker object ,
// - animation property set to DROP.
// - icon property is set to an icon from Templatic
var marker = new google.maps.Marker({
map: map,
animation: google.maps.Animation.DROP,
title: frsqrItem.name,
icon: './templatic/books-media.png',
//infowindow: new google.maps.InfoWindow(),
position: {
lat: frsqrItem.lat,
lng: frsqrItem.lng
}
});
content = content + "</br>";
content = content + "<p> " + marker.title + "</p>";
content = content + "<img src=\"" + frsqrItem.venuePhotoUrl + "\"/>";
marker.content = content;
content = '';
// add click handler to every marker.
// When a marker is clicked, the name of the location and photo is displayed.
// The animation property is set to bounce, so the marker bounces when you click on it
google.maps.event.addListener(marker, 'click', function() {
var self = this;
if (self.getAnimation() !== null) {
self.setAnimation(null);
} else {
self.setAnimation(google.maps.Animation.BOUNCE, 1400);
stopAnimation(marker);
}
// TODO: Open the infowindow only if it is not already open.
if(!infowindow.isOpen){
//The infowindow is not already open.
infowindow.setContent(self.content);
infowindow.open(self.map, this);
infowindow.isOpen = true;
}
else{
infowindow.close();
infowindow.setContent(self.content);
infowindow.open(self.map, this);
infowindow.isOpen = true;
}
});
return marker;
}
var stopAnimation = function(marker) {
setTimeout(function() {
marker.setAnimation(null);
}, 1400);
};
// code attribution: https://github.com/mdn/promises-test/blob/gh-pages/index.html
/*
This function takes the result from the geocoder request and subits a request to the Foursquare API.
*/
function getBookstores(geocoderSearchResult) {
return new Promise(function(resolve, reject) {
if (geocoderSearchResult.geometry.location) {
map.setCenter(geocoderSearchResult.geometry.location);
// Create a list and display all the results.
var cll = geocoderSearchResult.geometry.location.lat() + "," + geocoderSearchResult.geometry.location.lng();
var foursquareQuery = "https://api.foursquare.com/v2/venues/explore?client_id=F0XYIB113FEQQVQFFK5DGZ4V5PJBZA2DRNAXHFUW1G3UBE3N&client_secret=ZYY5PZ15D02DLZ0D3RGBADODPBC1KMKX4ZIQ4XNDNLUKBKEB&v=20140701&ll=" + cll + "&radius=1000&query=books&venuePhotos=1&limit=50";
getBooksRequest = new XMLHttpRequest();
if (!getBooksRequest) {
alert('Giving up getBookstores:( Cannot create an XMLHTTP instance');
return false;
}
getBooksRequest.open('GET', foursquareQuery);
getBooksRequest.responseType = 'text';
getBooksRequest.onload = function() {
if (getBooksRequest.status === 200) {
resolve(getBooksRequest.response);
} else {
reject(Error('Request did not load successfully' + getBooksRequest.statusText));
}
};
getBooksRequest.onerror = function() {
reject(Error('There was a network error'));
};
getBooksRequest.send();
} // if ends
});
}
var Venue = function() {
this.tips = '';
this.name = '';
this.venueUrl = '';
this.venuePhotoUrl = '';
this.rating = 0.0;
this.lat = 0;
this.lng = 0;
this.index = '';
this.marker = {};
this.displaySelection = function() {
if(!infowindow.isOpen){
//The infowindow is not already open.
infowindow.setContent(this.marker.content);
infowindow.open(map, this.marker);
infowindow.isOpen = true;
}
else{
infowindow.close();
infowindow.setContent(this.marker.content);
infowindow.open(map, this.marker);
infowindow.isOpen = true;
}
if (this.marker.getAnimation() !== null) {
this.marker.setAnimation(null);
} else {
this.marker.setAnimation(google.maps.Animation.BOUNCE, 1400);
stopAnimation(this.marker);
}
};
};
/*
***** Knockout *********
*/
// View Model for bookstore.
function BookstoreViewModel() {
var self = this;
self.searchText = ko.observable('');
self.visibleMarkers = ko.observableArray([]);
self.markers = ko.observableArray([]);
self.venues = ko.observableArray();
self.showMarkers = function(element, index, array){
if(!element.marker.getVisible()){
element.marker.setVisible(true);
}
};
self.visibleVenues = ko.computed(function() {
var searchInput = self.searchText().toLowerCase();
if (searchInput === '') {
self.venues().forEach(self.showMarkers);
return self.venues();
} else {
return ko.utils.arrayFilter(self.venues(), function(venue) {
//return ko.utils.stringStartsWith(item.name().toLowerCase(), filter);
if (venue.name.toLowerCase().indexOf(searchInput) > -1) {
// make sure venue marker is turned on.
if(!venue.marker.getVisible()){
venue.marker.setVisible(true);
}
return venue;
} else {
venue.marker.setVisible(false);
}
});
}
}, self.venues());
}
var bookstoreViewModel = new BookstoreViewModel();
// Attribution/thanks!: http://stackoverflow.com/questions/20857594/knockout-filtering-on-observable-array
ko.applyBindings(bookstoreViewModel);
var getBooksRequest;
var getBakeriesRequest;
var bookPhotosRequest;
var bakeryPhotosRequest;
| {
if (getBooksRequest.readyState === XMLHttpRequest.DONE) {
if (getBooksRequest.status === 200) {
var jsonResponse = JSON.parse(getBooksRequest.responseText);
var bkstr = []; // array, holds the frsqrItem object literal that is defined inside the loop below.
var frsqrBookItems = [];
if (jsonResponse.response.groups.length > 0) {
bookVenues = jsonResponse.response.groups[0];
items = bookVenues.items;
for (var i = 0; i < items.length; i++) {
// object that holds data for individual locations from the Foursquare response.
var frsqrItem = new Venue();
// populate the object literal with data from the response.
frsqrItem.tips = items[i].tips;
frsqrItem.name = items[i].venue.name;
frsqrItem.venueUrl = items[i].venue.url;
frsqrItem.lat = items[i].venue.location.lat;
frsqrItem.lng = items[i].venue.location.lng;
frsqrItem.index = i;
// Photos for the locations,
if (items[i].venue.photos.count > 0) {
// there is at least one photo - so construct photo url.
var groups = items[i].venue.photos.groups;
// Some Foursquare 'venues' do not have photos, so check if the location has any photos
if (groups.length > 0) {
var photoItems = groups[0].items;
if (photoItems.length > 0) {
frsqrItem.venuePhotoUrl = photoItems[0].prefix + '50x50' + photoItems[0].suffix;
}
}
}
frsqrItem.rating = items[i].venue.rating;
frsqrItem.marker = createMarker(frsqrItem);
bookstoreViewModel.venues.push(frsqrItem);
}
}
} else {
alert('There was a problem with the request.');
}
}
} | identifier_body |
shuf.rs | // This file is part of the uutils coreutils package.
//
// For the full copyright and license information, please view the LICENSE
// file that was distributed with this source code.
// spell-checker:ignore (ToDO) cmdline evec seps rvec fdata
use clap::{crate_version, Arg, ArgAction, Command};
use memchr::memchr_iter;
use rand::prelude::SliceRandom;
use rand::RngCore;
use std::fs::File;
use std::io::{stdin, stdout, BufReader, BufWriter, Read, Write};
use uucore::display::Quotable;
use uucore::error::{FromIo, UResult, USimpleError};
use uucore::{format_usage, help_about, help_usage};
mod rand_read_adapter;
enum Mode {
Default(String),
Echo(Vec<String>),
InputRange((usize, usize)),
}
static USAGE: &str = help_usage!("shuf.md");
static ABOUT: &str = help_about!("shuf.md");
struct Options {
head_count: usize,
output: Option<String>,
random_source: Option<String>,
repeat: bool,
sep: u8,
}
mod options {
pub static ECHO: &str = "echo";
pub static INPUT_RANGE: &str = "input-range";
pub static HEAD_COUNT: &str = "head-count";
pub static OUTPUT: &str = "output";
pub static RANDOM_SOURCE: &str = "random-source";
pub static REPEAT: &str = "repeat";
pub static ZERO_TERMINATED: &str = "zero-terminated";
pub static FILE: &str = "file";
}
#[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> |
pub fn uu_app() -> Command {
Command::new(uucore::util_name())
.about(ABOUT)
.version(crate_version!())
.override_usage(format_usage(USAGE))
.infer_long_args(true)
.args_override_self(true)
.arg(
Arg::new(options::ECHO)
.short('e')
.long(options::ECHO)
.value_name("ARG")
.help("treat each ARG as an input line")
.use_value_delimiter(false)
.num_args(0..)
.conflicts_with(options::INPUT_RANGE),
)
.arg(
Arg::new(options::INPUT_RANGE)
.short('i')
.long(options::INPUT_RANGE)
.value_name("LO-HI")
.help("treat each number LO through HI as an input line")
.conflicts_with(options::FILE),
)
.arg(
Arg::new(options::HEAD_COUNT)
.short('n')
.long(options::HEAD_COUNT)
.value_name("COUNT")
.help("output at most COUNT lines"),
)
.arg(
Arg::new(options::OUTPUT)
.short('o')
.long(options::OUTPUT)
.value_name("FILE")
.help("write result to FILE instead of standard output")
.value_hint(clap::ValueHint::FilePath),
)
.arg(
Arg::new(options::RANDOM_SOURCE)
.long(options::RANDOM_SOURCE)
.value_name("FILE")
.help("get random bytes from FILE")
.value_hint(clap::ValueHint::FilePath),
)
.arg(
Arg::new(options::REPEAT)
.short('r')
.long(options::REPEAT)
.help("output lines can be repeated")
.action(ArgAction::SetTrue),
)
.arg(
Arg::new(options::ZERO_TERMINATED)
.short('z')
.long(options::ZERO_TERMINATED)
.help("line delimiter is NUL, not newline")
.action(ArgAction::SetTrue),
)
.arg(Arg::new(options::FILE).value_hint(clap::ValueHint::FilePath))
}
fn read_input_file(filename: &str) -> UResult<Vec<u8>> {
let mut file = BufReader::new(if filename == "-" {
Box::new(stdin()) as Box<dyn Read>
} else {
let file = File::open(filename)
.map_err_context(|| format!("failed to open {}", filename.quote()))?;
Box::new(file) as Box<dyn Read>
});
let mut data = Vec::new();
file.read_to_end(&mut data)
.map_err_context(|| format!("failed reading {}", filename.quote()))?;
Ok(data)
}
fn find_seps(data: &mut Vec<&[u8]>, sep: u8) {
// need to use for loop so we don't borrow the vector as we modify it in place
// basic idea:
// * We don't care about the order of the result. This lets us slice the slices
// without making a new vector.
// * Starting from the end of the vector, we examine each element.
// * If that element contains the separator, we remove it from the vector,
// and then sub-slice it into slices that do not contain the separator.
// * We maintain the invariant throughout that each element in the vector past
// the ith element does not have any separators remaining.
for i in (0..data.len()).rev() {
if data[i].contains(&sep) {
let this = data.swap_remove(i);
let mut p = 0;
for i in memchr_iter(sep, this) {
data.push(&this[p..i]);
p = i + 1;
}
if p < this.len() {
data.push(&this[p..]);
}
}
}
}
fn shuf_bytes(input: &mut Vec<&[u8]>, opts: Options) -> UResult<()> {
let mut output = BufWriter::new(match opts.output {
None => Box::new(stdout()) as Box<dyn Write>,
Some(s) => {
let file = File::create(&s[..])
.map_err_context(|| format!("failed to open {} for writing", s.quote()))?;
Box::new(file) as Box<dyn Write>
}
});
let mut rng = match opts.random_source {
Some(r) => {
let file = File::open(&r[..])
.map_err_context(|| format!("failed to open random source {}", r.quote()))?;
WrappedRng::RngFile(rand_read_adapter::ReadRng::new(file))
}
None => WrappedRng::RngDefault(rand::thread_rng()),
};
if input.is_empty() {
return Ok(());
}
if opts.repeat {
for _ in 0..opts.head_count {
// Returns None is the slice is empty. We checked this before, so
// this is safe.
let r = input.choose(&mut rng).unwrap();
output
.write_all(r)
.map_err_context(|| "write failed".to_string())?;
output
.write_all(&[opts.sep])
.map_err_context(|| "write failed".to_string())?;
}
} else {
let (shuffled, _) = input.partial_shuffle(&mut rng, opts.head_count);
for r in shuffled {
output
.write_all(r)
.map_err_context(|| "write failed".to_string())?;
output
.write_all(&[opts.sep])
.map_err_context(|| "write failed".to_string())?;
}
}
Ok(())
}
fn parse_range(input_range: &str) -> Result<(usize, usize), String> {
if let Some((from, to)) = input_range.split_once('-') {
let begin = from
.parse::<usize>()
.map_err(|_| format!("invalid input range: {}", from.quote()))?;
let end = to
.parse::<usize>()
.map_err(|_| format!("invalid input range: {}", to.quote()))?;
Ok((begin, end + 1))
} else {
Err(format!("invalid input range: {}", input_range.quote()))
}
}
fn parse_head_count(headcounts: Vec<String>) -> Result<usize, String> {
let mut result = std::usize::MAX;
for count in headcounts {
match count.parse::<usize>() {
Ok(pv) => result = std::cmp::min(result, pv),
Err(_) => return Err(format!("invalid line count: {}", count.quote())),
}
}
Ok(result)
}
enum WrappedRng {
RngFile(rand_read_adapter::ReadRng<File>),
RngDefault(rand::rngs::ThreadRng),
}
impl RngCore for WrappedRng {
fn next_u32(&mut self) -> u32 {
match self {
Self::RngFile(r) => r.next_u32(),
Self::RngDefault(r) => r.next_u32(),
}
}
fn next_u64(&mut self) -> u64 {
match self {
Self::RngFile(r) => r.next_u64(),
Self::RngDefault(r) => r.next_u64(),
}
}
fn fill_bytes(&mut self, dest: &mut [u8]) {
match self {
Self::RngFile(r) => r.fill_bytes(dest),
Self::RngDefault(r) => r.fill_bytes(dest),
}
}
fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rand::Error> {
match self {
Self::RngFile(r) => r.try_fill_bytes(dest),
Self::RngDefault(r) => r.try_fill_bytes(dest),
}
}
}
| {
let args = args.collect_lossy();
let matches = uu_app().try_get_matches_from(args)?;
let mode = if let Some(args) = matches.get_many::<String>(options::ECHO) {
Mode::Echo(args.map(String::from).collect())
} else if let Some(range) = matches.get_one::<String>(options::INPUT_RANGE) {
match parse_range(range) {
Ok(m) => Mode::InputRange(m),
Err(msg) => {
return Err(USimpleError::new(1, msg));
}
}
} else {
Mode::Default(
matches
.get_one::<String>(options::FILE)
.map(|s| s.as_str())
.unwrap_or("-")
.to_string(),
)
};
let options = Options {
head_count: {
let headcounts = matches
.get_many::<String>(options::HEAD_COUNT)
.unwrap_or_default()
.map(|s| s.to_owned())
.collect();
match parse_head_count(headcounts) {
Ok(val) => val,
Err(msg) => return Err(USimpleError::new(1, msg)),
}
},
output: matches.get_one::<String>(options::OUTPUT).map(String::from),
random_source: matches
.get_one::<String>(options::RANDOM_SOURCE)
.map(String::from),
repeat: matches.get_flag(options::REPEAT),
sep: if matches.get_flag(options::ZERO_TERMINATED) {
0x00_u8
} else {
0x0a_u8
},
};
match mode {
Mode::Echo(args) => {
let mut evec = args.iter().map(String::as_bytes).collect::<Vec<_>>();
find_seps(&mut evec, options.sep);
shuf_bytes(&mut evec, options)?;
}
Mode::InputRange((b, e)) => {
let rvec = (b..e).map(|x| format!("{x}")).collect::<Vec<String>>();
let mut rvec = rvec.iter().map(String::as_bytes).collect::<Vec<&[u8]>>();
shuf_bytes(&mut rvec, options)?;
}
Mode::Default(filename) => {
let fdata = read_input_file(&filename)?;
let mut fdata = vec![&fdata[..]];
find_seps(&mut fdata, options.sep);
shuf_bytes(&mut fdata, options)?;
}
}
Ok(())
} | identifier_body |
shuf.rs | // This file is part of the uutils coreutils package.
//
// For the full copyright and license information, please view the LICENSE
// file that was distributed with this source code.
// spell-checker:ignore (ToDO) cmdline evec seps rvec fdata
use clap::{crate_version, Arg, ArgAction, Command};
use memchr::memchr_iter;
use rand::prelude::SliceRandom;
use rand::RngCore;
use std::fs::File;
use std::io::{stdin, stdout, BufReader, BufWriter, Read, Write};
use uucore::display::Quotable;
use uucore::error::{FromIo, UResult, USimpleError};
use uucore::{format_usage, help_about, help_usage};
mod rand_read_adapter;
enum Mode {
Default(String),
Echo(Vec<String>),
InputRange((usize, usize)),
}
static USAGE: &str = help_usage!("shuf.md");
static ABOUT: &str = help_about!("shuf.md");
struct Options {
head_count: usize,
output: Option<String>,
random_source: Option<String>,
repeat: bool,
sep: u8,
}
mod options {
pub static ECHO: &str = "echo";
pub static INPUT_RANGE: &str = "input-range";
pub static HEAD_COUNT: &str = "head-count";
pub static OUTPUT: &str = "output";
pub static RANDOM_SOURCE: &str = "random-source";
pub static REPEAT: &str = "repeat";
pub static ZERO_TERMINATED: &str = "zero-terminated";
pub static FILE: &str = "file";
}
#[uucore::main]
pub fn | (args: impl uucore::Args) -> UResult<()> {
let args = args.collect_lossy();
let matches = uu_app().try_get_matches_from(args)?;
let mode = if let Some(args) = matches.get_many::<String>(options::ECHO) {
Mode::Echo(args.map(String::from).collect())
} else if let Some(range) = matches.get_one::<String>(options::INPUT_RANGE) {
match parse_range(range) {
Ok(m) => Mode::InputRange(m),
Err(msg) => {
return Err(USimpleError::new(1, msg));
}
}
} else {
Mode::Default(
matches
.get_one::<String>(options::FILE)
.map(|s| s.as_str())
.unwrap_or("-")
.to_string(),
)
};
let options = Options {
head_count: {
let headcounts = matches
.get_many::<String>(options::HEAD_COUNT)
.unwrap_or_default()
.map(|s| s.to_owned())
.collect();
match parse_head_count(headcounts) {
Ok(val) => val,
Err(msg) => return Err(USimpleError::new(1, msg)),
}
},
output: matches.get_one::<String>(options::OUTPUT).map(String::from),
random_source: matches
.get_one::<String>(options::RANDOM_SOURCE)
.map(String::from),
repeat: matches.get_flag(options::REPEAT),
sep: if matches.get_flag(options::ZERO_TERMINATED) {
0x00_u8
} else {
0x0a_u8
},
};
match mode {
Mode::Echo(args) => {
let mut evec = args.iter().map(String::as_bytes).collect::<Vec<_>>();
find_seps(&mut evec, options.sep);
shuf_bytes(&mut evec, options)?;
}
Mode::InputRange((b, e)) => {
let rvec = (b..e).map(|x| format!("{x}")).collect::<Vec<String>>();
let mut rvec = rvec.iter().map(String::as_bytes).collect::<Vec<&[u8]>>();
shuf_bytes(&mut rvec, options)?;
}
Mode::Default(filename) => {
let fdata = read_input_file(&filename)?;
let mut fdata = vec![&fdata[..]];
find_seps(&mut fdata, options.sep);
shuf_bytes(&mut fdata, options)?;
}
}
Ok(())
}
pub fn uu_app() -> Command {
Command::new(uucore::util_name())
.about(ABOUT)
.version(crate_version!())
.override_usage(format_usage(USAGE))
.infer_long_args(true)
.args_override_self(true)
.arg(
Arg::new(options::ECHO)
.short('e')
.long(options::ECHO)
.value_name("ARG")
.help("treat each ARG as an input line")
.use_value_delimiter(false)
.num_args(0..)
.conflicts_with(options::INPUT_RANGE),
)
.arg(
Arg::new(options::INPUT_RANGE)
.short('i')
.long(options::INPUT_RANGE)
.value_name("LO-HI")
.help("treat each number LO through HI as an input line")
.conflicts_with(options::FILE),
)
.arg(
Arg::new(options::HEAD_COUNT)
.short('n')
.long(options::HEAD_COUNT)
.value_name("COUNT")
.help("output at most COUNT lines"),
)
.arg(
Arg::new(options::OUTPUT)
.short('o')
.long(options::OUTPUT)
.value_name("FILE")
.help("write result to FILE instead of standard output")
.value_hint(clap::ValueHint::FilePath),
)
.arg(
Arg::new(options::RANDOM_SOURCE)
.long(options::RANDOM_SOURCE)
.value_name("FILE")
.help("get random bytes from FILE")
.value_hint(clap::ValueHint::FilePath),
)
.arg(
Arg::new(options::REPEAT)
.short('r')
.long(options::REPEAT)
.help("output lines can be repeated")
.action(ArgAction::SetTrue),
)
.arg(
Arg::new(options::ZERO_TERMINATED)
.short('z')
.long(options::ZERO_TERMINATED)
.help("line delimiter is NUL, not newline")
.action(ArgAction::SetTrue),
)
.arg(Arg::new(options::FILE).value_hint(clap::ValueHint::FilePath))
}
fn read_input_file(filename: &str) -> UResult<Vec<u8>> {
let mut file = BufReader::new(if filename == "-" {
Box::new(stdin()) as Box<dyn Read>
} else {
let file = File::open(filename)
.map_err_context(|| format!("failed to open {}", filename.quote()))?;
Box::new(file) as Box<dyn Read>
});
let mut data = Vec::new();
file.read_to_end(&mut data)
.map_err_context(|| format!("failed reading {}", filename.quote()))?;
Ok(data)
}
fn find_seps(data: &mut Vec<&[u8]>, sep: u8) {
// need to use for loop so we don't borrow the vector as we modify it in place
// basic idea:
// * We don't care about the order of the result. This lets us slice the slices
// without making a new vector.
// * Starting from the end of the vector, we examine each element.
// * If that element contains the separator, we remove it from the vector,
// and then sub-slice it into slices that do not contain the separator.
// * We maintain the invariant throughout that each element in the vector past
// the ith element does not have any separators remaining.
for i in (0..data.len()).rev() {
if data[i].contains(&sep) {
let this = data.swap_remove(i);
let mut p = 0;
for i in memchr_iter(sep, this) {
data.push(&this[p..i]);
p = i + 1;
}
if p < this.len() {
data.push(&this[p..]);
}
}
}
}
fn shuf_bytes(input: &mut Vec<&[u8]>, opts: Options) -> UResult<()> {
let mut output = BufWriter::new(match opts.output {
None => Box::new(stdout()) as Box<dyn Write>,
Some(s) => {
let file = File::create(&s[..])
.map_err_context(|| format!("failed to open {} for writing", s.quote()))?;
Box::new(file) as Box<dyn Write>
}
});
let mut rng = match opts.random_source {
Some(r) => {
let file = File::open(&r[..])
.map_err_context(|| format!("failed to open random source {}", r.quote()))?;
WrappedRng::RngFile(rand_read_adapter::ReadRng::new(file))
}
None => WrappedRng::RngDefault(rand::thread_rng()),
};
if input.is_empty() {
return Ok(());
}
if opts.repeat {
for _ in 0..opts.head_count {
// Returns None is the slice is empty. We checked this before, so
// this is safe.
let r = input.choose(&mut rng).unwrap();
output
.write_all(r)
.map_err_context(|| "write failed".to_string())?;
output
.write_all(&[opts.sep])
.map_err_context(|| "write failed".to_string())?;
}
} else {
let (shuffled, _) = input.partial_shuffle(&mut rng, opts.head_count);
for r in shuffled {
output
.write_all(r)
.map_err_context(|| "write failed".to_string())?;
output
.write_all(&[opts.sep])
.map_err_context(|| "write failed".to_string())?;
}
}
Ok(())
}
fn parse_range(input_range: &str) -> Result<(usize, usize), String> {
if let Some((from, to)) = input_range.split_once('-') {
let begin = from
.parse::<usize>()
.map_err(|_| format!("invalid input range: {}", from.quote()))?;
let end = to
.parse::<usize>()
.map_err(|_| format!("invalid input range: {}", to.quote()))?;
Ok((begin, end + 1))
} else {
Err(format!("invalid input range: {}", input_range.quote()))
}
}
fn parse_head_count(headcounts: Vec<String>) -> Result<usize, String> {
let mut result = std::usize::MAX;
for count in headcounts {
match count.parse::<usize>() {
Ok(pv) => result = std::cmp::min(result, pv),
Err(_) => return Err(format!("invalid line count: {}", count.quote())),
}
}
Ok(result)
}
enum WrappedRng {
RngFile(rand_read_adapter::ReadRng<File>),
RngDefault(rand::rngs::ThreadRng),
}
impl RngCore for WrappedRng {
fn next_u32(&mut self) -> u32 {
match self {
Self::RngFile(r) => r.next_u32(),
Self::RngDefault(r) => r.next_u32(),
}
}
fn next_u64(&mut self) -> u64 {
match self {
Self::RngFile(r) => r.next_u64(),
Self::RngDefault(r) => r.next_u64(),
}
}
fn fill_bytes(&mut self, dest: &mut [u8]) {
match self {
Self::RngFile(r) => r.fill_bytes(dest),
Self::RngDefault(r) => r.fill_bytes(dest),
}
}
fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rand::Error> {
match self {
Self::RngFile(r) => r.try_fill_bytes(dest),
Self::RngDefault(r) => r.try_fill_bytes(dest),
}
}
}
| uumain | identifier_name |
shuf.rs | // This file is part of the uutils coreutils package.
//
// For the full copyright and license information, please view the LICENSE
// file that was distributed with this source code.
// spell-checker:ignore (ToDO) cmdline evec seps rvec fdata
use clap::{crate_version, Arg, ArgAction, Command};
use memchr::memchr_iter;
use rand::prelude::SliceRandom;
use rand::RngCore;
use std::fs::File;
use std::io::{stdin, stdout, BufReader, BufWriter, Read, Write};
use uucore::display::Quotable;
use uucore::error::{FromIo, UResult, USimpleError};
use uucore::{format_usage, help_about, help_usage};
mod rand_read_adapter;
enum Mode {
Default(String),
Echo(Vec<String>),
InputRange((usize, usize)),
}
static USAGE: &str = help_usage!("shuf.md");
static ABOUT: &str = help_about!("shuf.md");
struct Options {
head_count: usize,
output: Option<String>,
random_source: Option<String>,
repeat: bool,
sep: u8,
}
mod options {
pub static ECHO: &str = "echo";
pub static INPUT_RANGE: &str = "input-range";
pub static HEAD_COUNT: &str = "head-count";
pub static OUTPUT: &str = "output";
pub static RANDOM_SOURCE: &str = "random-source";
pub static REPEAT: &str = "repeat";
pub static ZERO_TERMINATED: &str = "zero-terminated";
pub static FILE: &str = "file";
}
#[uucore::main]
pub fn uumain(args: impl uucore::Args) -> UResult<()> {
let args = args.collect_lossy();
let matches = uu_app().try_get_matches_from(args)?;
let mode = if let Some(args) = matches.get_many::<String>(options::ECHO) {
Mode::Echo(args.map(String::from).collect())
} else if let Some(range) = matches.get_one::<String>(options::INPUT_RANGE) {
match parse_range(range) {
Ok(m) => Mode::InputRange(m),
Err(msg) => {
return Err(USimpleError::new(1, msg));
}
}
} else {
Mode::Default(
matches
.get_one::<String>(options::FILE)
.map(|s| s.as_str())
.unwrap_or("-")
.to_string(),
)
};
let options = Options {
head_count: {
let headcounts = matches
.get_many::<String>(options::HEAD_COUNT)
.unwrap_or_default()
.map(|s| s.to_owned())
.collect();
match parse_head_count(headcounts) {
Ok(val) => val,
Err(msg) => return Err(USimpleError::new(1, msg)),
}
},
output: matches.get_one::<String>(options::OUTPUT).map(String::from),
random_source: matches
.get_one::<String>(options::RANDOM_SOURCE)
.map(String::from),
repeat: matches.get_flag(options::REPEAT),
sep: if matches.get_flag(options::ZERO_TERMINATED) {
0x00_u8
} else {
0x0a_u8
},
};
match mode {
Mode::Echo(args) => {
let mut evec = args.iter().map(String::as_bytes).collect::<Vec<_>>();
find_seps(&mut evec, options.sep);
shuf_bytes(&mut evec, options)?;
}
Mode::InputRange((b, e)) => {
let rvec = (b..e).map(|x| format!("{x}")).collect::<Vec<String>>();
let mut rvec = rvec.iter().map(String::as_bytes).collect::<Vec<&[u8]>>();
shuf_bytes(&mut rvec, options)?;
}
Mode::Default(filename) => {
let fdata = read_input_file(&filename)?;
let mut fdata = vec![&fdata[..]];
find_seps(&mut fdata, options.sep);
shuf_bytes(&mut fdata, options)?;
}
}
Ok(())
}
pub fn uu_app() -> Command {
Command::new(uucore::util_name())
.about(ABOUT) | .override_usage(format_usage(USAGE))
.infer_long_args(true)
.args_override_self(true)
.arg(
Arg::new(options::ECHO)
.short('e')
.long(options::ECHO)
.value_name("ARG")
.help("treat each ARG as an input line")
.use_value_delimiter(false)
.num_args(0..)
.conflicts_with(options::INPUT_RANGE),
)
.arg(
Arg::new(options::INPUT_RANGE)
.short('i')
.long(options::INPUT_RANGE)
.value_name("LO-HI")
.help("treat each number LO through HI as an input line")
.conflicts_with(options::FILE),
)
.arg(
Arg::new(options::HEAD_COUNT)
.short('n')
.long(options::HEAD_COUNT)
.value_name("COUNT")
.help("output at most COUNT lines"),
)
.arg(
Arg::new(options::OUTPUT)
.short('o')
.long(options::OUTPUT)
.value_name("FILE")
.help("write result to FILE instead of standard output")
.value_hint(clap::ValueHint::FilePath),
)
.arg(
Arg::new(options::RANDOM_SOURCE)
.long(options::RANDOM_SOURCE)
.value_name("FILE")
.help("get random bytes from FILE")
.value_hint(clap::ValueHint::FilePath),
)
.arg(
Arg::new(options::REPEAT)
.short('r')
.long(options::REPEAT)
.help("output lines can be repeated")
.action(ArgAction::SetTrue),
)
.arg(
Arg::new(options::ZERO_TERMINATED)
.short('z')
.long(options::ZERO_TERMINATED)
.help("line delimiter is NUL, not newline")
.action(ArgAction::SetTrue),
)
.arg(Arg::new(options::FILE).value_hint(clap::ValueHint::FilePath))
}
fn read_input_file(filename: &str) -> UResult<Vec<u8>> {
let mut file = BufReader::new(if filename == "-" {
Box::new(stdin()) as Box<dyn Read>
} else {
let file = File::open(filename)
.map_err_context(|| format!("failed to open {}", filename.quote()))?;
Box::new(file) as Box<dyn Read>
});
let mut data = Vec::new();
file.read_to_end(&mut data)
.map_err_context(|| format!("failed reading {}", filename.quote()))?;
Ok(data)
}
fn find_seps(data: &mut Vec<&[u8]>, sep: u8) {
// need to use for loop so we don't borrow the vector as we modify it in place
// basic idea:
// * We don't care about the order of the result. This lets us slice the slices
// without making a new vector.
// * Starting from the end of the vector, we examine each element.
// * If that element contains the separator, we remove it from the vector,
// and then sub-slice it into slices that do not contain the separator.
// * We maintain the invariant throughout that each element in the vector past
// the ith element does not have any separators remaining.
for i in (0..data.len()).rev() {
if data[i].contains(&sep) {
let this = data.swap_remove(i);
let mut p = 0;
for i in memchr_iter(sep, this) {
data.push(&this[p..i]);
p = i + 1;
}
if p < this.len() {
data.push(&this[p..]);
}
}
}
}
fn shuf_bytes(input: &mut Vec<&[u8]>, opts: Options) -> UResult<()> {
let mut output = BufWriter::new(match opts.output {
None => Box::new(stdout()) as Box<dyn Write>,
Some(s) => {
let file = File::create(&s[..])
.map_err_context(|| format!("failed to open {} for writing", s.quote()))?;
Box::new(file) as Box<dyn Write>
}
});
let mut rng = match opts.random_source {
Some(r) => {
let file = File::open(&r[..])
.map_err_context(|| format!("failed to open random source {}", r.quote()))?;
WrappedRng::RngFile(rand_read_adapter::ReadRng::new(file))
}
None => WrappedRng::RngDefault(rand::thread_rng()),
};
if input.is_empty() {
return Ok(());
}
if opts.repeat {
for _ in 0..opts.head_count {
// Returns None is the slice is empty. We checked this before, so
// this is safe.
let r = input.choose(&mut rng).unwrap();
output
.write_all(r)
.map_err_context(|| "write failed".to_string())?;
output
.write_all(&[opts.sep])
.map_err_context(|| "write failed".to_string())?;
}
} else {
let (shuffled, _) = input.partial_shuffle(&mut rng, opts.head_count);
for r in shuffled {
output
.write_all(r)
.map_err_context(|| "write failed".to_string())?;
output
.write_all(&[opts.sep])
.map_err_context(|| "write failed".to_string())?;
}
}
Ok(())
}
fn parse_range(input_range: &str) -> Result<(usize, usize), String> {
if let Some((from, to)) = input_range.split_once('-') {
let begin = from
.parse::<usize>()
.map_err(|_| format!("invalid input range: {}", from.quote()))?;
let end = to
.parse::<usize>()
.map_err(|_| format!("invalid input range: {}", to.quote()))?;
Ok((begin, end + 1))
} else {
Err(format!("invalid input range: {}", input_range.quote()))
}
}
fn parse_head_count(headcounts: Vec<String>) -> Result<usize, String> {
let mut result = std::usize::MAX;
for count in headcounts {
match count.parse::<usize>() {
Ok(pv) => result = std::cmp::min(result, pv),
Err(_) => return Err(format!("invalid line count: {}", count.quote())),
}
}
Ok(result)
}
enum WrappedRng {
RngFile(rand_read_adapter::ReadRng<File>),
RngDefault(rand::rngs::ThreadRng),
}
impl RngCore for WrappedRng {
fn next_u32(&mut self) -> u32 {
match self {
Self::RngFile(r) => r.next_u32(),
Self::RngDefault(r) => r.next_u32(),
}
}
fn next_u64(&mut self) -> u64 {
match self {
Self::RngFile(r) => r.next_u64(),
Self::RngDefault(r) => r.next_u64(),
}
}
fn fill_bytes(&mut self, dest: &mut [u8]) {
match self {
Self::RngFile(r) => r.fill_bytes(dest),
Self::RngDefault(r) => r.fill_bytes(dest),
}
}
fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rand::Error> {
match self {
Self::RngFile(r) => r.try_fill_bytes(dest),
Self::RngDefault(r) => r.try_fill_bytes(dest),
}
}
} | .version(crate_version!()) | random_line_split |
eda.py | # To add a new cell, type '# %%'
# To add a new markdown cell, type '# %% [markdown]'
# %% [markdown]
# # Predicting Scooter Users
# April 2020
#
# Part 2. Exploratory Data Analysis
#
# Submitted by: XU Yuting
# %% [markdown]
# ## Section 1. Import Data
# %%
# import relevant libraries
import sys
import pandas as pd
import pyodbc
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
# %%
# connect to SQL server database
# for confidentiality purpose I have removed database connection details
conn = pyodbc.connect()
sql_query = "select date, hr, weather, temperature, feels_like_temperature, relative_humidity, windspeed, psi, guest_scooter from rental_data where date between '2011-01-01' and '2012-12-31'"
# load the datset into Pandas dataframe, df
df = pd.io.sql.read_sql(sql_query,conn)
# %% [markdown]
# ## Section 2. Examine the data columns
# %%
df.info()
# %%
df.describe()
# %% [markdown]
# Bearing in mind that the purpose of the exercise is to predict the number of scooters, given the other variables, we have to pay attention to the 'guest_scooter' column, and understand that this is a numeric continous attribute.
# %%
df.head()
# %% [markdown]
# From the above we can see that:
# - There are negative values in the column "guest_scooter" --> the negative values can't be true, so they will need to be removed
# - Column 'weather' contains non-numeric values --> this will need to be scrutinized in detail and transformed to numeric values
# - Column 'date' is not numeric either --> we can extract the numeric attributes out into year, month, date
# - Column 'relative_humidity' and 'psi' contains value 0 --> This is quite unrealistic, so we can treat them as missing value too
# %% [markdown]
# We will take a closer look at the 'weather' column
# %%
df['weather'].value_counts()
# %% [markdown]
# Looks like 'weather' column is not so clean: there are typo errors and misspellings that will need to be cleaned up before they can be transformed to numeric values.
# %% [markdown]
# By examining the data columns above, we have concluded that there are some data cleaning/preprocessing tasks we will need to perform:
# - Column 'weather': clean up the typos, and transform to numeric data
# - Column 'guest_scooter': remove negative values
# - Column 'relative_humidity','psi': value 0 should be treated as null values
# - Column 'date': transform to numeric data e.g. year, month, date, day of week
#
# With these tasks in mind, we can now start to preprocess the data
# %% [markdown]
# ## Section 3. Data Cleaning & Preprocessing
# %% [markdown]
# #### Task 1: Column 'Weather' -- Clean up typos and transform categorical data to numeric data
# %%
df['weather']=df['weather'].str.upper()
df['weather'].value_counts()
# %%
# now we correct some of the spellings
df.loc[df['weather']=='LEAR','weather']='CLEAR'
df.loc[df['weather']=='CLAR','weather']='CLEAR'
df.loc[df['weather']=='CLUDY','weather']='CLOUDY'
df.loc[df['weather']=='LOUDY','weather']='CLOUDY'
df.loc[df['weather']=='LIHT SNOW/RAIN','weather']='LIGHT SNOW/RAIN'
df['weather'].value_counts()
# %%
# map weather to integer
map_weather = {'CLEAR': 1, 'CLOUDY': 2,'LIGHT SNOW/RAIN':3,'HEAVY SNOW/RAIN':4}
df.replace({'weather': map_weather})
# %% [markdown]
# We are set!
# %% [markdown]
# #### Task 2: Column 'guest_scooter' -- remove negative values
# %%
df['guest_scooter'].replace([-2,-1],np.nan,inplace=True)
df['guest_scooter'].describe()
# %% [markdown]
# Notice that the count has reduced. This means that some of the negative values have been converted to NULL
# %% [markdown]
# #### Task 3: Column 'relative_humidity','psi' - value 0 should be treated as null values
# %%
df['relative_humidity'].replace(0,np.nan,inplace=True)
df['relative_humidity'].describe()
| df['psi'].describe()
# %% [markdown]
# #### Task 4: Column 'date' - transform to numeric data e.g. year, month, date, day of week
# %%
# first we convert the date column to date object
dt=df['date']
df['date']=pd.to_datetime(dt)
# %%
type(df['date'].values[0])
# %% [markdown]
# Now we need to create new columns and extract the year, month, date, and weekday attributes
# %%
df['year']=df['date'].dt.year
df['month']=df['date'].dt.month
df['day']=df['date'].dt.day
df['weekday']=df['date'].apply(lambda x: x.weekday())
df.head()
# %% [markdown]
# In the 'weekday' column, we notice that the number ranges from 0 to 6. We are going to add 1 to the values.
# %%
df['weekday']=df['weekday']+1
df.head()
# %% [markdown]
# We are done with preprocessing! These steps will also be performed in the data cleaning/preprocessing phase of the MLP.
# Now on to exploring some basic relationships between the attributes.
# %% [markdown]
# ## Section 4. Data Visualization
#
#
# Bearing in mind that the final purpose of the exercise is to predict the number of active e-scooter riders. Over here, we should try to aim to look for relationships between the other attributes with the target attribute (guest_scooters).
# %%
# first we look at the distribution of ridership/hour
plt.hist(df['guest_scooter'].dropna(),bins=50)
plt.xlabel('Number of Scooters/Hr')
plt.ylabel('Frequency')
plt.show()
# %% [markdown]
# We can see that the number of scooters/hour does not follow a normal distribution. It is heavily right-tailed.
# %%
# now we want to see if there's a longitudinal trend on daily total scooter number
group_by_day=df.groupby('date').guest_scooter.sum()
group_by_day.plot()
plt.xlabel('Date')
plt.ylabel('Daily Ridership')
plt.title("Total Number of Scooters/Day")
plt.show()
# %% [markdown]
# What we can observe from above is a clear seasonal trend: numbers spike up in horter months (May - August), and drop in colder months (Oct - Apr). There are also high variations across days
# %%
# we can see the seasonal trends more clearly if we plot the total number of riders per month
group_by_month=df.groupby('month').guest_scooter.sum()
group_by_month.plot(kind='bar')
plt.xlabel('Month')
plt.ylabel('Scooter Number')
plt.title("Total number of scooters in each month over the study period")
plt.show()
# %%
sns.heatmap(pd.crosstab(df['month'],df['hr'],values=df['guest_scooter'],aggfunc='mean'),cmap='coolwarm')
plt.ylabel("Month")
plt.xlabel("Hour of day")
plt.title("Mean hourly scooters per hour")
# %%
# %%
sns.boxenplot(data=df, x="month",y="guest_scooter")
plt.title("Distribution of guest scooter numbers across months")
plt.show()
# %% [markdown]
# The number of scooters shows clear monthly and daily variations.
# What actually makes a difference in the different months and hours is the climatic conditions, this means:
# - temperature
# - feels_like_temperature
# - relative_humidity
# - psi
# - windspeed
# - weather
#
# All these conditions will have some influence in the result. What is interesting from the boxenplot above is also that there are two peaks/maximum hourly ridership in a year, one in March-May, the other in September - November. Now we know that Summer is usually the hottest time of a year -- we are talking about temperature here. If so, it looks like other factors e.g. thermal comfort, wind, and humidity might have an effect to scooters too. We will look at this in detail.
# %% [markdown]
# Before we drill in on the climatic factors, there is another temporal scale we have not explored: variations in a day.
# %%
# resample by hour of day
mean_rider_by_hr = df.groupby('hr')['guest_scooter'].mean().sort_index()
mean_temp_by_hr = df.groupby('hr')['temperature'].mean().sort_index()
mean_fltemp_by_hr=df.groupby('hr')['feels_like_temperature'].mean().sort_index()
mean_psi_by_hr=df.groupby('hr')['psi'].mean().sort_index()
mean_rh_by_hr=df.groupby('hr')['relative_humidity'].mean().sort_index()
mean_windspeed_by_hr=df.groupby('hr')['windspeed'].mean().sort_index()
# %%
# merge into a new dataframe
frame={'rider':mean_rider_by_hr,
'temp':mean_temp_by_hr,
'fltemp':mean_fltemp_by_hr,
'psi':mean_psi_by_hr,
'rh':mean_rh_by_hr,
'windspeed':mean_windspeed_by_hr}
df_by_hr=pd.DataFrame(frame)
df_by_hr.head()
# %%
# plot the variables to 4 subplots
fig,ax=plt.subplots(2,2)
fig.tight_layout(pad=0.4, w_pad=6, h_pad=2.0)
ax[0,0].bar(df_by_hr.index,df_by_hr['rider'],alpha=0.2,color='grey')
ax2=ax[0,0].twinx()
ax2.plot(df_by_hr.index,df_by_hr['temp'],color='r')
ax2.plot(df_by_hr.index,df_by_hr['fltemp'],color='orange')
ax[0,0].set_ylabel('Mean Hourly Ridership')
ax2.set_ylabel("Temperature/Feels_like_temperature")
plt.legend(['Temperature','Feels_like_Temperature'])
ax[0,1].bar(df_by_hr.index,df_by_hr['rider'],alpha=0.2,color='grey')
ax3=ax[0,1].twinx()
ax3.plot(df_by_hr.index,df_by_hr['windspeed'],color='maroon')
ax[0,1].set_ylabel('Mean Hourly Ridership')
ax3.set_ylabel("Windspeed")
plt.legend(['Windspeed'])
ax[1,0].bar(df_by_hr.index,df_by_hr['rider'],alpha=0.2,color='grey')
ax4=ax[1,0].twinx()
ax4.plot(df_by_hr.index,df_by_hr['rh'],color='green')
ax[1,0].set_ylabel('Mean Hourly Ridership')
ax4.set_ylabel('Relative Humidity')
plt.legend(['Relative Humidity'])
ax[1,1].bar(df_by_hr.index,df_by_hr['rider'],alpha=0.2,color='grey')
ax5=ax[1,1].twinx()
ax5.plot(df_by_hr.index,df_by_hr['psi'],color='blue')
ax[1,1].set_ylabel('Mean Hourly Ridership')
ax5.set_ylabel("PSI")
plt.legend(['PSI'])
plt.show()
# %% [markdown]
# We can observe from the above that the shape of the hourly scooters follow windspeed most strongly, to a lesser extent, the temperature and feels_like_temperature. PSI does no seem a strong explanatory variable while relative humidity has a negative relationship.
#
# The exact influence of each factor may vary in different seasons.
# %% [markdown]
# We want to create the same graphs as above, but this time, split them into seasons. This time we will narrow down to only temperature and windspeed.
# We will split the months into:
# - spring: Mar, Apr, May
# - summer: June, July, Aug
# - fall: Sept, Oct, Nov
# - winter: Dec, Jan, Feb
# %%
# create a new column 'Season' and do the mapping by months
df['season']=df['month']
# %%
# map month into season
df['season'].replace([3,4,5],'Spring',inplace=True)
df['season'].replace([6,7,8],'Summer',inplace=True)
df['season'].replace([9,10,11],'Fall',inplace=True)
df['season'].replace([12,1,2],'Winter',inplace=True)
# %%
df.head()
# %%
df['season'].replace('Spring',1,inplace=True)
df['season'].replace('Summer',2,inplace=True)
df['season'].replace('Fall',3,inplace=True)
df['season'].replace('Winter',4,inplace=True)
df.head()
# %%
# how does temperature affect the ridership in different seasons?
sns.relplot(x="temperature",y="guest_scooter",data=df,kind="line",hue="season",alpha=0.3,palette='Set1',legend=False)
sns.set_style("white")
plt.title("Temperature vs Hourly Scooters")
plt.xlabel("Temperature")
plt.ylabel("Hourly Scooters")
plt.legend(['Spring','Summer','Fall','Winter'])
plt.show()
# %%
# how does temperature affect the ridership in different seasons?
sns.relplot(x="relative_humidity",y="guest_scooter",data=df,kind="line",hue="season",alpha=0.3,palette='Set1',legend=False)
sns.set_style("white")
plt.title("Relative Humidity vs Hourly Scooters")
plt.xlabel("Relative Humidity")
plt.ylabel("Hourly Scooters")
plt.legend(['Spring','Summer','Fall','Winter'])
plt.show()
# %%
# how does temperature affect the ridership in different seasons?
sns.relplot(x="windspeed",y="guest_scooter",data=df,kind="line",hue="season",alpha=0.3,palette='Set1',legend=False)
sns.set_style("white")
plt.title("Windspeed vs Hourly Scooters")
plt.xlabel("Windspeed")
plt.ylabel("Hourly Scooters")
plt.legend(['Spring','Summer','Fall','Winter'])
plt.show()
# %%
# how does temperature affect the ridership in different seasons?
sns.relplot(x="feels_like_temperature",y="guest_scooter",data=df,kind="line",hue="season",alpha=0.3,palette='Set1',legend=False)
sns.set_style("white")
plt.title("Feels_Like_Temperature vs Hourly Scooters")
plt.xlabel("Windspeed")
plt.ylabel("Hourly Scooters")
plt.legend(['Spring','Summer','Fall','Winter'])
plt.show()
# %% [markdown]
# ## Section 5. Conclusion
# %% [markdown]
# After thie exercise, we have come to the understanding of:
# - the basic structure of this dataset, e.g. the target feature, 'guest_scooter', is a continuous numeric feature. This will help us choose hte suitable models in our Machine Learning Pipeline later.
# - the preprocessing steps required on this dataset
# - Several attributes that might have strong influence on the number of scooters, especially hour of day, month/season, temperature, relative_humidity, and windspeed
#
# All these will come in handy when we build the Machine Learning Pipeline for predicting the scooter numbers. | # %%
df['psi'].replace(0,np.nan,inplace=True) | random_line_split |
form-field-config.type.ts | /**
* This file is part of OpenMediaVault.
*
* @license http://www.gnu.org/licenses/gpl.html GPL Version 3
* @author Volker Theile <[email protected]>
* @copyright Copyright (c) 2009-2021 Volker Theile
*
* OpenMediaVault is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* any later version.
*
* OpenMediaVault is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
import { Constraint } from '~/app/shared/models/constraint.type';
import { DataStore } from '~/app/shared/models/data-store.type';
import { DatatableColumn } from '~/app/shared/models/datatable-column.type';
import { Sorter } from '~/app/shared/models/sorter.type';
export type FormFieldConfig = {
// The following field controls are supported:
// .--------------------------------------------------------------------.
// | Type | Description |
// |-------------|------------------------------------------------------|
// | confObjUuid | This is a hidden field that contains an UUID. By |
// | | default it is set to the UUID that tells the backend |
// | | that it should handle the the data to create a new |
// | | database configuration object. |
// | hidden | A hidden form field. |
// | divider | Draws a horizontal line. |
// | paragraph | Displays a title and draws a horizontal line. |
// | ... | ... |
// | container | Align child fields in horizontal order. |
// '--------------------------------------------------------------------'
type:
| 'confObjUuid'
| 'hidden'
| 'divider'
| 'paragraph'
| 'button'
| 'iconButton'
| 'textInput'
| 'folderBrowser'
| 'numberInput'
| 'checkbox'
| 'textarea'
| 'fileInput'
| 'select'
| 'sharedFolderSelect'
| 'sslCertSelect'
| 'passwordInput'
| 'datePicker'
| 'datatable'
| 'slider'
| 'container';
name?: string;
label?: string;
placeholder?: string;
tooltip?: string;
hint?: string;
value?: any;
readonly?: boolean;
// Disable the field.
// Use a tokenized string to be able to mark the field as disabled
// dynamically based on its evaluation result. The special token
// `_routeConfig` can be to access the components route configuration.
// Make sure the token will be evaluated to a boolean value.
//
// Example:
// { disabled: '{{ _routeConfig.data.editing | toboolean }}' }
// The component's route configuration:
// {
// path: 'edit/:name',
// component: UserFormPageComponent,
// data: {
// title: gettext('Edit'),
// editing: true,
// notificationTitle: gettext('Updated user "{{ name }}".')
// }
// }
disabled?: boolean | string;
// Modify the form field depending on a specified constraint. The
// constraint must be truthy to apply.
modifiers?: Array<{
type:
| 'disabled'
| 'enabled'
| 'checked'
| 'unchecked'
| 'focused'
| 'visible'
| 'hidden'
| 'value';
// Optional configuration used by modifiers. This is required by
// the 'value' modifier, e.g. '{{ <NAME> }}' to set the value
// of the given field.
typeConfig?: any;
// Apply the opposite type, e.g. `disabled` for `enabled`, if the
// constraint is falsy. Defaults to `true`.
opposite?: boolean;
// The constraint can access the current form field
// values, e.g. '{ field: '<NAME>' }'
constraint: Constraint;
}>;
autofocus?: boolean;
icon?: string;
submitValue?: boolean;
// The event name for control to update upon. Defaults to `change`.
updateOn?: 'change' | 'blur';
// The validators to ensure the form field content is
// in a valid state.
validators?: {
// Mark the field as required.
// Use a tokenized string to be able to mark the field as required
// dynamically based on its evaluation result. The special token
// `_routeConfig` can be to access the components route configuration.
// Make sure the token will be evaluated to a boolean value.
required?: boolean | string;
// When the constraint succeeds and the control has
// an empty value, then the 'required' error is set.
requiredIf?: Constraint;
minLength?: number;
maxLength?: number;
min?: number;
max?: number;
email?: boolean;
custom?: Array<FormFieldConstraintValidator>;
pattern?: {
pattern: string;
errorData?: any;
};
patternType?:
| 'userName'
| 'groupName'
| 'shareName'
| 'email'
| 'ip'
| 'ipv4'
| 'ipv6'
| 'ipList'
| 'ipNetCidr'
| 'hostName'
| 'hostNameIpNetCidr'
| 'hostNameIpNetCidrList'
| 'domainName'
| 'domainNameList'
| 'domainNameIp'
| 'domainNameIpList'
| 'port'
| 'time'
| 'sshPubKey'
| 'sshPubKeyRfc4716'
| 'sshPubKeyOpenSsh'
| 'netmask';
};
// --- container ---
fields?: Array<FormFieldConfig>;
// Fields in a container will respect the 'flex' configuration.
// Specifies the size of the field in percent.
flex?: number;
// --- button | paragraph ---
text?: string;
// --- button | iconButton ---
click?: () => void;
request?: {
// The name of the RPC service.
service: string;
// The name of the RPC.
method: string;
// The RPC parameters. The parameters will be formatted using
// the values from the parent form.
params?: Record<string, any>;
// Set `true` if the RPC is a background task.
task?: boolean;
// If a message is defined, then the UI will be blocked and
// the message is displayed while the request is running.
progressMessage?: string;
// Display a notification when the request was successful.
successNotification?: string;
// Navigate to this URL when the request was successful.
// The URL will be formatted using the values from the parent
// form. The RPC response is accessible via '_response'.
// Example:
// /foo/bar/{{ xyz }}/{{ _response['baz'] }}
// where `xyz` will be replaced by the value of the form field
// named `xyz` and `_response['baz']` by the property `baz` of
// the map/object returned by the RPC.
// Example:
// /externalRedirect/{{ _response['url'] }}
// Redirect to an external URL. The URL must not be escaped,
// this will be done automatically.
successUrl?: string;
};
// The URL will be formatted using the parent form field values.
url?: string;
// --- folderBrowser ---
dirType?: 'sharedfolder' | 'mntent';
// The name of the field that contains the UUID of the
// shared folder or mount point configuration object.
dirRefIdField?: string;
// --- numberInput | slider ---
step?: number;
| // --- numberInput | password | textInput ---
autocomplete?: string;
// Note, this button is only visible if the browser supports
// that. The following requirements must be met:
// - The HTTPS protocol is used. localhost is also supported.
// - The site is not embedded in an iFrame.
hasCopyToClipboardButton?: boolean;
// --- textarea | fileInput ---
rows?: number;
cols?: number;
// --- textarea | textInput | fileInput ---
// Use a monospaced font.
monospaced?: boolean;
// --- fileInput ---
accept?: string;
// --- select ---
multiple?: boolean;
// Defaults to 'value'.
valueField?: string;
// Defaults to 'text'.
textField?: string;
store?: DataStore;
// Add an empty option to be able to clear the selection.
hasEmptyOption?: boolean;
// The text displayed in the option with the empty value.
// Defaults to 'None'.
emptyOptionText?: string;
selectionChange?: (value: any) => void;
// --- paragraph ---
hasDivider?: boolean;
// --- datatable ---
columns?: Array<DatatableColumn>;
columnMode?: 'standard' | 'flex' | 'force';
hasActionBar?: boolean;
hasSearchField?: boolean;
hasHeader?: boolean;
hasFooter?: boolean;
selectionType?: 'none' | 'single' | 'multi';
limit?: number;
sorters?: Array<Sorter>;
actions?: Array<{
// Specifies a template which pre-configures the action button.
// add - Shows a form dialog. When the dialog is successfully
// closed, then the form values will be used to add a new
// row to the datatable.
// edit - Shows a form dialog which displays the data of the
// current selected row. The action button is only enabled
// when one row is selected. When the dialog is
// successfully closed, then the form values are used
// to update the current selected row.
// delete - The action button is only enabled when one row is
// selected. If pressed, the current selected row will
// be removed from the datatable.
template: 'add' | 'edit' | 'delete';
dialogConfig?: {
// The dialog title.
title?: string;
// Width of the dialog.
width?: string;
// Height of the dialog.
height?: string;
// The form fields of the dialog that is displayed when the 'Add'
// or 'Edit' button is pressed.
fields?: Array<FormFieldConfig>;
};
}>;
// Specifies the type of the array items. Defaults to `object`.
valueType?: 'string' | 'integer' | 'number' | 'object';
};
export type FormFieldConstraintValidator = {
// When the constraint is falsy, then the specified
// error code is set.
constraint: Constraint;
// The error code, e.g. 'required' or 'email'.
// Defaults to 'constraint'.
errorCode?: string;
// The error data, e.g. a boolean `true` or the message displayed
// below the form field.
errorData?: any;
}; | random_line_split |
|
lib.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// TODO we're going to allocate a whole bunch of temp Strings, is it worth
// keeping some scratch mem for this and running our own StrPool?
// TODO for lint violations of names, emit a refactor script
#[macro_use]
extern crate log;
extern crate syntex_syntax as syntax;
extern crate rustc_serialize;
extern crate strings;
extern crate unicode_segmentation;
extern crate regex;
extern crate diff;
extern crate term;
use syntax::ast;
use syntax::codemap::{mk_sp, CodeMap, Span};
use syntax::errors::Handler;
use syntax::errors::emitter::{ColorConfig, EmitterWriter};
use syntax::parse::{self, ParseSess};
use std::io::stdout;
use std::ops::{Add, Sub};
use std::path::Path;
use std::rc::Rc;
use std::collections::HashMap;
use std::fmt;
use issues::{BadIssueSeeker, Issue};
use filemap::FileMap;
use visitor::FmtVisitor;
use config::Config;
#[macro_use]
mod utils;
pub mod config;
pub mod filemap;
mod visitor;
mod checkstyle;
mod items;
mod missed_spans;
mod lists;
mod types;
mod expr;
mod imports;
mod issues;
mod rewrite;
mod string;
mod comment;
mod modules;
pub mod rustfmt_diff;
mod chains;
mod macros;
mod patterns;
const MIN_STRING: usize = 10;
// When we get scoped annotations, we should have rustfmt::skip.
const SKIP_ANNOTATION: &'static str = "rustfmt_skip";
pub trait Spanned {
fn span(&self) -> Span;
}
impl Spanned for ast::Expr {
fn span(&self) -> Span {
self.span
}
}
impl Spanned for ast::Pat {
fn span(&self) -> Span {
self.span
}
}
impl Spanned for ast::Ty {
fn span(&self) -> Span {
self.span
}
}
impl Spanned for ast::Arg {
fn span(&self) -> Span {
if items::is_named_arg(self) {
mk_sp(self.pat.span.lo, self.ty.span.hi)
} else {
self.ty.span
}
}
}
#[derive(Copy, Clone, Debug)]
pub struct Indent {
// Width of the block indent, in characters. Must be a multiple of
// Config::tab_spaces.
pub block_indent: usize,
// Alignment in characters.
pub alignment: usize,
}
impl Indent {
pub fn new(block_indent: usize, alignment: usize) -> Indent {
Indent {
block_indent: block_indent,
alignment: alignment,
}
}
pub fn empty() -> Indent {
Indent::new(0, 0)
}
pub fn block_indent(mut self, config: &Config) -> Indent {
self.block_indent += config.tab_spaces;
self
}
pub fn block_unindent(mut self, config: &Config) -> Indent {
self.block_indent -= config.tab_spaces;
self
}
pub fn width(&self) -> usize {
self.block_indent + self.alignment
}
pub fn to_string(&self, config: &Config) -> String {
let (num_tabs, num_spaces) = if config.hard_tabs {
(self.block_indent / config.tab_spaces, self.alignment)
} else {
(0, self.block_indent + self.alignment)
};
let num_chars = num_tabs + num_spaces;
let mut indent = String::with_capacity(num_chars);
for _ in 0..num_tabs {
indent.push('\t')
}
for _ in 0..num_spaces {
indent.push(' ')
}
indent
}
}
impl Add for Indent {
type Output = Indent;
fn add(self, rhs: Indent) -> Indent {
Indent {
block_indent: self.block_indent + rhs.block_indent, | }
impl Sub for Indent {
type Output = Indent;
fn sub(self, rhs: Indent) -> Indent {
Indent::new(self.block_indent - rhs.block_indent,
self.alignment - rhs.alignment)
}
}
impl Add<usize> for Indent {
type Output = Indent;
fn add(self, rhs: usize) -> Indent {
Indent::new(self.block_indent, self.alignment + rhs)
}
}
impl Sub<usize> for Indent {
type Output = Indent;
fn sub(self, rhs: usize) -> Indent {
Indent::new(self.block_indent, self.alignment - rhs)
}
}
pub enum ErrorKind {
// Line has exceeded character limit
LineOverflow,
// Line ends in whitespace
TrailingWhitespace,
// TO-DO or FIX-ME item without an issue number
BadIssue(Issue),
}
impl fmt::Display for ErrorKind {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
ErrorKind::LineOverflow => write!(fmt, "line exceeded maximum length"),
ErrorKind::TrailingWhitespace => write!(fmt, "left behind trailing whitespace"),
ErrorKind::BadIssue(issue) => write!(fmt, "found {}", issue),
}
}
}
// Formatting errors that are identified *after* rustfmt has run.
pub struct FormattingError {
line: u32,
kind: ErrorKind,
}
impl FormattingError {
fn msg_prefix(&self) -> &str {
match self.kind {
ErrorKind::LineOverflow |
ErrorKind::TrailingWhitespace => "Rustfmt failed at",
ErrorKind::BadIssue(_) => "WARNING:",
}
}
fn msg_suffix(&self) -> &str {
match self.kind {
ErrorKind::LineOverflow |
ErrorKind::TrailingWhitespace => "(sorry)",
ErrorKind::BadIssue(_) => "",
}
}
}
pub struct FormatReport {
// Maps stringified file paths to their associated formatting errors.
file_error_map: HashMap<String, Vec<FormattingError>>,
}
impl FormatReport {
pub fn warning_count(&self) -> usize {
self.file_error_map.iter().map(|(_, ref errors)| errors.len()).fold(0, |acc, x| acc + x)
}
}
impl fmt::Display for FormatReport {
// Prints all the formatting errors.
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
for (file, errors) in &self.file_error_map {
for error in errors {
try!(write!(fmt,
"{} {}:{}: {} {}\n",
error.msg_prefix(),
file,
error.line,
error.kind,
error.msg_suffix()));
}
}
Ok(())
}
}
// Formatting which depends on the AST.
fn fmt_ast(krate: &ast::Crate,
parse_session: &ParseSess,
main_file: &Path,
config: &Config)
-> FileMap {
let mut file_map = FileMap::new();
for (path, module) in modules::list_files(krate, parse_session.codemap()) {
if config.skip_children && path.as_path() != main_file {
continue;
}
let path = path.to_str().unwrap();
if config.verbose {
println!("Formatting {}", path);
}
let mut visitor = FmtVisitor::from_codemap(parse_session, config);
visitor.format_separate_mod(module);
file_map.insert(path.to_owned(), visitor.buffer);
}
file_map
}
// Formatting done on a char by char or line by line basis.
// TODO(#209) warn on bad license
// TODO(#20) other stuff for parity with make tidy
pub fn fmt_lines(file_map: &mut FileMap, config: &Config) -> FormatReport {
let mut truncate_todo = Vec::new();
let mut report = FormatReport { file_error_map: HashMap::new() };
// Iterate over the chars in the file map.
for (f, text) in file_map.iter() {
let mut trims = vec![];
let mut last_wspace: Option<usize> = None;
let mut line_len = 0;
let mut cur_line = 1;
let mut newline_count = 0;
let mut errors = vec![];
let mut issue_seeker = BadIssueSeeker::new(config.report_todo, config.report_fixme);
for (c, b) in text.chars() {
if c == '\r' {
line_len += c.len_utf8();
continue;
}
// Add warnings for bad todos/ fixmes
if let Some(issue) = issue_seeker.inspect(c) {
errors.push(FormattingError {
line: cur_line,
kind: ErrorKind::BadIssue(issue),
});
}
if c == '\n' {
// Check for (and record) trailing whitespace.
if let Some(lw) = last_wspace {
trims.push((cur_line, lw, b));
line_len -= b - lw;
}
// Check for any line width errors we couldn't correct.
if line_len > config.max_width {
errors.push(FormattingError {
line: cur_line,
kind: ErrorKind::LineOverflow,
});
}
line_len = 0;
cur_line += 1;
newline_count += 1;
last_wspace = None;
} else {
newline_count = 0;
line_len += c.len_utf8();
if c.is_whitespace() {
if last_wspace.is_none() {
last_wspace = Some(b);
}
} else {
last_wspace = None;
}
}
}
if newline_count > 1 {
debug!("track truncate: {} {} {}", f, text.len, newline_count);
truncate_todo.push((f.to_owned(), text.len - newline_count + 1))
}
for &(l, _, _) in &trims {
errors.push(FormattingError {
line: l,
kind: ErrorKind::TrailingWhitespace,
});
}
report.file_error_map.insert(f.to_owned(), errors);
}
for (f, l) in truncate_todo {
file_map.get_mut(&f).unwrap().truncate(l);
}
report
}
pub fn format_string(input: String, config: &Config) -> FileMap {
let path = "stdin";
let codemap = Rc::new(CodeMap::new());
let tty_handler = Handler::with_tty_emitter(ColorConfig::Auto,
None,
true,
false,
codemap.clone());
let mut parse_session = ParseSess::with_span_handler(tty_handler, codemap.clone());
let krate = parse::parse_crate_from_source_str(path.to_owned(),
input,
Vec::new(),
&parse_session);
// Suppress error output after parsing.
let silent_emitter = Box::new(EmitterWriter::new(Box::new(Vec::new()), None, codemap.clone()));
parse_session.span_diagnostic = Handler::with_emitter(true, false, silent_emitter);
// FIXME: we still use a FileMap even though we only have
// one file, because fmt_lines requires a FileMap
let mut file_map = FileMap::new();
// do the actual formatting
let mut visitor = FmtVisitor::from_codemap(&parse_session, config);
visitor.format_separate_mod(&krate.module);
// append final newline
visitor.buffer.push_str("\n");
file_map.insert(path.to_owned(), visitor.buffer);
file_map
}
pub fn format(file: &Path, config: &Config) -> FileMap {
let codemap = Rc::new(CodeMap::new());
let tty_handler = Handler::with_tty_emitter(ColorConfig::Auto,
None,
true,
false,
codemap.clone());
let mut parse_session = ParseSess::with_span_handler(tty_handler, codemap.clone());
let krate = parse::parse_crate_from_file(file, Vec::new(), &parse_session);
// Suppress error output after parsing.
let silent_emitter = Box::new(EmitterWriter::new(Box::new(Vec::new()), None, codemap.clone()));
parse_session.span_diagnostic = Handler::with_emitter(true, false, silent_emitter);
let mut file_map = fmt_ast(&krate, &parse_session, file, config);
// For some reason, the codemap does not include terminating
// newlines so we must add one on for each file. This is sad.
filemap::append_newlines(&mut file_map);
file_map
}
pub fn run(file: &Path, config: &Config) {
let mut result = format(file, config);
print!("{}", fmt_lines(&mut result, config));
let out = stdout();
let write_result = filemap::write_all_files(&result, out, config);
if let Err(msg) = write_result {
println!("Error writing files: {}", msg);
}
}
// Similar to run, but takes an input String instead of a file to format
pub fn run_from_stdin(input: String, config: &Config) {
let mut result = format_string(input, config);
fmt_lines(&mut result, config);
let mut out = stdout();
let write_result = filemap::write_file(&result["stdin"], "stdin", &mut out, config);
if let Err(msg) = write_result {
panic!("Error writing to stdout: {}", msg);
}
} | alignment: self.alignment + rhs.alignment,
}
} | random_line_split |
lib.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// TODO we're going to allocate a whole bunch of temp Strings, is it worth
// keeping some scratch mem for this and running our own StrPool?
// TODO for lint violations of names, emit a refactor script
#[macro_use]
extern crate log;
extern crate syntex_syntax as syntax;
extern crate rustc_serialize;
extern crate strings;
extern crate unicode_segmentation;
extern crate regex;
extern crate diff;
extern crate term;
use syntax::ast;
use syntax::codemap::{mk_sp, CodeMap, Span};
use syntax::errors::Handler;
use syntax::errors::emitter::{ColorConfig, EmitterWriter};
use syntax::parse::{self, ParseSess};
use std::io::stdout;
use std::ops::{Add, Sub};
use std::path::Path;
use std::rc::Rc;
use std::collections::HashMap;
use std::fmt;
use issues::{BadIssueSeeker, Issue};
use filemap::FileMap;
use visitor::FmtVisitor;
use config::Config;
#[macro_use]
mod utils;
pub mod config;
pub mod filemap;
mod visitor;
mod checkstyle;
mod items;
mod missed_spans;
mod lists;
mod types;
mod expr;
mod imports;
mod issues;
mod rewrite;
mod string;
mod comment;
mod modules;
pub mod rustfmt_diff;
mod chains;
mod macros;
mod patterns;
const MIN_STRING: usize = 10;
// When we get scoped annotations, we should have rustfmt::skip.
const SKIP_ANNOTATION: &'static str = "rustfmt_skip";
pub trait Spanned {
fn span(&self) -> Span;
}
impl Spanned for ast::Expr {
fn | (&self) -> Span {
self.span
}
}
impl Spanned for ast::Pat {
fn span(&self) -> Span {
self.span
}
}
impl Spanned for ast::Ty {
fn span(&self) -> Span {
self.span
}
}
impl Spanned for ast::Arg {
fn span(&self) -> Span {
if items::is_named_arg(self) {
mk_sp(self.pat.span.lo, self.ty.span.hi)
} else {
self.ty.span
}
}
}
#[derive(Copy, Clone, Debug)]
pub struct Indent {
// Width of the block indent, in characters. Must be a multiple of
// Config::tab_spaces.
pub block_indent: usize,
// Alignment in characters.
pub alignment: usize,
}
impl Indent {
pub fn new(block_indent: usize, alignment: usize) -> Indent {
Indent {
block_indent: block_indent,
alignment: alignment,
}
}
pub fn empty() -> Indent {
Indent::new(0, 0)
}
pub fn block_indent(mut self, config: &Config) -> Indent {
self.block_indent += config.tab_spaces;
self
}
pub fn block_unindent(mut self, config: &Config) -> Indent {
self.block_indent -= config.tab_spaces;
self
}
pub fn width(&self) -> usize {
self.block_indent + self.alignment
}
pub fn to_string(&self, config: &Config) -> String {
let (num_tabs, num_spaces) = if config.hard_tabs {
(self.block_indent / config.tab_spaces, self.alignment)
} else {
(0, self.block_indent + self.alignment)
};
let num_chars = num_tabs + num_spaces;
let mut indent = String::with_capacity(num_chars);
for _ in 0..num_tabs {
indent.push('\t')
}
for _ in 0..num_spaces {
indent.push(' ')
}
indent
}
}
impl Add for Indent {
type Output = Indent;
fn add(self, rhs: Indent) -> Indent {
Indent {
block_indent: self.block_indent + rhs.block_indent,
alignment: self.alignment + rhs.alignment,
}
}
}
impl Sub for Indent {
type Output = Indent;
fn sub(self, rhs: Indent) -> Indent {
Indent::new(self.block_indent - rhs.block_indent,
self.alignment - rhs.alignment)
}
}
impl Add<usize> for Indent {
type Output = Indent;
fn add(self, rhs: usize) -> Indent {
Indent::new(self.block_indent, self.alignment + rhs)
}
}
impl Sub<usize> for Indent {
type Output = Indent;
fn sub(self, rhs: usize) -> Indent {
Indent::new(self.block_indent, self.alignment - rhs)
}
}
pub enum ErrorKind {
// Line has exceeded character limit
LineOverflow,
// Line ends in whitespace
TrailingWhitespace,
// TO-DO or FIX-ME item without an issue number
BadIssue(Issue),
}
impl fmt::Display for ErrorKind {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
ErrorKind::LineOverflow => write!(fmt, "line exceeded maximum length"),
ErrorKind::TrailingWhitespace => write!(fmt, "left behind trailing whitespace"),
ErrorKind::BadIssue(issue) => write!(fmt, "found {}", issue),
}
}
}
// Formatting errors that are identified *after* rustfmt has run.
pub struct FormattingError {
line: u32,
kind: ErrorKind,
}
impl FormattingError {
fn msg_prefix(&self) -> &str {
match self.kind {
ErrorKind::LineOverflow |
ErrorKind::TrailingWhitespace => "Rustfmt failed at",
ErrorKind::BadIssue(_) => "WARNING:",
}
}
fn msg_suffix(&self) -> &str {
match self.kind {
ErrorKind::LineOverflow |
ErrorKind::TrailingWhitespace => "(sorry)",
ErrorKind::BadIssue(_) => "",
}
}
}
pub struct FormatReport {
// Maps stringified file paths to their associated formatting errors.
file_error_map: HashMap<String, Vec<FormattingError>>,
}
impl FormatReport {
pub fn warning_count(&self) -> usize {
self.file_error_map.iter().map(|(_, ref errors)| errors.len()).fold(0, |acc, x| acc + x)
}
}
impl fmt::Display for FormatReport {
// Prints all the formatting errors.
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
for (file, errors) in &self.file_error_map {
for error in errors {
try!(write!(fmt,
"{} {}:{}: {} {}\n",
error.msg_prefix(),
file,
error.line,
error.kind,
error.msg_suffix()));
}
}
Ok(())
}
}
// Formatting which depends on the AST.
fn fmt_ast(krate: &ast::Crate,
parse_session: &ParseSess,
main_file: &Path,
config: &Config)
-> FileMap {
let mut file_map = FileMap::new();
for (path, module) in modules::list_files(krate, parse_session.codemap()) {
if config.skip_children && path.as_path() != main_file {
continue;
}
let path = path.to_str().unwrap();
if config.verbose {
println!("Formatting {}", path);
}
let mut visitor = FmtVisitor::from_codemap(parse_session, config);
visitor.format_separate_mod(module);
file_map.insert(path.to_owned(), visitor.buffer);
}
file_map
}
// Formatting done on a char by char or line by line basis.
// TODO(#209) warn on bad license
// TODO(#20) other stuff for parity with make tidy
pub fn fmt_lines(file_map: &mut FileMap, config: &Config) -> FormatReport {
let mut truncate_todo = Vec::new();
let mut report = FormatReport { file_error_map: HashMap::new() };
// Iterate over the chars in the file map.
for (f, text) in file_map.iter() {
let mut trims = vec![];
let mut last_wspace: Option<usize> = None;
let mut line_len = 0;
let mut cur_line = 1;
let mut newline_count = 0;
let mut errors = vec![];
let mut issue_seeker = BadIssueSeeker::new(config.report_todo, config.report_fixme);
for (c, b) in text.chars() {
if c == '\r' {
line_len += c.len_utf8();
continue;
}
// Add warnings for bad todos/ fixmes
if let Some(issue) = issue_seeker.inspect(c) {
errors.push(FormattingError {
line: cur_line,
kind: ErrorKind::BadIssue(issue),
});
}
if c == '\n' {
// Check for (and record) trailing whitespace.
if let Some(lw) = last_wspace {
trims.push((cur_line, lw, b));
line_len -= b - lw;
}
// Check for any line width errors we couldn't correct.
if line_len > config.max_width {
errors.push(FormattingError {
line: cur_line,
kind: ErrorKind::LineOverflow,
});
}
line_len = 0;
cur_line += 1;
newline_count += 1;
last_wspace = None;
} else {
newline_count = 0;
line_len += c.len_utf8();
if c.is_whitespace() {
if last_wspace.is_none() {
last_wspace = Some(b);
}
} else {
last_wspace = None;
}
}
}
if newline_count > 1 {
debug!("track truncate: {} {} {}", f, text.len, newline_count);
truncate_todo.push((f.to_owned(), text.len - newline_count + 1))
}
for &(l, _, _) in &trims {
errors.push(FormattingError {
line: l,
kind: ErrorKind::TrailingWhitespace,
});
}
report.file_error_map.insert(f.to_owned(), errors);
}
for (f, l) in truncate_todo {
file_map.get_mut(&f).unwrap().truncate(l);
}
report
}
pub fn format_string(input: String, config: &Config) -> FileMap {
let path = "stdin";
let codemap = Rc::new(CodeMap::new());
let tty_handler = Handler::with_tty_emitter(ColorConfig::Auto,
None,
true,
false,
codemap.clone());
let mut parse_session = ParseSess::with_span_handler(tty_handler, codemap.clone());
let krate = parse::parse_crate_from_source_str(path.to_owned(),
input,
Vec::new(),
&parse_session);
// Suppress error output after parsing.
let silent_emitter = Box::new(EmitterWriter::new(Box::new(Vec::new()), None, codemap.clone()));
parse_session.span_diagnostic = Handler::with_emitter(true, false, silent_emitter);
// FIXME: we still use a FileMap even though we only have
// one file, because fmt_lines requires a FileMap
let mut file_map = FileMap::new();
// do the actual formatting
let mut visitor = FmtVisitor::from_codemap(&parse_session, config);
visitor.format_separate_mod(&krate.module);
// append final newline
visitor.buffer.push_str("\n");
file_map.insert(path.to_owned(), visitor.buffer);
file_map
}
pub fn format(file: &Path, config: &Config) -> FileMap {
let codemap = Rc::new(CodeMap::new());
let tty_handler = Handler::with_tty_emitter(ColorConfig::Auto,
None,
true,
false,
codemap.clone());
let mut parse_session = ParseSess::with_span_handler(tty_handler, codemap.clone());
let krate = parse::parse_crate_from_file(file, Vec::new(), &parse_session);
// Suppress error output after parsing.
let silent_emitter = Box::new(EmitterWriter::new(Box::new(Vec::new()), None, codemap.clone()));
parse_session.span_diagnostic = Handler::with_emitter(true, false, silent_emitter);
let mut file_map = fmt_ast(&krate, &parse_session, file, config);
// For some reason, the codemap does not include terminating
// newlines so we must add one on for each file. This is sad.
filemap::append_newlines(&mut file_map);
file_map
}
pub fn run(file: &Path, config: &Config) {
let mut result = format(file, config);
print!("{}", fmt_lines(&mut result, config));
let out = stdout();
let write_result = filemap::write_all_files(&result, out, config);
if let Err(msg) = write_result {
println!("Error writing files: {}", msg);
}
}
// Similar to run, but takes an input String instead of a file to format
pub fn run_from_stdin(input: String, config: &Config) {
let mut result = format_string(input, config);
fmt_lines(&mut result, config);
let mut out = stdout();
let write_result = filemap::write_file(&result["stdin"], "stdin", &mut out, config);
if let Err(msg) = write_result {
panic!("Error writing to stdout: {}", msg);
}
}
| span | identifier_name |
trajectory_tracking.py | #!/usr/bin/env python3
import rospy
from nav_msgs.msg import Odometry
from geometry_msgs.msg import *
from rospy.core import rospyinfo
from std_msgs import msg
from tf.transformations import euler_from_quaternion
from gazebo_msgs.msg import ModelStates
import yaml
import matplotlib.pyplot as plt
from sensor_msgs.msg import LaserScan
import numpy as np
from trajectory_generation import Trajectory_generation
from Linear_control import Linear_control_law, nonLinear_control_law
import goalpos as g
import dist_obj as dist
import math
class Trajectory_tracking():
#attributes
t = []
x_d = []
y_d = []
v_d = []
w_d = []
theta_d = []
q=[]
dotx_d=[]
doty_d=[]
appov=[]
appow=[]
appox = []
appoy = []
appoteta = []
appopsi = []
thetaprec=0
A_park=[]
def __init__(self):
print("Starting node Trajectory control")
rospy.init_node('trajectory_tracking', anonymous=True)
self.twist_pub = rospy.Publisher('/cmd_vel', Twist, queue_size=10)
rospy.Subscriber('/ground_truth/state',Odometry, self.odometryCb)
rospy.Subscriber("/gazebo/model_states", ModelStates, self.callback)
self.a=dist.laser()
msg = rospy.wait_for_message("/scan", LaserScan, timeout=5)
self.a.get_flag(msg)
def callback(self, data):
# robot pose from ModelStates
indice = data.name.index('ferrari')
data_pose_x = data.pose[indice].position.x
data_pose_y = data.pose[indice].position.y
self.data_pose= np.array([data_pose_x,data_pose_y])
return self.data_pose
def odometryCb(self,msg):
#current robot pose
x = round(msg.pose.pose.position.x,4)
y = round(msg.pose.pose.position.y,4)
theta = round(self.get_angle_pose(msg.pose.pose),4)
y=round(y-1.4*np.cos(theta+np.pi),4) ##spostiamo il punto di riferimento dal centro della macchina al centro dell' asse posteriore
x=round(x+1.4*np.sin(theta+np.pi),4) ##spostiamo il punto di riferimento dal centro della macchina al centro dell' asse posteriore
self.q = np.array([x, y, theta])
return self.q
def get_angle_pose(self, quaternion_pose):
#compute angle from quaternion
#
q = [quaternion_pose.orientation.x,
quaternion_pose.orientation.y,
quaternion_pose.orientation.z,
quaternion_pose.orientation.w]
roll, pitch, yaw = euler_from_quaternion(q)
theta = yaw
tol=0.1
if abs(abs(theta)-abs(self.thetaprec))>2*np.pi-tol and self.thetaprec!=0:
theta=theta+2*np.pi-tol
else:
pass
self.thetaprec=theta
return theta
def trajectory_generation(self, traj,a):
data = rospy.wait_for_message("/gazebo/model_states", ModelStates, timeout=5)
posizione = self.callback(data)
x = round(posizione[0],1)
y = round(posizione[1],1)
tg = Trajectory_generation()
q_i = self.get_pose()
self.trajectory=traj
if(self.trajectory == "parallel_parking" ):
(self.x_d, self.y_d,self.dotx_d,self.doty_d,self.v_d, self.w_d , self.theta_d , self.psi, self.A_park) =tg.parallel_parking_trajectory(q_i, self.t,a)
#self.A_park indica il punto di partenza della manovra di parcheggio, ovvero punto in corrispondenza del parcheggio successivo a quello libero
def get_laser(self):
flag_free=self.a.park()[0] # flag che indica se ha trovato un parcheggio libero
flag_occ=self.a.park()[1] # flag che indica se il parcheggio si trova prima(2), in corrispondenza(1) o dopo(0)
if flag_free==1:
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
return flag_free,flag_occ
def get_pose(self):
#get robot position updated from callback
x = self.q[0]
y = self.q[1]
theta = self.q[2]
return np.array([x, y, theta])
def get_error(self, T,traj):
|
def unicicle_Linear_control(self,traj,zeta,a):
rospy.sleep(0.1) # need small time to setup q in callback
max_t = self.t[len(self.t) - 1]
len_t = len(self.t)
self.trajectory=traj
if(self.trajectory == "parallel_parking" ):
for i in np.arange(0, len_t):
now = rospy.get_time()
err = self.get_error(i,self.trajectory)
if round(0.03*len_t)<=i<=round(0.87*len_t) : #tra il 3% e l' 87% uso il controllore
(v, w) = Linear_control_law(err, self.v_d[i], self.w_d[i],zeta,a)
else: # utilizziamo nella parte iniziale e finale le desiderate
#(evitiamo gli spike del controllore dovuti a valori prossimi allo zero di v e w)
v=self.v_d[i]
w=self.w_d[i]
# (v, w) = Linear_control_law(err, self.v_d[i], self.w_d[i],zeta,a)
print("theta_d:{} and theta_odom:{} sample:{}".format(self.theta_d[i], self.q[2] , i) )
print("v_d:{} and v:{} sample:{}".format(self.v_d[i], v , i) )
print("w_d:{} and w:{} sample:{}".format(-self.w_d[i], w , i) )
print('Errors{}'.format(err))
self.send_velocities(v, w)
diff = rospy.get_time() - now
rospy.sleep(max_t/len_t + 0.0058)
self.appov.append(v)
self.appow.append(w)
self.appox.append(self.q[0])
self.appoy.append(self.q[1])
self.appoteta.append(self.q[2])
# self.appopsi.append(math.atan(w*2.85/v))
else:
pass
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.plot_wmr()
def plot_wmr(self):
plot1 = plt.figure(1)
plt.title('path')
plt.plot(self.x_d,self.y_d)
plt.plot(self.appox ,self.appoy )
plt.plot()
plt.xlabel('x')
plt.ylabel('y')
plot2 = plt.figure(2)
plt.title('velocità')
plt.plot(self.t,self.v_d)
plt.plot(self.t,self.appov)
plt.xlabel('time')
plt.ylabel('Velocità lineare')
plot3 = plt.figure(3)
plt.plot(self.t,self.w_d)
plt.plot(self.t,self.appow)
plt.xlabel('time')
plt.ylabel('Velocità angolare ')
plot4 = plt.figure(4)
plt.plot(self.t,self.theta_d)
plt.plot(self.t,self.appoteta)
plt.xlabel('time')
plt.ylabel('teta ')
# plot5 = plt.figure(5)
# plt.plot(self.t,self.psi)
# plt.plot(self.t,self.appopsi)
# plt.xlabel('time')
# plt.ylabel('psi')
plt.show()
def send_velocities(self, v, w, theta=None):
twist_msg = Twist() # Creating a new message to send to the robot
twist_msg.linear.x = v
twist_msg.angular.z = -w
self.twist_pub.publish(twist_msg)
def to_point(self):
toltheta=0.2
tol=0.05
vel=2
q_i = self.get_pose()
if np.pi/2-toltheta<=q_i[2]<=toltheta+np.pi/2 or -np.pi/2-toltheta<=q_i[2]<=toltheta-np.pi/2:
while q_i[0]<=self.A_park[0][0]-tol or q_i[0]>=self.A_park[0][0]+tol:
q_i = self.get_pose()
self.send_velocities(vel,0,0)
print("x_odom:{} and x_A:{}".format(q_i[0], round(self.A_park[0][0],4)))
elif -toltheta<=q_i[2]<=toltheta or np.pi-toltheta<=q_i[2]<=toltheta+np.pi:
while q_i[1]<=self.A_park[1][0]-tol or q_i[1]>=self.A_park[1][0]+tol:
q_i = self.get_pose()
self.send_velocities(vel,0,0)
print("y_odom:{} and y_A:{}".format(q_i[1], round(self.A_park[1][0],4)))
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
print("STOP")
print("Asse posteriore: x:{} and y:{}. Punto A: x:{} and y:{}".format(self.q[0], self.q[1],self.A_park[0][0],self.A_park[1][0]))
if __name__ == "__main__":
yaml_package_name = rospy.get_param('~yaml_package_name', 'object_spawner')
yaml_relative_path = rospy.get_param('~yaml_relative_path', '/config/parcheggi2.yaml')
m = g.parse_yaml(yaml_package_name,yaml_relative_path)
tt=Trajectory_tracking()
tt.t = np.linspace(0, 5, 1500)
trajectory = "parallel_parking"
toltheta=0.1
if np.pi-toltheta<=tt.q[2]<=np.pi+toltheta or -toltheta<=tt.q[2]<=toltheta:
a=tt.data_pose[1] #estraggo y odometria
else:
a=tt.data_pose[0] #estraggo x odometria
# SEZIONE AVANZAMENTO
while tt.get_laser()[0]==0 and abs(a)<=13: # 13 fine strada parcheggi
if tt.get_laser()[0]==1:
print("Park Found")
else:
print("Park not Found")
tt.send_velocities(3,0,0)
if np.pi-toltheta<=tt.q[2]<=np.pi+toltheta or -toltheta<=tt.q[2]<=toltheta:
a=tt.data_pose[1] #estraggo y odometria
else:
a=tt.data_pose[0] #estraggo x odometria
tt.send_velocities(0,0,0)
tt.send_velocities(0,0,0)
tt.send_velocities(0,0,0)
if tt.get_laser()[0]==1:
park=g.findpark(tt.q,m,tt.get_laser()[1]) # coordinate centro del parcheggio libero (x,y,theta):
print("Park Coodinate={} ".format(park))
tt.trajectory_generation(trajectory,park) # trajectory generation
print("Park beginning point (A): x={} and y={}".format(tt.A_park[0][0],tt.A_park[1][0]))
if len(tt.A_park)>0:
tt.to_point()
rospy.sleep(1)
zeta= 0.9 #parametri per controllo lineare
a= 1.45
tt.t = np.linspace(0, 5, 1500)
trajectory = "parallel_parking"
tt.trajectory_generation(trajectory,park)
tt.unicicle_Linear_control(trajectory,zeta,a)
else:
print("No Free Spot")
| if(self.trajectory == "parallel_parking" ):
(x, y, theta) = self.get_pose() #NB: i punti x e y sono sull'asse posteriore, non è il centro della macchina
else:
(a, b, theta) = self.get_pose() #prendo solo theta
x=self.data_pose[0]
y=self.data_pose[1]
#compute error
e1 = (self.x_d[T] - x) * np.cos(theta) + (self.y_d[T] - y ) * np.sin(theta)
e2 = -(self.x_d[T] - x) * np.sin(theta) + (self.y_d[T] - y ) * np.cos(theta)
# theta (derivante dall'odometria) quando va oltre 3,14 si inverte di segno (vede il 3,14 come -3.14 e va verso 0 come negativo)
e3 = self.theta_d[T] - theta if len(self.theta_d) else 0
if e3>np.pi :
e3-=2*np.pi
elif e3<-np.pi:
e3+=2*np.pi
else:
pass
print("x_d:{} and x_odom:{} sample:{}".format(self.x_d[T][0],x,T))
print("y_d:{} and y_odom:{} sample:{}".format(self.y_d[T][0],y,T))
print("theta_d:{} and theta_odom:{} sample:{}".format(self.theta_d[T],theta,T))
return np.array([float(e1), float(e2), e3])
| identifier_body |
trajectory_tracking.py | #!/usr/bin/env python3
import rospy
from nav_msgs.msg import Odometry
from geometry_msgs.msg import *
from rospy.core import rospyinfo
from std_msgs import msg
from tf.transformations import euler_from_quaternion
from gazebo_msgs.msg import ModelStates
import yaml
import matplotlib.pyplot as plt
from sensor_msgs.msg import LaserScan
import numpy as np
from trajectory_generation import Trajectory_generation
from Linear_control import Linear_control_law, nonLinear_control_law
import goalpos as g
import dist_obj as dist
import math
class Trajectory_tracking():
#attributes
t = []
x_d = []
y_d = []
v_d = []
w_d = []
theta_d = []
q=[]
dotx_d=[]
doty_d=[]
appov=[]
appow=[]
appox = []
appoy = []
appoteta = []
appopsi = []
thetaprec=0
A_park=[]
def __init__(self):
print("Starting node Trajectory control")
rospy.init_node('trajectory_tracking', anonymous=True)
self.twist_pub = rospy.Publisher('/cmd_vel', Twist, queue_size=10)
rospy.Subscriber('/ground_truth/state',Odometry, self.odometryCb)
rospy.Subscriber("/gazebo/model_states", ModelStates, self.callback)
self.a=dist.laser()
msg = rospy.wait_for_message("/scan", LaserScan, timeout=5)
self.a.get_flag(msg)
def callback(self, data):
# robot pose from ModelStates
indice = data.name.index('ferrari')
data_pose_x = data.pose[indice].position.x
data_pose_y = data.pose[indice].position.y
self.data_pose= np.array([data_pose_x,data_pose_y])
return self.data_pose
def odometryCb(self,msg):
#current robot pose
x = round(msg.pose.pose.position.x,4)
y = round(msg.pose.pose.position.y,4)
theta = round(self.get_angle_pose(msg.pose.pose),4)
y=round(y-1.4*np.cos(theta+np.pi),4) ##spostiamo il punto di riferimento dal centro della macchina al centro dell' asse posteriore
x=round(x+1.4*np.sin(theta+np.pi),4) ##spostiamo il punto di riferimento dal centro della macchina al centro dell' asse posteriore
self.q = np.array([x, y, theta])
return self.q
def get_angle_pose(self, quaternion_pose):
#compute angle from quaternion
#
q = [quaternion_pose.orientation.x,
quaternion_pose.orientation.y,
quaternion_pose.orientation.z,
quaternion_pose.orientation.w]
roll, pitch, yaw = euler_from_quaternion(q)
theta = yaw
tol=0.1
if abs(abs(theta)-abs(self.thetaprec))>2*np.pi-tol and self.thetaprec!=0:
theta=theta+2*np.pi-tol
else:
pass
self.thetaprec=theta
return theta
def trajectory_generation(self, traj,a):
data = rospy.wait_for_message("/gazebo/model_states", ModelStates, timeout=5)
posizione = self.callback(data)
x = round(posizione[0],1)
y = round(posizione[1],1)
tg = Trajectory_generation()
q_i = self.get_pose()
self.trajectory=traj
if(self.trajectory == "parallel_parking" ):
(self.x_d, self.y_d,self.dotx_d,self.doty_d,self.v_d, self.w_d , self.theta_d , self.psi, self.A_park) =tg.parallel_parking_trajectory(q_i, self.t,a)
#self.A_park indica il punto di partenza della manovra di parcheggio, ovvero punto in corrispondenza del parcheggio successivo a quello libero
def get_laser(self):
flag_free=self.a.park()[0] # flag che indica se ha trovato un parcheggio libero
flag_occ=self.a.park()[1] # flag che indica se il parcheggio si trova prima(2), in corrispondenza(1) o dopo(0)
if flag_free==1:
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
return flag_free,flag_occ
def get_pose(self):
#get robot position updated from callback
x = self.q[0]
y = self.q[1]
theta = self.q[2]
return np.array([x, y, theta])
def get_error(self, T,traj):
if(self.trajectory == "parallel_parking" ):
(x, y, theta) = self.get_pose() #NB: i punti x e y sono sull'asse posteriore, non è il centro della macchina
else:
(a, b, theta) = self.get_pose() #prendo solo theta
x=self.data_pose[0]
y=self.data_pose[1]
#compute error
e1 = (self.x_d[T] - x) * np.cos(theta) + (self.y_d[T] - y ) * np.sin(theta)
e2 = -(self.x_d[T] - x) * np.sin(theta) + (self.y_d[T] - y ) * np.cos(theta)
# theta (derivante dall'odometria) quando va oltre 3,14 si inverte di segno (vede il 3,14 come -3.14 e va verso 0 come negativo)
e3 = self.theta_d[T] - theta if len(self.theta_d) else 0
if e3>np.pi :
e3-=2*np.pi
elif e3<-np.pi:
e3+=2*np.pi
else:
pass
print("x_d:{} and x_odom:{} sample:{}".format(self.x_d[T][0],x,T))
print("y_d:{} and y_odom:{} sample:{}".format(self.y_d[T][0],y,T))
print("theta_d:{} and theta_odom:{} sample:{}".format(self.theta_d[T],theta,T))
return np.array([float(e1), float(e2), e3])
def unicicle_Linear_control(self,traj,zeta,a):
rospy.sleep(0.1) # need small time to setup q in callback
max_t = self.t[len(self.t) - 1]
len_t = len(self.t)
self.trajectory=traj
if(self.trajectory == "parallel_parking" ):
for i in np.arange(0, len_t):
now = rospy.get_time()
err = self.get_error(i,self.trajectory)
if round(0.03*len_t)<=i<=round(0.87*len_t) : #tra il 3% e l' 87% uso il controllore
(v, w) = Linear_control_law(err, self.v_d[i], self.w_d[i],zeta,a)
else: # utilizziamo nella parte iniziale e finale le desiderate
#(evitiamo gli spike del controllore dovuti a valori prossimi allo zero di v e w)
v=self.v_d[i]
w=self.w_d[i]
# (v, w) = Linear_control_law(err, self.v_d[i], self.w_d[i],zeta,a)
print("theta_d:{} and theta_odom:{} sample:{}".format(self.theta_d[i], self.q[2] , i) )
print("v_d:{} and v:{} sample:{}".format(self.v_d[i], v , i) )
print("w_d:{} and w:{} sample:{}".format(-self.w_d[i], w , i) )
print('Errors{}'.format(err))
self.send_velocities(v, w)
diff = rospy.get_time() - now
rospy.sleep(max_t/len_t + 0.0058)
self.appov.append(v)
self.appow.append(w)
self.appox.append(self.q[0])
self.appoy.append(self.q[1])
self.appoteta.append(self.q[2])
# self.appopsi.append(math.atan(w*2.85/v))
else:
pass
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.plot_wmr()
def plot_wmr(self):
plot1 = plt.figure(1)
plt.title('path')
plt.plot(self.x_d,self.y_d)
plt.plot(self.appox ,self.appoy )
plt.plot()
plt.xlabel('x')
plt.ylabel('y')
plot2 = plt.figure(2)
plt.title('velocità')
plt.plot(self.t,self.v_d)
plt.plot(self.t,self.appov)
plt.xlabel('time')
plt.ylabel('Velocità lineare')
plot3 = plt.figure(3)
plt.plot(self.t,self.w_d)
plt.plot(self.t,self.appow)
plt.xlabel('time')
plt.ylabel('Velocità angolare ')
plot4 = plt.figure(4)
plt.plot(self.t,self.theta_d)
plt.plot(self.t,self.appoteta)
plt.xlabel('time')
plt.ylabel('teta ')
# plot5 = plt.figure(5)
# plt.plot(self.t,self.psi)
# plt.plot(self.t,self.appopsi)
# plt.xlabel('time')
# plt.ylabel('psi')
plt.show()
def send_velocities(self, v, w, theta=None):
twist_msg = Twist() # Creating a new message to send to the robot
twist_msg.linear.x = v
twist_msg.angular.z = -w
self.twist_pub.publish(twist_msg)
def to_p | f):
toltheta=0.2
tol=0.05
vel=2
q_i = self.get_pose()
if np.pi/2-toltheta<=q_i[2]<=toltheta+np.pi/2 or -np.pi/2-toltheta<=q_i[2]<=toltheta-np.pi/2:
while q_i[0]<=self.A_park[0][0]-tol or q_i[0]>=self.A_park[0][0]+tol:
q_i = self.get_pose()
self.send_velocities(vel,0,0)
print("x_odom:{} and x_A:{}".format(q_i[0], round(self.A_park[0][0],4)))
elif -toltheta<=q_i[2]<=toltheta or np.pi-toltheta<=q_i[2]<=toltheta+np.pi:
while q_i[1]<=self.A_park[1][0]-tol or q_i[1]>=self.A_park[1][0]+tol:
q_i = self.get_pose()
self.send_velocities(vel,0,0)
print("y_odom:{} and y_A:{}".format(q_i[1], round(self.A_park[1][0],4)))
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
print("STOP")
print("Asse posteriore: x:{} and y:{}. Punto A: x:{} and y:{}".format(self.q[0], self.q[1],self.A_park[0][0],self.A_park[1][0]))
if __name__ == "__main__":
yaml_package_name = rospy.get_param('~yaml_package_name', 'object_spawner')
yaml_relative_path = rospy.get_param('~yaml_relative_path', '/config/parcheggi2.yaml')
m = g.parse_yaml(yaml_package_name,yaml_relative_path)
tt=Trajectory_tracking()
tt.t = np.linspace(0, 5, 1500)
trajectory = "parallel_parking"
toltheta=0.1
if np.pi-toltheta<=tt.q[2]<=np.pi+toltheta or -toltheta<=tt.q[2]<=toltheta:
a=tt.data_pose[1] #estraggo y odometria
else:
a=tt.data_pose[0] #estraggo x odometria
# SEZIONE AVANZAMENTO
while tt.get_laser()[0]==0 and abs(a)<=13: # 13 fine strada parcheggi
if tt.get_laser()[0]==1:
print("Park Found")
else:
print("Park not Found")
tt.send_velocities(3,0,0)
if np.pi-toltheta<=tt.q[2]<=np.pi+toltheta or -toltheta<=tt.q[2]<=toltheta:
a=tt.data_pose[1] #estraggo y odometria
else:
a=tt.data_pose[0] #estraggo x odometria
tt.send_velocities(0,0,0)
tt.send_velocities(0,0,0)
tt.send_velocities(0,0,0)
if tt.get_laser()[0]==1:
park=g.findpark(tt.q,m,tt.get_laser()[1]) # coordinate centro del parcheggio libero (x,y,theta):
print("Park Coodinate={} ".format(park))
tt.trajectory_generation(trajectory,park) # trajectory generation
print("Park beginning point (A): x={} and y={}".format(tt.A_park[0][0],tt.A_park[1][0]))
if len(tt.A_park)>0:
tt.to_point()
rospy.sleep(1)
zeta= 0.9 #parametri per controllo lineare
a= 1.45
tt.t = np.linspace(0, 5, 1500)
trajectory = "parallel_parking"
tt.trajectory_generation(trajectory,park)
tt.unicicle_Linear_control(trajectory,zeta,a)
else:
print("No Free Spot")
| oint(sel | identifier_name |
trajectory_tracking.py | #!/usr/bin/env python3
import rospy
from nav_msgs.msg import Odometry
from geometry_msgs.msg import *
from rospy.core import rospyinfo
from std_msgs import msg
from tf.transformations import euler_from_quaternion
from gazebo_msgs.msg import ModelStates
import yaml
import matplotlib.pyplot as plt
from sensor_msgs.msg import LaserScan
import numpy as np
from trajectory_generation import Trajectory_generation
from Linear_control import Linear_control_law, nonLinear_control_law
import goalpos as g
import dist_obj as dist
import math
class Trajectory_tracking():
#attributes
t = []
x_d = []
y_d = []
v_d = []
w_d = []
theta_d = []
q=[]
dotx_d=[]
doty_d=[]
appov=[]
appow=[]
appox = []
appoy = []
appoteta = []
appopsi = []
thetaprec=0
A_park=[]
def __init__(self):
print("Starting node Trajectory control")
rospy.init_node('trajectory_tracking', anonymous=True)
self.twist_pub = rospy.Publisher('/cmd_vel', Twist, queue_size=10)
rospy.Subscriber('/ground_truth/state',Odometry, self.odometryCb)
rospy.Subscriber("/gazebo/model_states", ModelStates, self.callback)
self.a=dist.laser()
msg = rospy.wait_for_message("/scan", LaserScan, timeout=5)
self.a.get_flag(msg)
def callback(self, data):
# robot pose from ModelStates
indice = data.name.index('ferrari')
data_pose_x = data.pose[indice].position.x
data_pose_y = data.pose[indice].position.y
self.data_pose= np.array([data_pose_x,data_pose_y])
return self.data_pose
def odometryCb(self,msg):
#current robot pose
x = round(msg.pose.pose.position.x,4)
y = round(msg.pose.pose.position.y,4)
theta = round(self.get_angle_pose(msg.pose.pose),4)
y=round(y-1.4*np.cos(theta+np.pi),4) ##spostiamo il punto di riferimento dal centro della macchina al centro dell' asse posteriore
x=round(x+1.4*np.sin(theta+np.pi),4) ##spostiamo il punto di riferimento dal centro della macchina al centro dell' asse posteriore
self.q = np.array([x, y, theta])
return self.q
def get_angle_pose(self, quaternion_pose):
#compute angle from quaternion
#
q = [quaternion_pose.orientation.x,
quaternion_pose.orientation.y,
quaternion_pose.orientation.z,
quaternion_pose.orientation.w]
roll, pitch, yaw = euler_from_quaternion(q)
theta = yaw
tol=0.1
if abs(abs(theta)-abs(self.thetaprec))>2*np.pi-tol and self.thetaprec!=0:
theta=theta+2*np.pi-tol
else:
pass
self.thetaprec=theta
return theta
def trajectory_generation(self, traj,a): | y = round(posizione[1],1)
tg = Trajectory_generation()
q_i = self.get_pose()
self.trajectory=traj
if(self.trajectory == "parallel_parking" ):
(self.x_d, self.y_d,self.dotx_d,self.doty_d,self.v_d, self.w_d , self.theta_d , self.psi, self.A_park) =tg.parallel_parking_trajectory(q_i, self.t,a)
#self.A_park indica il punto di partenza della manovra di parcheggio, ovvero punto in corrispondenza del parcheggio successivo a quello libero
def get_laser(self):
flag_free=self.a.park()[0] # flag che indica se ha trovato un parcheggio libero
flag_occ=self.a.park()[1] # flag che indica se il parcheggio si trova prima(2), in corrispondenza(1) o dopo(0)
if flag_free==1:
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
return flag_free,flag_occ
def get_pose(self):
#get robot position updated from callback
x = self.q[0]
y = self.q[1]
theta = self.q[2]
return np.array([x, y, theta])
def get_error(self, T,traj):
if(self.trajectory == "parallel_parking" ):
(x, y, theta) = self.get_pose() #NB: i punti x e y sono sull'asse posteriore, non è il centro della macchina
else:
(a, b, theta) = self.get_pose() #prendo solo theta
x=self.data_pose[0]
y=self.data_pose[1]
#compute error
e1 = (self.x_d[T] - x) * np.cos(theta) + (self.y_d[T] - y ) * np.sin(theta)
e2 = -(self.x_d[T] - x) * np.sin(theta) + (self.y_d[T] - y ) * np.cos(theta)
# theta (derivante dall'odometria) quando va oltre 3,14 si inverte di segno (vede il 3,14 come -3.14 e va verso 0 come negativo)
e3 = self.theta_d[T] - theta if len(self.theta_d) else 0
if e3>np.pi :
e3-=2*np.pi
elif e3<-np.pi:
e3+=2*np.pi
else:
pass
print("x_d:{} and x_odom:{} sample:{}".format(self.x_d[T][0],x,T))
print("y_d:{} and y_odom:{} sample:{}".format(self.y_d[T][0],y,T))
print("theta_d:{} and theta_odom:{} sample:{}".format(self.theta_d[T],theta,T))
return np.array([float(e1), float(e2), e3])
def unicicle_Linear_control(self,traj,zeta,a):
rospy.sleep(0.1) # need small time to setup q in callback
max_t = self.t[len(self.t) - 1]
len_t = len(self.t)
self.trajectory=traj
if(self.trajectory == "parallel_parking" ):
for i in np.arange(0, len_t):
now = rospy.get_time()
err = self.get_error(i,self.trajectory)
if round(0.03*len_t)<=i<=round(0.87*len_t) : #tra il 3% e l' 87% uso il controllore
(v, w) = Linear_control_law(err, self.v_d[i], self.w_d[i],zeta,a)
else: # utilizziamo nella parte iniziale e finale le desiderate
#(evitiamo gli spike del controllore dovuti a valori prossimi allo zero di v e w)
v=self.v_d[i]
w=self.w_d[i]
# (v, w) = Linear_control_law(err, self.v_d[i], self.w_d[i],zeta,a)
print("theta_d:{} and theta_odom:{} sample:{}".format(self.theta_d[i], self.q[2] , i) )
print("v_d:{} and v:{} sample:{}".format(self.v_d[i], v , i) )
print("w_d:{} and w:{} sample:{}".format(-self.w_d[i], w , i) )
print('Errors{}'.format(err))
self.send_velocities(v, w)
diff = rospy.get_time() - now
rospy.sleep(max_t/len_t + 0.0058)
self.appov.append(v)
self.appow.append(w)
self.appox.append(self.q[0])
self.appoy.append(self.q[1])
self.appoteta.append(self.q[2])
# self.appopsi.append(math.atan(w*2.85/v))
else:
pass
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.plot_wmr()
def plot_wmr(self):
plot1 = plt.figure(1)
plt.title('path')
plt.plot(self.x_d,self.y_d)
plt.plot(self.appox ,self.appoy )
plt.plot()
plt.xlabel('x')
plt.ylabel('y')
plot2 = plt.figure(2)
plt.title('velocità')
plt.plot(self.t,self.v_d)
plt.plot(self.t,self.appov)
plt.xlabel('time')
plt.ylabel('Velocità lineare')
plot3 = plt.figure(3)
plt.plot(self.t,self.w_d)
plt.plot(self.t,self.appow)
plt.xlabel('time')
plt.ylabel('Velocità angolare ')
plot4 = plt.figure(4)
plt.plot(self.t,self.theta_d)
plt.plot(self.t,self.appoteta)
plt.xlabel('time')
plt.ylabel('teta ')
# plot5 = plt.figure(5)
# plt.plot(self.t,self.psi)
# plt.plot(self.t,self.appopsi)
# plt.xlabel('time')
# plt.ylabel('psi')
plt.show()
def send_velocities(self, v, w, theta=None):
twist_msg = Twist() # Creating a new message to send to the robot
twist_msg.linear.x = v
twist_msg.angular.z = -w
self.twist_pub.publish(twist_msg)
def to_point(self):
toltheta=0.2
tol=0.05
vel=2
q_i = self.get_pose()
if np.pi/2-toltheta<=q_i[2]<=toltheta+np.pi/2 or -np.pi/2-toltheta<=q_i[2]<=toltheta-np.pi/2:
while q_i[0]<=self.A_park[0][0]-tol or q_i[0]>=self.A_park[0][0]+tol:
q_i = self.get_pose()
self.send_velocities(vel,0,0)
print("x_odom:{} and x_A:{}".format(q_i[0], round(self.A_park[0][0],4)))
elif -toltheta<=q_i[2]<=toltheta or np.pi-toltheta<=q_i[2]<=toltheta+np.pi:
while q_i[1]<=self.A_park[1][0]-tol or q_i[1]>=self.A_park[1][0]+tol:
q_i = self.get_pose()
self.send_velocities(vel,0,0)
print("y_odom:{} and y_A:{}".format(q_i[1], round(self.A_park[1][0],4)))
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
print("STOP")
print("Asse posteriore: x:{} and y:{}. Punto A: x:{} and y:{}".format(self.q[0], self.q[1],self.A_park[0][0],self.A_park[1][0]))
if __name__ == "__main__":
yaml_package_name = rospy.get_param('~yaml_package_name', 'object_spawner')
yaml_relative_path = rospy.get_param('~yaml_relative_path', '/config/parcheggi2.yaml')
m = g.parse_yaml(yaml_package_name,yaml_relative_path)
tt=Trajectory_tracking()
tt.t = np.linspace(0, 5, 1500)
trajectory = "parallel_parking"
toltheta=0.1
if np.pi-toltheta<=tt.q[2]<=np.pi+toltheta or -toltheta<=tt.q[2]<=toltheta:
a=tt.data_pose[1] #estraggo y odometria
else:
a=tt.data_pose[0] #estraggo x odometria
# SEZIONE AVANZAMENTO
while tt.get_laser()[0]==0 and abs(a)<=13: # 13 fine strada parcheggi
if tt.get_laser()[0]==1:
print("Park Found")
else:
print("Park not Found")
tt.send_velocities(3,0,0)
if np.pi-toltheta<=tt.q[2]<=np.pi+toltheta or -toltheta<=tt.q[2]<=toltheta:
a=tt.data_pose[1] #estraggo y odometria
else:
a=tt.data_pose[0] #estraggo x odometria
tt.send_velocities(0,0,0)
tt.send_velocities(0,0,0)
tt.send_velocities(0,0,0)
if tt.get_laser()[0]==1:
park=g.findpark(tt.q,m,tt.get_laser()[1]) # coordinate centro del parcheggio libero (x,y,theta):
print("Park Coodinate={} ".format(park))
tt.trajectory_generation(trajectory,park) # trajectory generation
print("Park beginning point (A): x={} and y={}".format(tt.A_park[0][0],tt.A_park[1][0]))
if len(tt.A_park)>0:
tt.to_point()
rospy.sleep(1)
zeta= 0.9 #parametri per controllo lineare
a= 1.45
tt.t = np.linspace(0, 5, 1500)
trajectory = "parallel_parking"
tt.trajectory_generation(trajectory,park)
tt.unicicle_Linear_control(trajectory,zeta,a)
else:
print("No Free Spot") |
data = rospy.wait_for_message("/gazebo/model_states", ModelStates, timeout=5)
posizione = self.callback(data)
x = round(posizione[0],1) | random_line_split |
trajectory_tracking.py | #!/usr/bin/env python3
import rospy
from nav_msgs.msg import Odometry
from geometry_msgs.msg import *
from rospy.core import rospyinfo
from std_msgs import msg
from tf.transformations import euler_from_quaternion
from gazebo_msgs.msg import ModelStates
import yaml
import matplotlib.pyplot as plt
from sensor_msgs.msg import LaserScan
import numpy as np
from trajectory_generation import Trajectory_generation
from Linear_control import Linear_control_law, nonLinear_control_law
import goalpos as g
import dist_obj as dist
import math
class Trajectory_tracking():
#attributes
t = []
x_d = []
y_d = []
v_d = []
w_d = []
theta_d = []
q=[]
dotx_d=[]
doty_d=[]
appov=[]
appow=[]
appox = []
appoy = []
appoteta = []
appopsi = []
thetaprec=0
A_park=[]
def __init__(self):
print("Starting node Trajectory control")
rospy.init_node('trajectory_tracking', anonymous=True)
self.twist_pub = rospy.Publisher('/cmd_vel', Twist, queue_size=10)
rospy.Subscriber('/ground_truth/state',Odometry, self.odometryCb)
rospy.Subscriber("/gazebo/model_states", ModelStates, self.callback)
self.a=dist.laser()
msg = rospy.wait_for_message("/scan", LaserScan, timeout=5)
self.a.get_flag(msg)
def callback(self, data):
# robot pose from ModelStates
indice = data.name.index('ferrari')
data_pose_x = data.pose[indice].position.x
data_pose_y = data.pose[indice].position.y
self.data_pose= np.array([data_pose_x,data_pose_y])
return self.data_pose
def odometryCb(self,msg):
#current robot pose
x = round(msg.pose.pose.position.x,4)
y = round(msg.pose.pose.position.y,4)
theta = round(self.get_angle_pose(msg.pose.pose),4)
y=round(y-1.4*np.cos(theta+np.pi),4) ##spostiamo il punto di riferimento dal centro della macchina al centro dell' asse posteriore
x=round(x+1.4*np.sin(theta+np.pi),4) ##spostiamo il punto di riferimento dal centro della macchina al centro dell' asse posteriore
self.q = np.array([x, y, theta])
return self.q
def get_angle_pose(self, quaternion_pose):
#compute angle from quaternion
#
q = [quaternion_pose.orientation.x,
quaternion_pose.orientation.y,
quaternion_pose.orientation.z,
quaternion_pose.orientation.w]
roll, pitch, yaw = euler_from_quaternion(q)
theta = yaw
tol=0.1
if abs(abs(theta)-abs(self.thetaprec))>2*np.pi-tol and self.thetaprec!=0:
theta=theta+2*np.pi-tol
else:
pass
self.thetaprec=theta
return theta
def trajectory_generation(self, traj,a):
data = rospy.wait_for_message("/gazebo/model_states", ModelStates, timeout=5)
posizione = self.callback(data)
x = round(posizione[0],1)
y = round(posizione[1],1)
tg = Trajectory_generation()
q_i = self.get_pose()
self.trajectory=traj
if(self.trajectory == "parallel_parking" ):
(self.x_d, self.y_d,self.dotx_d,self.doty_d,self.v_d, self.w_d , self.theta_d , self.psi, self.A_park) =tg.parallel_parking_trajectory(q_i, self.t,a)
#self.A_park indica il punto di partenza della manovra di parcheggio, ovvero punto in corrispondenza del parcheggio successivo a quello libero
def get_laser(self):
flag_free=self.a.park()[0] # flag che indica se ha trovato un parcheggio libero
flag_occ=self.a.park()[1] # flag che indica se il parcheggio si trova prima(2), in corrispondenza(1) o dopo(0)
if flag_free==1:
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
return flag_free,flag_occ
def get_pose(self):
#get robot position updated from callback
x = self.q[0]
y = self.q[1]
theta = self.q[2]
return np.array([x, y, theta])
def get_error(self, T,traj):
if(self.trajectory == "parallel_parking" ):
(x, y, theta) = self.get_pose() #NB: i punti x e y sono sull'asse posteriore, non è il centro della macchina
else:
(a, b, theta) = self.get_pose() #prendo solo theta
x=self.data_pose[0]
y=self.data_pose[1]
#compute error
e1 = (self.x_d[T] - x) * np.cos(theta) + (self.y_d[T] - y ) * np.sin(theta)
e2 = -(self.x_d[T] - x) * np.sin(theta) + (self.y_d[T] - y ) * np.cos(theta)
# theta (derivante dall'odometria) quando va oltre 3,14 si inverte di segno (vede il 3,14 come -3.14 e va verso 0 come negativo)
e3 = self.theta_d[T] - theta if len(self.theta_d) else 0
if e3>np.pi :
e3-=2*np.pi
elif e3<-np.pi:
e3+=2*np.pi
else:
pass
print("x_d:{} and x_odom:{} sample:{}".format(self.x_d[T][0],x,T))
print("y_d:{} and y_odom:{} sample:{}".format(self.y_d[T][0],y,T))
print("theta_d:{} and theta_odom:{} sample:{}".format(self.theta_d[T],theta,T))
return np.array([float(e1), float(e2), e3])
def unicicle_Linear_control(self,traj,zeta,a):
rospy.sleep(0.1) # need small time to setup q in callback
max_t = self.t[len(self.t) - 1]
len_t = len(self.t)
self.trajectory=traj
if(self.trajectory == "parallel_parking" ):
for i in np.arange(0, len_t):
now = rospy.get_time()
err = self.get_error(i,self.trajectory)
if round(0.03*len_t)<=i<=round(0.87*len_t) : #tra il 3% e l' 87% uso il controllore
(v, w) = Linear_control_law(err, self.v_d[i], self.w_d[i],zeta,a)
else: # utilizziamo nella parte iniziale e finale le desiderate
#(evitiamo gli spike del controllore dovuti a valori prossimi allo zero di v e w)
v=self.v_d[i]
w=self.w_d[i]
# (v, w) = Linear_control_law(err, self.v_d[i], self.w_d[i],zeta,a)
print("theta_d:{} and theta_odom:{} sample:{}".format(self.theta_d[i], self.q[2] , i) )
print("v_d:{} and v:{} sample:{}".format(self.v_d[i], v , i) )
print("w_d:{} and w:{} sample:{}".format(-self.w_d[i], w , i) )
print('Errors{}'.format(err))
self.send_velocities(v, w)
diff = rospy.get_time() - now
rospy.sleep(max_t/len_t + 0.0058)
self.appov.append(v)
self.appow.append(w)
self.appox.append(self.q[0])
self.appoy.append(self.q[1])
self.appoteta.append(self.q[2])
# self.appopsi.append(math.atan(w*2.85/v))
else:
pass
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.plot_wmr()
def plot_wmr(self):
plot1 = plt.figure(1)
plt.title('path')
plt.plot(self.x_d,self.y_d)
plt.plot(self.appox ,self.appoy )
plt.plot()
plt.xlabel('x')
plt.ylabel('y')
plot2 = plt.figure(2)
plt.title('velocità')
plt.plot(self.t,self.v_d)
plt.plot(self.t,self.appov)
plt.xlabel('time')
plt.ylabel('Velocità lineare')
plot3 = plt.figure(3)
plt.plot(self.t,self.w_d)
plt.plot(self.t,self.appow)
plt.xlabel('time')
plt.ylabel('Velocità angolare ')
plot4 = plt.figure(4)
plt.plot(self.t,self.theta_d)
plt.plot(self.t,self.appoteta)
plt.xlabel('time')
plt.ylabel('teta ')
# plot5 = plt.figure(5)
# plt.plot(self.t,self.psi)
# plt.plot(self.t,self.appopsi)
# plt.xlabel('time')
# plt.ylabel('psi')
plt.show()
def send_velocities(self, v, w, theta=None):
twist_msg = Twist() # Creating a new message to send to the robot
twist_msg.linear.x = v
twist_msg.angular.z = -w
self.twist_pub.publish(twist_msg)
def to_point(self):
toltheta=0.2
tol=0.05
vel=2
q_i = self.get_pose()
if np.pi/2-toltheta<=q_i[2]<=toltheta+np.pi/2 or -np.pi/2-toltheta<=q_i[2]<=toltheta-np.pi/2:
while q_i[0]<=self.A_park[0][0]-tol or q_i[0]>=self.A_park[0][0]+tol:
q_i = self.get_pose()
self.send_velocities(vel,0,0)
print("x_odom:{} and x_A:{}".format(q_i[0], round(self.A_park[0][0],4)))
elif -toltheta<=q_i[2]<=toltheta or np.pi-toltheta<=q_i[2]<=toltheta+np.pi:
while q_i[1]<=self.A_park[1][0]-tol or q_i[1]>=self.A_park[1][0]+tol:
q_i = self.get_pose()
self.send_velocities(vel,0,0)
print("y_odom:{} and y_A:{}".format(q_i[1], round(self.A_park[1][0],4)))
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
self.send_velocities(0,0,0)
print("STOP")
print("Asse posteriore: x:{} and y:{}. Punto A: x:{} and y:{}".format(self.q[0], self.q[1],self.A_park[0][0],self.A_park[1][0]))
if __name__ == "__main__":
yaml_package_name = rospy.get_param('~yaml_package_name', 'object_spawner')
yaml_relative_path = rospy.get_param('~yaml_relative_path', '/config/parcheggi2.yaml')
m = g.parse_yaml(yaml_package_name,yaml_relative_path)
tt=Trajectory_tracking()
tt.t = np.linspace(0, 5, 1500)
trajectory = "parallel_parking"
toltheta=0.1
if np.pi-toltheta<=tt.q[2]<=np.pi+toltheta or -toltheta<=tt.q[2]<=toltheta:
a=tt | else:
a=tt.data_pose[0] #estraggo x odometria
# SEZIONE AVANZAMENTO
while tt.get_laser()[0]==0 and abs(a)<=13: # 13 fine strada parcheggi
if tt.get_laser()[0]==1:
print("Park Found")
else:
print("Park not Found")
tt.send_velocities(3,0,0)
if np.pi-toltheta<=tt.q[2]<=np.pi+toltheta or -toltheta<=tt.q[2]<=toltheta:
a=tt.data_pose[1] #estraggo y odometria
else:
a=tt.data_pose[0] #estraggo x odometria
tt.send_velocities(0,0,0)
tt.send_velocities(0,0,0)
tt.send_velocities(0,0,0)
if tt.get_laser()[0]==1:
park=g.findpark(tt.q,m,tt.get_laser()[1]) # coordinate centro del parcheggio libero (x,y,theta):
print("Park Coodinate={} ".format(park))
tt.trajectory_generation(trajectory,park) # trajectory generation
print("Park beginning point (A): x={} and y={}".format(tt.A_park[0][0],tt.A_park[1][0]))
if len(tt.A_park)>0:
tt.to_point()
rospy.sleep(1)
zeta= 0.9 #parametri per controllo lineare
a= 1.45
tt.t = np.linspace(0, 5, 1500)
trajectory = "parallel_parking"
tt.trajectory_generation(trajectory,park)
tt.unicicle_Linear_control(trajectory,zeta,a)
else:
print("No Free Spot")
| .data_pose[1] #estraggo y odometria
| conditional_block |
srvcenter.go | /*
* Copyright 2013 Nan Deng
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package msgcenter
import (
"encoding/json"
"errors"
"fmt"
"github.com/uniqush/uniqush-conn/evthandler"
"github.com/uniqush/uniqush-conn/msgcache"
"github.com/uniqush/uniqush-conn/proto"
"github.com/uniqush/uniqush-conn/proto/server"
"github.com/uniqush/uniqush-conn/push"
"strings"
"sync"
"time"
)
type eventConnIn struct {
errChan chan error
conn server.Conn
}
type eventConnLeave struct {
conn server.Conn
err error
}
type Result struct {
Err error `json:"err,omitempty"`
ConnId string `json:"connId,omitempty"`
Visible bool `json:"visible"`
}
func (self *Result) Error() string {
b, _ := json.Marshal(self)
return string(b)
}
type ServiceConfig struct {
MaxNrConns int
MaxNrUsers int
MaxNrConnsPerUser int
MsgCache msgcache.Cache
LoginHandler evthandler.LoginHandler
LogoutHandler evthandler.LogoutHandler
MessageHandler evthandler.MessageHandler
ForwardRequestHandler evthandler.ForwardRequestHandler
ErrorHandler evthandler.ErrorHandler
// Push related web hooks
SubscribeHandler evthandler.SubscribeHandler
UnsubscribeHandler evthandler.UnsubscribeHandler
PushHandler evthandler.PushHandler
PushService push.Push
}
type writeMessageRequest struct {
user string
msg *proto.Message
ttl time.Duration
extra map[string]string
resChan chan<- []*Result
}
type serviceCenter struct {
serviceName string
config *ServiceConfig
fwdChan chan<- *server.ForwardRequest
writeReqChan chan *writeMessageRequest
connIn chan *eventConnIn
connLeave chan *eventConnLeave
subReqChan chan *server.SubscribeRequest
pushServiceLock sync.RWMutex
}
var ErrTooManyConns = errors.New("too many connections")
var ErrInvalidConnType = errors.New("invalid connection type")
func (self *serviceCenter) ReceiveForward(fwdreq *server.ForwardRequest) {
shouldFwd := false
if self.config != nil {
if self.config.ForwardRequestHandler != nil {
shouldFwd = self.config.ForwardRequestHandler.ShouldForward(fwdreq)
maxttl := self.config.ForwardRequestHandler.MaxTTL()
if fwdreq.TTL < 1*time.Second || fwdreq.TTL > maxttl {
fwdreq.TTL = maxttl
}
}
}
if !shouldFwd {
return
}
receiver := fwdreq.Receiver
extra := getPushInfo(fwdreq.Message, nil, true)
self.SendMessage(receiver, fwdreq.Message, extra, fwdreq.TTL)
}
func getPushInfo(msg *proto.Message, extra map[string]string, fwd bool) map[string]string {
if extra == nil {
extra = make(map[string]string, len(msg.Header)+3)
}
if fwd {
for k, v := range msg.Header {
if strings.HasPrefix(k, "notif.") {
if strings.HasPrefix(k, "notif.uniqush.") {
// forward message should not contain reserved fields
continue
}
extra[k] = v
delete(msg.Header, k)
}
}
extra["uniqush.sender"] = msg.Sender
extra["uniqush.sender-service"] = msg.SenderService
}
if msg.Header != nil {
if title, ok := msg.Header["title"]; ok {
if _, ok = extra["notif.msg"]; !ok {
extra["notif.msg"] = title
}
}
}
extra["notif.uniqush.msgsize"] = fmt.Sprintf("%v", msg.Size())
return extra
}
func (self *serviceCenter) shouldPush(service, username string, msg *proto.Message, extra map[string]string, fwd bool) bool {
if self.config != nil {
if self.config.PushHandler != nil {
info := getPushInfo(msg, extra, fwd)
return self.config.PushHandler.ShouldPush(service, username, info)
}
}
return false
}
func (self *serviceCenter) subscribe(req *server.SubscribeRequest) {
if req == nil {
return
}
if self.config != nil {
if self.config.PushService != nil {
if req.Subscribe {
self.config.PushService.Subscribe(req.Service, req.Username, req.Params)
} else {
self.config.PushService.Unsubscribe(req.Service, req.Username, req.Params)
}
}
}
}
func (self *serviceCenter) nrDeliveryPoints(service, username string) int {
n := 0
if self.config != nil {
if self.config.PushService != nil {
n = self.config.PushService.NrDeliveryPoints(service, username)
}
}
return n
}
func (self *serviceCenter) pushNotif(service, username string, msg *proto.Message, extra map[string]string, msgIds []string, fwd bool) {
if self.config != nil {
if self.config.PushService != nil {
info := getPushInfo(msg, extra, fwd)
err := self.config.PushService.Push(service, username, info, msgIds)
if err != nil {
self.reportError(service, username, "", "", err)
}
}
}
}
func (self *serviceCenter) reportError(service, username, connId, addr string, err error) {
if self.config != nil {
if self.config.ErrorHandler != nil {
go self.config.ErrorHandler.OnError(service, username, connId, addr, err)
}
}
}
func (self *serviceCenter) reportLogin(service, username, connId, addr string) {
if self.config != nil {
if self.config.LoginHandler != nil {
go self.config.LoginHandler.OnLogin(service, username, connId, addr)
}
}
}
func (self *serviceCenter) reportMessage(connId string, msg *proto.Message) {
if self.config != nil {
if self.config.MessageHandler != nil {
go self.config.MessageHandler.OnMessage(connId, msg)
}
}
}
func (self *serviceCenter) reportLogout(service, username, connId, addr string, err error) {
if self.config != nil {
if self.config.LogoutHandler != nil {
go self.config.LogoutHandler.OnLogout(service, username, connId, addr, err)
}
}
}
func (self *serviceCenter) cacheMessage(service, username string, msg *proto.Message, ttl time.Duration) (id string, err error) {
if self.config != nil {
if self.config.MsgCache != nil {
id, err = self.config.MsgCache.CacheMessage(service, username, msg, ttl)
}
}
return
}
type connWriteErr struct {
conn server.Conn
err error
}
func (self *serviceCenter) | (maxNrConns, maxNrConnsPerUser, maxNrUsers int) {
connMap := newTreeBasedConnMap()
nrConns := 0
for {
select {
case connInEvt := <-self.connIn:
if maxNrConns > 0 && nrConns >= maxNrConns {
if connInEvt.errChan != nil {
connInEvt.errChan <- ErrTooManyConns
}
continue
}
err := connMap.AddConn(connInEvt.conn, maxNrConnsPerUser, maxNrUsers)
if err != nil {
if connInEvt.errChan != nil {
connInEvt.errChan <- err
}
continue
}
nrConns++
if connInEvt.errChan != nil {
connInEvt.errChan <- nil
}
case leaveEvt := <-self.connLeave:
deleted := connMap.DelConn(leaveEvt.conn)
fmt.Printf("delete a connection %v under user %v; deleted: %v\n", leaveEvt.conn.UniqId(), leaveEvt.conn.Username(), deleted)
leaveEvt.conn.Close()
if deleted {
nrConns--
conn := leaveEvt.conn
self.reportLogout(conn.Service(), conn.Username(), conn.UniqId(), conn.RemoteAddr().String(), leaveEvt.err)
}
case subreq := <-self.subReqChan:
self.pushServiceLock.Lock()
self.subscribe(subreq)
self.pushServiceLock.Unlock()
case wreq := <-self.writeReqChan:
conns := connMap.GetConn(wreq.user)
res := make([]*Result, 0, len(conns))
errConns := make([]*connWriteErr, 0, len(conns))
n := 0
mid, err := self.cacheMessage(self.serviceName, wreq.user, wreq.msg, wreq.ttl)
if err != nil {
self.reportError(self.serviceName, wreq.user, "", "", err)
continue
}
for _, conn := range conns {
if conn == nil {
continue
}
var err error
sconn, ok := conn.(server.Conn)
if !ok {
continue
}
err = sconn.SendMessage(wreq.msg, wreq.extra, wreq.ttl, mid)
if err != nil {
errConns = append(errConns, &connWriteErr{sconn, err})
res = append(res, &Result{err, sconn.UniqId(), sconn.Visible()})
self.reportError(sconn.Service(), sconn.Username(), sconn.UniqId(), sconn.RemoteAddr().String(), err)
continue
} else {
res = append(res, &Result{nil, sconn.UniqId(), sconn.Visible()})
}
if sconn.Visible() {
n++
}
}
if n == 0 {
msg := wreq.msg
extra := wreq.extra
username := wreq.user
service := self.serviceName
fwd := false
if len(msg.Sender) > 0 && len(msg.SenderService) > 0 {
if msg.Sender != username || msg.SenderService != service {
fwd = true
}
}
go func() {
should := self.shouldPush(service, username, msg, extra, fwd)
if !should {
return
}
self.pushServiceLock.RLock()
defer self.pushServiceLock.RUnlock()
n := self.nrDeliveryPoints(service, username)
if n <= 0 {
return
}
msgIds := []string{mid}
self.pushNotif(service, username, msg, extra, msgIds, fwd)
}()
}
if wreq.resChan != nil {
wreq.resChan <- res
}
// close all connections with error:
go func() {
for _, e := range errConns {
fmt.Printf("Need to remove connection %v\n", e.conn.UniqId())
self.connLeave <- &eventConnLeave{conn: e.conn, err: e.err}
}
}()
}
}
}
func (self *serviceCenter) SendMessage(username string, msg *proto.Message, extra map[string]string, ttl time.Duration) []*Result {
req := new(writeMessageRequest)
ch := make(chan []*Result)
req.msg = msg
req.user = username
req.ttl = ttl
req.resChan = ch
req.extra = extra
self.writeReqChan <- req
res := <-ch
return res
}
func (self *serviceCenter) serveConn(conn server.Conn) {
conn.SetForwardRequestChannel(self.fwdChan)
conn.SetSubscribeRequestChan(self.subReqChan)
var err error
defer func() {
self.connLeave <- &eventConnLeave{conn: conn, err: err}
}()
for {
var msg *proto.Message
msg, err = conn.ReadMessage()
if err != nil {
return
}
self.reportMessage(conn.UniqId(), msg)
}
}
func (self *serviceCenter) NewConn(conn server.Conn) error {
usr := conn.Username()
if len(usr) == 0 || strings.Contains(usr, ":") || strings.Contains(usr, "\n") {
return fmt.Errorf("[Username=%v] Invalid Username")
}
evt := new(eventConnIn)
ch := make(chan error)
conn.SetMessageCache(self.config.MsgCache)
evt.conn = conn
evt.errChan = ch
self.connIn <- evt
err := <-ch
if err == nil {
go self.serveConn(conn)
self.reportLogin(conn.Service(), usr, conn.UniqId(), conn.RemoteAddr().String())
}
return err
}
func newServiceCenter(serviceName string, conf *ServiceConfig, fwdChan chan<- *server.ForwardRequest) *serviceCenter {
ret := new(serviceCenter)
ret.config = conf
if ret.config == nil {
ret.config = new(ServiceConfig)
}
ret.serviceName = serviceName
ret.fwdChan = fwdChan
ret.connIn = make(chan *eventConnIn)
ret.connLeave = make(chan *eventConnLeave)
ret.writeReqChan = make(chan *writeMessageRequest)
ret.subReqChan = make(chan *server.SubscribeRequest)
go ret.process(conf.MaxNrConns, conf.MaxNrConnsPerUser, conf.MaxNrUsers)
return ret
}
| process | identifier_name |
srvcenter.go | /*
* Copyright 2013 Nan Deng
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package msgcenter
import (
"encoding/json"
"errors"
"fmt"
"github.com/uniqush/uniqush-conn/evthandler"
"github.com/uniqush/uniqush-conn/msgcache"
"github.com/uniqush/uniqush-conn/proto"
"github.com/uniqush/uniqush-conn/proto/server"
"github.com/uniqush/uniqush-conn/push"
"strings"
"sync"
"time"
)
type eventConnIn struct {
errChan chan error
conn server.Conn
}
type eventConnLeave struct {
conn server.Conn
err error
}
type Result struct {
Err error `json:"err,omitempty"`
ConnId string `json:"connId,omitempty"`
Visible bool `json:"visible"`
}
func (self *Result) Error() string {
b, _ := json.Marshal(self)
return string(b)
}
type ServiceConfig struct {
MaxNrConns int
MaxNrUsers int
MaxNrConnsPerUser int
MsgCache msgcache.Cache
LoginHandler evthandler.LoginHandler
LogoutHandler evthandler.LogoutHandler
MessageHandler evthandler.MessageHandler
ForwardRequestHandler evthandler.ForwardRequestHandler
ErrorHandler evthandler.ErrorHandler
// Push related web hooks
SubscribeHandler evthandler.SubscribeHandler
UnsubscribeHandler evthandler.UnsubscribeHandler
PushHandler evthandler.PushHandler
PushService push.Push
}
type writeMessageRequest struct {
user string
msg *proto.Message
ttl time.Duration
extra map[string]string
resChan chan<- []*Result
}
type serviceCenter struct {
serviceName string
config *ServiceConfig
fwdChan chan<- *server.ForwardRequest
writeReqChan chan *writeMessageRequest
connIn chan *eventConnIn
connLeave chan *eventConnLeave
subReqChan chan *server.SubscribeRequest
pushServiceLock sync.RWMutex
}
var ErrTooManyConns = errors.New("too many connections")
var ErrInvalidConnType = errors.New("invalid connection type")
func (self *serviceCenter) ReceiveForward(fwdreq *server.ForwardRequest) |
func getPushInfo(msg *proto.Message, extra map[string]string, fwd bool) map[string]string {
if extra == nil {
extra = make(map[string]string, len(msg.Header)+3)
}
if fwd {
for k, v := range msg.Header {
if strings.HasPrefix(k, "notif.") {
if strings.HasPrefix(k, "notif.uniqush.") {
// forward message should not contain reserved fields
continue
}
extra[k] = v
delete(msg.Header, k)
}
}
extra["uniqush.sender"] = msg.Sender
extra["uniqush.sender-service"] = msg.SenderService
}
if msg.Header != nil {
if title, ok := msg.Header["title"]; ok {
if _, ok = extra["notif.msg"]; !ok {
extra["notif.msg"] = title
}
}
}
extra["notif.uniqush.msgsize"] = fmt.Sprintf("%v", msg.Size())
return extra
}
func (self *serviceCenter) shouldPush(service, username string, msg *proto.Message, extra map[string]string, fwd bool) bool {
if self.config != nil {
if self.config.PushHandler != nil {
info := getPushInfo(msg, extra, fwd)
return self.config.PushHandler.ShouldPush(service, username, info)
}
}
return false
}
func (self *serviceCenter) subscribe(req *server.SubscribeRequest) {
if req == nil {
return
}
if self.config != nil {
if self.config.PushService != nil {
if req.Subscribe {
self.config.PushService.Subscribe(req.Service, req.Username, req.Params)
} else {
self.config.PushService.Unsubscribe(req.Service, req.Username, req.Params)
}
}
}
}
func (self *serviceCenter) nrDeliveryPoints(service, username string) int {
n := 0
if self.config != nil {
if self.config.PushService != nil {
n = self.config.PushService.NrDeliveryPoints(service, username)
}
}
return n
}
func (self *serviceCenter) pushNotif(service, username string, msg *proto.Message, extra map[string]string, msgIds []string, fwd bool) {
if self.config != nil {
if self.config.PushService != nil {
info := getPushInfo(msg, extra, fwd)
err := self.config.PushService.Push(service, username, info, msgIds)
if err != nil {
self.reportError(service, username, "", "", err)
}
}
}
}
func (self *serviceCenter) reportError(service, username, connId, addr string, err error) {
if self.config != nil {
if self.config.ErrorHandler != nil {
go self.config.ErrorHandler.OnError(service, username, connId, addr, err)
}
}
}
func (self *serviceCenter) reportLogin(service, username, connId, addr string) {
if self.config != nil {
if self.config.LoginHandler != nil {
go self.config.LoginHandler.OnLogin(service, username, connId, addr)
}
}
}
func (self *serviceCenter) reportMessage(connId string, msg *proto.Message) {
if self.config != nil {
if self.config.MessageHandler != nil {
go self.config.MessageHandler.OnMessage(connId, msg)
}
}
}
func (self *serviceCenter) reportLogout(service, username, connId, addr string, err error) {
if self.config != nil {
if self.config.LogoutHandler != nil {
go self.config.LogoutHandler.OnLogout(service, username, connId, addr, err)
}
}
}
func (self *serviceCenter) cacheMessage(service, username string, msg *proto.Message, ttl time.Duration) (id string, err error) {
if self.config != nil {
if self.config.MsgCache != nil {
id, err = self.config.MsgCache.CacheMessage(service, username, msg, ttl)
}
}
return
}
type connWriteErr struct {
conn server.Conn
err error
}
func (self *serviceCenter) process(maxNrConns, maxNrConnsPerUser, maxNrUsers int) {
connMap := newTreeBasedConnMap()
nrConns := 0
for {
select {
case connInEvt := <-self.connIn:
if maxNrConns > 0 && nrConns >= maxNrConns {
if connInEvt.errChan != nil {
connInEvt.errChan <- ErrTooManyConns
}
continue
}
err := connMap.AddConn(connInEvt.conn, maxNrConnsPerUser, maxNrUsers)
if err != nil {
if connInEvt.errChan != nil {
connInEvt.errChan <- err
}
continue
}
nrConns++
if connInEvt.errChan != nil {
connInEvt.errChan <- nil
}
case leaveEvt := <-self.connLeave:
deleted := connMap.DelConn(leaveEvt.conn)
fmt.Printf("delete a connection %v under user %v; deleted: %v\n", leaveEvt.conn.UniqId(), leaveEvt.conn.Username(), deleted)
leaveEvt.conn.Close()
if deleted {
nrConns--
conn := leaveEvt.conn
self.reportLogout(conn.Service(), conn.Username(), conn.UniqId(), conn.RemoteAddr().String(), leaveEvt.err)
}
case subreq := <-self.subReqChan:
self.pushServiceLock.Lock()
self.subscribe(subreq)
self.pushServiceLock.Unlock()
case wreq := <-self.writeReqChan:
conns := connMap.GetConn(wreq.user)
res := make([]*Result, 0, len(conns))
errConns := make([]*connWriteErr, 0, len(conns))
n := 0
mid, err := self.cacheMessage(self.serviceName, wreq.user, wreq.msg, wreq.ttl)
if err != nil {
self.reportError(self.serviceName, wreq.user, "", "", err)
continue
}
for _, conn := range conns {
if conn == nil {
continue
}
var err error
sconn, ok := conn.(server.Conn)
if !ok {
continue
}
err = sconn.SendMessage(wreq.msg, wreq.extra, wreq.ttl, mid)
if err != nil {
errConns = append(errConns, &connWriteErr{sconn, err})
res = append(res, &Result{err, sconn.UniqId(), sconn.Visible()})
self.reportError(sconn.Service(), sconn.Username(), sconn.UniqId(), sconn.RemoteAddr().String(), err)
continue
} else {
res = append(res, &Result{nil, sconn.UniqId(), sconn.Visible()})
}
if sconn.Visible() {
n++
}
}
if n == 0 {
msg := wreq.msg
extra := wreq.extra
username := wreq.user
service := self.serviceName
fwd := false
if len(msg.Sender) > 0 && len(msg.SenderService) > 0 {
if msg.Sender != username || msg.SenderService != service {
fwd = true
}
}
go func() {
should := self.shouldPush(service, username, msg, extra, fwd)
if !should {
return
}
self.pushServiceLock.RLock()
defer self.pushServiceLock.RUnlock()
n := self.nrDeliveryPoints(service, username)
if n <= 0 {
return
}
msgIds := []string{mid}
self.pushNotif(service, username, msg, extra, msgIds, fwd)
}()
}
if wreq.resChan != nil {
wreq.resChan <- res
}
// close all connections with error:
go func() {
for _, e := range errConns {
fmt.Printf("Need to remove connection %v\n", e.conn.UniqId())
self.connLeave <- &eventConnLeave{conn: e.conn, err: e.err}
}
}()
}
}
}
func (self *serviceCenter) SendMessage(username string, msg *proto.Message, extra map[string]string, ttl time.Duration) []*Result {
req := new(writeMessageRequest)
ch := make(chan []*Result)
req.msg = msg
req.user = username
req.ttl = ttl
req.resChan = ch
req.extra = extra
self.writeReqChan <- req
res := <-ch
return res
}
func (self *serviceCenter) serveConn(conn server.Conn) {
conn.SetForwardRequestChannel(self.fwdChan)
conn.SetSubscribeRequestChan(self.subReqChan)
var err error
defer func() {
self.connLeave <- &eventConnLeave{conn: conn, err: err}
}()
for {
var msg *proto.Message
msg, err = conn.ReadMessage()
if err != nil {
return
}
self.reportMessage(conn.UniqId(), msg)
}
}
func (self *serviceCenter) NewConn(conn server.Conn) error {
usr := conn.Username()
if len(usr) == 0 || strings.Contains(usr, ":") || strings.Contains(usr, "\n") {
return fmt.Errorf("[Username=%v] Invalid Username")
}
evt := new(eventConnIn)
ch := make(chan error)
conn.SetMessageCache(self.config.MsgCache)
evt.conn = conn
evt.errChan = ch
self.connIn <- evt
err := <-ch
if err == nil {
go self.serveConn(conn)
self.reportLogin(conn.Service(), usr, conn.UniqId(), conn.RemoteAddr().String())
}
return err
}
func newServiceCenter(serviceName string, conf *ServiceConfig, fwdChan chan<- *server.ForwardRequest) *serviceCenter {
ret := new(serviceCenter)
ret.config = conf
if ret.config == nil {
ret.config = new(ServiceConfig)
}
ret.serviceName = serviceName
ret.fwdChan = fwdChan
ret.connIn = make(chan *eventConnIn)
ret.connLeave = make(chan *eventConnLeave)
ret.writeReqChan = make(chan *writeMessageRequest)
ret.subReqChan = make(chan *server.SubscribeRequest)
go ret.process(conf.MaxNrConns, conf.MaxNrConnsPerUser, conf.MaxNrUsers)
return ret
}
| {
shouldFwd := false
if self.config != nil {
if self.config.ForwardRequestHandler != nil {
shouldFwd = self.config.ForwardRequestHandler.ShouldForward(fwdreq)
maxttl := self.config.ForwardRequestHandler.MaxTTL()
if fwdreq.TTL < 1*time.Second || fwdreq.TTL > maxttl {
fwdreq.TTL = maxttl
}
}
}
if !shouldFwd {
return
}
receiver := fwdreq.Receiver
extra := getPushInfo(fwdreq.Message, nil, true)
self.SendMessage(receiver, fwdreq.Message, extra, fwdreq.TTL)
} | identifier_body |
srvcenter.go | /*
* Copyright 2013 Nan Deng
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package msgcenter
import (
"encoding/json"
"errors"
"fmt"
"github.com/uniqush/uniqush-conn/evthandler"
"github.com/uniqush/uniqush-conn/msgcache"
"github.com/uniqush/uniqush-conn/proto"
"github.com/uniqush/uniqush-conn/proto/server"
"github.com/uniqush/uniqush-conn/push"
"strings"
"sync"
"time"
)
type eventConnIn struct {
errChan chan error
conn server.Conn
}
type eventConnLeave struct {
conn server.Conn
err error
}
type Result struct {
Err error `json:"err,omitempty"`
ConnId string `json:"connId,omitempty"`
Visible bool `json:"visible"`
}
func (self *Result) Error() string {
b, _ := json.Marshal(self)
return string(b)
}
type ServiceConfig struct {
MaxNrConns int
MaxNrUsers int
MaxNrConnsPerUser int
MsgCache msgcache.Cache
LoginHandler evthandler.LoginHandler
LogoutHandler evthandler.LogoutHandler
MessageHandler evthandler.MessageHandler
ForwardRequestHandler evthandler.ForwardRequestHandler
ErrorHandler evthandler.ErrorHandler
// Push related web hooks
SubscribeHandler evthandler.SubscribeHandler
UnsubscribeHandler evthandler.UnsubscribeHandler
PushHandler evthandler.PushHandler
PushService push.Push
}
type writeMessageRequest struct {
user string
msg *proto.Message
ttl time.Duration
extra map[string]string
resChan chan<- []*Result
}
type serviceCenter struct {
serviceName string
config *ServiceConfig
fwdChan chan<- *server.ForwardRequest
writeReqChan chan *writeMessageRequest
connIn chan *eventConnIn
connLeave chan *eventConnLeave
subReqChan chan *server.SubscribeRequest
pushServiceLock sync.RWMutex
}
var ErrTooManyConns = errors.New("too many connections")
var ErrInvalidConnType = errors.New("invalid connection type")
func (self *serviceCenter) ReceiveForward(fwdreq *server.ForwardRequest) {
shouldFwd := false
if self.config != nil {
if self.config.ForwardRequestHandler != nil {
shouldFwd = self.config.ForwardRequestHandler.ShouldForward(fwdreq)
maxttl := self.config.ForwardRequestHandler.MaxTTL()
if fwdreq.TTL < 1*time.Second || fwdreq.TTL > maxttl {
fwdreq.TTL = maxttl
}
}
}
if !shouldFwd {
return
}
receiver := fwdreq.Receiver
extra := getPushInfo(fwdreq.Message, nil, true)
self.SendMessage(receiver, fwdreq.Message, extra, fwdreq.TTL)
}
func getPushInfo(msg *proto.Message, extra map[string]string, fwd bool) map[string]string {
if extra == nil {
extra = make(map[string]string, len(msg.Header)+3)
}
if fwd {
for k, v := range msg.Header {
if strings.HasPrefix(k, "notif.") {
if strings.HasPrefix(k, "notif.uniqush.") {
// forward message should not contain reserved fields
continue
}
extra[k] = v
delete(msg.Header, k)
}
}
extra["uniqush.sender"] = msg.Sender
extra["uniqush.sender-service"] = msg.SenderService
}
if msg.Header != nil {
if title, ok := msg.Header["title"]; ok {
if _, ok = extra["notif.msg"]; !ok {
extra["notif.msg"] = title
}
}
}
extra["notif.uniqush.msgsize"] = fmt.Sprintf("%v", msg.Size())
return extra
}
func (self *serviceCenter) shouldPush(service, username string, msg *proto.Message, extra map[string]string, fwd bool) bool {
if self.config != nil {
if self.config.PushHandler != nil {
info := getPushInfo(msg, extra, fwd)
return self.config.PushHandler.ShouldPush(service, username, info)
}
}
return false
}
func (self *serviceCenter) subscribe(req *server.SubscribeRequest) {
if req == nil {
return
}
if self.config != nil {
if self.config.PushService != nil {
if req.Subscribe {
self.config.PushService.Subscribe(req.Service, req.Username, req.Params)
} else {
self.config.PushService.Unsubscribe(req.Service, req.Username, req.Params)
}
}
}
}
func (self *serviceCenter) nrDeliveryPoints(service, username string) int {
n := 0
if self.config != nil {
if self.config.PushService != nil {
n = self.config.PushService.NrDeliveryPoints(service, username)
}
}
return n
}
func (self *serviceCenter) pushNotif(service, username string, msg *proto.Message, extra map[string]string, msgIds []string, fwd bool) {
if self.config != nil {
if self.config.PushService != nil {
info := getPushInfo(msg, extra, fwd)
err := self.config.PushService.Push(service, username, info, msgIds)
if err != nil {
self.reportError(service, username, "", "", err)
}
}
}
}
func (self *serviceCenter) reportError(service, username, connId, addr string, err error) {
if self.config != nil {
if self.config.ErrorHandler != nil {
go self.config.ErrorHandler.OnError(service, username, connId, addr, err)
}
}
}
func (self *serviceCenter) reportLogin(service, username, connId, addr string) {
if self.config != nil {
if self.config.LoginHandler != nil {
go self.config.LoginHandler.OnLogin(service, username, connId, addr)
}
}
}
func (self *serviceCenter) reportMessage(connId string, msg *proto.Message) {
if self.config != nil {
if self.config.MessageHandler != nil {
go self.config.MessageHandler.OnMessage(connId, msg)
}
}
}
func (self *serviceCenter) reportLogout(service, username, connId, addr string, err error) {
if self.config != nil {
if self.config.LogoutHandler != nil {
go self.config.LogoutHandler.OnLogout(service, username, connId, addr, err)
}
}
}
func (self *serviceCenter) cacheMessage(service, username string, msg *proto.Message, ttl time.Duration) (id string, err error) {
if self.config != nil {
if self.config.MsgCache != nil {
id, err = self.config.MsgCache.CacheMessage(service, username, msg, ttl)
}
}
return
}
type connWriteErr struct {
conn server.Conn
err error
}
func (self *serviceCenter) process(maxNrConns, maxNrConnsPerUser, maxNrUsers int) {
connMap := newTreeBasedConnMap()
nrConns := 0
for {
select {
case connInEvt := <-self.connIn:
if maxNrConns > 0 && nrConns >= maxNrConns {
if connInEvt.errChan != nil {
connInEvt.errChan <- ErrTooManyConns
}
continue
}
err := connMap.AddConn(connInEvt.conn, maxNrConnsPerUser, maxNrUsers)
if err != nil {
if connInEvt.errChan != nil {
connInEvt.errChan <- err
}
continue
}
nrConns++
if connInEvt.errChan != nil {
connInEvt.errChan <- nil
}
case leaveEvt := <-self.connLeave:
deleted := connMap.DelConn(leaveEvt.conn)
fmt.Printf("delete a connection %v under user %v; deleted: %v\n", leaveEvt.conn.UniqId(), leaveEvt.conn.Username(), deleted)
leaveEvt.conn.Close()
if deleted {
nrConns--
conn := leaveEvt.conn
self.reportLogout(conn.Service(), conn.Username(), conn.UniqId(), conn.RemoteAddr().String(), leaveEvt.err)
}
case subreq := <-self.subReqChan:
self.pushServiceLock.Lock()
self.subscribe(subreq)
self.pushServiceLock.Unlock()
case wreq := <-self.writeReqChan:
conns := connMap.GetConn(wreq.user)
res := make([]*Result, 0, len(conns))
errConns := make([]*connWriteErr, 0, len(conns))
n := 0
mid, err := self.cacheMessage(self.serviceName, wreq.user, wreq.msg, wreq.ttl)
if err != nil {
self.reportError(self.serviceName, wreq.user, "", "", err)
continue
}
for _, conn := range conns {
if conn == nil {
continue
}
var err error
sconn, ok := conn.(server.Conn)
if !ok {
continue
}
err = sconn.SendMessage(wreq.msg, wreq.extra, wreq.ttl, mid)
if err != nil {
errConns = append(errConns, &connWriteErr{sconn, err})
res = append(res, &Result{err, sconn.UniqId(), sconn.Visible()})
self.reportError(sconn.Service(), sconn.Username(), sconn.UniqId(), sconn.RemoteAddr().String(), err)
continue
} else {
res = append(res, &Result{nil, sconn.UniqId(), sconn.Visible()})
}
if sconn.Visible() {
n++
}
}
if n == 0 |
if wreq.resChan != nil {
wreq.resChan <- res
}
// close all connections with error:
go func() {
for _, e := range errConns {
fmt.Printf("Need to remove connection %v\n", e.conn.UniqId())
self.connLeave <- &eventConnLeave{conn: e.conn, err: e.err}
}
}()
}
}
}
func (self *serviceCenter) SendMessage(username string, msg *proto.Message, extra map[string]string, ttl time.Duration) []*Result {
req := new(writeMessageRequest)
ch := make(chan []*Result)
req.msg = msg
req.user = username
req.ttl = ttl
req.resChan = ch
req.extra = extra
self.writeReqChan <- req
res := <-ch
return res
}
func (self *serviceCenter) serveConn(conn server.Conn) {
conn.SetForwardRequestChannel(self.fwdChan)
conn.SetSubscribeRequestChan(self.subReqChan)
var err error
defer func() {
self.connLeave <- &eventConnLeave{conn: conn, err: err}
}()
for {
var msg *proto.Message
msg, err = conn.ReadMessage()
if err != nil {
return
}
self.reportMessage(conn.UniqId(), msg)
}
}
func (self *serviceCenter) NewConn(conn server.Conn) error {
usr := conn.Username()
if len(usr) == 0 || strings.Contains(usr, ":") || strings.Contains(usr, "\n") {
return fmt.Errorf("[Username=%v] Invalid Username")
}
evt := new(eventConnIn)
ch := make(chan error)
conn.SetMessageCache(self.config.MsgCache)
evt.conn = conn
evt.errChan = ch
self.connIn <- evt
err := <-ch
if err == nil {
go self.serveConn(conn)
self.reportLogin(conn.Service(), usr, conn.UniqId(), conn.RemoteAddr().String())
}
return err
}
func newServiceCenter(serviceName string, conf *ServiceConfig, fwdChan chan<- *server.ForwardRequest) *serviceCenter {
ret := new(serviceCenter)
ret.config = conf
if ret.config == nil {
ret.config = new(ServiceConfig)
}
ret.serviceName = serviceName
ret.fwdChan = fwdChan
ret.connIn = make(chan *eventConnIn)
ret.connLeave = make(chan *eventConnLeave)
ret.writeReqChan = make(chan *writeMessageRequest)
ret.subReqChan = make(chan *server.SubscribeRequest)
go ret.process(conf.MaxNrConns, conf.MaxNrConnsPerUser, conf.MaxNrUsers)
return ret
}
| {
msg := wreq.msg
extra := wreq.extra
username := wreq.user
service := self.serviceName
fwd := false
if len(msg.Sender) > 0 && len(msg.SenderService) > 0 {
if msg.Sender != username || msg.SenderService != service {
fwd = true
}
}
go func() {
should := self.shouldPush(service, username, msg, extra, fwd)
if !should {
return
}
self.pushServiceLock.RLock()
defer self.pushServiceLock.RUnlock()
n := self.nrDeliveryPoints(service, username)
if n <= 0 {
return
}
msgIds := []string{mid}
self.pushNotif(service, username, msg, extra, msgIds, fwd)
}()
} | conditional_block |
srvcenter.go | /*
* Copyright 2013 Nan Deng
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package msgcenter
import (
"encoding/json"
"errors"
"fmt"
"github.com/uniqush/uniqush-conn/evthandler"
"github.com/uniqush/uniqush-conn/msgcache"
"github.com/uniqush/uniqush-conn/proto"
"github.com/uniqush/uniqush-conn/proto/server"
"github.com/uniqush/uniqush-conn/push"
"strings"
"sync"
"time"
)
type eventConnIn struct {
errChan chan error
conn server.Conn
}
type eventConnLeave struct {
conn server.Conn
err error
}
type Result struct {
Err error `json:"err,omitempty"`
ConnId string `json:"connId,omitempty"`
Visible bool `json:"visible"`
}
func (self *Result) Error() string {
b, _ := json.Marshal(self)
return string(b)
}
type ServiceConfig struct {
MaxNrConns int
MaxNrUsers int
MaxNrConnsPerUser int
MsgCache msgcache.Cache
LoginHandler evthandler.LoginHandler
LogoutHandler evthandler.LogoutHandler
MessageHandler evthandler.MessageHandler
ForwardRequestHandler evthandler.ForwardRequestHandler
ErrorHandler evthandler.ErrorHandler
// Push related web hooks
SubscribeHandler evthandler.SubscribeHandler
UnsubscribeHandler evthandler.UnsubscribeHandler
PushHandler evthandler.PushHandler
PushService push.Push
}
type writeMessageRequest struct {
user string
msg *proto.Message
ttl time.Duration
extra map[string]string
resChan chan<- []*Result
}
type serviceCenter struct {
serviceName string
config *ServiceConfig
fwdChan chan<- *server.ForwardRequest
writeReqChan chan *writeMessageRequest
connIn chan *eventConnIn
connLeave chan *eventConnLeave
subReqChan chan *server.SubscribeRequest
pushServiceLock sync.RWMutex
}
var ErrTooManyConns = errors.New("too many connections")
var ErrInvalidConnType = errors.New("invalid connection type")
func (self *serviceCenter) ReceiveForward(fwdreq *server.ForwardRequest) {
shouldFwd := false
if self.config != nil {
if self.config.ForwardRequestHandler != nil {
shouldFwd = self.config.ForwardRequestHandler.ShouldForward(fwdreq)
maxttl := self.config.ForwardRequestHandler.MaxTTL()
if fwdreq.TTL < 1*time.Second || fwdreq.TTL > maxttl {
fwdreq.TTL = maxttl
}
}
}
if !shouldFwd {
return
}
receiver := fwdreq.Receiver
extra := getPushInfo(fwdreq.Message, nil, true)
self.SendMessage(receiver, fwdreq.Message, extra, fwdreq.TTL)
}
func getPushInfo(msg *proto.Message, extra map[string]string, fwd bool) map[string]string {
if extra == nil {
extra = make(map[string]string, len(msg.Header)+3)
}
if fwd {
for k, v := range msg.Header {
if strings.HasPrefix(k, "notif.") {
if strings.HasPrefix(k, "notif.uniqush.") {
// forward message should not contain reserved fields
continue
}
extra[k] = v
delete(msg.Header, k)
}
}
extra["uniqush.sender"] = msg.Sender
extra["uniqush.sender-service"] = msg.SenderService
}
if msg.Header != nil {
if title, ok := msg.Header["title"]; ok {
if _, ok = extra["notif.msg"]; !ok {
extra["notif.msg"] = title
}
}
}
extra["notif.uniqush.msgsize"] = fmt.Sprintf("%v", msg.Size())
return extra
}
func (self *serviceCenter) shouldPush(service, username string, msg *proto.Message, extra map[string]string, fwd bool) bool {
if self.config != nil {
if self.config.PushHandler != nil {
info := getPushInfo(msg, extra, fwd)
return self.config.PushHandler.ShouldPush(service, username, info)
}
}
return false
}
func (self *serviceCenter) subscribe(req *server.SubscribeRequest) {
if req == nil {
return
}
if self.config != nil {
if self.config.PushService != nil {
if req.Subscribe {
self.config.PushService.Subscribe(req.Service, req.Username, req.Params)
} else {
self.config.PushService.Unsubscribe(req.Service, req.Username, req.Params)
}
}
}
}
func (self *serviceCenter) nrDeliveryPoints(service, username string) int {
n := 0
if self.config != nil {
if self.config.PushService != nil {
n = self.config.PushService.NrDeliveryPoints(service, username)
}
}
return n
}
func (self *serviceCenter) pushNotif(service, username string, msg *proto.Message, extra map[string]string, msgIds []string, fwd bool) {
if self.config != nil {
if self.config.PushService != nil {
info := getPushInfo(msg, extra, fwd)
err := self.config.PushService.Push(service, username, info, msgIds)
if err != nil {
self.reportError(service, username, "", "", err)
}
}
}
}
func (self *serviceCenter) reportError(service, username, connId, addr string, err error) {
if self.config != nil {
if self.config.ErrorHandler != nil {
go self.config.ErrorHandler.OnError(service, username, connId, addr, err)
}
}
}
func (self *serviceCenter) reportLogin(service, username, connId, addr string) {
if self.config != nil {
if self.config.LoginHandler != nil {
go self.config.LoginHandler.OnLogin(service, username, connId, addr)
}
}
}
func (self *serviceCenter) reportMessage(connId string, msg *proto.Message) {
if self.config != nil {
if self.config.MessageHandler != nil {
go self.config.MessageHandler.OnMessage(connId, msg)
}
}
}
func (self *serviceCenter) reportLogout(service, username, connId, addr string, err error) {
if self.config != nil {
if self.config.LogoutHandler != nil {
go self.config.LogoutHandler.OnLogout(service, username, connId, addr, err)
}
}
}
func (self *serviceCenter) cacheMessage(service, username string, msg *proto.Message, ttl time.Duration) (id string, err error) {
if self.config != nil {
if self.config.MsgCache != nil {
id, err = self.config.MsgCache.CacheMessage(service, username, msg, ttl)
}
}
return
}
type connWriteErr struct {
conn server.Conn
err error
}
func (self *serviceCenter) process(maxNrConns, maxNrConnsPerUser, maxNrUsers int) {
connMap := newTreeBasedConnMap()
nrConns := 0
for {
select {
case connInEvt := <-self.connIn:
if maxNrConns > 0 && nrConns >= maxNrConns {
if connInEvt.errChan != nil {
connInEvt.errChan <- ErrTooManyConns
}
continue
}
err := connMap.AddConn(connInEvt.conn, maxNrConnsPerUser, maxNrUsers)
if err != nil {
if connInEvt.errChan != nil {
connInEvt.errChan <- err
}
continue
}
nrConns++
if connInEvt.errChan != nil {
connInEvt.errChan <- nil
}
case leaveEvt := <-self.connLeave:
deleted := connMap.DelConn(leaveEvt.conn)
fmt.Printf("delete a connection %v under user %v; deleted: %v\n", leaveEvt.conn.UniqId(), leaveEvt.conn.Username(), deleted)
leaveEvt.conn.Close()
if deleted {
nrConns--
conn := leaveEvt.conn
self.reportLogout(conn.Service(), conn.Username(), conn.UniqId(), conn.RemoteAddr().String(), leaveEvt.err)
}
case subreq := <-self.subReqChan:
self.pushServiceLock.Lock()
self.subscribe(subreq)
self.pushServiceLock.Unlock()
case wreq := <-self.writeReqChan:
conns := connMap.GetConn(wreq.user)
res := make([]*Result, 0, len(conns))
errConns := make([]*connWriteErr, 0, len(conns))
n := 0
mid, err := self.cacheMessage(self.serviceName, wreq.user, wreq.msg, wreq.ttl)
if err != nil {
self.reportError(self.serviceName, wreq.user, "", "", err)
continue
}
for _, conn := range conns {
if conn == nil {
continue
}
var err error
sconn, ok := conn.(server.Conn)
if !ok {
continue
}
err = sconn.SendMessage(wreq.msg, wreq.extra, wreq.ttl, mid)
if err != nil {
errConns = append(errConns, &connWriteErr{sconn, err})
res = append(res, &Result{err, sconn.UniqId(), sconn.Visible()})
self.reportError(sconn.Service(), sconn.Username(), sconn.UniqId(), sconn.RemoteAddr().String(), err)
continue
} else {
res = append(res, &Result{nil, sconn.UniqId(), sconn.Visible()})
}
if sconn.Visible() {
n++
}
}
if n == 0 {
msg := wreq.msg
extra := wreq.extra
username := wreq.user
service := self.serviceName
fwd := false
if len(msg.Sender) > 0 && len(msg.SenderService) > 0 {
if msg.Sender != username || msg.SenderService != service {
fwd = true
}
}
go func() {
should := self.shouldPush(service, username, msg, extra, fwd)
if !should {
return
}
self.pushServiceLock.RLock()
defer self.pushServiceLock.RUnlock()
n := self.nrDeliveryPoints(service, username)
if n <= 0 {
return
}
msgIds := []string{mid}
self.pushNotif(service, username, msg, extra, msgIds, fwd)
}()
}
if wreq.resChan != nil {
wreq.resChan <- res
}
// close all connections with error:
go func() {
for _, e := range errConns {
fmt.Printf("Need to remove connection %v\n", e.conn.UniqId())
self.connLeave <- &eventConnLeave{conn: e.conn, err: e.err}
}
}()
}
}
}
func (self *serviceCenter) SendMessage(username string, msg *proto.Message, extra map[string]string, ttl time.Duration) []*Result {
req := new(writeMessageRequest)
ch := make(chan []*Result)
req.msg = msg
req.user = username
req.ttl = ttl
req.resChan = ch
req.extra = extra
self.writeReqChan <- req
res := <-ch
return res
}
func (self *serviceCenter) serveConn(conn server.Conn) {
conn.SetForwardRequestChannel(self.fwdChan)
conn.SetSubscribeRequestChan(self.subReqChan)
var err error
defer func() {
self.connLeave <- &eventConnLeave{conn: conn, err: err}
}()
for {
var msg *proto.Message
msg, err = conn.ReadMessage()
if err != nil {
return
}
self.reportMessage(conn.UniqId(), msg) | usr := conn.Username()
if len(usr) == 0 || strings.Contains(usr, ":") || strings.Contains(usr, "\n") {
return fmt.Errorf("[Username=%v] Invalid Username")
}
evt := new(eventConnIn)
ch := make(chan error)
conn.SetMessageCache(self.config.MsgCache)
evt.conn = conn
evt.errChan = ch
self.connIn <- evt
err := <-ch
if err == nil {
go self.serveConn(conn)
self.reportLogin(conn.Service(), usr, conn.UniqId(), conn.RemoteAddr().String())
}
return err
}
func newServiceCenter(serviceName string, conf *ServiceConfig, fwdChan chan<- *server.ForwardRequest) *serviceCenter {
ret := new(serviceCenter)
ret.config = conf
if ret.config == nil {
ret.config = new(ServiceConfig)
}
ret.serviceName = serviceName
ret.fwdChan = fwdChan
ret.connIn = make(chan *eventConnIn)
ret.connLeave = make(chan *eventConnLeave)
ret.writeReqChan = make(chan *writeMessageRequest)
ret.subReqChan = make(chan *server.SubscribeRequest)
go ret.process(conf.MaxNrConns, conf.MaxNrConnsPerUser, conf.MaxNrUsers)
return ret
} | }
}
func (self *serviceCenter) NewConn(conn server.Conn) error { | random_line_split |
wireguard.go | package cmd
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net"
"net/http"
"os"
"strings"
"text/template"
"github.com/AlecAivazis/survey/v2"
"github.com/olekukonko/tablewriter"
"github.com/spf13/cobra"
"github.com/superfly/flyctl/api"
"github.com/superfly/flyctl/cmdctx"
"github.com/superfly/flyctl/docstrings"
"github.com/superfly/flyctl/internal/client"
"github.com/superfly/flyctl/internal/wireguard"
"github.com/superfly/flyctl/pkg/agent"
)
func newWireGuardCommand(client *client.Client) *Command {
cmd := BuildCommandKS(nil, nil, docstrings.Get("wireguard"), client, requireSession)
cmd.Aliases = []string{"wg"}
child := func(parent *Command, fn RunFn, ds string) *Command {
return BuildCommandKS(parent, fn, docstrings.Get(ds), client, requireSession)
}
child(cmd, runWireGuardList, "wireguard.list").Args = cobra.MaximumNArgs(1)
child(cmd, runWireGuardCreate, "wireguard.create").Args = cobra.MaximumNArgs(4)
child(cmd, runWireGuardRemove, "wireguard.remove").Args = cobra.MaximumNArgs(2)
child(cmd, runWireGuardStat, "wireguard.status").Args = cobra.MaximumNArgs(2)
child(cmd, runWireGuardResetPeer, "wireguard.reset").Args = cobra.MaximumNArgs(1)
tokens := child(cmd, nil, "wireguard.token")
child(tokens, runWireGuardTokenList, "wireguard.token.list").Args = cobra.MaximumNArgs(1)
child(tokens, runWireGuardTokenCreate, "wireguard.token.create").Args = cobra.MaximumNArgs(2)
child(tokens, runWireGuardTokenDelete, "wireguard.token.delete").Args = cobra.MaximumNArgs(3)
child(tokens, runWireGuardTokenStartPeer, "wireguard.token.start").Args = cobra.MaximumNArgs(4)
child(tokens, runWireGuardTokenUpdatePeer, "wireguard.token.update").Args = cobra.MaximumNArgs(2)
return cmd
}
func argOrPromptImpl(ctx *cmdctx.CmdContext, nth int, prompt string, first bool) (string, error) {
if len(ctx.Args) >= (nth + 1) {
return ctx.Args[nth], nil
}
val := ""
err := survey.AskOne(&survey.Input{
Message: prompt,
}, &val)
return val, err
}
func argOrPromptLoop(ctx *cmdctx.CmdContext, nth int, prompt, last string) (string, error) {
return argOrPromptImpl(ctx, nth, prompt, last == "")
}
func argOrPrompt(ctx *cmdctx.CmdContext, nth int, prompt string) (string, error) {
return argOrPromptImpl(ctx, nth, prompt, true)
}
func orgByArg(cmdCtx *cmdctx.CmdContext) (*api.Organization, error) {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
if len(cmdCtx.Args) == 0 {
org, err := selectOrganization(ctx, client, "", nil)
if err != nil {
return nil, err
}
return org, nil
}
return client.FindOrganizationBySlug(ctx, cmdCtx.Args[0])
}
func runWireGuardList(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
peers, err := client.GetWireGuardPeers(ctx, org.Slug)
if err != nil {
return err
}
if cmdCtx.OutputJSON() {
cmdCtx.WriteJSON(peers)
return nil
}
table := tablewriter.NewWriter(cmdCtx.Out)
table.SetHeader([]string{
"Name",
"Region",
"Peer IP",
})
for _, peer := range peers {
table.Append([]string{peer.Name, peer.Region, peer.Peerip})
}
table.Render()
return nil
}
func generateWgConf(peer *api.CreatedWireGuardPeer, privkey string, w io.Writer) {
templateStr := `
[Interface]
PrivateKey = {{.Meta.Privkey}}
Address = {{.Peer.Peerip}}/120
DNS = {{.Meta.DNS}}
[Peer]
PublicKey = {{.Peer.Pubkey}}
AllowedIPs = {{.Meta.AllowedIPs}}
Endpoint = {{.Peer.Endpointip}}:51820
PersistentKeepalive = 15
`
data := struct {
Peer *api.CreatedWireGuardPeer
Meta struct {
Privkey string
AllowedIPs string
DNS string
}
}{
Peer: peer,
}
addr := net.ParseIP(peer.Peerip).To16()
for i := 6; i < 16; i++ {
addr[i] = 0
}
// BUG(tqbf): can't stay this way
data.Meta.AllowedIPs = fmt.Sprintf("%s/48", addr)
addr[15] = 3
data.Meta.DNS = fmt.Sprintf("%s", addr)
data.Meta.Privkey = privkey
tmpl := template.Must(template.New("name").Parse(templateStr))
tmpl.Execute(w, &data)
}
func resolveOutputWriter(ctx *cmdctx.CmdContext, idx int, prompt string) (w io.WriteCloser, mustClose bool, err error) {
var (
f *os.File
filename string
)
for {
filename, err = argOrPromptLoop(ctx, idx, prompt, filename)
if err != nil {
return nil, false, err
}
if filename == "" {
fmt.Println("Provide a filename (or 'stdout')")
continue
}
if filename == "stdout" {
return os.Stdout, false, nil
}
f, err = os.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600)
if err == nil {
return f, true, nil
}
fmt.Printf("Can't create '%s': %s\n", filename, err)
}
}
func runWireGuardResetPeer(ctx *cmdctx.CmdContext) error {
org, err := orgByArg(ctx)
if err != nil {
return err
}
client := ctx.Client.API()
agentclient, err := agent.Establish(context.Background(), client)
if err != nil {
return err
}
conf, err := agentclient.Reestablish(context.Background(), org.Slug)
if err != nil {
return err
}
fmt.Printf("New WireGuard peer for organization '%s': '%s'\n", org.Slug, conf.WireGuardState.Name)
return nil
}
func runWireGuardCreate(ctx *cmdctx.CmdContext) error {
org, err := orgByArg(ctx)
if err != nil {
return err
}
var region string
var name string
if len(ctx.Args) > 1 && ctx.Args[1] != "" {
region = ctx.Args[1]
}
if len(ctx.Args) > 2 && ctx.Args[2] != "" {
name = ctx.Args[2]
}
state, err := wireguard.Create(ctx.Client.API(), org, region, name)
if err != nil {
return err
}
data := &state.Peer
fmt.Printf(`
!!!! WARNING: Output includes private key. Private keys cannot be recovered !!!!
!!!! after creating the peer; if you lose the key, you'll need to remove !!!!
!!!! and re-add the peering connection. !!!!
`)
w, shouldClose, err := resolveOutputWriter(ctx, 3, "Filename to store WireGuard configuration in, or 'stdout': ")
if err != nil {
return err
}
if shouldClose {
defer w.Close()
}
generateWgConf(data, state.LocalPrivate, w)
if shouldClose {
filename := w.(*os.File).Name()
fmt.Printf("Wrote WireGuard configuration to %s; load in your WireGuard client\n", filename)
}
return nil
}
func runWireGuardRemove(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
var name string
if len(cmdCtx.Args) >= 2 {
name = cmdCtx.Args[1]
} else {
name, err = selectWireGuardPeer(ctx, cmdCtx.Client.API(), org.Slug)
if err != nil {
return err
}
}
fmt.Printf("Removing WireGuard peer \"%s\" for organization %s\n", name, org.Slug)
err = client.RemoveWireGuardPeer(ctx, org, name)
if err != nil {
return err
}
fmt.Println("Removed peer.")
return wireguard.PruneInvalidPeers(ctx, cmdCtx.Client.API())
}
func runWireGuardStat(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
var name string
if len(cmdCtx.Args) >= 2 {
name = cmdCtx.Args[1]
} else {
name, err = selectWireGuardPeer(ctx, cmdCtx.Client.API(), org.Slug)
if err != nil {
return err
}
}
status, err := client.GetWireGuardPeerStatus(ctx, org.Slug, name)
if err != nil {
return err
}
fmt.Printf("Alive: %+v\n", status.Live)
if status.WgError != "" {
fmt.Printf("Gateway error: %s\n", status.WgError)
}
if !status.Live {
return nil
}
if status.Endpoint != "" {
fmt.Printf("Last Source Address: %s\n", status.Endpoint)
}
ago := ""
if status.SinceAdded != "" {
ago = " (" + status.SinceAdded + " ago)"
}
if status.LastHandshake != "" {
fmt.Printf("Last Handshake At: %s%s\n", status.LastHandshake, ago)
}
ago = ""
if status.SinceHandshake != "" {
ago = " (" + status.SinceHandshake + " ago)"
}
fmt.Printf("Installed On Gateway At: %s%s\n", status.Added, ago)
fmt.Printf("Traffic: rx:%d tx:%d\n", status.Rx, status.Tx)
return nil
}
func runWireGuardTokenList(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
tokens, err := client.GetDelegatedWireGuardTokens(ctx, org.Slug)
if err != nil {
return err
}
if cmdCtx.OutputJSON() {
cmdCtx.WriteJSON(tokens)
return nil
}
table := tablewriter.NewWriter(cmdCtx.Out)
table.SetHeader([]string{
"Name",
})
for _, peer := range tokens {
table.Append([]string{peer.Name})
}
table.Render()
return nil
}
func runWireGuardTokenCreate(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
name, err := argOrPrompt(cmdCtx, 1, "Memorable name for WireGuard token: ")
if err != nil {
return err
}
data, err := client.CreateDelegatedWireGuardToken(ctx, org, name)
if err != nil |
fmt.Printf(`
!!!! WARNING: Output includes credential information. Credentials cannot !!!!
!!!! be recovered after creation; if you lose the token, you'll need to !!!!
!!!! remove and and re-add it. !!!!
To use a token to create a WireGuard connection, you can use curl:
curl -v --request POST
-H "Authorization: Bearer ${WG_TOKEN}"
-H "Content-Type: application/json"
--data '{"name": "node-1", \
"group": "k8s", \
"pubkey": "'"${WG_PUBKEY}"'", \
"region": "dev"}'
http://fly.io/api/v3/wire_guard_peers
We'll return 'us' (our local 6PN address), 'them' (the gateway IP address),
and 'pubkey' (the public key of the gateway), which you can inject into a
"wg.con".
`)
w, shouldClose, err := resolveOutputWriter(cmdCtx, 2, "Filename to store WireGuard token in, or 'stdout': ")
if err != nil {
return err
}
if shouldClose {
defer w.Close()
}
fmt.Fprintf(w, "FLY_WIREGUARD_TOKEN=%s\n", data.Token)
return nil
}
func runWireGuardTokenDelete(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
kv, err := argOrPrompt(cmdCtx, 1, "'name:<name>' or token:<token>': ")
if err != nil {
return err
}
tup := strings.SplitN(kv, ":", 2)
if len(tup) != 2 || (tup[0] != "name" && tup[0] != "token") {
return fmt.Errorf("format is name:<name> or token:<token>")
}
fmt.Printf("Removing WireGuard token \"%s\" for organization %s\n", kv, org.Slug)
if tup[0] == "name" {
err = client.DeleteDelegatedWireGuardToken(ctx, org, &tup[1], nil)
} else {
err = client.DeleteDelegatedWireGuardToken(ctx, org, nil, &tup[1])
}
if err != nil {
return err
}
fmt.Println("Removed token.")
return nil
}
func tokenRequest(method, path, token string, data interface{}) (*http.Response, error) {
buf := &bytes.Buffer{}
if err := json.NewEncoder(buf).Encode(data); err != nil {
return nil, err
}
req, err := http.NewRequest(method,
fmt.Sprintf("https://fly.io/api/v3/wire_guard_peers%s", path),
buf)
if err != nil {
return nil, err
}
req.Header.Add("Authorization", "Bearer "+token)
req.Header.Add("Content-Type", "application/json")
return (&http.Client{}).Do(req)
}
type StartPeerJson struct {
Name string `json:"name"`
Group string `json:"group"`
Pubkey string `json:"pubkey"`
Region string `json:"region"`
}
type UpdatePeerJson struct {
Pubkey string `json:"pubkey"`
}
type PeerStatusJson struct {
Us string `json:"us"`
Them string `json:"them"`
Pubkey string `json:"key"`
Error string `json:"error"`
}
func generateTokenConf(ctx *cmdctx.CmdContext, idx int, stat *PeerStatusJson, privkey string) error {
fmt.Printf(`
!!!! WARNING: Output includes private key. Private keys cannot be recovered !!!!
!!!! after creating the peer; if you lose the key, you'll need to rekey !!!!
!!!! the peering connection. !!!!
`)
w, shouldClose, err := resolveOutputWriter(ctx, idx, "Filename to store WireGuard configuration in, or 'stdout': ")
if err != nil {
return err
}
if shouldClose {
defer w.Close()
}
generateWgConf(&api.CreatedWireGuardPeer{
Peerip: stat.Us,
Pubkey: stat.Pubkey,
Endpointip: stat.Them,
}, privkey, w)
if shouldClose {
filename := w.(*os.File).Name()
fmt.Printf("Wrote WireGuard configuration to %s; load in your WireGuard client\n", filename)
}
return nil
}
func runWireGuardTokenStartPeer(ctx *cmdctx.CmdContext) error {
token := os.Getenv("FLY_WIREGUARD_TOKEN")
if token == "" {
return fmt.Errorf("set FLY_WIREGUARD_TOKEN env")
}
name, err := argOrPrompt(ctx, 0, "Name (DNS-compatible) for peer: ")
if err != nil {
return err
}
group, err := argOrPrompt(ctx, 1, "Peer group (i.e. 'k8s'): ")
if err != nil {
return err
}
region, err := argOrPrompt(ctx, 2, "Gateway region: ")
if err != nil {
return err
}
pubkey, privatekey := wireguard.C25519pair()
body := &StartPeerJson{
Name: name,
Group: group,
Pubkey: pubkey,
Region: region,
}
resp, err := tokenRequest("POST", "", token, body)
if err != nil {
return err
}
peerStatus := &PeerStatusJson{}
if err = json.NewDecoder(resp.Body).Decode(peerStatus); err != nil {
if resp.StatusCode != 200 {
return fmt.Errorf("server returned error: %s %w", resp.Status, err)
}
return err
}
if peerStatus.Error != "" {
return fmt.Errorf("WireGuard API error: %s", peerStatus.Error)
}
if err = generateTokenConf(ctx, 3, peerStatus, privatekey); err != nil {
return err
}
return nil
}
func runWireGuardTokenUpdatePeer(ctx *cmdctx.CmdContext) error {
token := os.Getenv("FLY_WIREGUARD_TOKEN")
if token == "" {
return fmt.Errorf("set FLY_WIREGUARD_TOKEN env")
}
name, err := argOrPrompt(ctx, 0, "Name (DNS-compatible) for peer: ")
if err != nil {
return err
}
pubkey, privatekey := wireguard.C25519pair()
body := &StartPeerJson{
Pubkey: pubkey,
}
resp, err := tokenRequest("PUT", "/"+name, token, body)
if err != nil {
return err
}
peerStatus := &PeerStatusJson{}
if err = json.NewDecoder(resp.Body).Decode(peerStatus); err != nil {
if resp.StatusCode != 200 {
return fmt.Errorf("server returned error: %s %w", resp.Status, err)
}
return err
}
if peerStatus.Error != "" {
return fmt.Errorf("WireGuard API error: %s", peerStatus.Error)
}
if err = generateTokenConf(ctx, 1, peerStatus, privatekey); err != nil {
return err
}
return nil
}
| {
return err
} | conditional_block |
wireguard.go | package cmd
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net"
"net/http"
"os"
"strings"
"text/template"
"github.com/AlecAivazis/survey/v2"
"github.com/olekukonko/tablewriter"
"github.com/spf13/cobra"
"github.com/superfly/flyctl/api"
"github.com/superfly/flyctl/cmdctx"
"github.com/superfly/flyctl/docstrings"
"github.com/superfly/flyctl/internal/client"
"github.com/superfly/flyctl/internal/wireguard"
"github.com/superfly/flyctl/pkg/agent"
)
func newWireGuardCommand(client *client.Client) *Command {
cmd := BuildCommandKS(nil, nil, docstrings.Get("wireguard"), client, requireSession)
cmd.Aliases = []string{"wg"}
child := func(parent *Command, fn RunFn, ds string) *Command {
return BuildCommandKS(parent, fn, docstrings.Get(ds), client, requireSession)
}
child(cmd, runWireGuardList, "wireguard.list").Args = cobra.MaximumNArgs(1)
child(cmd, runWireGuardCreate, "wireguard.create").Args = cobra.MaximumNArgs(4)
child(cmd, runWireGuardRemove, "wireguard.remove").Args = cobra.MaximumNArgs(2)
child(cmd, runWireGuardStat, "wireguard.status").Args = cobra.MaximumNArgs(2)
child(cmd, runWireGuardResetPeer, "wireguard.reset").Args = cobra.MaximumNArgs(1)
tokens := child(cmd, nil, "wireguard.token")
child(tokens, runWireGuardTokenList, "wireguard.token.list").Args = cobra.MaximumNArgs(1)
child(tokens, runWireGuardTokenCreate, "wireguard.token.create").Args = cobra.MaximumNArgs(2)
child(tokens, runWireGuardTokenDelete, "wireguard.token.delete").Args = cobra.MaximumNArgs(3)
child(tokens, runWireGuardTokenStartPeer, "wireguard.token.start").Args = cobra.MaximumNArgs(4)
child(tokens, runWireGuardTokenUpdatePeer, "wireguard.token.update").Args = cobra.MaximumNArgs(2)
return cmd
}
func argOrPromptImpl(ctx *cmdctx.CmdContext, nth int, prompt string, first bool) (string, error) {
if len(ctx.Args) >= (nth + 1) {
return ctx.Args[nth], nil
}
val := ""
err := survey.AskOne(&survey.Input{
Message: prompt,
}, &val)
return val, err
}
func argOrPromptLoop(ctx *cmdctx.CmdContext, nth int, prompt, last string) (string, error) {
return argOrPromptImpl(ctx, nth, prompt, last == "")
}
func argOrPrompt(ctx *cmdctx.CmdContext, nth int, prompt string) (string, error) {
return argOrPromptImpl(ctx, nth, prompt, true)
}
func orgByArg(cmdCtx *cmdctx.CmdContext) (*api.Organization, error) {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
if len(cmdCtx.Args) == 0 {
org, err := selectOrganization(ctx, client, "", nil)
if err != nil {
return nil, err
}
return org, nil
}
return client.FindOrganizationBySlug(ctx, cmdCtx.Args[0])
}
func runWireGuardList(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
peers, err := client.GetWireGuardPeers(ctx, org.Slug)
if err != nil {
return err
}
if cmdCtx.OutputJSON() {
cmdCtx.WriteJSON(peers)
return nil
}
table := tablewriter.NewWriter(cmdCtx.Out)
table.SetHeader([]string{
"Name",
"Region",
"Peer IP",
})
for _, peer := range peers {
table.Append([]string{peer.Name, peer.Region, peer.Peerip})
}
table.Render()
return nil
}
func generateWgConf(peer *api.CreatedWireGuardPeer, privkey string, w io.Writer) {
templateStr := `
[Interface]
PrivateKey = {{.Meta.Privkey}}
Address = {{.Peer.Peerip}}/120
DNS = {{.Meta.DNS}}
[Peer]
PublicKey = {{.Peer.Pubkey}}
AllowedIPs = {{.Meta.AllowedIPs}}
Endpoint = {{.Peer.Endpointip}}:51820
PersistentKeepalive = 15
`
data := struct {
Peer *api.CreatedWireGuardPeer
Meta struct {
Privkey string
AllowedIPs string
DNS string
}
}{
Peer: peer,
}
addr := net.ParseIP(peer.Peerip).To16()
for i := 6; i < 16; i++ {
addr[i] = 0
}
// BUG(tqbf): can't stay this way
data.Meta.AllowedIPs = fmt.Sprintf("%s/48", addr)
addr[15] = 3
data.Meta.DNS = fmt.Sprintf("%s", addr)
data.Meta.Privkey = privkey
tmpl := template.Must(template.New("name").Parse(templateStr))
tmpl.Execute(w, &data)
}
func | (ctx *cmdctx.CmdContext, idx int, prompt string) (w io.WriteCloser, mustClose bool, err error) {
var (
f *os.File
filename string
)
for {
filename, err = argOrPromptLoop(ctx, idx, prompt, filename)
if err != nil {
return nil, false, err
}
if filename == "" {
fmt.Println("Provide a filename (or 'stdout')")
continue
}
if filename == "stdout" {
return os.Stdout, false, nil
}
f, err = os.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600)
if err == nil {
return f, true, nil
}
fmt.Printf("Can't create '%s': %s\n", filename, err)
}
}
func runWireGuardResetPeer(ctx *cmdctx.CmdContext) error {
org, err := orgByArg(ctx)
if err != nil {
return err
}
client := ctx.Client.API()
agentclient, err := agent.Establish(context.Background(), client)
if err != nil {
return err
}
conf, err := agentclient.Reestablish(context.Background(), org.Slug)
if err != nil {
return err
}
fmt.Printf("New WireGuard peer for organization '%s': '%s'\n", org.Slug, conf.WireGuardState.Name)
return nil
}
func runWireGuardCreate(ctx *cmdctx.CmdContext) error {
org, err := orgByArg(ctx)
if err != nil {
return err
}
var region string
var name string
if len(ctx.Args) > 1 && ctx.Args[1] != "" {
region = ctx.Args[1]
}
if len(ctx.Args) > 2 && ctx.Args[2] != "" {
name = ctx.Args[2]
}
state, err := wireguard.Create(ctx.Client.API(), org, region, name)
if err != nil {
return err
}
data := &state.Peer
fmt.Printf(`
!!!! WARNING: Output includes private key. Private keys cannot be recovered !!!!
!!!! after creating the peer; if you lose the key, you'll need to remove !!!!
!!!! and re-add the peering connection. !!!!
`)
w, shouldClose, err := resolveOutputWriter(ctx, 3, "Filename to store WireGuard configuration in, or 'stdout': ")
if err != nil {
return err
}
if shouldClose {
defer w.Close()
}
generateWgConf(data, state.LocalPrivate, w)
if shouldClose {
filename := w.(*os.File).Name()
fmt.Printf("Wrote WireGuard configuration to %s; load in your WireGuard client\n", filename)
}
return nil
}
func runWireGuardRemove(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
var name string
if len(cmdCtx.Args) >= 2 {
name = cmdCtx.Args[1]
} else {
name, err = selectWireGuardPeer(ctx, cmdCtx.Client.API(), org.Slug)
if err != nil {
return err
}
}
fmt.Printf("Removing WireGuard peer \"%s\" for organization %s\n", name, org.Slug)
err = client.RemoveWireGuardPeer(ctx, org, name)
if err != nil {
return err
}
fmt.Println("Removed peer.")
return wireguard.PruneInvalidPeers(ctx, cmdCtx.Client.API())
}
func runWireGuardStat(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
var name string
if len(cmdCtx.Args) >= 2 {
name = cmdCtx.Args[1]
} else {
name, err = selectWireGuardPeer(ctx, cmdCtx.Client.API(), org.Slug)
if err != nil {
return err
}
}
status, err := client.GetWireGuardPeerStatus(ctx, org.Slug, name)
if err != nil {
return err
}
fmt.Printf("Alive: %+v\n", status.Live)
if status.WgError != "" {
fmt.Printf("Gateway error: %s\n", status.WgError)
}
if !status.Live {
return nil
}
if status.Endpoint != "" {
fmt.Printf("Last Source Address: %s\n", status.Endpoint)
}
ago := ""
if status.SinceAdded != "" {
ago = " (" + status.SinceAdded + " ago)"
}
if status.LastHandshake != "" {
fmt.Printf("Last Handshake At: %s%s\n", status.LastHandshake, ago)
}
ago = ""
if status.SinceHandshake != "" {
ago = " (" + status.SinceHandshake + " ago)"
}
fmt.Printf("Installed On Gateway At: %s%s\n", status.Added, ago)
fmt.Printf("Traffic: rx:%d tx:%d\n", status.Rx, status.Tx)
return nil
}
func runWireGuardTokenList(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
tokens, err := client.GetDelegatedWireGuardTokens(ctx, org.Slug)
if err != nil {
return err
}
if cmdCtx.OutputJSON() {
cmdCtx.WriteJSON(tokens)
return nil
}
table := tablewriter.NewWriter(cmdCtx.Out)
table.SetHeader([]string{
"Name",
})
for _, peer := range tokens {
table.Append([]string{peer.Name})
}
table.Render()
return nil
}
func runWireGuardTokenCreate(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
name, err := argOrPrompt(cmdCtx, 1, "Memorable name for WireGuard token: ")
if err != nil {
return err
}
data, err := client.CreateDelegatedWireGuardToken(ctx, org, name)
if err != nil {
return err
}
fmt.Printf(`
!!!! WARNING: Output includes credential information. Credentials cannot !!!!
!!!! be recovered after creation; if you lose the token, you'll need to !!!!
!!!! remove and and re-add it. !!!!
To use a token to create a WireGuard connection, you can use curl:
curl -v --request POST
-H "Authorization: Bearer ${WG_TOKEN}"
-H "Content-Type: application/json"
--data '{"name": "node-1", \
"group": "k8s", \
"pubkey": "'"${WG_PUBKEY}"'", \
"region": "dev"}'
http://fly.io/api/v3/wire_guard_peers
We'll return 'us' (our local 6PN address), 'them' (the gateway IP address),
and 'pubkey' (the public key of the gateway), which you can inject into a
"wg.con".
`)
w, shouldClose, err := resolveOutputWriter(cmdCtx, 2, "Filename to store WireGuard token in, or 'stdout': ")
if err != nil {
return err
}
if shouldClose {
defer w.Close()
}
fmt.Fprintf(w, "FLY_WIREGUARD_TOKEN=%s\n", data.Token)
return nil
}
func runWireGuardTokenDelete(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
kv, err := argOrPrompt(cmdCtx, 1, "'name:<name>' or token:<token>': ")
if err != nil {
return err
}
tup := strings.SplitN(kv, ":", 2)
if len(tup) != 2 || (tup[0] != "name" && tup[0] != "token") {
return fmt.Errorf("format is name:<name> or token:<token>")
}
fmt.Printf("Removing WireGuard token \"%s\" for organization %s\n", kv, org.Slug)
if tup[0] == "name" {
err = client.DeleteDelegatedWireGuardToken(ctx, org, &tup[1], nil)
} else {
err = client.DeleteDelegatedWireGuardToken(ctx, org, nil, &tup[1])
}
if err != nil {
return err
}
fmt.Println("Removed token.")
return nil
}
func tokenRequest(method, path, token string, data interface{}) (*http.Response, error) {
buf := &bytes.Buffer{}
if err := json.NewEncoder(buf).Encode(data); err != nil {
return nil, err
}
req, err := http.NewRequest(method,
fmt.Sprintf("https://fly.io/api/v3/wire_guard_peers%s", path),
buf)
if err != nil {
return nil, err
}
req.Header.Add("Authorization", "Bearer "+token)
req.Header.Add("Content-Type", "application/json")
return (&http.Client{}).Do(req)
}
type StartPeerJson struct {
Name string `json:"name"`
Group string `json:"group"`
Pubkey string `json:"pubkey"`
Region string `json:"region"`
}
type UpdatePeerJson struct {
Pubkey string `json:"pubkey"`
}
type PeerStatusJson struct {
Us string `json:"us"`
Them string `json:"them"`
Pubkey string `json:"key"`
Error string `json:"error"`
}
func generateTokenConf(ctx *cmdctx.CmdContext, idx int, stat *PeerStatusJson, privkey string) error {
fmt.Printf(`
!!!! WARNING: Output includes private key. Private keys cannot be recovered !!!!
!!!! after creating the peer; if you lose the key, you'll need to rekey !!!!
!!!! the peering connection. !!!!
`)
w, shouldClose, err := resolveOutputWriter(ctx, idx, "Filename to store WireGuard configuration in, or 'stdout': ")
if err != nil {
return err
}
if shouldClose {
defer w.Close()
}
generateWgConf(&api.CreatedWireGuardPeer{
Peerip: stat.Us,
Pubkey: stat.Pubkey,
Endpointip: stat.Them,
}, privkey, w)
if shouldClose {
filename := w.(*os.File).Name()
fmt.Printf("Wrote WireGuard configuration to %s; load in your WireGuard client\n", filename)
}
return nil
}
func runWireGuardTokenStartPeer(ctx *cmdctx.CmdContext) error {
token := os.Getenv("FLY_WIREGUARD_TOKEN")
if token == "" {
return fmt.Errorf("set FLY_WIREGUARD_TOKEN env")
}
name, err := argOrPrompt(ctx, 0, "Name (DNS-compatible) for peer: ")
if err != nil {
return err
}
group, err := argOrPrompt(ctx, 1, "Peer group (i.e. 'k8s'): ")
if err != nil {
return err
}
region, err := argOrPrompt(ctx, 2, "Gateway region: ")
if err != nil {
return err
}
pubkey, privatekey := wireguard.C25519pair()
body := &StartPeerJson{
Name: name,
Group: group,
Pubkey: pubkey,
Region: region,
}
resp, err := tokenRequest("POST", "", token, body)
if err != nil {
return err
}
peerStatus := &PeerStatusJson{}
if err = json.NewDecoder(resp.Body).Decode(peerStatus); err != nil {
if resp.StatusCode != 200 {
return fmt.Errorf("server returned error: %s %w", resp.Status, err)
}
return err
}
if peerStatus.Error != "" {
return fmt.Errorf("WireGuard API error: %s", peerStatus.Error)
}
if err = generateTokenConf(ctx, 3, peerStatus, privatekey); err != nil {
return err
}
return nil
}
func runWireGuardTokenUpdatePeer(ctx *cmdctx.CmdContext) error {
token := os.Getenv("FLY_WIREGUARD_TOKEN")
if token == "" {
return fmt.Errorf("set FLY_WIREGUARD_TOKEN env")
}
name, err := argOrPrompt(ctx, 0, "Name (DNS-compatible) for peer: ")
if err != nil {
return err
}
pubkey, privatekey := wireguard.C25519pair()
body := &StartPeerJson{
Pubkey: pubkey,
}
resp, err := tokenRequest("PUT", "/"+name, token, body)
if err != nil {
return err
}
peerStatus := &PeerStatusJson{}
if err = json.NewDecoder(resp.Body).Decode(peerStatus); err != nil {
if resp.StatusCode != 200 {
return fmt.Errorf("server returned error: %s %w", resp.Status, err)
}
return err
}
if peerStatus.Error != "" {
return fmt.Errorf("WireGuard API error: %s", peerStatus.Error)
}
if err = generateTokenConf(ctx, 1, peerStatus, privatekey); err != nil {
return err
}
return nil
}
| resolveOutputWriter | identifier_name |
wireguard.go | package cmd
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net"
"net/http"
"os"
"strings"
"text/template"
"github.com/AlecAivazis/survey/v2"
"github.com/olekukonko/tablewriter"
"github.com/spf13/cobra"
"github.com/superfly/flyctl/api"
"github.com/superfly/flyctl/cmdctx"
"github.com/superfly/flyctl/docstrings"
"github.com/superfly/flyctl/internal/client"
"github.com/superfly/flyctl/internal/wireguard"
"github.com/superfly/flyctl/pkg/agent"
)
func newWireGuardCommand(client *client.Client) *Command {
cmd := BuildCommandKS(nil, nil, docstrings.Get("wireguard"), client, requireSession)
cmd.Aliases = []string{"wg"}
child := func(parent *Command, fn RunFn, ds string) *Command {
return BuildCommandKS(parent, fn, docstrings.Get(ds), client, requireSession)
}
child(cmd, runWireGuardList, "wireguard.list").Args = cobra.MaximumNArgs(1)
child(cmd, runWireGuardCreate, "wireguard.create").Args = cobra.MaximumNArgs(4)
child(cmd, runWireGuardRemove, "wireguard.remove").Args = cobra.MaximumNArgs(2)
child(cmd, runWireGuardStat, "wireguard.status").Args = cobra.MaximumNArgs(2)
child(cmd, runWireGuardResetPeer, "wireguard.reset").Args = cobra.MaximumNArgs(1)
tokens := child(cmd, nil, "wireguard.token")
child(tokens, runWireGuardTokenList, "wireguard.token.list").Args = cobra.MaximumNArgs(1)
child(tokens, runWireGuardTokenCreate, "wireguard.token.create").Args = cobra.MaximumNArgs(2)
child(tokens, runWireGuardTokenDelete, "wireguard.token.delete").Args = cobra.MaximumNArgs(3)
child(tokens, runWireGuardTokenStartPeer, "wireguard.token.start").Args = cobra.MaximumNArgs(4)
child(tokens, runWireGuardTokenUpdatePeer, "wireguard.token.update").Args = cobra.MaximumNArgs(2)
return cmd
}
func argOrPromptImpl(ctx *cmdctx.CmdContext, nth int, prompt string, first bool) (string, error) {
if len(ctx.Args) >= (nth + 1) {
return ctx.Args[nth], nil
}
val := ""
err := survey.AskOne(&survey.Input{
Message: prompt,
}, &val)
return val, err
}
func argOrPromptLoop(ctx *cmdctx.CmdContext, nth int, prompt, last string) (string, error) {
return argOrPromptImpl(ctx, nth, prompt, last == "")
}
func argOrPrompt(ctx *cmdctx.CmdContext, nth int, prompt string) (string, error) {
return argOrPromptImpl(ctx, nth, prompt, true)
}
func orgByArg(cmdCtx *cmdctx.CmdContext) (*api.Organization, error) {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
if len(cmdCtx.Args) == 0 {
org, err := selectOrganization(ctx, client, "", nil)
if err != nil {
return nil, err
}
return org, nil
}
return client.FindOrganizationBySlug(ctx, cmdCtx.Args[0])
}
func runWireGuardList(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
peers, err := client.GetWireGuardPeers(ctx, org.Slug)
if err != nil {
return err
}
if cmdCtx.OutputJSON() {
cmdCtx.WriteJSON(peers)
return nil
}
table := tablewriter.NewWriter(cmdCtx.Out)
table.SetHeader([]string{
"Name",
"Region",
"Peer IP",
})
for _, peer := range peers {
table.Append([]string{peer.Name, peer.Region, peer.Peerip})
}
table.Render()
return nil
}
func generateWgConf(peer *api.CreatedWireGuardPeer, privkey string, w io.Writer) {
templateStr := `
[Interface]
PrivateKey = {{.Meta.Privkey}}
Address = {{.Peer.Peerip}}/120
DNS = {{.Meta.DNS}}
[Peer]
PublicKey = {{.Peer.Pubkey}}
AllowedIPs = {{.Meta.AllowedIPs}}
Endpoint = {{.Peer.Endpointip}}:51820
PersistentKeepalive = 15
`
data := struct {
Peer *api.CreatedWireGuardPeer
Meta struct {
Privkey string
AllowedIPs string
DNS string
}
}{
Peer: peer,
}
addr := net.ParseIP(peer.Peerip).To16()
for i := 6; i < 16; i++ {
addr[i] = 0
}
// BUG(tqbf): can't stay this way
data.Meta.AllowedIPs = fmt.Sprintf("%s/48", addr)
addr[15] = 3
data.Meta.DNS = fmt.Sprintf("%s", addr)
data.Meta.Privkey = privkey
tmpl := template.Must(template.New("name").Parse(templateStr))
tmpl.Execute(w, &data)
}
func resolveOutputWriter(ctx *cmdctx.CmdContext, idx int, prompt string) (w io.WriteCloser, mustClose bool, err error) {
var (
f *os.File
filename string
)
for {
filename, err = argOrPromptLoop(ctx, idx, prompt, filename)
if err != nil {
return nil, false, err
}
if filename == "" {
fmt.Println("Provide a filename (or 'stdout')")
continue
}
if filename == "stdout" {
return os.Stdout, false, nil
}
f, err = os.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600)
if err == nil {
return f, true, nil
}
fmt.Printf("Can't create '%s': %s\n", filename, err)
}
}
func runWireGuardResetPeer(ctx *cmdctx.CmdContext) error {
org, err := orgByArg(ctx)
if err != nil {
return err
}
client := ctx.Client.API()
agentclient, err := agent.Establish(context.Background(), client)
if err != nil {
return err
}
conf, err := agentclient.Reestablish(context.Background(), org.Slug)
if err != nil {
return err
}
fmt.Printf("New WireGuard peer for organization '%s': '%s'\n", org.Slug, conf.WireGuardState.Name)
return nil
}
func runWireGuardCreate(ctx *cmdctx.CmdContext) error {
org, err := orgByArg(ctx)
if err != nil {
return err
}
var region string
var name string
if len(ctx.Args) > 1 && ctx.Args[1] != "" {
region = ctx.Args[1]
}
if len(ctx.Args) > 2 && ctx.Args[2] != "" {
name = ctx.Args[2]
}
state, err := wireguard.Create(ctx.Client.API(), org, region, name)
if err != nil {
return err
}
data := &state.Peer
fmt.Printf(`
!!!! WARNING: Output includes private key. Private keys cannot be recovered !!!!
!!!! after creating the peer; if you lose the key, you'll need to remove !!!!
!!!! and re-add the peering connection. !!!!
`)
w, shouldClose, err := resolveOutputWriter(ctx, 3, "Filename to store WireGuard configuration in, or 'stdout': ")
if err != nil {
return err
}
if shouldClose {
defer w.Close()
}
generateWgConf(data, state.LocalPrivate, w)
if shouldClose {
filename := w.(*os.File).Name()
fmt.Printf("Wrote WireGuard configuration to %s; load in your WireGuard client\n", filename)
}
return nil
}
func runWireGuardRemove(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
var name string
if len(cmdCtx.Args) >= 2 {
name = cmdCtx.Args[1]
} else {
name, err = selectWireGuardPeer(ctx, cmdCtx.Client.API(), org.Slug)
if err != nil {
return err
}
}
fmt.Printf("Removing WireGuard peer \"%s\" for organization %s\n", name, org.Slug)
err = client.RemoveWireGuardPeer(ctx, org, name)
if err != nil {
return err
}
| }
func runWireGuardStat(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
var name string
if len(cmdCtx.Args) >= 2 {
name = cmdCtx.Args[1]
} else {
name, err = selectWireGuardPeer(ctx, cmdCtx.Client.API(), org.Slug)
if err != nil {
return err
}
}
status, err := client.GetWireGuardPeerStatus(ctx, org.Slug, name)
if err != nil {
return err
}
fmt.Printf("Alive: %+v\n", status.Live)
if status.WgError != "" {
fmt.Printf("Gateway error: %s\n", status.WgError)
}
if !status.Live {
return nil
}
if status.Endpoint != "" {
fmt.Printf("Last Source Address: %s\n", status.Endpoint)
}
ago := ""
if status.SinceAdded != "" {
ago = " (" + status.SinceAdded + " ago)"
}
if status.LastHandshake != "" {
fmt.Printf("Last Handshake At: %s%s\n", status.LastHandshake, ago)
}
ago = ""
if status.SinceHandshake != "" {
ago = " (" + status.SinceHandshake + " ago)"
}
fmt.Printf("Installed On Gateway At: %s%s\n", status.Added, ago)
fmt.Printf("Traffic: rx:%d tx:%d\n", status.Rx, status.Tx)
return nil
}
func runWireGuardTokenList(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
tokens, err := client.GetDelegatedWireGuardTokens(ctx, org.Slug)
if err != nil {
return err
}
if cmdCtx.OutputJSON() {
cmdCtx.WriteJSON(tokens)
return nil
}
table := tablewriter.NewWriter(cmdCtx.Out)
table.SetHeader([]string{
"Name",
})
for _, peer := range tokens {
table.Append([]string{peer.Name})
}
table.Render()
return nil
}
func runWireGuardTokenCreate(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
name, err := argOrPrompt(cmdCtx, 1, "Memorable name for WireGuard token: ")
if err != nil {
return err
}
data, err := client.CreateDelegatedWireGuardToken(ctx, org, name)
if err != nil {
return err
}
fmt.Printf(`
!!!! WARNING: Output includes credential information. Credentials cannot !!!!
!!!! be recovered after creation; if you lose the token, you'll need to !!!!
!!!! remove and and re-add it. !!!!
To use a token to create a WireGuard connection, you can use curl:
curl -v --request POST
-H "Authorization: Bearer ${WG_TOKEN}"
-H "Content-Type: application/json"
--data '{"name": "node-1", \
"group": "k8s", \
"pubkey": "'"${WG_PUBKEY}"'", \
"region": "dev"}'
http://fly.io/api/v3/wire_guard_peers
We'll return 'us' (our local 6PN address), 'them' (the gateway IP address),
and 'pubkey' (the public key of the gateway), which you can inject into a
"wg.con".
`)
w, shouldClose, err := resolveOutputWriter(cmdCtx, 2, "Filename to store WireGuard token in, or 'stdout': ")
if err != nil {
return err
}
if shouldClose {
defer w.Close()
}
fmt.Fprintf(w, "FLY_WIREGUARD_TOKEN=%s\n", data.Token)
return nil
}
func runWireGuardTokenDelete(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
kv, err := argOrPrompt(cmdCtx, 1, "'name:<name>' or token:<token>': ")
if err != nil {
return err
}
tup := strings.SplitN(kv, ":", 2)
if len(tup) != 2 || (tup[0] != "name" && tup[0] != "token") {
return fmt.Errorf("format is name:<name> or token:<token>")
}
fmt.Printf("Removing WireGuard token \"%s\" for organization %s\n", kv, org.Slug)
if tup[0] == "name" {
err = client.DeleteDelegatedWireGuardToken(ctx, org, &tup[1], nil)
} else {
err = client.DeleteDelegatedWireGuardToken(ctx, org, nil, &tup[1])
}
if err != nil {
return err
}
fmt.Println("Removed token.")
return nil
}
func tokenRequest(method, path, token string, data interface{}) (*http.Response, error) {
buf := &bytes.Buffer{}
if err := json.NewEncoder(buf).Encode(data); err != nil {
return nil, err
}
req, err := http.NewRequest(method,
fmt.Sprintf("https://fly.io/api/v3/wire_guard_peers%s", path),
buf)
if err != nil {
return nil, err
}
req.Header.Add("Authorization", "Bearer "+token)
req.Header.Add("Content-Type", "application/json")
return (&http.Client{}).Do(req)
}
type StartPeerJson struct {
Name string `json:"name"`
Group string `json:"group"`
Pubkey string `json:"pubkey"`
Region string `json:"region"`
}
type UpdatePeerJson struct {
Pubkey string `json:"pubkey"`
}
type PeerStatusJson struct {
Us string `json:"us"`
Them string `json:"them"`
Pubkey string `json:"key"`
Error string `json:"error"`
}
func generateTokenConf(ctx *cmdctx.CmdContext, idx int, stat *PeerStatusJson, privkey string) error {
fmt.Printf(`
!!!! WARNING: Output includes private key. Private keys cannot be recovered !!!!
!!!! after creating the peer; if you lose the key, you'll need to rekey !!!!
!!!! the peering connection. !!!!
`)
w, shouldClose, err := resolveOutputWriter(ctx, idx, "Filename to store WireGuard configuration in, or 'stdout': ")
if err != nil {
return err
}
if shouldClose {
defer w.Close()
}
generateWgConf(&api.CreatedWireGuardPeer{
Peerip: stat.Us,
Pubkey: stat.Pubkey,
Endpointip: stat.Them,
}, privkey, w)
if shouldClose {
filename := w.(*os.File).Name()
fmt.Printf("Wrote WireGuard configuration to %s; load in your WireGuard client\n", filename)
}
return nil
}
func runWireGuardTokenStartPeer(ctx *cmdctx.CmdContext) error {
token := os.Getenv("FLY_WIREGUARD_TOKEN")
if token == "" {
return fmt.Errorf("set FLY_WIREGUARD_TOKEN env")
}
name, err := argOrPrompt(ctx, 0, "Name (DNS-compatible) for peer: ")
if err != nil {
return err
}
group, err := argOrPrompt(ctx, 1, "Peer group (i.e. 'k8s'): ")
if err != nil {
return err
}
region, err := argOrPrompt(ctx, 2, "Gateway region: ")
if err != nil {
return err
}
pubkey, privatekey := wireguard.C25519pair()
body := &StartPeerJson{
Name: name,
Group: group,
Pubkey: pubkey,
Region: region,
}
resp, err := tokenRequest("POST", "", token, body)
if err != nil {
return err
}
peerStatus := &PeerStatusJson{}
if err = json.NewDecoder(resp.Body).Decode(peerStatus); err != nil {
if resp.StatusCode != 200 {
return fmt.Errorf("server returned error: %s %w", resp.Status, err)
}
return err
}
if peerStatus.Error != "" {
return fmt.Errorf("WireGuard API error: %s", peerStatus.Error)
}
if err = generateTokenConf(ctx, 3, peerStatus, privatekey); err != nil {
return err
}
return nil
}
func runWireGuardTokenUpdatePeer(ctx *cmdctx.CmdContext) error {
token := os.Getenv("FLY_WIREGUARD_TOKEN")
if token == "" {
return fmt.Errorf("set FLY_WIREGUARD_TOKEN env")
}
name, err := argOrPrompt(ctx, 0, "Name (DNS-compatible) for peer: ")
if err != nil {
return err
}
pubkey, privatekey := wireguard.C25519pair()
body := &StartPeerJson{
Pubkey: pubkey,
}
resp, err := tokenRequest("PUT", "/"+name, token, body)
if err != nil {
return err
}
peerStatus := &PeerStatusJson{}
if err = json.NewDecoder(resp.Body).Decode(peerStatus); err != nil {
if resp.StatusCode != 200 {
return fmt.Errorf("server returned error: %s %w", resp.Status, err)
}
return err
}
if peerStatus.Error != "" {
return fmt.Errorf("WireGuard API error: %s", peerStatus.Error)
}
if err = generateTokenConf(ctx, 1, peerStatus, privatekey); err != nil {
return err
}
return nil
} | fmt.Println("Removed peer.")
return wireguard.PruneInvalidPeers(ctx, cmdCtx.Client.API()) | random_line_split |
wireguard.go | package cmd
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net"
"net/http"
"os"
"strings"
"text/template"
"github.com/AlecAivazis/survey/v2"
"github.com/olekukonko/tablewriter"
"github.com/spf13/cobra"
"github.com/superfly/flyctl/api"
"github.com/superfly/flyctl/cmdctx"
"github.com/superfly/flyctl/docstrings"
"github.com/superfly/flyctl/internal/client"
"github.com/superfly/flyctl/internal/wireguard"
"github.com/superfly/flyctl/pkg/agent"
)
func newWireGuardCommand(client *client.Client) *Command {
cmd := BuildCommandKS(nil, nil, docstrings.Get("wireguard"), client, requireSession)
cmd.Aliases = []string{"wg"}
child := func(parent *Command, fn RunFn, ds string) *Command {
return BuildCommandKS(parent, fn, docstrings.Get(ds), client, requireSession)
}
child(cmd, runWireGuardList, "wireguard.list").Args = cobra.MaximumNArgs(1)
child(cmd, runWireGuardCreate, "wireguard.create").Args = cobra.MaximumNArgs(4)
child(cmd, runWireGuardRemove, "wireguard.remove").Args = cobra.MaximumNArgs(2)
child(cmd, runWireGuardStat, "wireguard.status").Args = cobra.MaximumNArgs(2)
child(cmd, runWireGuardResetPeer, "wireguard.reset").Args = cobra.MaximumNArgs(1)
tokens := child(cmd, nil, "wireguard.token")
child(tokens, runWireGuardTokenList, "wireguard.token.list").Args = cobra.MaximumNArgs(1)
child(tokens, runWireGuardTokenCreate, "wireguard.token.create").Args = cobra.MaximumNArgs(2)
child(tokens, runWireGuardTokenDelete, "wireguard.token.delete").Args = cobra.MaximumNArgs(3)
child(tokens, runWireGuardTokenStartPeer, "wireguard.token.start").Args = cobra.MaximumNArgs(4)
child(tokens, runWireGuardTokenUpdatePeer, "wireguard.token.update").Args = cobra.MaximumNArgs(2)
return cmd
}
func argOrPromptImpl(ctx *cmdctx.CmdContext, nth int, prompt string, first bool) (string, error) {
if len(ctx.Args) >= (nth + 1) {
return ctx.Args[nth], nil
}
val := ""
err := survey.AskOne(&survey.Input{
Message: prompt,
}, &val)
return val, err
}
func argOrPromptLoop(ctx *cmdctx.CmdContext, nth int, prompt, last string) (string, error) {
return argOrPromptImpl(ctx, nth, prompt, last == "")
}
func argOrPrompt(ctx *cmdctx.CmdContext, nth int, prompt string) (string, error) {
return argOrPromptImpl(ctx, nth, prompt, true)
}
func orgByArg(cmdCtx *cmdctx.CmdContext) (*api.Organization, error) {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
if len(cmdCtx.Args) == 0 {
org, err := selectOrganization(ctx, client, "", nil)
if err != nil {
return nil, err
}
return org, nil
}
return client.FindOrganizationBySlug(ctx, cmdCtx.Args[0])
}
func runWireGuardList(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
peers, err := client.GetWireGuardPeers(ctx, org.Slug)
if err != nil {
return err
}
if cmdCtx.OutputJSON() {
cmdCtx.WriteJSON(peers)
return nil
}
table := tablewriter.NewWriter(cmdCtx.Out)
table.SetHeader([]string{
"Name",
"Region",
"Peer IP",
})
for _, peer := range peers {
table.Append([]string{peer.Name, peer.Region, peer.Peerip})
}
table.Render()
return nil
}
func generateWgConf(peer *api.CreatedWireGuardPeer, privkey string, w io.Writer) {
templateStr := `
[Interface]
PrivateKey = {{.Meta.Privkey}}
Address = {{.Peer.Peerip}}/120
DNS = {{.Meta.DNS}}
[Peer]
PublicKey = {{.Peer.Pubkey}}
AllowedIPs = {{.Meta.AllowedIPs}}
Endpoint = {{.Peer.Endpointip}}:51820
PersistentKeepalive = 15
`
data := struct {
Peer *api.CreatedWireGuardPeer
Meta struct {
Privkey string
AllowedIPs string
DNS string
}
}{
Peer: peer,
}
addr := net.ParseIP(peer.Peerip).To16()
for i := 6; i < 16; i++ {
addr[i] = 0
}
// BUG(tqbf): can't stay this way
data.Meta.AllowedIPs = fmt.Sprintf("%s/48", addr)
addr[15] = 3
data.Meta.DNS = fmt.Sprintf("%s", addr)
data.Meta.Privkey = privkey
tmpl := template.Must(template.New("name").Parse(templateStr))
tmpl.Execute(w, &data)
}
func resolveOutputWriter(ctx *cmdctx.CmdContext, idx int, prompt string) (w io.WriteCloser, mustClose bool, err error) {
var (
f *os.File
filename string
)
for {
filename, err = argOrPromptLoop(ctx, idx, prompt, filename)
if err != nil {
return nil, false, err
}
if filename == "" {
fmt.Println("Provide a filename (or 'stdout')")
continue
}
if filename == "stdout" {
return os.Stdout, false, nil
}
f, err = os.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600)
if err == nil {
return f, true, nil
}
fmt.Printf("Can't create '%s': %s\n", filename, err)
}
}
func runWireGuardResetPeer(ctx *cmdctx.CmdContext) error {
org, err := orgByArg(ctx)
if err != nil {
return err
}
client := ctx.Client.API()
agentclient, err := agent.Establish(context.Background(), client)
if err != nil {
return err
}
conf, err := agentclient.Reestablish(context.Background(), org.Slug)
if err != nil {
return err
}
fmt.Printf("New WireGuard peer for organization '%s': '%s'\n", org.Slug, conf.WireGuardState.Name)
return nil
}
func runWireGuardCreate(ctx *cmdctx.CmdContext) error {
org, err := orgByArg(ctx)
if err != nil {
return err
}
var region string
var name string
if len(ctx.Args) > 1 && ctx.Args[1] != "" {
region = ctx.Args[1]
}
if len(ctx.Args) > 2 && ctx.Args[2] != "" {
name = ctx.Args[2]
}
state, err := wireguard.Create(ctx.Client.API(), org, region, name)
if err != nil {
return err
}
data := &state.Peer
fmt.Printf(`
!!!! WARNING: Output includes private key. Private keys cannot be recovered !!!!
!!!! after creating the peer; if you lose the key, you'll need to remove !!!!
!!!! and re-add the peering connection. !!!!
`)
w, shouldClose, err := resolveOutputWriter(ctx, 3, "Filename to store WireGuard configuration in, or 'stdout': ")
if err != nil {
return err
}
if shouldClose {
defer w.Close()
}
generateWgConf(data, state.LocalPrivate, w)
if shouldClose {
filename := w.(*os.File).Name()
fmt.Printf("Wrote WireGuard configuration to %s; load in your WireGuard client\n", filename)
}
return nil
}
func runWireGuardRemove(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
var name string
if len(cmdCtx.Args) >= 2 {
name = cmdCtx.Args[1]
} else {
name, err = selectWireGuardPeer(ctx, cmdCtx.Client.API(), org.Slug)
if err != nil {
return err
}
}
fmt.Printf("Removing WireGuard peer \"%s\" for organization %s\n", name, org.Slug)
err = client.RemoveWireGuardPeer(ctx, org, name)
if err != nil {
return err
}
fmt.Println("Removed peer.")
return wireguard.PruneInvalidPeers(ctx, cmdCtx.Client.API())
}
func runWireGuardStat(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
var name string
if len(cmdCtx.Args) >= 2 {
name = cmdCtx.Args[1]
} else {
name, err = selectWireGuardPeer(ctx, cmdCtx.Client.API(), org.Slug)
if err != nil {
return err
}
}
status, err := client.GetWireGuardPeerStatus(ctx, org.Slug, name)
if err != nil {
return err
}
fmt.Printf("Alive: %+v\n", status.Live)
if status.WgError != "" {
fmt.Printf("Gateway error: %s\n", status.WgError)
}
if !status.Live {
return nil
}
if status.Endpoint != "" {
fmt.Printf("Last Source Address: %s\n", status.Endpoint)
}
ago := ""
if status.SinceAdded != "" {
ago = " (" + status.SinceAdded + " ago)"
}
if status.LastHandshake != "" {
fmt.Printf("Last Handshake At: %s%s\n", status.LastHandshake, ago)
}
ago = ""
if status.SinceHandshake != "" {
ago = " (" + status.SinceHandshake + " ago)"
}
fmt.Printf("Installed On Gateway At: %s%s\n", status.Added, ago)
fmt.Printf("Traffic: rx:%d tx:%d\n", status.Rx, status.Tx)
return nil
}
func runWireGuardTokenList(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
tokens, err := client.GetDelegatedWireGuardTokens(ctx, org.Slug)
if err != nil {
return err
}
if cmdCtx.OutputJSON() {
cmdCtx.WriteJSON(tokens)
return nil
}
table := tablewriter.NewWriter(cmdCtx.Out)
table.SetHeader([]string{
"Name",
})
for _, peer := range tokens {
table.Append([]string{peer.Name})
}
table.Render()
return nil
}
func runWireGuardTokenCreate(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
name, err := argOrPrompt(cmdCtx, 1, "Memorable name for WireGuard token: ")
if err != nil {
return err
}
data, err := client.CreateDelegatedWireGuardToken(ctx, org, name)
if err != nil {
return err
}
fmt.Printf(`
!!!! WARNING: Output includes credential information. Credentials cannot !!!!
!!!! be recovered after creation; if you lose the token, you'll need to !!!!
!!!! remove and and re-add it. !!!!
To use a token to create a WireGuard connection, you can use curl:
curl -v --request POST
-H "Authorization: Bearer ${WG_TOKEN}"
-H "Content-Type: application/json"
--data '{"name": "node-1", \
"group": "k8s", \
"pubkey": "'"${WG_PUBKEY}"'", \
"region": "dev"}'
http://fly.io/api/v3/wire_guard_peers
We'll return 'us' (our local 6PN address), 'them' (the gateway IP address),
and 'pubkey' (the public key of the gateway), which you can inject into a
"wg.con".
`)
w, shouldClose, err := resolveOutputWriter(cmdCtx, 2, "Filename to store WireGuard token in, or 'stdout': ")
if err != nil {
return err
}
if shouldClose {
defer w.Close()
}
fmt.Fprintf(w, "FLY_WIREGUARD_TOKEN=%s\n", data.Token)
return nil
}
func runWireGuardTokenDelete(cmdCtx *cmdctx.CmdContext) error {
ctx := cmdCtx.Command.Context()
client := cmdCtx.Client.API()
org, err := orgByArg(cmdCtx)
if err != nil {
return err
}
kv, err := argOrPrompt(cmdCtx, 1, "'name:<name>' or token:<token>': ")
if err != nil {
return err
}
tup := strings.SplitN(kv, ":", 2)
if len(tup) != 2 || (tup[0] != "name" && tup[0] != "token") {
return fmt.Errorf("format is name:<name> or token:<token>")
}
fmt.Printf("Removing WireGuard token \"%s\" for organization %s\n", kv, org.Slug)
if tup[0] == "name" {
err = client.DeleteDelegatedWireGuardToken(ctx, org, &tup[1], nil)
} else {
err = client.DeleteDelegatedWireGuardToken(ctx, org, nil, &tup[1])
}
if err != nil {
return err
}
fmt.Println("Removed token.")
return nil
}
func tokenRequest(method, path, token string, data interface{}) (*http.Response, error) {
buf := &bytes.Buffer{}
if err := json.NewEncoder(buf).Encode(data); err != nil {
return nil, err
}
req, err := http.NewRequest(method,
fmt.Sprintf("https://fly.io/api/v3/wire_guard_peers%s", path),
buf)
if err != nil {
return nil, err
}
req.Header.Add("Authorization", "Bearer "+token)
req.Header.Add("Content-Type", "application/json")
return (&http.Client{}).Do(req)
}
type StartPeerJson struct {
Name string `json:"name"`
Group string `json:"group"`
Pubkey string `json:"pubkey"`
Region string `json:"region"`
}
type UpdatePeerJson struct {
Pubkey string `json:"pubkey"`
}
type PeerStatusJson struct {
Us string `json:"us"`
Them string `json:"them"`
Pubkey string `json:"key"`
Error string `json:"error"`
}
func generateTokenConf(ctx *cmdctx.CmdContext, idx int, stat *PeerStatusJson, privkey string) error |
func runWireGuardTokenStartPeer(ctx *cmdctx.CmdContext) error {
token := os.Getenv("FLY_WIREGUARD_TOKEN")
if token == "" {
return fmt.Errorf("set FLY_WIREGUARD_TOKEN env")
}
name, err := argOrPrompt(ctx, 0, "Name (DNS-compatible) for peer: ")
if err != nil {
return err
}
group, err := argOrPrompt(ctx, 1, "Peer group (i.e. 'k8s'): ")
if err != nil {
return err
}
region, err := argOrPrompt(ctx, 2, "Gateway region: ")
if err != nil {
return err
}
pubkey, privatekey := wireguard.C25519pair()
body := &StartPeerJson{
Name: name,
Group: group,
Pubkey: pubkey,
Region: region,
}
resp, err := tokenRequest("POST", "", token, body)
if err != nil {
return err
}
peerStatus := &PeerStatusJson{}
if err = json.NewDecoder(resp.Body).Decode(peerStatus); err != nil {
if resp.StatusCode != 200 {
return fmt.Errorf("server returned error: %s %w", resp.Status, err)
}
return err
}
if peerStatus.Error != "" {
return fmt.Errorf("WireGuard API error: %s", peerStatus.Error)
}
if err = generateTokenConf(ctx, 3, peerStatus, privatekey); err != nil {
return err
}
return nil
}
func runWireGuardTokenUpdatePeer(ctx *cmdctx.CmdContext) error {
token := os.Getenv("FLY_WIREGUARD_TOKEN")
if token == "" {
return fmt.Errorf("set FLY_WIREGUARD_TOKEN env")
}
name, err := argOrPrompt(ctx, 0, "Name (DNS-compatible) for peer: ")
if err != nil {
return err
}
pubkey, privatekey := wireguard.C25519pair()
body := &StartPeerJson{
Pubkey: pubkey,
}
resp, err := tokenRequest("PUT", "/"+name, token, body)
if err != nil {
return err
}
peerStatus := &PeerStatusJson{}
if err = json.NewDecoder(resp.Body).Decode(peerStatus); err != nil {
if resp.StatusCode != 200 {
return fmt.Errorf("server returned error: %s %w", resp.Status, err)
}
return err
}
if peerStatus.Error != "" {
return fmt.Errorf("WireGuard API error: %s", peerStatus.Error)
}
if err = generateTokenConf(ctx, 1, peerStatus, privatekey); err != nil {
return err
}
return nil
}
| {
fmt.Printf(`
!!!! WARNING: Output includes private key. Private keys cannot be recovered !!!!
!!!! after creating the peer; if you lose the key, you'll need to rekey !!!!
!!!! the peering connection. !!!!
`)
w, shouldClose, err := resolveOutputWriter(ctx, idx, "Filename to store WireGuard configuration in, or 'stdout': ")
if err != nil {
return err
}
if shouldClose {
defer w.Close()
}
generateWgConf(&api.CreatedWireGuardPeer{
Peerip: stat.Us,
Pubkey: stat.Pubkey,
Endpointip: stat.Them,
}, privkey, w)
if shouldClose {
filename := w.(*os.File).Name()
fmt.Printf("Wrote WireGuard configuration to %s; load in your WireGuard client\n", filename)
}
return nil
} | identifier_body |
nba-game-list.ts | import { Component, OnInit } from '@angular/core';
import { Refresher, AlertController, NavController, Platform } from 'ionic-angular';
import { DatePicker, DatePickerOptions } from 'ionic-native';
import { NBATeamDataType } from '../../base-data-type/nba-team-datatype';
import { NBATeamMap } from '../../services/nba-team-map/nba-team-map';
import { NBADataServices } from '../../services/nba-data-services/nba-data-services';
import { NBAGameDetailPage } from '../nba-game-detail/nba-game-detail';
@Component({
templateUrl: 'build/pages/nba-game-list/nba-game-list.html'
})
export class NBAGameListPage implements OnInit{
NBATeamMapData: any[];
NBAGameList: any[] = []; //to avoid error: Cannot read property 'push' of undefined, so we give it an initial value
gameCount: number;
gameDate: string;
ChangedDate: string;
selectedYear: string;
constructor(private NBAteammap: NBATeamMap, private NBAdataservices: NBADataServices,
private GamealertCtrl: AlertController, public navCtrl: NavController, public platform: Platform ) {
this.NBATeamMapData = NBAteammap.getNBATeamArrayData();
}
ngOnInit() {
let nowLocalTime: Date = new Date();
//we use America/Los_Angeles time zone because L.A. game start at last in one day.
let SpecificDateTimeArray: any[] = this.GetSpecificTimeZoneFormat(nowLocalTime, -7);
this.ChangedDate = SpecificDateTimeArray[0] + SpecificDateTimeArray[1] + SpecificDateTimeArray[2];
this.gameDate = SpecificDateTimeArray[0] + '-' + SpecificDateTimeArray[1] + '-' + SpecificDateTimeArray[2];
this.selectedYear = SpecificDateTimeArray[0];
this.GetNBAGameList(this.ChangedDate).then(() => null).catch(this.handleError);
}
doRefresh(refresher: Refresher) {
this.GetNBAGameList(this.ChangedDate).then(() => refresher.complete()).catch(this.handleError);
}
OpenDatePicker(): void {
let options: DatePickerOptions;
if (this.platform.is('android')) {
options = {
date: new Date(),
mode: 'datetime',
androidTheme : 3
}
}
else {
options = {
date: new Date(),
mode: 'datetime'
}
}
DatePicker.show(options).then(
date => {
let SpecificDateTimeArray: any[] = this.GetSpecificTimeZoneFormat(date, -7);
this.ChangedDate = SpecificDateTimeArray[0] + SpecificDateTimeArray[1] + SpecificDateTimeArray[2];
this.gameDate = SpecificDateTimeArray[0] + '-' + SpecificDateTimeArray[1] + '-' + SpecificDateTimeArray[2];
this.selectedYear = SpecificDateTimeArray[0];
this.GetNBAGameList(this.ChangedDate).then(() => null).catch(this.handleError);
},
error => console.log('Error occurred while getting date: ', error)
);
}
GameItemTapped(event, GameItem) {
this.navCtrl.push(NBAGameDetailPage, {
GameItem: GameItem,
SelectedYear: this.selectedYear
});
}
/**
* @Param: nowDateTime, UTC
* @Example: UTC => +8: "Asia/Shanghai", -4: "America/New_York", -7: "America/Los_Angeles"
* @Remark: "Asia/Shanghai" => GMT+8 (CST)
* "America/New_York" => GMT-4 (EDT)
* "America/Los_Angeles" => GMT-7 (PDT)
*/
private GetSpecificTimeZoneFormat(SpecificLocalDateTime: Date, UTC: number): any[] {
let YMDDateArray: any[] = []; //need to assign initial value or occur error : cannot access property push of undefined
//Return the number of milliseconds since 1970/01/01
let localTime: number = SpecificLocalDateTime.getTime();
//Return the timezone difference between UTC and Local Time
//By default, this method returns the time zone offset in minutes, so convert this value to milliseconds for easier manipulation:
//Note that a negative return value from getTimezoneOffset() indicates that the current location is ahead of UTC,
//while a positive value indicates that the location is behind UTC. (ex: Taiwan is UTC+8 => 8 * 60 = -480)
//Obtain local UTC offset and convert to msec
let localOffset: number = SpecificLocalDateTime.getTimezoneOffset() * 60000;
//Obtain UTC time in msec
let utc = localTime + localOffset;
// Obtain and add destination's UTC time offset. For example, America/Los_Angeles which is UTC - 7 hours
let offset = UTC;
//Note: In case you're wondering how I arrived at 3600000 as the multiplication factor, remember that
//1000 millseconds = 1 second, and 1 hour = 3600 seconds. Therefore, converting hours to milliseconds
//involves multiplying by 3600 * 1000 = 3600000.
let timezone = utc + (3600000 * offset);
let anotherTimeZoneTime = new Date(timezone);
let nowYear: string = anotherTimeZoneTime.getFullYear().toString();
//Months are zero based
let nowMonth: string = (anotherTimeZoneTime.getMonth() + 1) < 10 ? "0" + (anotherTimeZoneTime.getMonth() + 1).toString() : (anotherTimeZoneTime.getMonth() + 1).toString();
let nowDate: string = anotherTimeZoneTime.getDate() < 10 ? "0" + anotherTimeZoneTime.getDate().toString() : anotherTimeZoneTime.getDate().toString();
//let nowHour: string = anotherTimeZoneTime.getHours() < 10 ? "0" + anotherTimeZoneTime.getHours().toString() : anotherTimeZoneTime.getHours().toString();
//let nowMinute: string = anotherTimeZoneTime.getMinutes().toString();
//let nowSecond: string = anotherTimeZoneTime.getSeconds().toString();
YMDDateArray.push(nowYear, nowMonth, nowDate);
return YMDDateArray;
//[0]:year, [1]:month, [2]:day
}
private GetNBAGameList(GameDate: string): Promise<any> {
return this.NBAdataservices.GetGameGeneral(GameDate)
.then(gameGeneral => {
//before everytime we get data, we should clear the former game data
this.NBAGameList.length = 0;
let unstartArray: any[] = gameGeneral['unstart'];
let liveArray: any[] = gameGeneral['live'];
let overArray: any[] = gameGeneral['over'];
this.gameCount = unstartArray.length + liveArray.length + overArray.length;
if(this.gameCount !== 0) {
//The concat() method is used to join two or more arrays.
let totalArray: any[] = unstartArray.concat(liveArray, overArray);
totalArray.forEach(EachGameitem => {
let GameID: string = EachGameitem['gameID'];
let GameDate: string = EachGameitem['gameDate'];
let HomeTeamID: string = EachGameitem['home']['TeamID'];
let HomeAbbr: string = EachGameitem['home']['TeamAbbr'];
let VisitorTeamID: string = EachGameitem['visitor']['TeamID'];
let VisitorAbbr: string = EachGameitem['visitor']['TeamAbbr'];
let GameProcess: string;
let GameType: string = EachGameitem['GameProcessType'];
switch(GameType) {
case 'unstart':
GameProcess = EachGameitem['GameDate'].replace(/\s*ET\s*/, '');
break;
case 'live':
GameProcess = EachGameitem['process']['Quarter'] + ' ';
GameProcess += EachGameitem['process']['Time'].replace(/\s+/, '');
break;
case 'over':
GameProcess = 'Final';
break;
default:
return;
}
let HomeScore: number = EachGameitem['home']['Score'];
let VisitorScore: number = EachGameitem['visitor']['Score'];
//Get City, Team Name, Logo, Color from TeamMap services based on Abbreviation, so use Array.filter to get match result
//Only return an array that count is 1 because Abbreviation is unique.
let HomeMatchAbbrArray: any[] = this.NBATeamMapData.filter(data => data['Abbreviation'] === HomeAbbr);
let VisitorMatchAbbrArray: any[] = this.NBATeamMapData.filter(data => data['Abbreviation'] === VisitorAbbr);
let HomeTeamColor: string = HomeMatchAbbrArray[0]['Color'];
let HomeTeamCity: string = HomeMatchAbbrArray[0]['City'];
let VisitorTeamCity: string = VisitorMatchAbbrArray[0]['City'];
let HomeTeamName: string = HomeMatchAbbrArray[0]['Team'];
let VisitorTeamName: string = VisitorMatchAbbrArray[0]['Team'];
let HomeTeamLogo: string = HomeMatchAbbrArray[0]['imgGameUrl'];
let VisitorTeamLogo: string = VisitorMatchAbbrArray[0]['imgGameUrl'];
//Finally, we create an array contains all information we want to show in html, and use this array
//as the datasource of ion-card, which use *ngFor to produce game list.
this.NBAGameList.push({
GameID: GameID,
GameDate: GameDate,
HomeTeamID: HomeTeamID,
HomeTeamColor: HomeTeamColor,
HomeTeamCity: HomeTeamCity,
VisitorTeamID: VisitorTeamID,
VisitorTeamCity: VisitorTeamCity,
HomeTeamName: HomeTeamName,
VisitorTeamName: VisitorTeamName,
HomeTeamLogo: HomeTeamLogo,
VisitorTeamLogo: VisitorTeamLogo,
GameProcess: GameProcess,
HomeScore: HomeScore,
VisitorScore: VisitorScore
});
});
}
else |
})
.catch(this.handleError);
}
private handleError(error: any): Promise<any> {
console.error('An error occurred', error);
return Promise.reject(error.message || error);
}
} | {
this.gameCount = 0;
let alert = this.GamealertCtrl.create({
title: 'Oops!',
subTitle: 'There are not any game today or the day you select.',
buttons: ['OK']
});
alert.present();
} | conditional_block |
nba-game-list.ts | import { Component, OnInit } from '@angular/core';
import { Refresher, AlertController, NavController, Platform } from 'ionic-angular';
import { DatePicker, DatePickerOptions } from 'ionic-native';
import { NBATeamDataType } from '../../base-data-type/nba-team-datatype';
import { NBATeamMap } from '../../services/nba-team-map/nba-team-map';
import { NBADataServices } from '../../services/nba-data-services/nba-data-services';
import { NBAGameDetailPage } from '../nba-game-detail/nba-game-detail';
@Component({
templateUrl: 'build/pages/nba-game-list/nba-game-list.html'
})
export class NBAGameListPage implements OnInit{
NBATeamMapData: any[];
NBAGameList: any[] = []; //to avoid error: Cannot read property 'push' of undefined, so we give it an initial value
gameCount: number;
gameDate: string;
ChangedDate: string;
selectedYear: string;
constructor(private NBAteammap: NBATeamMap, private NBAdataservices: NBADataServices,
private GamealertCtrl: AlertController, public navCtrl: NavController, public platform: Platform ) {
this.NBATeamMapData = NBAteammap.getNBATeamArrayData();
}
ngOnInit() {
let nowLocalTime: Date = new Date();
//we use America/Los_Angeles time zone because L.A. game start at last in one day.
let SpecificDateTimeArray: any[] = this.GetSpecificTimeZoneFormat(nowLocalTime, -7);
this.ChangedDate = SpecificDateTimeArray[0] + SpecificDateTimeArray[1] + SpecificDateTimeArray[2];
this.gameDate = SpecificDateTimeArray[0] + '-' + SpecificDateTimeArray[1] + '-' + SpecificDateTimeArray[2];
this.selectedYear = SpecificDateTimeArray[0];
this.GetNBAGameList(this.ChangedDate).then(() => null).catch(this.handleError);
}
doRefresh(refresher: Refresher) {
this.GetNBAGameList(this.ChangedDate).then(() => refresher.complete()).catch(this.handleError);
}
OpenDatePicker(): void {
let options: DatePickerOptions;
if (this.platform.is('android')) {
options = {
date: new Date(),
mode: 'datetime',
androidTheme : 3
}
}
else {
options = {
date: new Date(),
mode: 'datetime'
}
}
DatePicker.show(options).then(
date => {
let SpecificDateTimeArray: any[] = this.GetSpecificTimeZoneFormat(date, -7);
this.ChangedDate = SpecificDateTimeArray[0] + SpecificDateTimeArray[1] + SpecificDateTimeArray[2];
this.gameDate = SpecificDateTimeArray[0] + '-' + SpecificDateTimeArray[1] + '-' + SpecificDateTimeArray[2];
this.selectedYear = SpecificDateTimeArray[0];
this.GetNBAGameList(this.ChangedDate).then(() => null).catch(this.handleError);
},
error => console.log('Error occurred while getting date: ', error)
);
}
| (event, GameItem) {
this.navCtrl.push(NBAGameDetailPage, {
GameItem: GameItem,
SelectedYear: this.selectedYear
});
}
/**
* @Param: nowDateTime, UTC
* @Example: UTC => +8: "Asia/Shanghai", -4: "America/New_York", -7: "America/Los_Angeles"
* @Remark: "Asia/Shanghai" => GMT+8 (CST)
* "America/New_York" => GMT-4 (EDT)
* "America/Los_Angeles" => GMT-7 (PDT)
*/
private GetSpecificTimeZoneFormat(SpecificLocalDateTime: Date, UTC: number): any[] {
let YMDDateArray: any[] = []; //need to assign initial value or occur error : cannot access property push of undefined
//Return the number of milliseconds since 1970/01/01
let localTime: number = SpecificLocalDateTime.getTime();
//Return the timezone difference between UTC and Local Time
//By default, this method returns the time zone offset in minutes, so convert this value to milliseconds for easier manipulation:
//Note that a negative return value from getTimezoneOffset() indicates that the current location is ahead of UTC,
//while a positive value indicates that the location is behind UTC. (ex: Taiwan is UTC+8 => 8 * 60 = -480)
//Obtain local UTC offset and convert to msec
let localOffset: number = SpecificLocalDateTime.getTimezoneOffset() * 60000;
//Obtain UTC time in msec
let utc = localTime + localOffset;
// Obtain and add destination's UTC time offset. For example, America/Los_Angeles which is UTC - 7 hours
let offset = UTC;
//Note: In case you're wondering how I arrived at 3600000 as the multiplication factor, remember that
//1000 millseconds = 1 second, and 1 hour = 3600 seconds. Therefore, converting hours to milliseconds
//involves multiplying by 3600 * 1000 = 3600000.
let timezone = utc + (3600000 * offset);
let anotherTimeZoneTime = new Date(timezone);
let nowYear: string = anotherTimeZoneTime.getFullYear().toString();
//Months are zero based
let nowMonth: string = (anotherTimeZoneTime.getMonth() + 1) < 10 ? "0" + (anotherTimeZoneTime.getMonth() + 1).toString() : (anotherTimeZoneTime.getMonth() + 1).toString();
let nowDate: string = anotherTimeZoneTime.getDate() < 10 ? "0" + anotherTimeZoneTime.getDate().toString() : anotherTimeZoneTime.getDate().toString();
//let nowHour: string = anotherTimeZoneTime.getHours() < 10 ? "0" + anotherTimeZoneTime.getHours().toString() : anotherTimeZoneTime.getHours().toString();
//let nowMinute: string = anotherTimeZoneTime.getMinutes().toString();
//let nowSecond: string = anotherTimeZoneTime.getSeconds().toString();
YMDDateArray.push(nowYear, nowMonth, nowDate);
return YMDDateArray;
//[0]:year, [1]:month, [2]:day
}
private GetNBAGameList(GameDate: string): Promise<any> {
return this.NBAdataservices.GetGameGeneral(GameDate)
.then(gameGeneral => {
//before everytime we get data, we should clear the former game data
this.NBAGameList.length = 0;
let unstartArray: any[] = gameGeneral['unstart'];
let liveArray: any[] = gameGeneral['live'];
let overArray: any[] = gameGeneral['over'];
this.gameCount = unstartArray.length + liveArray.length + overArray.length;
if(this.gameCount !== 0) {
//The concat() method is used to join two or more arrays.
let totalArray: any[] = unstartArray.concat(liveArray, overArray);
totalArray.forEach(EachGameitem => {
let GameID: string = EachGameitem['gameID'];
let GameDate: string = EachGameitem['gameDate'];
let HomeTeamID: string = EachGameitem['home']['TeamID'];
let HomeAbbr: string = EachGameitem['home']['TeamAbbr'];
let VisitorTeamID: string = EachGameitem['visitor']['TeamID'];
let VisitorAbbr: string = EachGameitem['visitor']['TeamAbbr'];
let GameProcess: string;
let GameType: string = EachGameitem['GameProcessType'];
switch(GameType) {
case 'unstart':
GameProcess = EachGameitem['GameDate'].replace(/\s*ET\s*/, '');
break;
case 'live':
GameProcess = EachGameitem['process']['Quarter'] + ' ';
GameProcess += EachGameitem['process']['Time'].replace(/\s+/, '');
break;
case 'over':
GameProcess = 'Final';
break;
default:
return;
}
let HomeScore: number = EachGameitem['home']['Score'];
let VisitorScore: number = EachGameitem['visitor']['Score'];
//Get City, Team Name, Logo, Color from TeamMap services based on Abbreviation, so use Array.filter to get match result
//Only return an array that count is 1 because Abbreviation is unique.
let HomeMatchAbbrArray: any[] = this.NBATeamMapData.filter(data => data['Abbreviation'] === HomeAbbr);
let VisitorMatchAbbrArray: any[] = this.NBATeamMapData.filter(data => data['Abbreviation'] === VisitorAbbr);
let HomeTeamColor: string = HomeMatchAbbrArray[0]['Color'];
let HomeTeamCity: string = HomeMatchAbbrArray[0]['City'];
let VisitorTeamCity: string = VisitorMatchAbbrArray[0]['City'];
let HomeTeamName: string = HomeMatchAbbrArray[0]['Team'];
let VisitorTeamName: string = VisitorMatchAbbrArray[0]['Team'];
let HomeTeamLogo: string = HomeMatchAbbrArray[0]['imgGameUrl'];
let VisitorTeamLogo: string = VisitorMatchAbbrArray[0]['imgGameUrl'];
//Finally, we create an array contains all information we want to show in html, and use this array
//as the datasource of ion-card, which use *ngFor to produce game list.
this.NBAGameList.push({
GameID: GameID,
GameDate: GameDate,
HomeTeamID: HomeTeamID,
HomeTeamColor: HomeTeamColor,
HomeTeamCity: HomeTeamCity,
VisitorTeamID: VisitorTeamID,
VisitorTeamCity: VisitorTeamCity,
HomeTeamName: HomeTeamName,
VisitorTeamName: VisitorTeamName,
HomeTeamLogo: HomeTeamLogo,
VisitorTeamLogo: VisitorTeamLogo,
GameProcess: GameProcess,
HomeScore: HomeScore,
VisitorScore: VisitorScore
});
});
}
else {
this.gameCount = 0;
let alert = this.GamealertCtrl.create({
title: 'Oops!',
subTitle: 'There are not any game today or the day you select.',
buttons: ['OK']
});
alert.present();
}
})
.catch(this.handleError);
}
private handleError(error: any): Promise<any> {
console.error('An error occurred', error);
return Promise.reject(error.message || error);
}
} | GameItemTapped | identifier_name |
nba-game-list.ts | import { Component, OnInit } from '@angular/core';
import { Refresher, AlertController, NavController, Platform } from 'ionic-angular';
import { DatePicker, DatePickerOptions } from 'ionic-native';
import { NBATeamDataType } from '../../base-data-type/nba-team-datatype';
import { NBATeamMap } from '../../services/nba-team-map/nba-team-map';
import { NBADataServices } from '../../services/nba-data-services/nba-data-services';
import { NBAGameDetailPage } from '../nba-game-detail/nba-game-detail';
@Component({
templateUrl: 'build/pages/nba-game-list/nba-game-list.html'
})
export class NBAGameListPage implements OnInit{
NBATeamMapData: any[];
NBAGameList: any[] = []; //to avoid error: Cannot read property 'push' of undefined, so we give it an initial value
gameCount: number;
gameDate: string;
ChangedDate: string;
selectedYear: string;
constructor(private NBAteammap: NBATeamMap, private NBAdataservices: NBADataServices,
private GamealertCtrl: AlertController, public navCtrl: NavController, public platform: Platform ) {
this.NBATeamMapData = NBAteammap.getNBATeamArrayData();
}
ngOnInit() |
doRefresh(refresher: Refresher) {
this.GetNBAGameList(this.ChangedDate).then(() => refresher.complete()).catch(this.handleError);
}
OpenDatePicker(): void {
let options: DatePickerOptions;
if (this.platform.is('android')) {
options = {
date: new Date(),
mode: 'datetime',
androidTheme : 3
}
}
else {
options = {
date: new Date(),
mode: 'datetime'
}
}
DatePicker.show(options).then(
date => {
let SpecificDateTimeArray: any[] = this.GetSpecificTimeZoneFormat(date, -7);
this.ChangedDate = SpecificDateTimeArray[0] + SpecificDateTimeArray[1] + SpecificDateTimeArray[2];
this.gameDate = SpecificDateTimeArray[0] + '-' + SpecificDateTimeArray[1] + '-' + SpecificDateTimeArray[2];
this.selectedYear = SpecificDateTimeArray[0];
this.GetNBAGameList(this.ChangedDate).then(() => null).catch(this.handleError);
},
error => console.log('Error occurred while getting date: ', error)
);
}
GameItemTapped(event, GameItem) {
this.navCtrl.push(NBAGameDetailPage, {
GameItem: GameItem,
SelectedYear: this.selectedYear
});
}
/**
* @Param: nowDateTime, UTC
* @Example: UTC => +8: "Asia/Shanghai", -4: "America/New_York", -7: "America/Los_Angeles"
* @Remark: "Asia/Shanghai" => GMT+8 (CST)
* "America/New_York" => GMT-4 (EDT)
* "America/Los_Angeles" => GMT-7 (PDT)
*/
private GetSpecificTimeZoneFormat(SpecificLocalDateTime: Date, UTC: number): any[] {
let YMDDateArray: any[] = []; //need to assign initial value or occur error : cannot access property push of undefined
//Return the number of milliseconds since 1970/01/01
let localTime: number = SpecificLocalDateTime.getTime();
//Return the timezone difference between UTC and Local Time
//By default, this method returns the time zone offset in minutes, so convert this value to milliseconds for easier manipulation:
//Note that a negative return value from getTimezoneOffset() indicates that the current location is ahead of UTC,
//while a positive value indicates that the location is behind UTC. (ex: Taiwan is UTC+8 => 8 * 60 = -480)
//Obtain local UTC offset and convert to msec
let localOffset: number = SpecificLocalDateTime.getTimezoneOffset() * 60000;
//Obtain UTC time in msec
let utc = localTime + localOffset;
// Obtain and add destination's UTC time offset. For example, America/Los_Angeles which is UTC - 7 hours
let offset = UTC;
//Note: In case you're wondering how I arrived at 3600000 as the multiplication factor, remember that
//1000 millseconds = 1 second, and 1 hour = 3600 seconds. Therefore, converting hours to milliseconds
//involves multiplying by 3600 * 1000 = 3600000.
let timezone = utc + (3600000 * offset);
let anotherTimeZoneTime = new Date(timezone);
let nowYear: string = anotherTimeZoneTime.getFullYear().toString();
//Months are zero based
let nowMonth: string = (anotherTimeZoneTime.getMonth() + 1) < 10 ? "0" + (anotherTimeZoneTime.getMonth() + 1).toString() : (anotherTimeZoneTime.getMonth() + 1).toString();
let nowDate: string = anotherTimeZoneTime.getDate() < 10 ? "0" + anotherTimeZoneTime.getDate().toString() : anotherTimeZoneTime.getDate().toString();
//let nowHour: string = anotherTimeZoneTime.getHours() < 10 ? "0" + anotherTimeZoneTime.getHours().toString() : anotherTimeZoneTime.getHours().toString();
//let nowMinute: string = anotherTimeZoneTime.getMinutes().toString();
//let nowSecond: string = anotherTimeZoneTime.getSeconds().toString();
YMDDateArray.push(nowYear, nowMonth, nowDate);
return YMDDateArray;
//[0]:year, [1]:month, [2]:day
}
private GetNBAGameList(GameDate: string): Promise<any> {
return this.NBAdataservices.GetGameGeneral(GameDate)
.then(gameGeneral => {
//before everytime we get data, we should clear the former game data
this.NBAGameList.length = 0;
let unstartArray: any[] = gameGeneral['unstart'];
let liveArray: any[] = gameGeneral['live'];
let overArray: any[] = gameGeneral['over'];
this.gameCount = unstartArray.length + liveArray.length + overArray.length;
if(this.gameCount !== 0) {
//The concat() method is used to join two or more arrays.
let totalArray: any[] = unstartArray.concat(liveArray, overArray);
totalArray.forEach(EachGameitem => {
let GameID: string = EachGameitem['gameID'];
let GameDate: string = EachGameitem['gameDate'];
let HomeTeamID: string = EachGameitem['home']['TeamID'];
let HomeAbbr: string = EachGameitem['home']['TeamAbbr'];
let VisitorTeamID: string = EachGameitem['visitor']['TeamID'];
let VisitorAbbr: string = EachGameitem['visitor']['TeamAbbr'];
let GameProcess: string;
let GameType: string = EachGameitem['GameProcessType'];
switch(GameType) {
case 'unstart':
GameProcess = EachGameitem['GameDate'].replace(/\s*ET\s*/, '');
break;
case 'live':
GameProcess = EachGameitem['process']['Quarter'] + ' ';
GameProcess += EachGameitem['process']['Time'].replace(/\s+/, '');
break;
case 'over':
GameProcess = 'Final';
break;
default:
return;
}
let HomeScore: number = EachGameitem['home']['Score'];
let VisitorScore: number = EachGameitem['visitor']['Score'];
//Get City, Team Name, Logo, Color from TeamMap services based on Abbreviation, so use Array.filter to get match result
//Only return an array that count is 1 because Abbreviation is unique.
let HomeMatchAbbrArray: any[] = this.NBATeamMapData.filter(data => data['Abbreviation'] === HomeAbbr);
let VisitorMatchAbbrArray: any[] = this.NBATeamMapData.filter(data => data['Abbreviation'] === VisitorAbbr);
let HomeTeamColor: string = HomeMatchAbbrArray[0]['Color'];
let HomeTeamCity: string = HomeMatchAbbrArray[0]['City'];
let VisitorTeamCity: string = VisitorMatchAbbrArray[0]['City'];
let HomeTeamName: string = HomeMatchAbbrArray[0]['Team'];
let VisitorTeamName: string = VisitorMatchAbbrArray[0]['Team'];
let HomeTeamLogo: string = HomeMatchAbbrArray[0]['imgGameUrl'];
let VisitorTeamLogo: string = VisitorMatchAbbrArray[0]['imgGameUrl'];
//Finally, we create an array contains all information we want to show in html, and use this array
//as the datasource of ion-card, which use *ngFor to produce game list.
this.NBAGameList.push({
GameID: GameID,
GameDate: GameDate,
HomeTeamID: HomeTeamID,
HomeTeamColor: HomeTeamColor,
HomeTeamCity: HomeTeamCity,
VisitorTeamID: VisitorTeamID,
VisitorTeamCity: VisitorTeamCity,
HomeTeamName: HomeTeamName,
VisitorTeamName: VisitorTeamName,
HomeTeamLogo: HomeTeamLogo,
VisitorTeamLogo: VisitorTeamLogo,
GameProcess: GameProcess,
HomeScore: HomeScore,
VisitorScore: VisitorScore
});
});
}
else {
this.gameCount = 0;
let alert = this.GamealertCtrl.create({
title: 'Oops!',
subTitle: 'There are not any game today or the day you select.',
buttons: ['OK']
});
alert.present();
}
})
.catch(this.handleError);
}
private handleError(error: any): Promise<any> {
console.error('An error occurred', error);
return Promise.reject(error.message || error);
}
} | {
let nowLocalTime: Date = new Date();
//we use America/Los_Angeles time zone because L.A. game start at last in one day.
let SpecificDateTimeArray: any[] = this.GetSpecificTimeZoneFormat(nowLocalTime, -7);
this.ChangedDate = SpecificDateTimeArray[0] + SpecificDateTimeArray[1] + SpecificDateTimeArray[2];
this.gameDate = SpecificDateTimeArray[0] + '-' + SpecificDateTimeArray[1] + '-' + SpecificDateTimeArray[2];
this.selectedYear = SpecificDateTimeArray[0];
this.GetNBAGameList(this.ChangedDate).then(() => null).catch(this.handleError);
} | identifier_body |
nba-game-list.ts | import { Component, OnInit } from '@angular/core';
import { Refresher, AlertController, NavController, Platform } from 'ionic-angular';
import { DatePicker, DatePickerOptions } from 'ionic-native';
import { NBATeamDataType } from '../../base-data-type/nba-team-datatype';
import { NBATeamMap } from '../../services/nba-team-map/nba-team-map';
import { NBADataServices } from '../../services/nba-data-services/nba-data-services';
import { NBAGameDetailPage } from '../nba-game-detail/nba-game-detail';
@Component({
templateUrl: 'build/pages/nba-game-list/nba-game-list.html'
})
export class NBAGameListPage implements OnInit{
NBATeamMapData: any[];
NBAGameList: any[] = []; //to avoid error: Cannot read property 'push' of undefined, so we give it an initial value
gameCount: number;
gameDate: string;
ChangedDate: string;
selectedYear: string;
constructor(private NBAteammap: NBATeamMap, private NBAdataservices: NBADataServices,
private GamealertCtrl: AlertController, public navCtrl: NavController, public platform: Platform ) {
this.NBATeamMapData = NBAteammap.getNBATeamArrayData();
}
ngOnInit() {
let nowLocalTime: Date = new Date();
//we use America/Los_Angeles time zone because L.A. game start at last in one day.
let SpecificDateTimeArray: any[] = this.GetSpecificTimeZoneFormat(nowLocalTime, -7);
this.ChangedDate = SpecificDateTimeArray[0] + SpecificDateTimeArray[1] + SpecificDateTimeArray[2];
this.gameDate = SpecificDateTimeArray[0] + '-' + SpecificDateTimeArray[1] + '-' + SpecificDateTimeArray[2];
this.selectedYear = SpecificDateTimeArray[0];
this.GetNBAGameList(this.ChangedDate).then(() => null).catch(this.handleError);
}
doRefresh(refresher: Refresher) {
this.GetNBAGameList(this.ChangedDate).then(() => refresher.complete()).catch(this.handleError);
}
OpenDatePicker(): void {
let options: DatePickerOptions;
if (this.platform.is('android')) {
options = {
date: new Date(),
mode: 'datetime',
androidTheme : 3
}
}
else {
options = {
date: new Date(),
mode: 'datetime'
}
}
DatePicker.show(options).then(
date => {
let SpecificDateTimeArray: any[] = this.GetSpecificTimeZoneFormat(date, -7);
this.ChangedDate = SpecificDateTimeArray[0] + SpecificDateTimeArray[1] + SpecificDateTimeArray[2];
this.gameDate = SpecificDateTimeArray[0] + '-' + SpecificDateTimeArray[1] + '-' + SpecificDateTimeArray[2];
this.selectedYear = SpecificDateTimeArray[0];
this.GetNBAGameList(this.ChangedDate).then(() => null).catch(this.handleError);
},
error => console.log('Error occurred while getting date: ', error)
);
}
GameItemTapped(event, GameItem) {
this.navCtrl.push(NBAGameDetailPage, {
GameItem: GameItem,
SelectedYear: this.selectedYear
});
}
/**
* @Param: nowDateTime, UTC
* @Example: UTC => +8: "Asia/Shanghai", -4: "America/New_York", -7: "America/Los_Angeles"
* @Remark: "Asia/Shanghai" => GMT+8 (CST)
* "America/New_York" => GMT-4 (EDT)
* "America/Los_Angeles" => GMT-7 (PDT)
*/
private GetSpecificTimeZoneFormat(SpecificLocalDateTime: Date, UTC: number): any[] {
let YMDDateArray: any[] = []; //need to assign initial value or occur error : cannot access property push of undefined
//Return the number of milliseconds since 1970/01/01
let localTime: number = SpecificLocalDateTime.getTime();
//Return the timezone difference between UTC and Local Time
//By default, this method returns the time zone offset in minutes, so convert this value to milliseconds for easier manipulation:
//Note that a negative return value from getTimezoneOffset() indicates that the current location is ahead of UTC,
//while a positive value indicates that the location is behind UTC. (ex: Taiwan is UTC+8 => 8 * 60 = -480)
//Obtain local UTC offset and convert to msec
let localOffset: number = SpecificLocalDateTime.getTimezoneOffset() * 60000;
//Obtain UTC time in msec
let utc = localTime + localOffset;
// Obtain and add destination's UTC time offset. For example, America/Los_Angeles which is UTC - 7 hours
let offset = UTC;
//Note: In case you're wondering how I arrived at 3600000 as the multiplication factor, remember that
//1000 millseconds = 1 second, and 1 hour = 3600 seconds. Therefore, converting hours to milliseconds
//involves multiplying by 3600 * 1000 = 3600000.
let timezone = utc + (3600000 * offset);
let anotherTimeZoneTime = new Date(timezone);
let nowYear: string = anotherTimeZoneTime.getFullYear().toString();
//Months are zero based
let nowMonth: string = (anotherTimeZoneTime.getMonth() + 1) < 10 ? "0" + (anotherTimeZoneTime.getMonth() + 1).toString() : (anotherTimeZoneTime.getMonth() + 1).toString();
let nowDate: string = anotherTimeZoneTime.getDate() < 10 ? "0" + anotherTimeZoneTime.getDate().toString() : anotherTimeZoneTime.getDate().toString();
//let nowHour: string = anotherTimeZoneTime.getHours() < 10 ? "0" + anotherTimeZoneTime.getHours().toString() : anotherTimeZoneTime.getHours().toString();
//let nowMinute: string = anotherTimeZoneTime.getMinutes().toString();
//let nowSecond: string = anotherTimeZoneTime.getSeconds().toString();
YMDDateArray.push(nowYear, nowMonth, nowDate);
return YMDDateArray;
//[0]:year, [1]:month, [2]:day
}
private GetNBAGameList(GameDate: string): Promise<any> {
return this.NBAdataservices.GetGameGeneral(GameDate)
.then(gameGeneral => {
//before everytime we get data, we should clear the former game data
this.NBAGameList.length = 0;
let unstartArray: any[] = gameGeneral['unstart'];
let liveArray: any[] = gameGeneral['live'];
let overArray: any[] = gameGeneral['over'];
this.gameCount = unstartArray.length + liveArray.length + overArray.length;
if(this.gameCount !== 0) {
//The concat() method is used to join two or more arrays.
let totalArray: any[] = unstartArray.concat(liveArray, overArray);
totalArray.forEach(EachGameitem => {
let GameID: string = EachGameitem['gameID'];
let GameDate: string = EachGameitem['gameDate'];
let HomeTeamID: string = EachGameitem['home']['TeamID'];
let HomeAbbr: string = EachGameitem['home']['TeamAbbr'];
let VisitorTeamID: string = EachGameitem['visitor']['TeamID'];
let VisitorAbbr: string = EachGameitem['visitor']['TeamAbbr'];
let GameProcess: string;
let GameType: string = EachGameitem['GameProcessType'];
switch(GameType) {
case 'unstart': | GameProcess = EachGameitem['GameDate'].replace(/\s*ET\s*/, '');
break;
case 'live':
GameProcess = EachGameitem['process']['Quarter'] + ' ';
GameProcess += EachGameitem['process']['Time'].replace(/\s+/, '');
break;
case 'over':
GameProcess = 'Final';
break;
default:
return;
}
let HomeScore: number = EachGameitem['home']['Score'];
let VisitorScore: number = EachGameitem['visitor']['Score'];
//Get City, Team Name, Logo, Color from TeamMap services based on Abbreviation, so use Array.filter to get match result
//Only return an array that count is 1 because Abbreviation is unique.
let HomeMatchAbbrArray: any[] = this.NBATeamMapData.filter(data => data['Abbreviation'] === HomeAbbr);
let VisitorMatchAbbrArray: any[] = this.NBATeamMapData.filter(data => data['Abbreviation'] === VisitorAbbr);
let HomeTeamColor: string = HomeMatchAbbrArray[0]['Color'];
let HomeTeamCity: string = HomeMatchAbbrArray[0]['City'];
let VisitorTeamCity: string = VisitorMatchAbbrArray[0]['City'];
let HomeTeamName: string = HomeMatchAbbrArray[0]['Team'];
let VisitorTeamName: string = VisitorMatchAbbrArray[0]['Team'];
let HomeTeamLogo: string = HomeMatchAbbrArray[0]['imgGameUrl'];
let VisitorTeamLogo: string = VisitorMatchAbbrArray[0]['imgGameUrl'];
//Finally, we create an array contains all information we want to show in html, and use this array
//as the datasource of ion-card, which use *ngFor to produce game list.
this.NBAGameList.push({
GameID: GameID,
GameDate: GameDate,
HomeTeamID: HomeTeamID,
HomeTeamColor: HomeTeamColor,
HomeTeamCity: HomeTeamCity,
VisitorTeamID: VisitorTeamID,
VisitorTeamCity: VisitorTeamCity,
HomeTeamName: HomeTeamName,
VisitorTeamName: VisitorTeamName,
HomeTeamLogo: HomeTeamLogo,
VisitorTeamLogo: VisitorTeamLogo,
GameProcess: GameProcess,
HomeScore: HomeScore,
VisitorScore: VisitorScore
});
});
}
else {
this.gameCount = 0;
let alert = this.GamealertCtrl.create({
title: 'Oops!',
subTitle: 'There are not any game today or the day you select.',
buttons: ['OK']
});
alert.present();
}
})
.catch(this.handleError);
}
private handleError(error: any): Promise<any> {
console.error('An error occurred', error);
return Promise.reject(error.message || error);
}
} | random_line_split |
|
ddglCtrl.js | require([ 'jquery', 'app', 'common', 'validate','pagination', 'bootstrap', 'timepicker', 'dateZh' ], function($, app, common,validate) {
$('title').text('订单管理');
var deliverytype = ['','上门提货','自送网点'];
var iscreditapply = ['否','是'];
//日期控件
$('#date-start, #date-end').datetimepicker({
format : "yyyy-mm-dd hh:ii:ss",
language : "zh-CN",
todayBtn : 1,
autoclose : true,
todayHighlight : true,
forceParse : true,
minuteStep : 10
});
var totalPages = 0;
var pageSize = '10';
var first_load = true;
var reload = true;
$(document).ready(function() {
showOrderByPage(1);
});
//placeholder兼容ie start
$(function() {
if (!placeholderSupport()) { // 判断浏览器是否支持 placeholder
$('[placeholder]').focus(function() {
var input = $(this);
if (input.val() == input.attr('placeholder')) {
input.val('');
input.removeClass('placeholder');
}
}).blur(function() {
var input = $(this);
if (input.val() == ''
|| input.val() == input.attr('placeholder')) {
input.addClass('placeholder');
input.val(input.attr('placeholder'));
}
}).blur();
};
});
function placeholderSupport() {
return 'placeholder' in document.createElement('input' | ', function() {
$("input[name='orderid-query']").val('');
$("input[name='sender-query']").val('');
$("input[name='receiver-query']").val('');
$("input[name='username-query']").val('');
$("select[name='state-query']").val('');
$("select[name='iscreditapply-query']").val('');
$('.setTableLength').val(10);
$("#date-start").val('');
$("#date-end").val('');
});
$(".setTableLength").change(function() {
reload = true;
showOrderByPage(1);
});
$(".searchBtn").click(function() {
reload = true;
showOrderByPage(1);
});
$('.clearBtn').on('click', function() {
$("input[name='loginname-query']").val('');
$("select[name='status-query']").val('');
$("#date-start").val('');
$("#date-end").val('');
$("input[name='vipname-nickName']").val('');
$("select[name='status-query']").val('');
$("select[name='viprole-query']").val('');
});
$.validator.addMethod("isPhone", function(value, element) {
var length = value.length;
return this.optional(element) || (length == 11 && /^(((13[0-9]{1})|(17[0-9]{1})|(15[0-9]{1})|(18[0-9]{1}))+\d{8})$/.test(value))||/^((0\d{2,3})-)?(\d{7,8})(-(\d{3,}))?$/.test(value);
}, "手机、电话格式不正确");
/** 添加验证start **/
$('#addStaffForm').validate({
rules: {
'nickname': {
required: true
},
'loginname': {
required: true
},
'phone':{
isPhone:true
}
},
messages: {
'nickname': {
required: "请填写姓名"
},
'loginname': {
required: "请填写登录帐号"
},
'phone': { }
},
errorClass: "error",
success: 'valid',
unhighlight: function(element, errorClass, validClass) { //验证通过
$(element).tooltip('destroy').removeClass(errorClass);
},
errorPlacement: function(label, element) {
$(element).tooltip('destroy'); /*必需*/
$(element).attr('title', $(label).text()).tooltip('show');
},
onclick: true
});
/** 添加验证end **/
/** 清空添加数据 start **/
$('.add').click(function(e) {
$(".errorMsg").html("");
$("#addStaffForm").find("input[name='nickname']").val('');
$("#addStaffForm").find("input[name='loginname']").val('');
$("#addStaffForm").find("select[name='role']").val('管理员');
$("#addStaffForm").find("input[name='phone']").val('');
})
/** 清空添加数据 end **/
/*添加start **/
$('#btn_addstaff').click(function(e) {
var isValid = addStaffForm.valid();
if (isValid) {
$.post(ctx+"/staff/savestaff", $('#addStaffForm').serialize(),function (data) {
var result = data.result;
if(result!=null && result=="noauthority") {
common.alert1('你无此权限!');
return;
}
if(data.result!=null && data.result=="againLogin") {
$('.commonConfirm').modal('hide');
common.alert1("登录过期,请重新登录");
setTimeout("location.href = ctx + '/login.html'",1000);
}else if(data.result=='success'){
$('.modal-staff').modal('hide');
$('.mySuccessModal1').modal({show:true,backdrop: 'static', keyboard: false});
common.mySuccessModal1('添加成功!','如需继续添加,请点击返回继续添加',
'返回继续添加','前往后台用户列表',
function () {
$('.mySuccessModal1').modal('hide');
$('.modal-order').modal('hide');
showOrderByPage(1);
},
function () {
$('.modal-staff').modal('show');
});
$("#addStaffForm").find("input[name='nickname']").val('');
$("#addStaffForm").find("input[name='loginname']").val('');
$("#addStaffForm").find("select[name='role']").val('管理员');
$("#addStaffForm").find("input[name='phone']").val('');
}else if(data.result=='fail'){
$(".errorMsg").html(data.message);
}else{
$(".errorMsg").html("信息提交错误");
}
},"json");
}
});
/** 添加end **/
/** 修改验证start **/
$('#updateStaffForm').validate({
rules: {
'nickname': {
required: true
},
'phone':{
isPhone:true
}
},
messages: {
'nickname': {
required: "请填写姓名"
},
'phone': {
}
},
errorClass: "error",
success: 'valid',
unhighlight: function(element, errorClass, validClass) { //验证通过
$(element).tooltip('destroy').removeClass(errorClass);
},
errorPlacement: function(label, element) {
$(element).tooltip('destroy'); /*必需*/
$(element).attr('title', $(label).text()).tooltip('show');
},
onclick: true
});
/** 修改验证end **/
/** 修改start **/
$("#btn_updatestaff").click(function() {
var isValid = updateStaffForm.valid();
if (isValid) {
$.post(ctx+"/staff/updatestaff", $('#updateStaffForm').serialize(),function (data) {
var result = data.result;
if(result!=null && result=="noauthority") {
common.alert1('你无此权限!');
return;
}
if(data.result!=null && data.result=="againLogin") {
$('.commonConfirm').modal('hide');
common.alert1("登录过期,请重新登录");;
setTimeout("location.href = ctx + '/login.html'",1000);
}else if(data.result=='success'){
$(".modal-view").modal('hide');
common.mySuccessModal2('修改成功!');
showOrderByPage(1);
}else{
$(".updateMsg").html(data.message);
}
},"json");
}
});
/** 修改end **/
function showOrderByPage(pageNum) {
if ($(".all").is(':checked')) {
$(".all").removeAttr('checked');
}
$("#dataBody").append(loadingHtml);
if($('.alert-warning').length>0){
$('.alert-warning').remove();
}
$.getJSON(ctx + '/adminorder/showorderbypage', {
pageNum : pageNum,
pageSize : $('.setTableLength').val() || 10,
startTime : $("#date-start").val(),
endTime : $("#date-end").val(),
orderid : $("input[name='orderid-query']").val(),
sender : $("input[name='sender-query']").val(),
receiver : $("input[name='receiver-query']").val(),
state : $("select[name='state-query").val(),
iscreditapply : $("select[name='iscreditapply-query").val(),
username : $("input[name='username-query']").val(),
},
function(data){
if($('.loading')){
$('.loading').remove();
}
var sumnum = 0;
var sumweight = 0.0;
var sumvalume = 0.0;
var sumorderfee = 0.0;
var sumagencyfund = 0.0;
var result = data.result;
if(result!=null && result=="noauthority") {
common.alert1('你无此权限!');
return;
}
if(data.result!=null && data.result=="againLogin") {
common.alert1("登录过期,请重新登录");
setTimeout("location.href = ctx + '/login.html'",1000);
} else if(data.result.length == 0) {
$('.table-responsive').append('<div class="alert alert-warning no-result text-center" role="alert">没有找到记录... </div>');
} else{
var datapage = data.result;
var datalist = datapage.result;
var totalCount = datapage.totalCount;
var pageNum = datapage.pageNo;
var pageSize = datapage.pageSize;
var totalPages = datapage.totalPages;
$(".totalCount").html(totalCount);
$(".totalPages").html(totalPages);
$(".pageNo").html(pageNum);
var tempHtml="";
var currentorderid = $('#orderid').val();
var currentUtype = $('#currentUtype').val();
for(var i=0;i<datalist.length;i++){
var order = datalist[i];
var no = (pageNum-1)*pageSize+i+1;
tempHtml +="<tr data-id='"+order.id+"' data-pageno='" + pageNum + "'>";
// tempHtml +="<td><div class='checkbox'><label><input type='checkbox' data-orderid="+order.id+" /></label></div></td>";
tempHtml +="<td class='excel'>"+no+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.orderid)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.username)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.sender)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.senderphone)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.receiver)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.receiverphone)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.goodsname)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.goodsnum)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.weight)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.volume)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.transportway)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.sendfee)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.orgname)+"</td>";
tempHtml +="<td class='excel'>"+isnull(deliverytype[order.deliverytype])+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.pickupgoodstime)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.settletype)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.agencyfund)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.state)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.createtime)+"</td>";
tempHtml +="<td><button type='button' data-toggle='modal' data-target='.modal-edityd' onclick='showOrderInfo(" +order.id+")' class='btn btn-primary btn-xs'>查看</button> </td>";
tempHtml +="</tr>";
sumnum += order.goodsnum;
sumweight += order.weight;
sumvalume += order.volume;
sumorderfee += order.sendfee;
sumagencyfund += order.agencyfund;
if($('.alert-warning').length>0){
$('.alert-warning').remove();
}
}
var sl=parseFloat(sumvalume).toFixed(2);
var sf=parseFloat(sumorderfee).toFixed(2);
tempHtml +="<tr>" +
"<td style='padding: 6px 5px 3px;'>总计</td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> " +
"<td>"+sumnum+"</td>" +
"<td>"+sumweight+"</td>" +
"<td>"+sl+"</td>" +
"<td></td>" +
"<td>"+sf+"</td>" +
"<td></td> <td></td> <td></td> <td></td>" +
"<td>"+sumagencyfund+"</td>" +
"<td></td> <td></td> <td></td>" +
"</tr>";
if(datalist.length === 0&&$('.alert-warning').length<=0) {
$('.table-responsive').append('<div class="alert alert-warning no-result text-center" role="alert">没有找到记录 </div>');
}
$('#dataBody').html(tempHtml);
if (reload) {
reload = false;
first_load = true;
var initPagination = function() {
$("#Pagination").pagination(totalCount, {
num_edge_entries : 1, // 边缘页数
num_display_entries : 4, // 主体页数
callback: pageselectCallback,
prev_text : "«",
next_text : "»",
items_per_page : pageSize,
link_to : 'javascript:void(0);'
});
}();
/**分页e**/
}
}
}).fail(function (err) {
if($('.loading')){
$('.loading').remove();
}
common.alert('服务器错误!请刷新页面重试或联系管理员',"确定");
});
}
function pageselectCallback(page_index, jq) {
if (first_load) { //第一次初始化不需要请求
first_load = false;
return false;
}
pageNum = page_index + 1;
showOrderByPage(pageNum);
}
function isnull(value) {
if (value == null || value == '' || typeof (value) == "undefined" || value == '/')
return "--";
else
return value;
}
function isnumber(value) {
if (value == null || value == '')
return "0.00";
else
return value;
}
});
function showOrderInfo(id){
$.getJSON(ctx + '/adminorder/showorderinfo', {
id : id
}, function(data) {
if(data != null){
$(".modal-edityd").find("input[name='orderid']").val(data.orderid);
$(".modal-edityd").find("input[name='orgname']").val(data.orgname);
$(".modal-edityd").find("select[name='transportway']").val(data.transportway);
$(".modal-edityd").find("input[name='createtime']").val(data.createtime);
$(".modal-edityd").find("select[name='settletype']").val(data.settletype);
$(".modal-edityd").find("select[name='ifnotifydispatch']").val(data.ifnotifydispatch);
$(".modal-edityd").find("select[name='paytype']").val(data.paytype);
$(".modal-edityd").find("select[name='dispatchtype']").val(data.dispatchtype);
$(".modal-edityd").find("input[name='agencyfund']").val(data.agencyfund);
$(".modal-edityd").find("input[name='receiptnum']").val(data.receiptnum);
$(".modal-edityd").find("input[name='receiptno']").val(data.receiptno);
$(".modal-edityd").find("input[name='receiver']").val(data.receiver);
$(".modal-edityd").find("input[name='receiverphone']").val(data.receiverphone);
$(".modal-edityd").find("input[name='sender']").val(data.sender);
$(".modal-edityd").find("input[name='senderphone']").val(data.senderphone);
$(".modal-edityd").find("input[name='destname']").val(data.destname);
$(".modal-edityd").find("input[name='receivercompany']").val(data.receivercompany);
$(".modal-edityd").find("input[name='sendvipname']").val(data.sendvipname);
$(".modal-edityd").find("input[name='sendercompany']").val(data.sendercompany);
$(".modal-edityd").find("input[name='receiverpcdt']").val(data.receiverpcdt);
$(".modal-edityd").find("input[name='senderpcdt']").val(data.senderpcdt);
$(".modal-edityd").find("input[name='receiveraddress']").val(data.receiveraddress);
$(".modal-edityd").find("input[name='senderaddress']").val(data.senderaddress);
$(".modal-edityd").find("input[name='goodsname']").val(data.goodsname);
$(".modal-edityd").find("input[name='goodsnum']").val(data.goodsnum);
$(".modal-edityd").find("input[name='sendfee']").val(data.sendfee);
$(".modal-edityd").find("input[name='freightcollectfee']").val(data.freightcollectfee);
$(".modal-edityd").find("input[name='weight']").val(data.weight);
$(".modal-edityd").find("input[name='volume']").val(data.volume);
$(".modal-edityd").find("input[name='goodsvalue']").val(data.goodsvalue);
$(".modal-edityd").find("input[name='securefee']").val(data.securefee);
$(".modal-edityd").find("select[name='packtype']").val(data.packtype);
$(".modal-edityd").find("input[name='remark']").val(data.remark);
}else{
alert("查询失败");
}
});
}
| );
}
//placeholder兼容ie end
//清除按钮
$('.clearBtn').on('click | identifier_body |
ddglCtrl.js | require([ 'jquery', 'app', 'common', 'validate','pagination', 'bootstrap', 'timepicker', 'dateZh' ], function($, app, common,validate) {
$('title').text('订单管理');
var deliverytype = ['','上门提货','自送网点'];
var iscreditapply = ['否','是'];
//日期控件
$('#date-start, #date-end').datetimepicker({
format : "yyyy-mm-dd hh:ii:ss",
language : "zh-CN",
todayBtn : 1,
autoclose : true,
todayHighlight : true,
forceParse : true,
minuteStep : 10
});
var totalPages = 0;
var pageSize = '10';
var first_load = true;
var reload = true;
$(document).ready(function() {
showOrderByPage(1);
});
//placeholder兼容ie start
$(function() {
if (!placeholderSupport()) { // 判断浏览器是否支持 placeholder
$('[placeholder]').focus(function() {
var input = $(this);
if (input.val() == input.attr('placeholder')) {
input.val('');
input.removeClass('placeholder');
}
}).blur(function() {
var input = $(this);
if (input.val() == ''
|| input.val() == input.attr('placeholder')) {
input.addClass('placeholder');
input.val(input.attr('placeholder'));
}
}).blur();
};
});
function placeholderSupport() {
return 'placeholder' in document.createElement('input');
}
//placeholder兼容ie end
//清除按钮
$('.clearBtn').on('click', function() {
$("input[name='orderid-query']").val('');
$("input[name='sender-query']").val('');
$("input[name='receiver-query']").val('');
$("input[name='username-query']").val('');
$("select[name='state-query']").val('');
$("select[name='iscreditapply-query']").val('');
$('.setTableLength').val(10);
$("#date-start").val('');
$("#date-end").val('');
});
$(".setTableLength").change(function() {
reload = true;
showOrderByPage(1);
});
$(".searchBtn").click(function() {
reload = true;
showOrderByPage(1);
});
$('.clearBtn').on('click', function() {
$("input[name='loginname-query']").val('');
$("select[name='status-query']").val('');
$("#date-start").val('');
$("#date-end").val('');
$("input[name='vipname-nickName']").val('');
$("select[name='status-query']").val('');
$("select[name='viprole-query']").val('');
});
$.validator.addMethod("isPhone", function(value, element) {
var length = value.length;
return this.optional(element) || (length == 11 && /^(((13[0-9]{1})|(17[0-9]{1})|(15[0-9]{1})|(18[0-9]{1}))+\d{8})$/.test(value))||/^((0\d{2,3})-)?(\d{7,8})(-(\d{3,}))?$/.test(value);
}, "手机、电话格式不正确");
/** 添加验证start **/
$('#addStaffForm').validate({
rules: {
'nickname': {
required: true
},
'loginname': {
required: true
},
'phone':{
isPhone:true
}
},
messages: {
'nickname': {
required: "请填写姓名"
},
'loginname': {
required: "请填写登录帐号"
},
'phone': { }
},
errorClass: "error",
success: 'valid',
unhighlight: function(element, errorClass, validClass) { //验证通过
$(element).tooltip('destroy').removeClass(errorClass);
},
errorPlacement: function(label, element) {
$(element).tooltip('destroy'); /*必需*/
$(element).attr('title', $(label).text()).tooltip('show');
},
onclick: true
});
/** 添加验证end **/
/** 清空添加数据 start **/
$('.add').click(function(e) {
$(".errorMsg").html("");
$("#addStaffForm").find("input[name='nickname']").val('');
$("#addStaffForm").find("input[name='loginname']").val('');
$("#addStaffForm").find("select[name='role']").val('管理员');
$("#addStaffForm").find("input[name='phone']").val('');
})
/** 清空添加数据 end **/
/*添加start **/
$('#btn_addstaff').click(function(e) {
var isValid = addStaffForm.valid();
if (isValid) {
$.post(ctx+"/staff/savestaff", $('#addStaffForm').serialize(),function (data) {
var result = data.result;
if(result!=null && result=="noauthority") {
common.alert1('你无此权限!');
return;
}
if(data.result!=null && data.result=="againLogin") {
$('.commonConfirm').modal('hide');
common.alert1("登录过期,请重新登录");
setTimeout("location.href = ctx + '/login.html'",1000);
}else if(data.result=='success'){
$('.modal-staff').modal('hide');
$('.mySuccessModal1').modal({show:true,backdrop: 'static', keyboard: false});
common.mySuccessModal1('添加成功!','如需继续添加,请点击返回继续添加',
'返回继续添加','前往后台用户列表',
function () {
$('.mySuccessModal1').modal('hide');
$('.modal-order').modal('hide');
showOrderByPage(1);
},
function () {
$('.modal-staff').modal('show');
});
$("#addStaffForm").find("input[name='nickname']").val('');
$("#addStaffForm").find("input[name='loginname']").val('');
$("#addStaffForm").find("select[name='role']").val('管理员');
$("#addStaffForm").find("input[name='phone']").val('');
}else if(data.result=='fail'){
$(".errorMsg").html(data.message);
}else{
$(".errorMsg").html("信息提交错误");
}
},"json");
}
});
/** 添加end **/
/** 修改验证start **/
$('#updateStaffForm').validate({
rules: {
'nickname': {
required: true
},
'phone':{
isPhone:true
}
},
messages: {
'nickname': {
required: "请填写姓名"
},
'phone': {
}
},
errorClass: "error",
success: 'valid',
unhighlight: function(element, errorClass, validClass) { //验证通过
$(element).tooltip('destroy').removeClass(errorClass);
},
errorPlacement: function(label, element) {
$(element).tooltip('destroy'); /*必需*/
$(element).attr('title', $(label).text()).tooltip('show');
},
onclick: true
});
/** 修改验证end **/
/** 修改start **/
$("#btn_updatestaff").click(function() {
var isValid = updateStaffForm.valid();
if (isValid) {
$.post(ctx+"/staff/updatestaff", $('#updateStaffForm').serialize(),function (data) {
var result = data.result;
if(result!=null && result=="noauthority") {
common.alert1('你无此权限!');
return;
}
if(data.result!=null && data.result=="againLogin") {
$('.commonConfirm').modal('hide');
common.alert1("登录过期,请重新登录");;
setTimeout("location.href = ctx + '/login.html'",1000);
}else if(data.result=='success'){
$(".modal-view").modal('hide');
common.mySuccessModal2('修改成功!');
showOrderByPage(1);
}else{
$(".updateMsg").html(data.message);
}
},"json");
}
});
/** 修改end **/
function showOrderByPage(pageNum) {
if ($(".all").is(':checked')) {
$(".all").removeAttr('checked');
}
$("#dataBody").append(loadingHtml);
if($('.alert-warning').length>0){
$('.alert-warning').remove();
}
$.getJSON(ctx + '/adminorder/showorderbypage', {
pageNum : pageNum,
pageSize : $('.setTableLength').val() || 10,
startTime : $("#date-start").val(),
endTime : $("#date-end").val(),
orderid : $("input[name='orderid-query']").val(),
sender : $("input[name='sender-query']").val(),
receiver : $("input[name='receiver-query']").val(),
state : $("select[name='state-query").val(),
iscreditapply : $("select[name='iscreditapply-query").val(),
username : $("input[name='username-query']").val(),
},
function(data){
if($('.loading')){
$('.loading').remove();
}
var sumnum = 0;
var sumweight = 0.0;
var sumvalume = 0.0;
var sumorderfee = 0.0;
var sumagencyfund = 0.0;
var result = data.result;
if(result!=null && result=="noauthority") {
common.alert1('你无此权限!');
return;
}
if(data.result!=null && data.result=="againLogin") {
common.alert1("登录过期,请重新登录");
setTimeout("location.href = ctx + '/login.html'",1000);
} else if(data.result.length == 0) {
$('.table-responsive').append('<div class="alert alert-warning no-result text-center" role="alert">没有找到记录... </div>');
} else{
var datapage = data.result;
var datalist = datapage.result;
var totalCount = datapage.totalCount;
var pageNum = datapage.pageNo;
var pageSize = datapage.pageSize;
var totalPages = datapage.totalPages;
$(".totalCount").html(totalCount);
$(".totalPages").html(totalPages);
$(".pageNo").html(pageNum);
var tempHtml="";
var currentorderid = $('#orderid').val();
var currentUtype = $('#currentUtype').val();
for(var i=0;i<datalist.length;i++){
var order = datalist[i];
var no = (pageNum-1)*pageSize+i+1;
tempHtml +="<tr data-id='"+order.id+"' data-pageno='" + pageNum + "'>";
// tempHtml +="<td><div class='checkbox'><label><input type='checkbox' data-orderid="+order.id+" /></label></div></td>";
tempHtml +="<td class='excel'>"+no+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.orderid)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.username)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.sender)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.senderphone)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.receiver)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.receiverphone)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.goodsname)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.goodsnum)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.weight)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.volume)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.transportway)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.sendfee)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.orgname)+"</td>";
tempHtml +="<td class='excel'>"+isnull(deliverytype[order.deliverytype])+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.pickupgoodstime)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.settletype)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.agencyfund)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.state)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.createtime)+"</td>";
tempHtml +="<td><button type='button' data-toggle='modal' data-target='.modal-edityd' onclick='showOrderInfo(" +order.id+")' class='btn btn-primary btn-xs'>查看</button> </td>";
tempHtml +="</tr>";
sumnum += order.goodsnum;
sumweight += order.weight;
sumvalume += order.volume;
sumorderfee += order.sendfee;
sumagencyfund += order.agencyfund;
if($('.alert-warning').length>0){
$('.alert-warning').remove();
}
}
var sl=parseFloat(sumvalume).toFixed(2);
var sf=parseFloat(sumorderfee).toFixed(2);
tempHtml +="<tr>" +
"<td style='padding: 6px 5px 3px;'>总计</td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> " +
"<td>"+sumnum+"</td>" +
"<td>"+sumweight+"</td>" +
"<td>"+sl+"</td>" +
"<td></td>" +
"<td>"+sf+"</td>" +
"<td></td> <td></td> <td></td> <td></td>" +
"<td>"+sumagencyfund+"</td>" +
"<td></td> <td></td> <td></td>" +
"</tr>";
if(datalist.length === 0&&$('.alert-warning').length<=0) {
$('.table-responsive').append('<div class="alert alert-warning no-result text-center" role="alert">没有找到记录 </div>');
}
$('#dataBody').html(tempHtml);
if (reload) {
reload = false;
first_load = true;
var initPagination = function() {
$("#Pagination").pagination(totalCount, {
num_edge_entries : 1, // 边缘页数
num_display_entries : 4, // 主体页数
callback: pageselectCallback,
prev_text : "«",
next_text : "»",
items_per_page : pageSize,
link_to : 'javascript:void(0);'
});
}();
/**分页e**/
}
}
}).fail(function (err) {
if($('.loading')){
$('.loading').remove();
}
common.alert('服务器错误!请刷新页面重试或联系管理员',"确定");
});
}
function pageselectCallback(page_index, jq) {
if (first_load) { //第一次初始化不需要请求
first_load = false;
return false;
}
pageNum = page_index + 1;
showOrderByPage(pageNum);
}
function isnull(value) {
if (value == null || value == '' || typeof (value) == "undefined" || value == '/')
return "--";
else
return value;
}
function isnumber(value) {
if (value == null || value == '')
return "0.00";
else
return value;
}
});
function showOrderInfo(id){
$.getJSON(ctx + '/adminorder/showorderinfo', {
id : id
}, function(data) {
if(data != null){
$(".modal-edityd").find("input[name='orderid']").val(data.orderid);
$(".modal-edityd").find("input[name='orgname']").val(data.orgname);
$(".modal-edityd").find("select[name='transportway']").val(data.transportway);
$(".modal-edityd").find("input[name='createtime']").val(data.createtime);
$(".modal-edityd").find("select[name='settletype']").val(data.settletype);
$(".modal-edityd").find("select[name='ifnotifydispatch']").val(data.ifnotifydispatch);
$(".modal-edityd").find("select[name='paytype']").val(data.paytype | );
$(".modal-edityd").find("select[name='dispatchtype']").val(data.dispatchtype);
$(".modal-edityd").find("input[name='agencyfund']").val(data.agencyfund);
$(".modal-edityd").find("input[name='receiptnum']").val(data.receiptnum);
$(".modal-edityd").find("input[name='receiptno']").val(data.receiptno);
$(".modal-edityd").find("input[name='receiver']").val(data.receiver);
$(".modal-edityd").find("input[name='receiverphone']").val(data.receiverphone);
$(".modal-edityd").find("input[name='sender']").val(data.sender);
$(".modal-edityd").find("input[name='senderphone']").val(data.senderphone);
$(".modal-edityd").find("input[name='destname']").val(data.destname);
$(".modal-edityd").find("input[name='receivercompany']").val(data.receivercompany);
$(".modal-edityd").find("input[name='sendvipname']").val(data.sendvipname);
$(".modal-edityd").find("input[name='sendercompany']").val(data.sendercompany);
$(".modal-edityd").find("input[name='receiverpcdt']").val(data.receiverpcdt);
$(".modal-edityd").find("input[name='senderpcdt']").val(data.senderpcdt);
$(".modal-edityd").find("input[name='receiveraddress']").val(data.receiveraddress);
$(".modal-edityd").find("input[name='senderaddress']").val(data.senderaddress);
$(".modal-edityd").find("input[name='goodsname']").val(data.goodsname);
$(".modal-edityd").find("input[name='goodsnum']").val(data.goodsnum);
$(".modal-edityd").find("input[name='sendfee']").val(data.sendfee);
$(".modal-edityd").find("input[name='freightcollectfee']").val(data.freightcollectfee);
$(".modal-edityd").find("input[name='weight']").val(data.weight);
$(".modal-edityd").find("input[name='volume']").val(data.volume);
$(".modal-edityd").find("input[name='goodsvalue']").val(data.goodsvalue);
$(".modal-edityd").find("input[name='securefee']").val(data.securefee);
$(".modal-edityd").find("select[name='packtype']").val(data.packtype);
$(".modal-edityd").find("input[name='remark']").val(data.remark);
}else{
alert("查询失败");
}
});
}
| conditional_block |
|
ddglCtrl.js | require([ 'jquery', 'app', 'common', 'validate','pagination', 'bootstrap', 'timepicker', 'dateZh' ], function($, app, common,validate) {
$('title').text('订单管理');
var deliverytype = ['','上门提货','自送网点'];
var iscreditapply = ['否','是'];
//日期控件
$('#date-start, #date-end').datetimepicker({
format : "yyyy-mm-dd hh:ii:ss",
language : "zh-CN",
todayBtn : 1,
autoclose : true,
todayHighlight : true,
forceParse : true,
minuteStep : 10
});
var totalPages = 0;
var pageSize = '10';
var first_load = true;
var reload = true;
$(document).ready(function() {
showOrderByPage(1);
});
//placeholder兼容ie start
$(function() {
if (!placeholderSupport()) { // 判断浏览器是否支持 placeholder
$('[placeholder]').focus(function() {
var input = $(this);
if (input.val() == input.attr('placeholder')) {
input.val('');
input.removeClass('placeholder');
}
}).blur(function() {
var input = $(this);
if (input.val() == ''
|| input.val() == input.attr('placeholder')) {
input.addClass('placeholder');
input.val(input.attr('placeholder'));
}
}).blur();
};
});
function placeholderSupport() {
return 'placeholder' in document.createElement('input');
}
//placeholder兼容ie end
//清除按钮
$('.clearBtn').on('click', function() {
$("input[name='orderid-query']").val('');
$("input[name='sender-query']").val('');
$("input[name='receiver-query']").val('');
$("input[name='username-query']").val('');
$("select[name='state-query']").val('');
$("select[name='iscreditapply-query']").val('');
$('.setTableLength').val(10);
$("#date-start").val('');
$("#date-end").val('');
});
$(".setTableLength").change(function() {
reload = true;
showOrderByPage(1);
});
$(".searchBtn").click(function() {
reload = true;
showOrderByPage(1);
});
$('.clearBtn').on('click', function() {
$("input[name='loginname-query']").val('');
$("select[name='status-query']").val('');
$("#date-start").val('');
$("#date-end").val('');
$("input[name='vipname-nickName']").val('');
$("select[name='status-query']").val('');
$("select[name='viprole-query']").val('');
});
$.validator.addMethod("isPhone", function(value, element) {
var length = value.length;
return this.optional(element) || (length == 11 && /^(((13[0-9]{1})|(17[0-9]{1})|(15[0-9]{1})|(18[0-9]{1}))+\d{8})$/.test(value))||/^((0\d{2,3})-)?(\d{7,8})(-(\d{3,}))?$/.test(value);
}, "手机、电话格式不正确");
/** 添加验证start **/
$('#addStaffForm').validate({
rules: {
'nickname': {
required: true
},
'loginname': {
required: true
},
'phone':{
isPhone:true
}
},
messages: {
'nickname': {
required: "请填写姓名"
},
'loginname': {
required: "请填写登录帐号"
},
'phone': { }
},
errorClass: "error",
success: 'valid',
unhighlight: function(element, errorClass, validClass) { //验证通过
$(element).tooltip('destroy').removeClass(errorClass);
},
errorPlacement: function(label, element) {
$(element).tooltip('destroy'); /*必需*/
$(element).attr('title', $(label).text()).tooltip('show');
},
onclick: true
});
/** 添加验证end **/
/** 清空添加数据 start **/
$('.add').click(function(e) {
$(".errorMsg").html("");
$("#addStaffForm").find("input[name='nickname']").val('');
$("#addStaffForm").find("input[name='loginname']").val('');
$("#addStaffForm").find("select[name='role']").val('管理员');
$("#addStaffForm").find("input[name='phone']").val('');
})
/** 清空添加数据 end **/
/*添加start **/
$('#btn_addstaff').click(function(e) {
var isValid = addStaffForm.valid();
if (isValid) {
$.post(ctx+"/staff/savestaff", $('#addStaffForm').serialize(),function (data) {
var result = data.result;
if(result!=null && result=="noauthority") {
common.alert1('你无此权限!');
return;
}
if(data.result!=null && data.result=="againLogin") {
$('.commonConfirm').modal('hide');
common.alert1("登录过期,请重新登录");
setTimeout("location.href = ctx + '/login.html'",1000);
}else if(data.result=='success'){
$('.modal-staff').modal('hide');
$('.mySuccessModal1').modal({show:true,backdrop: 'static', keyboard: false});
common.mySuccessModal1('添加成功!','如需继续添加,请点击返回继续添加',
'返回继续添加','前往后台用户列表',
function () {
$('.mySuccessModal1').modal('hide');
$('.modal-order').modal('hide');
showOrderByPage(1);
},
function () {
$('.modal-staff').modal('show');
});
$("#addStaffForm").find("input[name='nickname']").val('');
$("#addStaffForm").find("input[name='loginname']").val('');
$("#addStaffForm").find("select[name='role']").val('管理员');
$("#addStaffForm").find("input[name='phone']").val('');
}else if(data.result=='fail'){
$(".errorMsg").html(data.message);
}else{
$(".errorMsg").html("信息提交错误");
}
},"json");
}
});
/** 添加end **/
/** 修改验证start **/
$('#updateStaffForm').validate({
rules: {
'nickname': {
required: true
},
'phone':{
isPhone:true
}
},
messages: {
'nickname': {
required: "请填写姓名"
},
'phone': {
}
},
errorClass: "error",
success: 'valid',
unhighlight: function(element, errorClass, validClass) { //验证通过
$(element).tooltip('destroy').removeClass(errorClass);
},
errorPlacement: function(label, element) {
$(element).tooltip('destroy'); /*必需*/
$(element).attr('title', $(label).text()).tooltip('show');
},
onclick: true
});
/** 修改验证end **/
/** 修改start **/
$("#btn_updatestaff").click(function() {
var isValid = updateStaffForm.valid();
if (isValid) {
$.post(ctx+"/staff/updatestaff", $('#updateStaffForm').serialize(),function (data) {
var result = data.result;
if(result!=null && result=="noauthority") {
common.alert1('你无此权限!');
return;
}
if(data.result!=null && data.result=="againLogin") {
$('.commonConfirm').modal('hide');
common.alert1("登录过期,请重新登录");;
setTimeout("location.href = ctx + '/login.html'",1000);
}else if(data.result=='success'){
$(".modal-view").modal('hide');
common.mySuccessModal2('修改成功!');
showOrderByPage(1);
}else{
$(".updateMsg").html(data.message);
}
},"json");
}
});
/** 修改end **/
function showOrderByPage(pageNum) {
if ($(".all").is(':checked')) {
$(".all").removeAttr('checked');
}
$("#dataBody").append(loadingHtml);
if($('.alert-warning').length>0){
$('.alert-warning').remove();
}
$.getJSON(ctx + '/adminorder/showorderbypage', {
pageNum : pageNum,
pageSize : $('.setTableLength').val() || 10,
startTime : $("#date-start").val(),
endTime : $("#date-end").val(),
orderid : $("input[name='orderid-query']").val(),
sender : $("input[name='sender-query']").val(),
receiver : $("input[name='receiver-query']").val(),
state : $("select[name='state-query").val(),
iscreditapply : $("select[name='iscreditapply-query").val(),
username : $("input[name='username-query']").val(),
},
function(data){
if($('.loading')){ | }
var sumnum = 0;
var sumweight = 0.0;
var sumvalume = 0.0;
var sumorderfee = 0.0;
var sumagencyfund = 0.0;
var result = data.result;
if(result!=null && result=="noauthority") {
common.alert1('你无此权限!');
return;
}
if(data.result!=null && data.result=="againLogin") {
common.alert1("登录过期,请重新登录");
setTimeout("location.href = ctx + '/login.html'",1000);
} else if(data.result.length == 0) {
$('.table-responsive').append('<div class="alert alert-warning no-result text-center" role="alert">没有找到记录... </div>');
} else{
var datapage = data.result;
var datalist = datapage.result;
var totalCount = datapage.totalCount;
var pageNum = datapage.pageNo;
var pageSize = datapage.pageSize;
var totalPages = datapage.totalPages;
$(".totalCount").html(totalCount);
$(".totalPages").html(totalPages);
$(".pageNo").html(pageNum);
var tempHtml="";
var currentorderid = $('#orderid').val();
var currentUtype = $('#currentUtype').val();
for(var i=0;i<datalist.length;i++){
var order = datalist[i];
var no = (pageNum-1)*pageSize+i+1;
tempHtml +="<tr data-id='"+order.id+"' data-pageno='" + pageNum + "'>";
// tempHtml +="<td><div class='checkbox'><label><input type='checkbox' data-orderid="+order.id+" /></label></div></td>";
tempHtml +="<td class='excel'>"+no+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.orderid)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.username)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.sender)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.senderphone)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.receiver)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.receiverphone)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.goodsname)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.goodsnum)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.weight)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.volume)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.transportway)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.sendfee)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.orgname)+"</td>";
tempHtml +="<td class='excel'>"+isnull(deliverytype[order.deliverytype])+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.pickupgoodstime)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.settletype)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.agencyfund)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.state)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.createtime)+"</td>";
tempHtml +="<td><button type='button' data-toggle='modal' data-target='.modal-edityd' onclick='showOrderInfo(" +order.id+")' class='btn btn-primary btn-xs'>查看</button> </td>";
tempHtml +="</tr>";
sumnum += order.goodsnum;
sumweight += order.weight;
sumvalume += order.volume;
sumorderfee += order.sendfee;
sumagencyfund += order.agencyfund;
if($('.alert-warning').length>0){
$('.alert-warning').remove();
}
}
var sl=parseFloat(sumvalume).toFixed(2);
var sf=parseFloat(sumorderfee).toFixed(2);
tempHtml +="<tr>" +
"<td style='padding: 6px 5px 3px;'>总计</td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> " +
"<td>"+sumnum+"</td>" +
"<td>"+sumweight+"</td>" +
"<td>"+sl+"</td>" +
"<td></td>" +
"<td>"+sf+"</td>" +
"<td></td> <td></td> <td></td> <td></td>" +
"<td>"+sumagencyfund+"</td>" +
"<td></td> <td></td> <td></td>" +
"</tr>";
if(datalist.length === 0&&$('.alert-warning').length<=0) {
$('.table-responsive').append('<div class="alert alert-warning no-result text-center" role="alert">没有找到记录 </div>');
}
$('#dataBody').html(tempHtml);
if (reload) {
reload = false;
first_load = true;
var initPagination = function() {
$("#Pagination").pagination(totalCount, {
num_edge_entries : 1, // 边缘页数
num_display_entries : 4, // 主体页数
callback: pageselectCallback,
prev_text : "«",
next_text : "»",
items_per_page : pageSize,
link_to : 'javascript:void(0);'
});
}();
/**分页e**/
}
}
}).fail(function (err) {
if($('.loading')){
$('.loading').remove();
}
common.alert('服务器错误!请刷新页面重试或联系管理员',"确定");
});
}
function pageselectCallback(page_index, jq) {
if (first_load) { //第一次初始化不需要请求
first_load = false;
return false;
}
pageNum = page_index + 1;
showOrderByPage(pageNum);
}
function isnull(value) {
if (value == null || value == '' || typeof (value) == "undefined" || value == '/')
return "--";
else
return value;
}
function isnumber(value) {
if (value == null || value == '')
return "0.00";
else
return value;
}
});
function showOrderInfo(id){
$.getJSON(ctx + '/adminorder/showorderinfo', {
id : id
}, function(data) {
if(data != null){
$(".modal-edityd").find("input[name='orderid']").val(data.orderid);
$(".modal-edityd").find("input[name='orgname']").val(data.orgname);
$(".modal-edityd").find("select[name='transportway']").val(data.transportway);
$(".modal-edityd").find("input[name='createtime']").val(data.createtime);
$(".modal-edityd").find("select[name='settletype']").val(data.settletype);
$(".modal-edityd").find("select[name='ifnotifydispatch']").val(data.ifnotifydispatch);
$(".modal-edityd").find("select[name='paytype']").val(data.paytype);
$(".modal-edityd").find("select[name='dispatchtype']").val(data.dispatchtype);
$(".modal-edityd").find("input[name='agencyfund']").val(data.agencyfund);
$(".modal-edityd").find("input[name='receiptnum']").val(data.receiptnum);
$(".modal-edityd").find("input[name='receiptno']").val(data.receiptno);
$(".modal-edityd").find("input[name='receiver']").val(data.receiver);
$(".modal-edityd").find("input[name='receiverphone']").val(data.receiverphone);
$(".modal-edityd").find("input[name='sender']").val(data.sender);
$(".modal-edityd").find("input[name='senderphone']").val(data.senderphone);
$(".modal-edityd").find("input[name='destname']").val(data.destname);
$(".modal-edityd").find("input[name='receivercompany']").val(data.receivercompany);
$(".modal-edityd").find("input[name='sendvipname']").val(data.sendvipname);
$(".modal-edityd").find("input[name='sendercompany']").val(data.sendercompany);
$(".modal-edityd").find("input[name='receiverpcdt']").val(data.receiverpcdt);
$(".modal-edityd").find("input[name='senderpcdt']").val(data.senderpcdt);
$(".modal-edityd").find("input[name='receiveraddress']").val(data.receiveraddress);
$(".modal-edityd").find("input[name='senderaddress']").val(data.senderaddress);
$(".modal-edityd").find("input[name='goodsname']").val(data.goodsname);
$(".modal-edityd").find("input[name='goodsnum']").val(data.goodsnum);
$(".modal-edityd").find("input[name='sendfee']").val(data.sendfee);
$(".modal-edityd").find("input[name='freightcollectfee']").val(data.freightcollectfee);
$(".modal-edityd").find("input[name='weight']").val(data.weight);
$(".modal-edityd").find("input[name='volume']").val(data.volume);
$(".modal-edityd").find("input[name='goodsvalue']").val(data.goodsvalue);
$(".modal-edityd").find("input[name='securefee']").val(data.securefee);
$(".modal-edityd").find("select[name='packtype']").val(data.packtype);
$(".modal-edityd").find("input[name='remark']").val(data.remark);
}else{
alert("查询失败");
}
});
} | $('.loading').remove(); | random_line_split |
ddglCtrl.js | require([ 'jquery', 'app', 'common', 'validate','pagination', 'bootstrap', 'timepicker', 'dateZh' ], function($, app, common,validate) {
$('title').text('订单管理');
var deliverytype = ['','上门提货','自送网点'];
var iscreditapply = ['否','是'];
//日期控件
$('#date-start, #date-end').datetimepicker({
format : "yyyy-mm-dd hh:ii:ss",
language : "zh-CN",
todayBtn : 1,
autoclose : true,
todayHighlight : true,
forceParse : true,
minuteStep : 10
});
var totalPages = 0;
var pageSize = '10';
var first_load = true;
var reload = true;
$(document).ready(function() {
showOrderByPage(1);
});
//placeholder兼容ie start
$(function() {
if (!placeholderSupport()) { // 判断浏览器是否支持 placeholder
$('[placeholder]').focus(function() {
var input = $(this);
if (input.val() == input.attr('placeholder')) {
input.val('');
input.removeClass('placeholder');
}
}).blur(function() {
var input = $(this);
if (input.val() == ''
|| input.val() == input.attr('placeholder')) {
input.addClass('placeholder');
input.val(input.attr('placeholder'));
}
}).blur();
};
});
function placeholderSupport() {
return 'placeholder' in document. | ut');
}
//placeholder兼容ie end
//清除按钮
$('.clearBtn').on('click', function() {
$("input[name='orderid-query']").val('');
$("input[name='sender-query']").val('');
$("input[name='receiver-query']").val('');
$("input[name='username-query']").val('');
$("select[name='state-query']").val('');
$("select[name='iscreditapply-query']").val('');
$('.setTableLength').val(10);
$("#date-start").val('');
$("#date-end").val('');
});
$(".setTableLength").change(function() {
reload = true;
showOrderByPage(1);
});
$(".searchBtn").click(function() {
reload = true;
showOrderByPage(1);
});
$('.clearBtn').on('click', function() {
$("input[name='loginname-query']").val('');
$("select[name='status-query']").val('');
$("#date-start").val('');
$("#date-end").val('');
$("input[name='vipname-nickName']").val('');
$("select[name='status-query']").val('');
$("select[name='viprole-query']").val('');
});
$.validator.addMethod("isPhone", function(value, element) {
var length = value.length;
return this.optional(element) || (length == 11 && /^(((13[0-9]{1})|(17[0-9]{1})|(15[0-9]{1})|(18[0-9]{1}))+\d{8})$/.test(value))||/^((0\d{2,3})-)?(\d{7,8})(-(\d{3,}))?$/.test(value);
}, "手机、电话格式不正确");
/** 添加验证start **/
$('#addStaffForm').validate({
rules: {
'nickname': {
required: true
},
'loginname': {
required: true
},
'phone':{
isPhone:true
}
},
messages: {
'nickname': {
required: "请填写姓名"
},
'loginname': {
required: "请填写登录帐号"
},
'phone': { }
},
errorClass: "error",
success: 'valid',
unhighlight: function(element, errorClass, validClass) { //验证通过
$(element).tooltip('destroy').removeClass(errorClass);
},
errorPlacement: function(label, element) {
$(element).tooltip('destroy'); /*必需*/
$(element).attr('title', $(label).text()).tooltip('show');
},
onclick: true
});
/** 添加验证end **/
/** 清空添加数据 start **/
$('.add').click(function(e) {
$(".errorMsg").html("");
$("#addStaffForm").find("input[name='nickname']").val('');
$("#addStaffForm").find("input[name='loginname']").val('');
$("#addStaffForm").find("select[name='role']").val('管理员');
$("#addStaffForm").find("input[name='phone']").val('');
})
/** 清空添加数据 end **/
/*添加start **/
$('#btn_addstaff').click(function(e) {
var isValid = addStaffForm.valid();
if (isValid) {
$.post(ctx+"/staff/savestaff", $('#addStaffForm').serialize(),function (data) {
var result = data.result;
if(result!=null && result=="noauthority") {
common.alert1('你无此权限!');
return;
}
if(data.result!=null && data.result=="againLogin") {
$('.commonConfirm').modal('hide');
common.alert1("登录过期,请重新登录");
setTimeout("location.href = ctx + '/login.html'",1000);
}else if(data.result=='success'){
$('.modal-staff').modal('hide');
$('.mySuccessModal1').modal({show:true,backdrop: 'static', keyboard: false});
common.mySuccessModal1('添加成功!','如需继续添加,请点击返回继续添加',
'返回继续添加','前往后台用户列表',
function () {
$('.mySuccessModal1').modal('hide');
$('.modal-order').modal('hide');
showOrderByPage(1);
},
function () {
$('.modal-staff').modal('show');
});
$("#addStaffForm").find("input[name='nickname']").val('');
$("#addStaffForm").find("input[name='loginname']").val('');
$("#addStaffForm").find("select[name='role']").val('管理员');
$("#addStaffForm").find("input[name='phone']").val('');
}else if(data.result=='fail'){
$(".errorMsg").html(data.message);
}else{
$(".errorMsg").html("信息提交错误");
}
},"json");
}
});
/** 添加end **/
/** 修改验证start **/
$('#updateStaffForm').validate({
rules: {
'nickname': {
required: true
},
'phone':{
isPhone:true
}
},
messages: {
'nickname': {
required: "请填写姓名"
},
'phone': {
}
},
errorClass: "error",
success: 'valid',
unhighlight: function(element, errorClass, validClass) { //验证通过
$(element).tooltip('destroy').removeClass(errorClass);
},
errorPlacement: function(label, element) {
$(element).tooltip('destroy'); /*必需*/
$(element).attr('title', $(label).text()).tooltip('show');
},
onclick: true
});
/** 修改验证end **/
/** 修改start **/
$("#btn_updatestaff").click(function() {
var isValid = updateStaffForm.valid();
if (isValid) {
$.post(ctx+"/staff/updatestaff", $('#updateStaffForm').serialize(),function (data) {
var result = data.result;
if(result!=null && result=="noauthority") {
common.alert1('你无此权限!');
return;
}
if(data.result!=null && data.result=="againLogin") {
$('.commonConfirm').modal('hide');
common.alert1("登录过期,请重新登录");;
setTimeout("location.href = ctx + '/login.html'",1000);
}else if(data.result=='success'){
$(".modal-view").modal('hide');
common.mySuccessModal2('修改成功!');
showOrderByPage(1);
}else{
$(".updateMsg").html(data.message);
}
},"json");
}
});
/** 修改end **/
function showOrderByPage(pageNum) {
if ($(".all").is(':checked')) {
$(".all").removeAttr('checked');
}
$("#dataBody").append(loadingHtml);
if($('.alert-warning').length>0){
$('.alert-warning').remove();
}
$.getJSON(ctx + '/adminorder/showorderbypage', {
pageNum : pageNum,
pageSize : $('.setTableLength').val() || 10,
startTime : $("#date-start").val(),
endTime : $("#date-end").val(),
orderid : $("input[name='orderid-query']").val(),
sender : $("input[name='sender-query']").val(),
receiver : $("input[name='receiver-query']").val(),
state : $("select[name='state-query").val(),
iscreditapply : $("select[name='iscreditapply-query").val(),
username : $("input[name='username-query']").val(),
},
function(data){
if($('.loading')){
$('.loading').remove();
}
var sumnum = 0;
var sumweight = 0.0;
var sumvalume = 0.0;
var sumorderfee = 0.0;
var sumagencyfund = 0.0;
var result = data.result;
if(result!=null && result=="noauthority") {
common.alert1('你无此权限!');
return;
}
if(data.result!=null && data.result=="againLogin") {
common.alert1("登录过期,请重新登录");
setTimeout("location.href = ctx + '/login.html'",1000);
} else if(data.result.length == 0) {
$('.table-responsive').append('<div class="alert alert-warning no-result text-center" role="alert">没有找到记录... </div>');
} else{
var datapage = data.result;
var datalist = datapage.result;
var totalCount = datapage.totalCount;
var pageNum = datapage.pageNo;
var pageSize = datapage.pageSize;
var totalPages = datapage.totalPages;
$(".totalCount").html(totalCount);
$(".totalPages").html(totalPages);
$(".pageNo").html(pageNum);
var tempHtml="";
var currentorderid = $('#orderid').val();
var currentUtype = $('#currentUtype').val();
for(var i=0;i<datalist.length;i++){
var order = datalist[i];
var no = (pageNum-1)*pageSize+i+1;
tempHtml +="<tr data-id='"+order.id+"' data-pageno='" + pageNum + "'>";
// tempHtml +="<td><div class='checkbox'><label><input type='checkbox' data-orderid="+order.id+" /></label></div></td>";
tempHtml +="<td class='excel'>"+no+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.orderid)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.username)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.sender)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.senderphone)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.receiver)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.receiverphone)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.goodsname)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.goodsnum)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.weight)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.volume)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.transportway)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.sendfee)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.orgname)+"</td>";
tempHtml +="<td class='excel'>"+isnull(deliverytype[order.deliverytype])+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.pickupgoodstime)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.settletype)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.agencyfund)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.state)+"</td>";
tempHtml +="<td class='excel'>"+isnull(order.createtime)+"</td>";
tempHtml +="<td><button type='button' data-toggle='modal' data-target='.modal-edityd' onclick='showOrderInfo(" +order.id+")' class='btn btn-primary btn-xs'>查看</button> </td>";
tempHtml +="</tr>";
sumnum += order.goodsnum;
sumweight += order.weight;
sumvalume += order.volume;
sumorderfee += order.sendfee;
sumagencyfund += order.agencyfund;
if($('.alert-warning').length>0){
$('.alert-warning').remove();
}
}
var sl=parseFloat(sumvalume).toFixed(2);
var sf=parseFloat(sumorderfee).toFixed(2);
tempHtml +="<tr>" +
"<td style='padding: 6px 5px 3px;'>总计</td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> " +
"<td>"+sumnum+"</td>" +
"<td>"+sumweight+"</td>" +
"<td>"+sl+"</td>" +
"<td></td>" +
"<td>"+sf+"</td>" +
"<td></td> <td></td> <td></td> <td></td>" +
"<td>"+sumagencyfund+"</td>" +
"<td></td> <td></td> <td></td>" +
"</tr>";
if(datalist.length === 0&&$('.alert-warning').length<=0) {
$('.table-responsive').append('<div class="alert alert-warning no-result text-center" role="alert">没有找到记录 </div>');
}
$('#dataBody').html(tempHtml);
if (reload) {
reload = false;
first_load = true;
var initPagination = function() {
$("#Pagination").pagination(totalCount, {
num_edge_entries : 1, // 边缘页数
num_display_entries : 4, // 主体页数
callback: pageselectCallback,
prev_text : "«",
next_text : "»",
items_per_page : pageSize,
link_to : 'javascript:void(0);'
});
}();
/**分页e**/
}
}
}).fail(function (err) {
if($('.loading')){
$('.loading').remove();
}
common.alert('服务器错误!请刷新页面重试或联系管理员',"确定");
});
}
function pageselectCallback(page_index, jq) {
if (first_load) { //第一次初始化不需要请求
first_load = false;
return false;
}
pageNum = page_index + 1;
showOrderByPage(pageNum);
}
function isnull(value) {
if (value == null || value == '' || typeof (value) == "undefined" || value == '/')
return "--";
else
return value;
}
function isnumber(value) {
if (value == null || value == '')
return "0.00";
else
return value;
}
});
function showOrderInfo(id){
$.getJSON(ctx + '/adminorder/showorderinfo', {
id : id
}, function(data) {
if(data != null){
$(".modal-edityd").find("input[name='orderid']").val(data.orderid);
$(".modal-edityd").find("input[name='orgname']").val(data.orgname);
$(".modal-edityd").find("select[name='transportway']").val(data.transportway);
$(".modal-edityd").find("input[name='createtime']").val(data.createtime);
$(".modal-edityd").find("select[name='settletype']").val(data.settletype);
$(".modal-edityd").find("select[name='ifnotifydispatch']").val(data.ifnotifydispatch);
$(".modal-edityd").find("select[name='paytype']").val(data.paytype);
$(".modal-edityd").find("select[name='dispatchtype']").val(data.dispatchtype);
$(".modal-edityd").find("input[name='agencyfund']").val(data.agencyfund);
$(".modal-edityd").find("input[name='receiptnum']").val(data.receiptnum);
$(".modal-edityd").find("input[name='receiptno']").val(data.receiptno);
$(".modal-edityd").find("input[name='receiver']").val(data.receiver);
$(".modal-edityd").find("input[name='receiverphone']").val(data.receiverphone);
$(".modal-edityd").find("input[name='sender']").val(data.sender);
$(".modal-edityd").find("input[name='senderphone']").val(data.senderphone);
$(".modal-edityd").find("input[name='destname']").val(data.destname);
$(".modal-edityd").find("input[name='receivercompany']").val(data.receivercompany);
$(".modal-edityd").find("input[name='sendvipname']").val(data.sendvipname);
$(".modal-edityd").find("input[name='sendercompany']").val(data.sendercompany);
$(".modal-edityd").find("input[name='receiverpcdt']").val(data.receiverpcdt);
$(".modal-edityd").find("input[name='senderpcdt']").val(data.senderpcdt);
$(".modal-edityd").find("input[name='receiveraddress']").val(data.receiveraddress);
$(".modal-edityd").find("input[name='senderaddress']").val(data.senderaddress);
$(".modal-edityd").find("input[name='goodsname']").val(data.goodsname);
$(".modal-edityd").find("input[name='goodsnum']").val(data.goodsnum);
$(".modal-edityd").find("input[name='sendfee']").val(data.sendfee);
$(".modal-edityd").find("input[name='freightcollectfee']").val(data.freightcollectfee);
$(".modal-edityd").find("input[name='weight']").val(data.weight);
$(".modal-edityd").find("input[name='volume']").val(data.volume);
$(".modal-edityd").find("input[name='goodsvalue']").val(data.goodsvalue);
$(".modal-edityd").find("input[name='securefee']").val(data.securefee);
$(".modal-edityd").find("select[name='packtype']").val(data.packtype);
$(".modal-edityd").find("input[name='remark']").val(data.remark);
}else{
alert("查询失败");
}
});
}
| createElement('inp | identifier_name |
SiteInfoDialog.py | # -*- coding: utf-8 -*-
# Copyright (c) 2011 - 2019 Detlev Offenbach <[email protected]>
#
"""
Module implementing a dialog to show some information about a site.
"""
from PyQt5.QtCore import pyqtSlot, QUrl, Qt
from PyQt5.QtGui import QPixmap, QImage, QPainter, QColor, QBrush
from PyQt5.QtNetwork import QNetworkRequest, QNetworkReply
from PyQt5.QtWidgets import (
QDialog, QTreeWidgetItem, QGraphicsScene, QMenu, QApplication,
QGraphicsPixmapItem
)
from E5Gui import E5MessageBox, E5FileDialog
from .Ui_SiteInfoDialog import Ui_SiteInfoDialog
from ..Tools import Scripts, WebBrowserTools
from ..WebBrowserPage import WebBrowserPage
import UI.PixmapCache
class SiteInfoDialog(QDialog, Ui_SiteInfoDialog):
"""
Class implementing a dialog to show some information about a site.
"""
okStyle = "QLabel { color : white; background-color : green; }"
nokStyle = "QLabel { color : white; background-color : red; }"
def __init__(self, browser, parent=None):
"""
Constructor
@param browser reference to the browser window (HelpBrowser)
@param parent reference to the parent widget (QWidget)
"""
super(SiteInfoDialog, self).__init__(parent)
self.setupUi(self)
self.setWindowFlags(Qt.Window)
# put icons
self.tabWidget.setTabIcon(
0, UI.PixmapCache.getIcon("siteinfo-general.png"))
self.tabWidget.setTabIcon(
1, UI.PixmapCache.getIcon("siteinfo-media.png"))
self.__imageReply = None
self.__baseUrl = browser.url()
title = browser.title()
#prepare background of image preview
self.__imagePreviewStandardBackground = (
self.imagePreview.backgroundBrush()
)
color1 = QColor(220, 220, 220)
color2 = QColor(160, 160, 160)
self.__tilePixmap = QPixmap(8, 8)
self.__tilePixmap.fill(color1)
tilePainter = QPainter(self.__tilePixmap)
tilePainter.fillRect(0, 0, 4, 4, color2)
tilePainter.fillRect(4, 4, 4, 4, color2)
tilePainter.end()
# populate General tab
self.heading.setText("<b>{0}</b>".format(title))
self.siteAddressLabel.setText(self.__baseUrl.toString())
if self.__baseUrl.scheme() in ["https"]:
self.securityLabel.setStyleSheet(SiteInfoDialog.okStyle)
self.securityLabel.setText(
self.tr('<b>Connection is encrypted.</b>'))
else:
self.securityLabel.setStyleSheet(SiteInfoDialog.nokStyle)
self.securityLabel.setText(
self.tr('<b>Connection is not encrypted.</b>'))
browser.page().runJavaScript(
"document.charset", WebBrowserPage.SafeJsWorld,
lambda res: self.encodingLabel.setText(res))
# populate Meta tags
browser.page().runJavaScript(Scripts.getAllMetaAttributes(),
WebBrowserPage.SafeJsWorld,
self.__processMetaAttributes)
# populate Media tab
browser.page().runJavaScript(Scripts.getAllImages(),
WebBrowserPage.SafeJsWorld,
self.__processImageTags)
def __processImageTags(self, res):
"""
Private method to process the image tags.
@param res result of the JavaScript script
@type list of dict
"""
for img in res:
src = img["src"]
alt = img["alt"]
if not alt:
if src.find("/") == -1:
alt = src
else:
pos = src.rfind("/")
alt = src[pos + 1:]
if not src or not alt:
continue
QTreeWidgetItem(self.imagesTree, [alt, src])
for col in range(self.imagesTree.columnCount()):
self.imagesTree.resizeColumnToContents(col)
if self.imagesTree.columnWidth(0) > 300:
self.imagesTree.setColumnWidth(0, 300)
self.imagesTree.setCurrentItem(self.imagesTree.topLevelItem(0))
self.imagesTree.setContextMenuPolicy(Qt.CustomContextMenu)
self.imagesTree.customContextMenuRequested.connect(
self.__imagesTreeContextMenuRequested)
def __processMetaAttributes(self, res):
"""
Private method to process the meta attributes.
@param res result of the JavaScript script
@type list of dict
"""
for meta in res:
content = meta["content"]
name = meta["name"]
if not name:
name = meta["httpequiv"]
if not name or not content:
continue
if meta["charset"]:
self.encodingLabel.setText(meta["charset"])
if "charset=" in content:
self.encodingLabel.setText(
content[content.index("charset=") + 8:])
QTreeWidgetItem(self.tagsTree, [name, content])
for col in range(self.tagsTree.columnCount()):
|
@pyqtSlot(QTreeWidgetItem, QTreeWidgetItem)
def on_imagesTree_currentItemChanged(self, current, previous):
"""
Private slot to show a preview of the selected image.
@param current current image entry (QTreeWidgetItem)
@param previous old current entry (QTreeWidgetItem)
"""
if current is None:
return
imageUrl = QUrl(current.text(1))
if imageUrl.isRelative():
imageUrl = self.__baseUrl.resolved(imageUrl)
pixmap = QPixmap()
loading = False
if imageUrl.scheme() == "data":
encodedUrl = current.text(1).encode("utf-8")
imageData = encodedUrl[encodedUrl.find(b",") + 1:]
pixmap = WebBrowserTools.pixmapFromByteArray(imageData)
elif imageUrl.scheme() == "file":
pixmap = QPixmap(imageUrl.toLocalFile())
elif imageUrl.scheme() == "qrc":
pixmap = QPixmap(imageUrl.toString()[3:])
else:
if self.__imageReply is not None:
self.__imageReply.deleteLater()
self.__imageReply = None
from WebBrowser.WebBrowserWindow import WebBrowserWindow
self.__imageReply = WebBrowserWindow.networkManager().get(
QNetworkRequest(imageUrl))
self.__imageReply.finished.connect(self.__imageReplyFinished)
loading = True
self.__showLoadingText()
if not loading:
self.__showPixmap(pixmap)
@pyqtSlot()
def __imageReplyFinished(self):
"""
Private slot handling the loading of an image.
"""
if self.__imageReply.error() != QNetworkReply.NoError:
return
data = self.__imageReply.readAll()
self.__showPixmap(QPixmap.fromImage(QImage.fromData(data)))
def __showPixmap(self, pixmap):
"""
Private method to show a pixmap in the preview pane.
@param pixmap pixmap to be shown
@type QPixmap
"""
scene = QGraphicsScene(self.imagePreview)
if pixmap.isNull():
self.imagePreview.setBackgroundBrush(
self.__imagePreviewStandardBackground)
scene.addText(self.tr("Preview not available."))
else:
self.imagePreview.setBackgroundBrush(QBrush(self.__tilePixmap))
scene.addPixmap(pixmap)
self.imagePreview.setScene(scene)
def __showLoadingText(self):
"""
Private method to show some text while loading an image.
"""
self.imagePreview.setBackgroundBrush(
self.__imagePreviewStandardBackground)
scene = QGraphicsScene(self.imagePreview)
scene.addText(self.tr("Loading..."))
self.imagePreview.setScene(scene)
def __imagesTreeContextMenuRequested(self, pos):
"""
Private slot to show a context menu for the images list.
@param pos position for the menu (QPoint)
"""
itm = self.imagesTree.itemAt(pos)
if itm is None:
return
menu = QMenu()
act = menu.addAction(self.tr("Copy Image Location to Clipboard"))
act.setData(itm.text(1))
act.triggered.connect(lambda: self.__copyAction(act))
act = menu.addAction(self.tr("Copy Image Name to Clipboard"))
act.setData(itm.text(0))
act.triggered.connect(lambda: self.__copyAction(act))
menu.addSeparator()
act = menu.addAction(self.tr("Save Image"))
act.setData(self.imagesTree.indexOfTopLevelItem(itm))
act.triggered.connect(lambda: self.__saveImage(act))
menu.exec_(self.imagesTree.viewport().mapToGlobal(pos))
def __copyAction(self, act):
"""
Private slot to copy the image URL or the image name to the clipboard.
@param act reference to the action that triggered
@type QAction
"""
QApplication.clipboard().setText(act.data())
def __saveImage(self, act):
"""
Private slot to save the selected image to disk.
@param act reference to the action that triggered
@type QAction
"""
index = act.data()
itm = self.imagesTree.topLevelItem(index)
if itm is None:
return
if (
not self.imagePreview.scene() or
len(self.imagePreview.scene().items()) == 0
):
return
pixmapItem = self.imagePreview.scene().items()[0]
if not isinstance(pixmapItem, QGraphicsPixmapItem):
return
if pixmapItem.pixmap().isNull():
E5MessageBox.warning(
self,
self.tr("Save Image"),
self.tr(
"""<p>This preview is not available.</p>"""))
return
imageFileName = WebBrowserTools.getFileNameFromUrl(QUrl(itm.text(1)))
index = imageFileName.rfind(".")
if index != -1:
imageFileName = imageFileName[:index] + ".png"
filename = E5FileDialog.getSaveFileName(
self,
self.tr("Save Image"),
imageFileName,
self.tr("All Files (*)"),
E5FileDialog.Options(E5FileDialog.DontConfirmOverwrite))
if not filename:
return
if not pixmapItem.pixmap().save(filename, "PNG"):
E5MessageBox.critical(
self,
self.tr("Save Image"),
self.tr(
"""<p>Cannot write to file <b>{0}</b>.</p>""")
.format(filename))
return
| self.tagsTree.resizeColumnToContents(col) | conditional_block |
SiteInfoDialog.py | # -*- coding: utf-8 -*-
# Copyright (c) 2011 - 2019 Detlev Offenbach <[email protected]>
#
"""
Module implementing a dialog to show some information about a site.
"""
from PyQt5.QtCore import pyqtSlot, QUrl, Qt
from PyQt5.QtGui import QPixmap, QImage, QPainter, QColor, QBrush
from PyQt5.QtNetwork import QNetworkRequest, QNetworkReply
from PyQt5.QtWidgets import (
QDialog, QTreeWidgetItem, QGraphicsScene, QMenu, QApplication,
QGraphicsPixmapItem
)
from E5Gui import E5MessageBox, E5FileDialog
from .Ui_SiteInfoDialog import Ui_SiteInfoDialog
from ..Tools import Scripts, WebBrowserTools
from ..WebBrowserPage import WebBrowserPage
import UI.PixmapCache
class SiteInfoDialog(QDialog, Ui_SiteInfoDialog):
"""
Class implementing a dialog to show some information about a site.
"""
okStyle = "QLabel { color : white; background-color : green; }"
nokStyle = "QLabel { color : white; background-color : red; }"
def __init__(self, browser, parent=None):
"""
Constructor
@param browser reference to the browser window (HelpBrowser)
@param parent reference to the parent widget (QWidget)
"""
super(SiteInfoDialog, self).__init__(parent)
self.setupUi(self)
self.setWindowFlags(Qt.Window)
# put icons
self.tabWidget.setTabIcon(
0, UI.PixmapCache.getIcon("siteinfo-general.png"))
self.tabWidget.setTabIcon(
1, UI.PixmapCache.getIcon("siteinfo-media.png"))
self.__imageReply = None
self.__baseUrl = browser.url()
title = browser.title()
#prepare background of image preview
self.__imagePreviewStandardBackground = (
self.imagePreview.backgroundBrush()
)
color1 = QColor(220, 220, 220)
color2 = QColor(160, 160, 160)
self.__tilePixmap = QPixmap(8, 8)
self.__tilePixmap.fill(color1)
tilePainter = QPainter(self.__tilePixmap)
tilePainter.fillRect(0, 0, 4, 4, color2)
tilePainter.fillRect(4, 4, 4, 4, color2)
tilePainter.end()
# populate General tab
self.heading.setText("<b>{0}</b>".format(title))
self.siteAddressLabel.setText(self.__baseUrl.toString())
if self.__baseUrl.scheme() in ["https"]:
self.securityLabel.setStyleSheet(SiteInfoDialog.okStyle)
self.securityLabel.setText(
self.tr('<b>Connection is encrypted.</b>'))
else:
self.securityLabel.setStyleSheet(SiteInfoDialog.nokStyle)
self.securityLabel.setText(
self.tr('<b>Connection is not encrypted.</b>'))
browser.page().runJavaScript(
"document.charset", WebBrowserPage.SafeJsWorld,
lambda res: self.encodingLabel.setText(res))
# populate Meta tags
browser.page().runJavaScript(Scripts.getAllMetaAttributes(),
WebBrowserPage.SafeJsWorld,
self.__processMetaAttributes)
# populate Media tab
browser.page().runJavaScript(Scripts.getAllImages(),
WebBrowserPage.SafeJsWorld,
self.__processImageTags)
def __processImageTags(self, res):
"""
Private method to process the image tags.
@param res result of the JavaScript script
@type list of dict
"""
for img in res:
src = img["src"]
alt = img["alt"]
if not alt:
if src.find("/") == -1:
alt = src
else:
pos = src.rfind("/")
alt = src[pos + 1:]
if not src or not alt:
continue
QTreeWidgetItem(self.imagesTree, [alt, src])
for col in range(self.imagesTree.columnCount()):
self.imagesTree.resizeColumnToContents(col)
if self.imagesTree.columnWidth(0) > 300:
self.imagesTree.setColumnWidth(0, 300)
self.imagesTree.setCurrentItem(self.imagesTree.topLevelItem(0))
self.imagesTree.setContextMenuPolicy(Qt.CustomContextMenu)
self.imagesTree.customContextMenuRequested.connect(
self.__imagesTreeContextMenuRequested)
def __processMetaAttributes(self, res):
"""
Private method to process the meta attributes.
@param res result of the JavaScript script
@type list of dict
"""
for meta in res:
content = meta["content"]
name = meta["name"]
if not name:
name = meta["httpequiv"]
if not name or not content:
continue
if meta["charset"]:
self.encodingLabel.setText(meta["charset"])
if "charset=" in content:
self.encodingLabel.setText(
content[content.index("charset=") + 8:])
QTreeWidgetItem(self.tagsTree, [name, content])
for col in range(self.tagsTree.columnCount()):
self.tagsTree.resizeColumnToContents(col)
@pyqtSlot(QTreeWidgetItem, QTreeWidgetItem)
def on_imagesTree_currentItemChanged(self, current, previous):
|
@pyqtSlot()
def __imageReplyFinished(self):
"""
Private slot handling the loading of an image.
"""
if self.__imageReply.error() != QNetworkReply.NoError:
return
data = self.__imageReply.readAll()
self.__showPixmap(QPixmap.fromImage(QImage.fromData(data)))
def __showPixmap(self, pixmap):
"""
Private method to show a pixmap in the preview pane.
@param pixmap pixmap to be shown
@type QPixmap
"""
scene = QGraphicsScene(self.imagePreview)
if pixmap.isNull():
self.imagePreview.setBackgroundBrush(
self.__imagePreviewStandardBackground)
scene.addText(self.tr("Preview not available."))
else:
self.imagePreview.setBackgroundBrush(QBrush(self.__tilePixmap))
scene.addPixmap(pixmap)
self.imagePreview.setScene(scene)
def __showLoadingText(self):
"""
Private method to show some text while loading an image.
"""
self.imagePreview.setBackgroundBrush(
self.__imagePreviewStandardBackground)
scene = QGraphicsScene(self.imagePreview)
scene.addText(self.tr("Loading..."))
self.imagePreview.setScene(scene)
def __imagesTreeContextMenuRequested(self, pos):
"""
Private slot to show a context menu for the images list.
@param pos position for the menu (QPoint)
"""
itm = self.imagesTree.itemAt(pos)
if itm is None:
return
menu = QMenu()
act = menu.addAction(self.tr("Copy Image Location to Clipboard"))
act.setData(itm.text(1))
act.triggered.connect(lambda: self.__copyAction(act))
act = menu.addAction(self.tr("Copy Image Name to Clipboard"))
act.setData(itm.text(0))
act.triggered.connect(lambda: self.__copyAction(act))
menu.addSeparator()
act = menu.addAction(self.tr("Save Image"))
act.setData(self.imagesTree.indexOfTopLevelItem(itm))
act.triggered.connect(lambda: self.__saveImage(act))
menu.exec_(self.imagesTree.viewport().mapToGlobal(pos))
def __copyAction(self, act):
"""
Private slot to copy the image URL or the image name to the clipboard.
@param act reference to the action that triggered
@type QAction
"""
QApplication.clipboard().setText(act.data())
def __saveImage(self, act):
"""
Private slot to save the selected image to disk.
@param act reference to the action that triggered
@type QAction
"""
index = act.data()
itm = self.imagesTree.topLevelItem(index)
if itm is None:
return
if (
not self.imagePreview.scene() or
len(self.imagePreview.scene().items()) == 0
):
return
pixmapItem = self.imagePreview.scene().items()[0]
if not isinstance(pixmapItem, QGraphicsPixmapItem):
return
if pixmapItem.pixmap().isNull():
E5MessageBox.warning(
self,
self.tr("Save Image"),
self.tr(
"""<p>This preview is not available.</p>"""))
return
imageFileName = WebBrowserTools.getFileNameFromUrl(QUrl(itm.text(1)))
index = imageFileName.rfind(".")
if index != -1:
imageFileName = imageFileName[:index] + ".png"
filename = E5FileDialog.getSaveFileName(
self,
self.tr("Save Image"),
imageFileName,
self.tr("All Files (*)"),
E5FileDialog.Options(E5FileDialog.DontConfirmOverwrite))
if not filename:
return
if not pixmapItem.pixmap().save(filename, "PNG"):
E5MessageBox.critical(
self,
self.tr("Save Image"),
self.tr(
"""<p>Cannot write to file <b>{0}</b>.</p>""")
.format(filename))
return
| """
Private slot to show a preview of the selected image.
@param current current image entry (QTreeWidgetItem)
@param previous old current entry (QTreeWidgetItem)
"""
if current is None:
return
imageUrl = QUrl(current.text(1))
if imageUrl.isRelative():
imageUrl = self.__baseUrl.resolved(imageUrl)
pixmap = QPixmap()
loading = False
if imageUrl.scheme() == "data":
encodedUrl = current.text(1).encode("utf-8")
imageData = encodedUrl[encodedUrl.find(b",") + 1:]
pixmap = WebBrowserTools.pixmapFromByteArray(imageData)
elif imageUrl.scheme() == "file":
pixmap = QPixmap(imageUrl.toLocalFile())
elif imageUrl.scheme() == "qrc":
pixmap = QPixmap(imageUrl.toString()[3:])
else:
if self.__imageReply is not None:
self.__imageReply.deleteLater()
self.__imageReply = None
from WebBrowser.WebBrowserWindow import WebBrowserWindow
self.__imageReply = WebBrowserWindow.networkManager().get(
QNetworkRequest(imageUrl))
self.__imageReply.finished.connect(self.__imageReplyFinished)
loading = True
self.__showLoadingText()
if not loading:
self.__showPixmap(pixmap) | identifier_body |
SiteInfoDialog.py | # -*- coding: utf-8 -*-
# Copyright (c) 2011 - 2019 Detlev Offenbach <[email protected]>
#
"""
Module implementing a dialog to show some information about a site.
"""
from PyQt5.QtCore import pyqtSlot, QUrl, Qt
from PyQt5.QtGui import QPixmap, QImage, QPainter, QColor, QBrush
from PyQt5.QtNetwork import QNetworkRequest, QNetworkReply
from PyQt5.QtWidgets import (
QDialog, QTreeWidgetItem, QGraphicsScene, QMenu, QApplication,
QGraphicsPixmapItem
)
from E5Gui import E5MessageBox, E5FileDialog
from .Ui_SiteInfoDialog import Ui_SiteInfoDialog
from ..Tools import Scripts, WebBrowserTools
from ..WebBrowserPage import WebBrowserPage
import UI.PixmapCache
class SiteInfoDialog(QDialog, Ui_SiteInfoDialog):
"""
Class implementing a dialog to show some information about a site.
"""
okStyle = "QLabel { color : white; background-color : green; }"
nokStyle = "QLabel { color : white; background-color : red; }"
def __init__(self, browser, parent=None):
"""
Constructor
@param browser reference to the browser window (HelpBrowser)
@param parent reference to the parent widget (QWidget)
"""
super(SiteInfoDialog, self).__init__(parent)
self.setupUi(self)
self.setWindowFlags(Qt.Window)
# put icons
self.tabWidget.setTabIcon(
0, UI.PixmapCache.getIcon("siteinfo-general.png"))
self.tabWidget.setTabIcon(
1, UI.PixmapCache.getIcon("siteinfo-media.png"))
self.__imageReply = None
self.__baseUrl = browser.url()
title = browser.title()
#prepare background of image preview
self.__imagePreviewStandardBackground = (
self.imagePreview.backgroundBrush()
)
color1 = QColor(220, 220, 220)
color2 = QColor(160, 160, 160)
self.__tilePixmap = QPixmap(8, 8)
self.__tilePixmap.fill(color1)
tilePainter = QPainter(self.__tilePixmap)
tilePainter.fillRect(0, 0, 4, 4, color2)
tilePainter.fillRect(4, 4, 4, 4, color2)
tilePainter.end()
# populate General tab
self.heading.setText("<b>{0}</b>".format(title))
self.siteAddressLabel.setText(self.__baseUrl.toString())
if self.__baseUrl.scheme() in ["https"]:
self.securityLabel.setStyleSheet(SiteInfoDialog.okStyle)
self.securityLabel.setText(
self.tr('<b>Connection is encrypted.</b>'))
else:
self.securityLabel.setStyleSheet(SiteInfoDialog.nokStyle)
self.securityLabel.setText(
self.tr('<b>Connection is not encrypted.</b>'))
browser.page().runJavaScript(
"document.charset", WebBrowserPage.SafeJsWorld,
lambda res: self.encodingLabel.setText(res))
# populate Meta tags
browser.page().runJavaScript(Scripts.getAllMetaAttributes(),
WebBrowserPage.SafeJsWorld,
self.__processMetaAttributes)
# populate Media tab
browser.page().runJavaScript(Scripts.getAllImages(),
WebBrowserPage.SafeJsWorld,
self.__processImageTags)
def __processImageTags(self, res):
"""
Private method to process the image tags.
@param res result of the JavaScript script
@type list of dict
"""
for img in res:
src = img["src"]
alt = img["alt"]
if not alt:
if src.find("/") == -1:
alt = src
else:
pos = src.rfind("/")
alt = src[pos + 1:]
if not src or not alt:
continue
QTreeWidgetItem(self.imagesTree, [alt, src])
for col in range(self.imagesTree.columnCount()):
self.imagesTree.resizeColumnToContents(col)
if self.imagesTree.columnWidth(0) > 300:
self.imagesTree.setColumnWidth(0, 300)
self.imagesTree.setCurrentItem(self.imagesTree.topLevelItem(0))
self.imagesTree.setContextMenuPolicy(Qt.CustomContextMenu)
self.imagesTree.customContextMenuRequested.connect(
self.__imagesTreeContextMenuRequested)
def __processMetaAttributes(self, res):
"""
Private method to process the meta attributes.
@param res result of the JavaScript script
@type list of dict
"""
for meta in res:
content = meta["content"]
name = meta["name"]
if not name:
name = meta["httpequiv"]
if not name or not content:
continue
if meta["charset"]:
self.encodingLabel.setText(meta["charset"])
if "charset=" in content:
self.encodingLabel.setText(
content[content.index("charset=") + 8:])
QTreeWidgetItem(self.tagsTree, [name, content])
for col in range(self.tagsTree.columnCount()):
self.tagsTree.resizeColumnToContents(col)
@pyqtSlot(QTreeWidgetItem, QTreeWidgetItem)
def on_imagesTree_currentItemChanged(self, current, previous):
"""
Private slot to show a preview of the selected image.
@param current current image entry (QTreeWidgetItem)
@param previous old current entry (QTreeWidgetItem)
"""
if current is None:
return
imageUrl = QUrl(current.text(1))
if imageUrl.isRelative():
imageUrl = self.__baseUrl.resolved(imageUrl)
pixmap = QPixmap()
loading = False
if imageUrl.scheme() == "data":
encodedUrl = current.text(1).encode("utf-8")
imageData = encodedUrl[encodedUrl.find(b",") + 1:]
pixmap = WebBrowserTools.pixmapFromByteArray(imageData)
elif imageUrl.scheme() == "file":
pixmap = QPixmap(imageUrl.toLocalFile())
elif imageUrl.scheme() == "qrc":
pixmap = QPixmap(imageUrl.toString()[3:])
else:
if self.__imageReply is not None:
self.__imageReply.deleteLater()
self.__imageReply = None
from WebBrowser.WebBrowserWindow import WebBrowserWindow
self.__imageReply = WebBrowserWindow.networkManager().get(
QNetworkRequest(imageUrl))
self.__imageReply.finished.connect(self.__imageReplyFinished)
loading = True
self.__showLoadingText()
if not loading:
self.__showPixmap(pixmap)
@pyqtSlot()
def __imageReplyFinished(self):
"""
Private slot handling the loading of an image.
"""
if self.__imageReply.error() != QNetworkReply.NoError:
return
data = self.__imageReply.readAll()
self.__showPixmap(QPixmap.fromImage(QImage.fromData(data)))
def __showPixmap(self, pixmap):
"""
Private method to show a pixmap in the preview pane.
@param pixmap pixmap to be shown
@type QPixmap
"""
scene = QGraphicsScene(self.imagePreview)
if pixmap.isNull():
self.imagePreview.setBackgroundBrush(
self.__imagePreviewStandardBackground)
scene.addText(self.tr("Preview not available."))
else:
self.imagePreview.setBackgroundBrush(QBrush(self.__tilePixmap))
scene.addPixmap(pixmap)
self.imagePreview.setScene(scene)
def | (self):
"""
Private method to show some text while loading an image.
"""
self.imagePreview.setBackgroundBrush(
self.__imagePreviewStandardBackground)
scene = QGraphicsScene(self.imagePreview)
scene.addText(self.tr("Loading..."))
self.imagePreview.setScene(scene)
def __imagesTreeContextMenuRequested(self, pos):
"""
Private slot to show a context menu for the images list.
@param pos position for the menu (QPoint)
"""
itm = self.imagesTree.itemAt(pos)
if itm is None:
return
menu = QMenu()
act = menu.addAction(self.tr("Copy Image Location to Clipboard"))
act.setData(itm.text(1))
act.triggered.connect(lambda: self.__copyAction(act))
act = menu.addAction(self.tr("Copy Image Name to Clipboard"))
act.setData(itm.text(0))
act.triggered.connect(lambda: self.__copyAction(act))
menu.addSeparator()
act = menu.addAction(self.tr("Save Image"))
act.setData(self.imagesTree.indexOfTopLevelItem(itm))
act.triggered.connect(lambda: self.__saveImage(act))
menu.exec_(self.imagesTree.viewport().mapToGlobal(pos))
def __copyAction(self, act):
"""
Private slot to copy the image URL or the image name to the clipboard.
@param act reference to the action that triggered
@type QAction
"""
QApplication.clipboard().setText(act.data())
def __saveImage(self, act):
"""
Private slot to save the selected image to disk.
@param act reference to the action that triggered
@type QAction
"""
index = act.data()
itm = self.imagesTree.topLevelItem(index)
if itm is None:
return
if (
not self.imagePreview.scene() or
len(self.imagePreview.scene().items()) == 0
):
return
pixmapItem = self.imagePreview.scene().items()[0]
if not isinstance(pixmapItem, QGraphicsPixmapItem):
return
if pixmapItem.pixmap().isNull():
E5MessageBox.warning(
self,
self.tr("Save Image"),
self.tr(
"""<p>This preview is not available.</p>"""))
return
imageFileName = WebBrowserTools.getFileNameFromUrl(QUrl(itm.text(1)))
index = imageFileName.rfind(".")
if index != -1:
imageFileName = imageFileName[:index] + ".png"
filename = E5FileDialog.getSaveFileName(
self,
self.tr("Save Image"),
imageFileName,
self.tr("All Files (*)"),
E5FileDialog.Options(E5FileDialog.DontConfirmOverwrite))
if not filename:
return
if not pixmapItem.pixmap().save(filename, "PNG"):
E5MessageBox.critical(
self,
self.tr("Save Image"),
self.tr(
"""<p>Cannot write to file <b>{0}</b>.</p>""")
.format(filename))
return
| __showLoadingText | identifier_name |
SiteInfoDialog.py | # -*- coding: utf-8 -*-
# Copyright (c) 2011 - 2019 Detlev Offenbach <[email protected]>
#
"""
Module implementing a dialog to show some information about a site.
"""
from PyQt5.QtCore import pyqtSlot, QUrl, Qt
from PyQt5.QtGui import QPixmap, QImage, QPainter, QColor, QBrush
from PyQt5.QtNetwork import QNetworkRequest, QNetworkReply
from PyQt5.QtWidgets import (
QDialog, QTreeWidgetItem, QGraphicsScene, QMenu, QApplication,
QGraphicsPixmapItem
)
from E5Gui import E5MessageBox, E5FileDialog
from .Ui_SiteInfoDialog import Ui_SiteInfoDialog
from ..Tools import Scripts, WebBrowserTools
from ..WebBrowserPage import WebBrowserPage
import UI.PixmapCache
class SiteInfoDialog(QDialog, Ui_SiteInfoDialog):
"""
Class implementing a dialog to show some information about a site.
"""
okStyle = "QLabel { color : white; background-color : green; }"
nokStyle = "QLabel { color : white; background-color : red; }"
def __init__(self, browser, parent=None):
"""
Constructor
@param browser reference to the browser window (HelpBrowser)
@param parent reference to the parent widget (QWidget)
"""
super(SiteInfoDialog, self).__init__(parent)
self.setupUi(self)
self.setWindowFlags(Qt.Window)
# put icons
self.tabWidget.setTabIcon(
0, UI.PixmapCache.getIcon("siteinfo-general.png"))
self.tabWidget.setTabIcon(
1, UI.PixmapCache.getIcon("siteinfo-media.png"))
self.__imageReply = None
self.__baseUrl = browser.url()
title = browser.title()
#prepare background of image preview
self.__imagePreviewStandardBackground = (
self.imagePreview.backgroundBrush()
)
color1 = QColor(220, 220, 220)
color2 = QColor(160, 160, 160)
self.__tilePixmap = QPixmap(8, 8)
self.__tilePixmap.fill(color1)
tilePainter = QPainter(self.__tilePixmap)
tilePainter.fillRect(0, 0, 4, 4, color2)
tilePainter.fillRect(4, 4, 4, 4, color2)
tilePainter.end()
# populate General tab
self.heading.setText("<b>{0}</b>".format(title))
self.siteAddressLabel.setText(self.__baseUrl.toString())
if self.__baseUrl.scheme() in ["https"]:
self.securityLabel.setStyleSheet(SiteInfoDialog.okStyle)
self.securityLabel.setText(
self.tr('<b>Connection is encrypted.</b>'))
else:
self.securityLabel.setStyleSheet(SiteInfoDialog.nokStyle)
self.securityLabel.setText(
self.tr('<b>Connection is not encrypted.</b>'))
browser.page().runJavaScript(
"document.charset", WebBrowserPage.SafeJsWorld,
lambda res: self.encodingLabel.setText(res))
# populate Meta tags
browser.page().runJavaScript(Scripts.getAllMetaAttributes(),
WebBrowserPage.SafeJsWorld,
self.__processMetaAttributes)
# populate Media tab
browser.page().runJavaScript(Scripts.getAllImages(),
WebBrowserPage.SafeJsWorld,
self.__processImageTags)
def __processImageTags(self, res):
"""
Private method to process the image tags.
@param res result of the JavaScript script
@type list of dict
"""
for img in res:
src = img["src"]
alt = img["alt"]
if not alt:
if src.find("/") == -1:
alt = src
else:
pos = src.rfind("/")
alt = src[pos + 1:]
if not src or not alt:
continue
QTreeWidgetItem(self.imagesTree, [alt, src])
for col in range(self.imagesTree.columnCount()):
self.imagesTree.resizeColumnToContents(col)
if self.imagesTree.columnWidth(0) > 300:
self.imagesTree.setColumnWidth(0, 300)
self.imagesTree.setCurrentItem(self.imagesTree.topLevelItem(0))
self.imagesTree.setContextMenuPolicy(Qt.CustomContextMenu)
self.imagesTree.customContextMenuRequested.connect(
self.__imagesTreeContextMenuRequested)
def __processMetaAttributes(self, res):
"""
Private method to process the meta attributes.
@param res result of the JavaScript script
@type list of dict
"""
for meta in res:
content = meta["content"]
name = meta["name"]
if not name:
name = meta["httpequiv"]
if not name or not content:
continue
if meta["charset"]:
self.encodingLabel.setText(meta["charset"])
if "charset=" in content:
self.encodingLabel.setText(
content[content.index("charset=") + 8:])
QTreeWidgetItem(self.tagsTree, [name, content])
for col in range(self.tagsTree.columnCount()):
self.tagsTree.resizeColumnToContents(col)
@pyqtSlot(QTreeWidgetItem, QTreeWidgetItem)
def on_imagesTree_currentItemChanged(self, current, previous):
"""
Private slot to show a preview of the selected image.
@param current current image entry (QTreeWidgetItem)
@param previous old current entry (QTreeWidgetItem)
"""
if current is None:
return
imageUrl = QUrl(current.text(1))
if imageUrl.isRelative():
imageUrl = self.__baseUrl.resolved(imageUrl)
pixmap = QPixmap()
loading = False
if imageUrl.scheme() == "data":
encodedUrl = current.text(1).encode("utf-8")
imageData = encodedUrl[encodedUrl.find(b",") + 1:]
pixmap = WebBrowserTools.pixmapFromByteArray(imageData)
elif imageUrl.scheme() == "file":
pixmap = QPixmap(imageUrl.toLocalFile())
elif imageUrl.scheme() == "qrc":
pixmap = QPixmap(imageUrl.toString()[3:])
else:
if self.__imageReply is not None:
self.__imageReply.deleteLater()
self.__imageReply = None
from WebBrowser.WebBrowserWindow import WebBrowserWindow
self.__imageReply = WebBrowserWindow.networkManager().get(
QNetworkRequest(imageUrl))
self.__imageReply.finished.connect(self.__imageReplyFinished)
loading = True
self.__showLoadingText()
if not loading:
self.__showPixmap(pixmap)
@pyqtSlot()
def __imageReplyFinished(self):
"""
Private slot handling the loading of an image.
"""
if self.__imageReply.error() != QNetworkReply.NoError:
return
data = self.__imageReply.readAll()
self.__showPixmap(QPixmap.fromImage(QImage.fromData(data)))
def __showPixmap(self, pixmap):
"""
Private method to show a pixmap in the preview pane.
@param pixmap pixmap to be shown
@type QPixmap
"""
scene = QGraphicsScene(self.imagePreview)
if pixmap.isNull():
self.imagePreview.setBackgroundBrush(
self.__imagePreviewStandardBackground)
scene.addText(self.tr("Preview not available."))
else:
self.imagePreview.setBackgroundBrush(QBrush(self.__tilePixmap))
scene.addPixmap(pixmap)
self.imagePreview.setScene(scene)
def __showLoadingText(self):
"""
Private method to show some text while loading an image.
"""
self.imagePreview.setBackgroundBrush(
self.__imagePreviewStandardBackground)
scene = QGraphicsScene(self.imagePreview)
scene.addText(self.tr("Loading..."))
self.imagePreview.setScene(scene)
def __imagesTreeContextMenuRequested(self, pos):
"""
Private slot to show a context menu for the images list.
@param pos position for the menu (QPoint)
"""
itm = self.imagesTree.itemAt(pos)
if itm is None:
return
menu = QMenu()
act = menu.addAction(self.tr("Copy Image Location to Clipboard"))
act.setData(itm.text(1))
act.triggered.connect(lambda: self.__copyAction(act))
act = menu.addAction(self.tr("Copy Image Name to Clipboard"))
act.setData(itm.text(0))
act.triggered.connect(lambda: self.__copyAction(act))
menu.addSeparator()
act = menu.addAction(self.tr("Save Image"))
act.setData(self.imagesTree.indexOfTopLevelItem(itm))
act.triggered.connect(lambda: self.__saveImage(act))
menu.exec_(self.imagesTree.viewport().mapToGlobal(pos))
def __copyAction(self, act):
"""
Private slot to copy the image URL or the image name to the clipboard.
@param act reference to the action that triggered
@type QAction
"""
QApplication.clipboard().setText(act.data())
def __saveImage(self, act):
"""
Private slot to save the selected image to disk.
@param act reference to the action that triggered
@type QAction
"""
index = act.data()
itm = self.imagesTree.topLevelItem(index)
if itm is None:
return
if (
not self.imagePreview.scene() or
len(self.imagePreview.scene().items()) == 0
): |
pixmapItem = self.imagePreview.scene().items()[0]
if not isinstance(pixmapItem, QGraphicsPixmapItem):
return
if pixmapItem.pixmap().isNull():
E5MessageBox.warning(
self,
self.tr("Save Image"),
self.tr(
"""<p>This preview is not available.</p>"""))
return
imageFileName = WebBrowserTools.getFileNameFromUrl(QUrl(itm.text(1)))
index = imageFileName.rfind(".")
if index != -1:
imageFileName = imageFileName[:index] + ".png"
filename = E5FileDialog.getSaveFileName(
self,
self.tr("Save Image"),
imageFileName,
self.tr("All Files (*)"),
E5FileDialog.Options(E5FileDialog.DontConfirmOverwrite))
if not filename:
return
if not pixmapItem.pixmap().save(filename, "PNG"):
E5MessageBox.critical(
self,
self.tr("Save Image"),
self.tr(
"""<p>Cannot write to file <b>{0}</b>.</p>""")
.format(filename))
return | return | random_line_split |
kNN.py | # -*- coding:UTF-8 -*-
import numpy as np
import operator
def createDataSet():
dataMat = np.array([[1.0,1.1], [1.0,1.0], [0.0,1.0], [0.0,1.1]])
labelVec = ['love', 'love', 'hate', 'hate']
return dataMat, labelVec
def classify0(inX, dataMat, labelVec, k):
# 训练集的个数
dataSize = dataMat.shape[0]
# 扩展inX到训练集的维度
inXMat = np.tile(inX, (dataSize, 1))
# inX跟训练集所有点计算距离
diffMat = inXMat - dataMat
sqDiffMat = diffMat ** 2
sqDistance = sqDiffMat.sum(axis = 1)
distances = sqDistance ** 0.5
sortedDistIndicies = distances.argsort()
classCount = {}
# 按照距离排序,取前k个最近的点
for i in range(k):
voteILabel = labelVec[sortedDistIndicies[i]]
classCount[voteILabel] = classCount.get(voteILabel, 0) + 1
# 统计k个最近点的类别数,最多的类别即为inX的类别
sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True)
return sortedClassCount[0][0]
'''
归一化
'''
def autoNorm(dataMat):
# 训练集大小
dataSize = dataMat.shape[0]
# 每列的最小值
minVec = np.min(dataMat, axis = 0)
minMat = np.tile(minVec, (dataSize, 1))
# 每列的最大值
maxVec = np.max(dataMat, axis = 0)
maxMat = np.tile(maxVec, (dataSize, 1))
# 最大值-最小值
diffMat = maxMat - minMat
# 训练集-最小值
diffDataMat = dataMat - minMat
# 归一化
normMat = diffDataMat / diffMat
return normMat
def file2matrix(filename):
fr = open(filename)
lines = fr.readlines()
numberOfLines = len(lines)
dataMat = np.zeros((numberOfLines, 3))
classLabelVec = []
index = 0
for line in lines:
listFromLine = line.strip().split('\t')
dataMat[index, :] = listFromLine[0:3]
classLabelVec.append(int(listFromLine[-1]))
index += 1
return dataMat, classLabelVec
def datingClassTest():
hoRatio = 0.50
datingDataMat, datingLabelVec = file2matrix('datingTestSet2.txt')
# 归一化
normMat = autoNorm(datingDataMat)
m = normMat.shape[0]
# 抽样测试样本
numTestVecs = int(m * hoRatio)
errorCount = 0.0
for i in range(numTestVecs):
classifierResult = classify0(normMat[i,:], normMat[numTestVecs:m,:], datingLabelVec[numTestVecs:m], 3)
print "the classifier came back with: %d, the real answer is: %d" % (classifierResult, datingLabelVec[i])
if (classifierResult != datingLabelVec[i]):
errorCount += 1.0
print "the total error rate is:%f" % (errorCount/float(numTestVecs))
print errorCount
def img2vector(filename):
returnVec = np.zeros((1, 1024))
fr = open(filename)
for i in range(32):
lineStr = fr.readline()
for j in range(32):
returnVec[0, i * 32 + j] = int(l |
return returnVec
def handwritingClassTest():
from os import listdir
from sklearn.neighbors import KNeighborsClassifier as kNN
hwLabels = []
# 目录下文件名
trainingFileList = listdir('trainingDigits')
m = len(trainingFileList)
trainingMat = np.zeros((m, 1024))
# 获取训练数据和标签
for i in range(m):
fileNameStr = trainingFileList[i]
classNumStr = int(fileNameStr.split('_')[0])
hwLabels.append(classNumStr)
trainingMat[i, :] = img2vector('trainingDigits/%s' % fileNameStr)
#构建kNN分类器
neigh = kNN(n_neighbors = 10, algorithm = 'auto')
#拟合模型, trainingMat为测试矩阵,hwLabels为对应的标签
neigh.fit(trainingMat, hwLabels)
testFileList = listdir('testDigits')
errorCount = 0.0
mTest = len(testFileList)
for i in range(mTest):
fileNameStr = testFileList[i]
classNumStr = int(fileNameStr.split('_')[0])
vectorUnderTest = img2vector('testDigits/%s' % fileNameStr)
#classifierResult = classify0(vectorUnderTest, trainingMat, hwLabels, 3)
classifierResult = neigh.predict(vectorUnderTest);
print "the classifier came back with: %d, the real answer is: %d" % (classifierResult, classNumStr)
if (classifierResult != classNumStr):
errorCount += 1.0
print "\nthe total number of errors is: %d" % errorCount
print "\nthe total error rate is: %f" % (errorCount / float(mTest))
return trainingFileList
"""
函数说明:可视化数据
Parameters:
datingDataMat - 特征矩阵
datingLabels - 分类Label
Returns:
无
Modify:
2017-03-24
"""
def showdatas(datingDataMat, datingLabels):
import matplotlib.pyplot as plt
import matplotlib.lines as mlines
#设置汉字格式
#font = FontProperties(fname=r"c:\windows\fonts\simsun.ttc", size=14)
#将fig画布分隔成1行1列,不共享x轴和y轴,fig画布的大小为(13,8)
#当nrow=2,nclos=2时,代表fig画布被分为四个区域,axs[0][0]表示第一行第一个区域
fig, axs = plt.subplots(nrows=2, ncols=2,sharex=False, sharey=False, figsize=(13,8))
numberOfLabels = len(datingLabels)
LabelsColors = []
for i in datingLabels:
if i == 1:
LabelsColors.append('black')
if i == 2:
LabelsColors.append('orange')
if i == 3:
LabelsColors.append('red')
#画出散点图,以datingDataMat矩阵的第一(飞行常客例程)、第二列(玩游戏)数据画散点数据,散点大小为15,透明度为0.5
axs[0][0].scatter(x=datingDataMat[:,0], y=datingDataMat[:,1], color=LabelsColors,s=15, alpha=.5)
#设置标题,x轴label,y轴label
axs0_title_text = axs[0][0].set_title(u'每年获得的飞行常客里程数与玩视频游戏所消耗时间占比')
axs0_xlabel_text = axs[0][0].set_xlabel(u'每年获得的飞行常客里程数')
axs0_ylabel_text = axs[0][0].set_ylabel(u'玩视频游戏所消耗时间占')
plt.setp(axs0_title_text, size=9, weight='bold', color='red')
plt.setp(axs0_xlabel_text, size=7, weight='bold', color='black')
plt.setp(axs0_ylabel_text, size=7, weight='bold', color='black')
#画出散点图,以datingDataMat矩阵的第一(飞行常客例程)、第三列(冰激凌)数据画散点数据,散点大小为15,透明度为0.5
axs[0][1].scatter(x=datingDataMat[:,0], y=datingDataMat[:,2], color=LabelsColors,s=15, alpha=.5)
#设置标题,x轴label,y轴label
axs1_title_text = axs[0][1].set_title(u'每年获得的飞行常客里程数与每周消费的冰激淋公升数')
axs1_xlabel_text = axs[0][1].set_xlabel(u'每年获得的飞行常客里程数')
axs1_ylabel_text = axs[0][1].set_ylabel(u'每周消费的冰激淋公升数')
plt.setp(axs1_title_text, size=9, weight='bold', color='red')
plt.setp(axs1_xlabel_text, size=7, weight='bold', color='black')
plt.setp(axs1_ylabel_text, size=7, weight='bold', color='black')
#画出散点图,以datingDataMat矩阵的第二(玩游戏)、第三列(冰激凌)数据画散点数据,散点大小为15,透明度为0.5
axs[1][0].scatter(x=datingDataMat[:,1], y=datingDataMat[:,2], color=LabelsColors,s=15, alpha=.5)
#设置标题,x轴label,y轴label
axs2_title_text = axs[1][0].set_title(u'玩视频游戏所消耗时间占比与每周消费的冰激淋公升数')
axs2_xlabel_text = axs[1][0].set_xlabel(u'玩视频游戏所消耗时间占比')
axs2_ylabel_text = axs[1][0].set_ylabel(u'每周消费的冰激淋公升数')
plt.setp(axs2_title_text, size=9, weight='bold', color='red')
plt.setp(axs2_xlabel_text, size=7, weight='bold', color='black')
plt.setp(axs2_ylabel_text, size=7, weight='bold', color='black')
#设置图例
didntLike = mlines.Line2D([], [], color='black', marker='.',
markersize=6, label='didntLike')
smallDoses = mlines.Line2D([], [], color='orange', marker='.',
markersize=6, label='smallDoses')
largeDoses = mlines.Line2D([], [], color='red', marker='.',
markersize=6, label='largeDoses')
#添加图例
axs[0][0].legend(handles=[didntLike,smallDoses,largeDoses])
axs[0][1].legend(handles=[didntLike,smallDoses,largeDoses])
axs[1][0].legend(handles=[didntLike,smallDoses,largeDoses])
#显示图片
plt.show()
def showDataPlot():
import matplotlib.pyplot as plt
plt.plot([1, 2, 3, 4], [1, 4, 9, 16], 'ro')
plt.axis([0, 6, 0, 20])
plt.ylabel('some numbers')
plt.show()
def showDifPlot():
import numpy as np
import matplotlib.pyplot as plt
# evenly sampled time at 200ms intervals
t = np.arange(0., 5., 0.2)
# red dashes, blue squares and green triangles
plt.plot(t, t, 'r--', t, t**2, 'bs', t, t**3, 'g^')
plt.show()
def showPic():
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
img=mpimg.imread('stinkbug.png')
print img
imgplot = plt.imshow(img)
plt.show()
# RGB单色图
lum_img = img[:, :, 0]
plt.imshow(lum_img)
plt.show()
# 热力图
plt.imshow(lum_img, cmap="hot")
plt.show()
#
imgplot = plt.imshow(lum_img)
imgplot.set_cmap('nipy_spectral')
plt.colorbar()
plt.show()
def showHistogram():
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
img=mpimg.imread('stinkbug.png')
lum_img = img[:, :, 0]
plt.hist(lum_img.ravel(), bins=256, range=(0.0, 1.0), fc='k', ec='k')
plt.show()
def showComplex():
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
img=mpimg.imread('stinkbug.png')
lum_img = img[:, :, 0]
imgplot = plt.imshow(lum_img, clim=(0.0, 0.7))
plt.show()
def showImages():
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
from PIL import Image
img = Image.open('stinkbug.png')
img.thumbnail((64, 64), Image.ANTIALIAS) # resizes image in-place
imgplot = plt.imshow(img, interpolation="bicubic")
plt.show()
def showSubplot():
import matplotlib.pyplot as plt
ax1 = plt.subplot2grid((3, 3), (0, 0), colspan=3)
ax2 = plt.subplot2grid((3, 3), (1, 0), colspan=2)
ax3 = plt.subplot2grid((3, 3), (1, 2), rowspan=2)
ax4 = plt.subplot2grid((3, 3), (2, 0))
ax5 = plt.subplot2grid((3, 3), (2, 1))
plt.show()
def showGredSpec():
import matplotlib.gridspec as gridspec
gs = gridspec.GridSpec(3, 3)
ax1 = plt.subplot(gs[0, :])
ax2 = plt.subplot(gs[1, :-1])
ax3 = plt.subplot(gs[1:, -1])
ax4 = plt.subplot(gs[-1, 0])
ax5 = plt.subplot(gs[-1, -2])
plt.show()
def showDemoGridSpec():
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import numpy as np
try:
from itertools import product
except ImportError:
# product is new in v 2.6
def product(*args, **kwds):
pools = map(tuple, args) * kwds.get('repeat', 1)
result = [[]]
for pool in pools:
result = [x+[y] for x in result for y in pool]
for prod in result:
yield tuple(prod)
def squiggle_xy(a, b, c, d, i=np.arange(0.0, 2*np.pi, 0.05)):
return np.sin(i*a)*np.cos(i*b), np.sin(i*c)*np.cos(i*d)
fig = plt.figure(figsize=(8, 8))
# gridspec inside gridspec
outer_grid = gridspec.GridSpec(4, 4, wspace=0.0, hspace=0.0)
for i in range(16):
inner_grid = gridspec.GridSpecFromSubplotSpec(3, 3,
subplot_spec=outer_grid[i], wspace=0.0, hspace=0.0)
a, b = int(i/4)+1,i%4+1
for j, (c, d) in enumerate(product(range(1, 4), repeat=2)):
ax = plt.Subplot(fig, inner_grid[j])
ax.plot(*squiggle_xy(a, b, c, d))
ax.set_xticks([])
ax.set_yticks([])
fig.add_subplot(ax)
all_axes = fig.get_axes()
#show only the outside spines
for ax in all_axes:
for sp in ax.spines.values():
sp.set_visible(False)
if ax.is_first_row():
ax.spines['top'].set_visible(True)
if ax.is_last_row():
ax.spines['bottom'].set_visible(True)
if ax.is_first_col():
ax.spines['left'].set_visible(True)
if ax.is_last_col():
ax.spines['right'].set_visible(True)
plt.show()
if __name__ == '__main__':
handwritingClassTest() | ineStr[i]) | identifier_name |
kNN.py | # -*- coding:UTF-8 -*-
import numpy as np
import operator
def createDataSet():
dataMat = np.array([[1.0,1.1], [1.0,1.0], [0.0,1.0], [0.0,1.1]])
labelVec = ['love', 'love', 'hate', 'hate']
return dataMat, labelVec
def classify0(inX, dataMat, labelVec, k):
# 训练集的个数
dataSize = dataMat.shape[0]
# 扩展inX到训练集的维度
inXMat = np.tile(inX, (dataSize, 1))
# inX跟训练集所有点计算距离
diffMat = inXMat - dataMat
sqDiffMat = diffMat ** 2
sqDistance = sqDiffMat.sum(axis = 1)
distances = sqDistance ** 0.5
sortedDistIndicies = distances.argsort()
classCount = {}
# 按照距离排序,取前k个最近的点
for i in range(k):
voteILabel = labelVec[sortedDistIndicies[i]]
classCount[voteILabel] = classCount.get(voteILabel, 0) + 1
# 统计k个最近点的类别数,最多的类别即为inX的类别
sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True)
return sortedClassCount[0][0]
'''
归一化
'''
def autoNorm(dataMat):
# 训练集大小
dataSize = dataMat.shape[0]
# 每列的最小值
minVec = np.min(dataMat, axis = 0)
minMat = np.tile(minVec, (dataSize, 1))
# 每列的最大值
maxVec = np.max(dataMat, axis = 0)
maxMat = np.tile(maxVec, (dataSize, 1))
# 最大值-最小值
diffMat = maxMat - minMat
# 训练集-最小值
diffDataMat = dataMat - minMat
# 归一化
normMat = diffDataMat / diffMat
return normMat
def file2matrix(filename):
fr = open(filename)
lines = fr.readlines()
numberOfLines = len(lines)
dataMat = np.zeros((numberOfLines, 3))
classLabelVec = []
index = 0
for line in lines:
listFromLine = line.strip().split('\t')
dataMat[index, :] = listFromLine[0:3]
classLabelVec.append(int(listFromLine[-1]))
index += 1
return dataMat, classLabelVec
def datingClassTest():
hoRatio = 0.50
datingDataMat, datingLabelVec = file2matrix('datingTestSet2.txt')
# 归一化
normMat = autoNorm(datingDataMat)
m = normMat.shape[0]
# 抽样测试样本
numTestVecs = int(m * hoRatio)
errorCount = 0.0
for i in range(numTestVecs):
classifierResult = classify0(normMat[i,:], normMat[numTestVecs:m,:], datingLabelVec[numTestVecs:m], 3)
print "the classifier came back with: %d, the real answer is: %d" % (classifierResult, datingLabelVec[i])
if (classifierResult != datingLabelVec[i]):
errorCount += 1.0
print "the total error rate is:%f" % (errorCount/float(numTestVecs))
print errorCount
def img2vector(filename):
returnVec = np.zeros((1, 1024))
fr = open(filename)
for i in range(32):
lineStr = fr.readline()
for j in range(32):
returnVec[0, i * 32 + j] = int(lineStr[i])
return returnVec
def handwritingClassTest():
from os import listdir
from sklearn.neighbors import KNeighborsClassifier as kNN
hwLabels = []
# 目录下文件名
trainingFileList = listdir('trainingDigits')
m = len(trainingFileList)
trainingMat = np.zeros((m, 1024))
# 获取训练数据和标签
for i in range(m):
fileNameStr = trainingFileList[i]
classNumStr = int(fileNameStr.split('_')[0])
hwLabels.append(classNumStr)
trainingMat[i, :] = img2vector('trainingDigits/%s' % fileNameStr)
#构建kNN分类器
neigh = kNN(n_neighbors = 10, algorithm = 'auto')
#拟合模型, trainingMat为测试矩阵,hwLabels为对应的标签
neigh.fit(trainingMat, hwLabels)
testFileList = listdir('testDigits')
errorCount = 0.0
mTest = len(testFileList)
for i in range(mTest):
fileNameStr = testFileList[i]
classNumStr = int(fileNameStr.split('_')[0])
vectorUnderTest = img2vector('testDigits/%s' % fileNameStr)
#classifierResult = classify0(vectorUnderTest, trainingMat, hwLabels, 3)
classifierResult = neigh.predict(vectorUnderTest);
print "the classifier came back with: %d, the real answer is: %d" % (classifierResult, classNumStr)
if (classifierResult != classNumStr):
errorCount += 1.0
print "\nthe total number of errors is: %d" % errorCount
print "\nthe total error rate is: %f" % (errorCount / float(mTest))
return trainingFileList
"""
函数说明:可视化数据
Parameters:
datingDataMat - 特征矩阵
datingLabels - 分类Label
Returns:
无
Modify:
2017-03-24
"""
def showdatas(datingDataMat, datingLabels):
import matplotlib.pyplot as plt
import matplotlib.lines as mlines
#设置汉字格式
#font = FontProperties(fname=r"c:\windows\fonts\simsun.ttc", size=14)
#将fig画布分隔成1行1列,不共享x轴和y轴,fig画布的大小为(13,8)
#当nrow=2,nclos=2时,代表fig画布被分为四个区域,axs[0][0]表示第一行第一个区域
fig, axs = plt.subplots(nrows=2, ncols=2,sharex=False, sharey=False, figsize=(13,8))
numberOfLabels = len(datingLabels)
LabelsColors = []
for i in datingLabels:
if i == 1:
LabelsColors.append('black')
if i == 2:
LabelsColors.append('orange')
if i == 3:
LabelsColors.append('red')
#画出散点图,以datingDataMat矩阵的第一(飞行常客例程)、第二列(玩游戏)数据画散点数据,散点大小为15,透明度为0.5
axs[0][0].scatter(x=datingDataMat[:,0], y=datingDataMat[:,1], color=LabelsColors,s=15, alpha=.5)
#设置标题,x轴label,y轴label
axs0_title_text = axs[0][0].set_title(u'每年获得的飞行常客里程数与玩视频游戏所消耗时间占比')
axs0_xlabel_text = axs[0][0].set_xlabel(u'每年获得的飞行常客里程数')
axs0_ylabel_text = axs[0][0].set_ylabel(u'玩视频游戏所消耗时间占')
plt.setp(axs0_title_text, size=9, weight='bold', color='red')
plt.setp(axs0_xlabel_text, size=7, weight='bold', color='black')
plt.setp(axs0_ylabel_text, size=7, weight='bold', color='black')
#画出散点图,以datingDataMat矩阵的第一(飞行常客例程)、第三列(冰激凌)数据画散点数据,散点大小为15,透明度为0.5
axs[0][1].scatter(x=datingDataMat[:,0], y=datingDataMat[:,2], color=LabelsColors,s=15, alpha=.5)
#设置标题,x轴label,y轴label
axs1_title_text = axs[0][1].set_title(u'每年获得的飞行常客里程数与每周消费的冰激淋公升数')
axs1_xlabel_text = axs[0][1].set_xlabel(u'每年获得的飞行常客里程数')
axs1_ylabel_text = axs[0][1].set_ylabel(u'每周消费的冰激淋公升数')
plt.setp(axs1_title_text, size=9, weight='bold', color='red')
plt.setp(axs1_xlabel_text, size=7, weight='bold', color='black')
plt.setp(axs1_ylabel_text, size=7, weight='bold', color='black')
#画出散点图,以datingDataMat矩阵的第二(玩游戏)、第三列(冰激凌)数据画散点数据,散点大小为15,透明度为0.5
axs[1][0].scatter(x=datingDataMat[:,1], y=datingDataMat[:,2], color=LabelsColors,s=15, alpha=.5)
#设置标题,x轴label,y轴label
axs2_title_text = axs[1][0].set_title(u'玩视频游戏所消耗时间占比与每周消费的冰激淋公升数')
axs2_xlabel_text = axs[1][0].set_xlabel(u'玩视频游戏所消耗时间占比')
axs2_ylabel_text = axs[1][0].set_ylabel(u'每周消费的冰激淋公升数')
plt.setp(axs2_title_text, size=9, weight='bold', color='red')
plt.setp(axs2_xlabel_text, size=7, weight='bold', color='black')
plt.setp(axs2_ylabel_text, size=7, weight='bold', color='black')
#设置图例
didntLike = mlines.Line2D([], [], color='black', marker='.',
markersize=6, label='didntLike')
smallDoses = mlines.Line2D([], [], color='orange', marker='.',
markersize=6, label='smallDoses')
largeDoses = mlines.Line2D([], [], color='red', marker='.',
markersize=6, label='largeDoses')
#添加图例
axs[0][0].legend(handles=[didntLike,smallDoses,largeDoses])
axs[0][1].legend(handles=[didntLike,smallDoses,largeDoses])
axs[1][0].legend(handles=[didntLike,smallDoses,largeDoses])
#显示图片
plt.show()
def showDataPlot():
import matplotlib.pyplot as plt
plt.plot([1, 2, 3, 4], [1, 4, 9, 16], 'ro')
plt.axis([0, 6, 0, 20])
plt.ylabel('some numbers')
plt.show()
def showDifPlot():
import numpy as np
import matplotlib.pyplot as plt
# evenly sampled time at 200ms intervals
t = np.arange(0., 5., 0.2)
# red dashes, blue squares and green triangles
plt.plot(t, t, 'r--', t, t**2, 'bs', t, t**3, 'g^')
plt.show()
def showPic():
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
img=mpimg.imread('stinkbug.png')
print img
imgplot = plt.imshow(img)
plt.show()
# RGB单色图
lum_img = img[:, :, 0]
plt.imshow(lum_img)
plt.show()
# 热力图
plt.imshow(lum_img, cmap="hot")
plt.show()
#
imgplot = plt.imshow(lum_img)
imgplot.set_cmap('nipy_spectral')
plt.colorbar()
plt.show()
def showHistogram():
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
img=mpimg.imread('stinkbug.png')
lum_img = img[:, :, 0]
plt.hist(lum_img.ravel(), bins=256, range=(0.0, 1.0), fc='k', ec='k')
plt.show()
def showComplex():
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
img=mpimg.imread('stinkbug.png')
lum_img = img[:, :, 0]
imgplot = plt.imshow(lum_img, clim=(0.0, 0.7))
plt.show()
def showImages():
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
from PIL import Image
img = Image.open('stinkbug.png')
img.thumbnail((64, 64), Image.ANTIALIAS) # resizes image in-place
imgplot = plt.imshow(img, interpolation="bicubic")
plt.show()
def showSubplot():
import matplotlib.pyplot as plt
ax1 = plt.subplot2grid((3, 3), (0, 0), colspan=3)
ax2 = plt.subplot2grid((3, 3), (1, 0), colspan=2)
ax3 = plt.subplot2grid((3, 3), (1, 2), rowspan=2)
ax4 = plt.subplot2grid((3, 3), (2, 0))
ax5 = plt.subplot2grid((3, 3), (2, 1))
plt.show()
def showGredSpec():
import matplotlib.gridspec as gridspec
gs = gridspec.GridSpec(3, 3)
ax1 = plt.subplot(gs[0, :])
ax2 = plt.subplot(gs[1, :-1])
ax3 = plt.subplot(gs[1:, -1])
ax4 = plt.subplot(gs[-1, 0])
ax5 = plt.subplot(gs[-1, -2])
plt.show()
def showDemoGridSpec():
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import numpy as np
try:
from itertools import product
except ImportError:
# product is new in v 2.6
def product(*args, **kwds):
pools = map(tuple, args) * kwds.get('repeat', 1)
result = [[]]
for pool in pools:
result = [x+[y] for x in result for y in pool]
for prod in result:
yield tuple(prod)
def squiggle_xy(a, b, c, d, i=np.arange(0.0, 2*np.pi, 0.05)):
return np.sin(i*a)*np.cos(i*b), np.sin(i*c)*np.cos(i*d)
fig = plt.figure(figsize=(8, 8))
# gridspec inside gridspec
outer_grid = gridspec.Gr | ate(product(range(1, 4), repeat=2)):
ax = plt.Subplot(fig, inner_grid[j])
ax.plot(*squiggle_xy(a, b, c, d))
ax.set_xticks([])
ax.set_yticks([])
fig.add_subplot(ax)
all_axes = fig.get_axes()
#show only the outside spines
for ax in all_axes:
for sp in ax.spines.values():
sp.set_visible(False)
if ax.is_first_row():
ax.spines['top'].set_visible(True)
if ax.is_last_row():
ax.spines['bottom'].set_visible(True)
if ax.is_first_col():
ax.spines['left'].set_visible(True)
if ax.is_last_col():
ax.spines['right'].set_visible(True)
plt.show()
if __name__ == '__main__':
handwritingClassTest() | idSpec(4, 4, wspace=0.0, hspace=0.0)
for i in range(16):
inner_grid = gridspec.GridSpecFromSubplotSpec(3, 3,
subplot_spec=outer_grid[i], wspace=0.0, hspace=0.0)
a, b = int(i/4)+1,i%4+1
for j, (c, d) in enumer | identifier_body |
kNN.py | # -*- coding:UTF-8 -*-
import numpy as np
import operator
def createDataSet():
dataMat = np.array([[1.0,1.1], [1.0,1.0], [0.0,1.0], [0.0,1.1]])
labelVec = ['love', 'love', 'hate', 'hate']
return dataMat, labelVec
def classify0(inX, dataMat, labelVec, k):
# 训练集的个数
dataSize = dataMat.shape[0]
# 扩展inX到训练集的维度
inXMat = np.tile(inX, (dataSize, 1))
# inX跟训练集所有点计算距离
diffMat = inXMat - dataMat
sqDiffMat = diffMat ** 2
sqDistance = sqDiffMat.sum(axis = 1)
distances = sqDistance ** 0.5
sortedDistIndicies = distances.argsort()
classCount = {}
# 按照距离排序,取前k个最近的点
for i in range(k):
voteILabel = labelVec[sortedDistIndicies[i]]
classCount[voteILabel] = classCount.get(voteILabel, 0) + 1
# 统计k个最近点的类别数,最多的类别即为inX的类别
sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True)
return sortedClassCount[0][0]
'''
归一化
'''
def autoNorm(dataMat):
# 训练集大小
dataSize = dataMat.shape[0]
# 每列的最小值
minVec = np.min(dataMat, axis = 0)
minMat = np.tile(minVec, (dataSize, 1))
# 每列的最大值
maxVec = np.max(dataMat, axis = 0)
maxMat = np.tile(maxVec, (dataSize, 1))
# 最大值-最小值
diffMat = maxMat - minMat
# 训练集-最小值
diffDataMat = dataMat - minMat
# 归一化
normMat = diffDataMat / diffMat
return normMat
def file2matrix(filename):
fr = open(filename)
lines = fr.readlines()
numberOfLines = len(lines)
dataMat = np.zeros((numberOfLines, 3))
classLabelVec = []
index = 0
for line in lines:
listFromLine = line.strip().split('\t')
dataMat[index, :] = listFromLine[0:3]
classLabelVec.append(int(listFromLine[-1]))
index += 1
return dataMat, classLabelVec
def datingClassTest():
hoRatio = 0.50
datingDataMat, datingLabelVec = file2matrix('datingTestSet2.txt')
# 归一化
normMat = autoNorm(datingDataMat)
m = normMat.shape[0]
# 抽样测试样本
numTestVecs = int(m * hoRatio)
errorCount = 0.0
for i in range(numTestVecs):
classifierResult = classify0(normMat[i,:], normMat[numTestVecs:m,:], datingLabelVec[numTestVecs:m], 3)
print "the classifier came back with: %d, the real answer is: %d" % (classifierResult, datingLabelVec[i])
if (classifierResult != datingLabelVec[i]):
errorCount += 1.0
print "the total error rate is:%f" % (errorCount/float(numTestVecs))
print errorCount
def img2vector(filename):
returnVec = np.zeros((1, 1024))
fr = open(filename)
for i in range(32):
lineStr = fr.readline()
for j in range(32):
returnVec[0, i * 32 + j] = int(lineStr[i])
return returnVec
def handwritingClassTest():
from os import listdir
from sklearn.neighbors import KNeighborsClassifier as kNN
hwLabels = []
# 目录下文件名
trainingFileList = listdir('trainingDigits')
m = len(trainingFileList)
trainingMat = np.zeros((m, 1024))
# 获取训练数据和标签
for i in range(m):
fileNameStr = trainingFileList[i]
classNumStr = int(fileNameStr.split('_')[0])
hwLabels.append(classNumStr)
trainingMat[i, :] = img2vector('trainingDigits/%s' % fileNameStr)
#构建kNN分类器
neigh = kNN(n_neighbors = 10, algorithm = 'auto')
#拟合模型, trainingMat为测试矩阵,hwLabels为对应的标签
neigh.fit(trainingMat, hwLabels)
testFileList = listdir('testDigits')
errorCount = 0.0
mTest = len(testFileList)
for i in range(mTest):
fileNameStr = testFileList[i]
classNumStr = int(fileNameStr.split('_')[0])
vectorUnderTest = img2vector('testDigits/%s' % fileNameStr)
#classifierResult = classify0(vectorUnderTest, trainingMat, hwLabels, 3)
classifierResult = neigh.predict(vectorUnderTest);
print "the classifier came back with: %d, the real answer is: %d" % (classifierResult, classNumStr)
if (classifierResult != classNumStr):
errorCount += 1.0
print "\nthe total number of errors is: %d" % errorCount
print "\nthe total error rate is: %f" % (errorCount / float(mTest))
return trainingFileList
"""
函数说明:可视化数据
Parameters:
datingDataMat - 特征矩阵
datingLabels - 分类Label
Returns:
无
Modify:
2017-03-24
"""
def showdatas(datingDataMat, datingLabels):
import matplotlib.pyplot as plt
import matplotlib.lines as mlines
#设置汉字格式
#font = FontProperties(fname=r"c:\windows\fonts\simsun.ttc", size=14)
#将fig画布分隔成1行1列,不共享x轴和y轴,fig画布的大小为(13,8)
#当nrow=2,nclos=2时,代表fig画布被分为四个区域,axs[0][0]表示第一行第一个区域
fig, axs = plt.subplots(nrows=2, ncols=2,sharex=False, sharey=False, figsize=(13,8))
numberOfLabels = len(datingLabels)
LabelsColors = []
for i in datingLabels:
if i == 1:
LabelsColors.append('black')
if i == 2:
LabelsColors.append('orange')
if i == 3:
LabelsColors.append('red')
#画出散点图,以datingDataMat矩阵的第一(飞行常客例程)、第二列(玩游戏)数据画散点数据,散点大小为15,透明度为0.5
axs[0][0].scatter(x=datingDataMat[:,0], y=datingDataMat[:,1], color=LabelsColors,s=15, alpha=.5)
#设置标题,x轴label,y轴label
axs0_title_text = axs[0][0].set_title(u'每年获得的飞行常客里程数与玩视频游戏所消耗时间占比')
axs0_xlabel_text = axs[0][0].set_xlabel(u'每年获得的飞行常客里程数')
axs0_ylabel_text = axs[0][0].set_ylabel(u'玩视频游戏所消耗时间占')
plt.setp(axs0_title_text, size=9, weight='bold', color='red')
plt.setp(axs0_xlabel_text, size=7, weight='bold', color='black')
plt.setp(axs0_ylabel_text, size=7, weight='bold', color='black')
#画出散点图,以datingDataMat矩阵的第一(飞行常客例程)、第三列(冰激凌)数据画散点数据,散点大小为15,透明度为0.5
axs[0][1].scatter(x=datingDataMat[:,0], y=datingDataMat[:,2], color=LabelsColors,s=15, alpha=.5)
#设置标题,x轴label,y轴label
axs1_title_text = axs[0][1].set_title(u'每年获得的飞行常客里程数与每周消费的冰激淋公升数')
axs1_xlabel_text = axs[0][1].set_xlabel(u'每年获得的飞行常客里程数')
axs1_ylabel_text = axs[0][1].set_ylabel(u'每周消费的冰激淋公升数')
plt.setp(axs1_title_text, size=9, weight='bold', color='red')
plt.setp(axs1_xlabel_text, size=7, weight='bold', color='black')
plt.setp(axs1_ylabel_text, size=7, weight='bold', color='black')
#画出散点图,以datingDataMat矩阵的第二(玩游戏)、第三列(冰激凌)数据画散点数据,散点大小为15,透明度为0.5
axs[1][0].scatter(x=datingDataMat[:,1], y=datingDataMat[:,2], color=LabelsColors,s=15, alpha=.5)
#设置标题,x轴label,y轴label
axs2_title_text = axs[1][0].set_title(u'玩视频游戏所消耗时间占比与每周消费的冰激淋公升数')
axs2_xlabel_text = axs[1][0].set_xlabel(u'玩视频游戏所消耗时间占比')
axs2_ylabel_text = axs[1][0].set_ylabel(u'每周消费的冰激淋公升数')
plt.setp(axs2_title_text, size=9, weight='bold', color='red')
plt.setp(axs2_xlabel_text, size=7, weight='bold', color='black')
plt.setp(axs2_ylabel_text, size=7, weight='bold', color='black')
#设置图例
didntLike = mlines.Line2D([], [], color='black', marker='.',
markersize=6, label='didntLike')
smallDoses = mlines.Line2D([], [], color='orange', marker='.',
markersize=6, label='smallDoses')
largeDoses = mlines.Line2D([], [], color='red', marker='.',
markersize=6, label='largeDoses')
#添加图例
axs[0][0].legend(handles=[didntLike,smallDoses,largeDoses])
axs[0][1].legend(handles=[didntLike,smallDoses,largeDoses])
axs[1][0].legend(handles=[didntLike,smallDoses,largeDoses])
#显示图片
plt.show()
def showDataPlot():
import matplotlib.pyplot as plt
plt.plot([1, 2, 3, 4], [1, 4, 9, 16], 'ro')
plt.axis([0, 6, 0, 20])
plt.ylabel('some numbers')
plt.show()
def showDifPlot():
import numpy as np
import matplotlib.pyplot as plt
# evenly sampled time at 200ms intervals
t = np.arange(0., 5., 0.2)
# red dashes, blue squares and green triangles
plt.plot(t, t, 'r--', t, t**2, 'bs', t, t**3, 'g^')
plt.show()
def showPic():
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
img=mpimg.imread('stinkbug.png')
print img
imgplot = plt.imshow(img)
plt.show()
# RGB单色图
lum_img = img[:, :, 0]
plt.imshow(lum_img)
plt.show()
# 热力图
plt.imshow(lum_img, cmap="hot")
plt.show()
#
imgplot = plt.imshow(lum_img)
imgplot.set_cmap('nipy_spectral')
plt.colorbar()
plt.show()
def showHistogram():
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
img=mpimg.imread('stinkbug.png')
lum_img = img[:, :, 0]
plt.hist(lum_img.ravel(), bins=256, range=(0.0, 1.0), fc='k', ec='k')
plt.show()
def showComplex():
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
img=mpimg.imread('stinkbug.png')
lum_img = img[:, :, 0]
imgplot = plt.imshow(lum_img, clim=(0.0, 0.7))
plt.show()
def showImages():
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
from PIL import Image
img = Image.open('stinkbug.png')
img.thumbnail((64, 64), Image.ANTIALIAS) # resizes image in-place
imgplot = plt.imshow(img, interpolation="bicubic")
plt.show()
def showSubplot():
import matplotlib.pyplot as plt
ax1 = plt.subplot2grid((3, 3), (0, 0), colspan=3)
ax2 = plt.subplot2grid((3, 3), (1, 0), colspan=2)
ax3 = plt.subplot2grid((3, 3), (1, 2), rowspan=2)
ax4 = plt.subplot2grid((3, 3), (2, 0))
ax5 = plt.subplot2grid((3, 3), (2, 1))
plt.show()
def showGredSpec():
import matplotlib.gridspec as gridspec
gs = gridspec.GridSpec(3, 3)
ax1 = plt.subplot(gs[0, :])
ax2 = plt.subplot(gs[1, :-1])
ax3 = plt.subplot(gs[1:, -1])
ax4 = plt.subplot(gs[-1, 0])
ax5 = plt.subplot(gs[-1, -2])
plt.show()
def showDemoGridSpec():
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import numpy as np
try:
from itertools import product
except ImportError:
# product is new in v 2.6
def product(*args, **kwds):
pools = map(tuple, args) * kwds.get('repeat', 1)
result = [[]]
for pool in pools:
result = [x+[y] for x in result for y in pool]
for prod in result:
yield tuple(prod)
def squiggle_xy(a, b, c, d, i=np.arange(0.0, 2*np.pi, 0.05)):
return np.sin(i*a)*np.cos(i*b), np.sin(i*c)*np.cos(i*d) | outer_grid = gridspec.GridSpec(4, 4, wspace=0.0, hspace=0.0)
for i in range(16):
inner_grid = gridspec.GridSpecFromSubplotSpec(3, 3,
subplot_spec=outer_grid[i], wspace=0.0, hspace=0.0)
a, b = int(i/4)+1,i%4+1
for j, (c, d) in enumerate(product(range(1, 4), repeat=2)):
ax = plt.Subplot(fig, inner_grid[j])
ax.plot(*squiggle_xy(a, b, c, d))
ax.set_xticks([])
ax.set_yticks([])
fig.add_subplot(ax)
all_axes = fig.get_axes()
#show only the outside spines
for ax in all_axes:
for sp in ax.spines.values():
sp.set_visible(False)
if ax.is_first_row():
ax.spines['top'].set_visible(True)
if ax.is_last_row():
ax.spines['bottom'].set_visible(True)
if ax.is_first_col():
ax.spines['left'].set_visible(True)
if ax.is_last_col():
ax.spines['right'].set_visible(True)
plt.show()
if __name__ == '__main__':
handwritingClassTest() |
fig = plt.figure(figsize=(8, 8))
# gridspec inside gridspec | random_line_split |
kNN.py | # -*- coding:UTF-8 -*-
import numpy as np
import operator
def createDataSet():
dataMat = np.array([[1.0,1.1], [1.0,1.0], [0.0,1.0], [0.0,1.1]])
labelVec = ['love', 'love', 'hate', 'hate']
return dataMat, labelVec
def classify0(inX, dataMat, labelVec, k):
# 训练集的个数
dataSize = dataMat.shape[0]
# 扩展inX到训练集的维度
inXMat = np.tile(inX, (dataSize, 1))
# inX跟训练集所有点计算距离
diffMat = inXMat - dataMat
sqDiffMat = diffMat ** 2
sqDistance = sqDiffMat.sum(axis = 1)
distances = sqDistance ** 0.5
sortedDistIndicies = distances.argsort()
classCount = {}
# 按照距离排序,取前k个最近的点
for i in range(k):
voteILabel = labelVec[sortedDistIndicies[i]]
classCount[voteILabel] = classCount.get(voteILabel, 0) + 1
# 统计k个最近点的类别数,最多的类别即为inX的类别
sortedClassCount = sorted(classCount.iteritems(), key=operator.itemgetter(1), reverse=True)
return sortedClassCount[0][0]
'''
归一化
'''
def autoNorm(dataMat):
# 训练集大小
dataSize = dataMat.shape[0]
# 每列的最小值
minVec = np.min(dataMat, axis = 0)
minMat = np.tile(minVec, (dataSize, 1))
# 每列的最大值
maxVec = np.max(dataMat, axis = 0)
maxMat = np.tile(maxVec, (dataSize, 1))
# 最大值-最小值
diffMat = maxMat - minMat
# 训练集-最小值
diffDataMat = dataMat - minMat
# 归一化
normMat = diffDataMat / diffMat
return normMat
def file2matrix(filename):
fr = open(filename)
lines = fr.readlines()
numberOfLines = len(lines)
dataMat = np.zeros((numberOfLines, 3))
classLabelVec = []
index = 0
for line in lines:
listFromLine = line.strip().split('\t')
dataMat[index, :] = listFromLine[0:3]
classLabelVec.append(int(listFromLine[-1]))
index += 1
return dataMat, classLabelVec
def datingClassTest():
hoRatio = 0.50
datingDataMat, datingLabelVec = file2matrix('datingTestSet2.txt')
# 归一化
normMat = autoNorm(datingDataMat)
m = normMat.shape[0]
# 抽样测试样本
numTestVecs = int(m * hoRatio)
errorCount = 0.0
for i in range(numTestVecs):
classifierResult = classify0(normMat[i,:], normMat[numTestVecs:m,:], datingLabelVec[numTestVecs:m], 3)
print "the classifier came back with: %d, the real answer is: %d" % (classifierResult, datingLabelV | lineStr = fr.readline()
for j in range(32):
returnVec[0, i * 32 + j] = int(lineStr[i])
return returnVec
def handwritingClassTest():
from os import listdir
from sklearn.neighbors import KNeighborsClassifier as kNN
hwLabels = []
# 目录下文件名
trainingFileList = listdir('trainingDigits')
m = len(trainingFileList)
trainingMat = np.zeros((m, 1024))
# 获取训练数据和标签
for i in range(m):
fileNameStr = trainingFileList[i]
classNumStr = int(fileNameStr.split('_')[0])
hwLabels.append(classNumStr)
trainingMat[i, :] = img2vector('trainingDigits/%s' % fileNameStr)
#构建kNN分类器
neigh = kNN(n_neighbors = 10, algorithm = 'auto')
#拟合模型, trainingMat为测试矩阵,hwLabels为对应的标签
neigh.fit(trainingMat, hwLabels)
testFileList = listdir('testDigits')
errorCount = 0.0
mTest = len(testFileList)
for i in range(mTest):
fileNameStr = testFileList[i]
classNumStr = int(fileNameStr.split('_')[0])
vectorUnderTest = img2vector('testDigits/%s' % fileNameStr)
#classifierResult = classify0(vectorUnderTest, trainingMat, hwLabels, 3)
classifierResult = neigh.predict(vectorUnderTest);
print "the classifier came back with: %d, the real answer is: %d" % (classifierResult, classNumStr)
if (classifierResult != classNumStr):
errorCount += 1.0
print "\nthe total number of errors is: %d" % errorCount
print "\nthe total error rate is: %f" % (errorCount / float(mTest))
return trainingFileList
"""
函数说明:可视化数据
Parameters:
datingDataMat - 特征矩阵
datingLabels - 分类Label
Returns:
无
Modify:
2017-03-24
"""
def showdatas(datingDataMat, datingLabels):
import matplotlib.pyplot as plt
import matplotlib.lines as mlines
#设置汉字格式
#font = FontProperties(fname=r"c:\windows\fonts\simsun.ttc", size=14)
#将fig画布分隔成1行1列,不共享x轴和y轴,fig画布的大小为(13,8)
#当nrow=2,nclos=2时,代表fig画布被分为四个区域,axs[0][0]表示第一行第一个区域
fig, axs = plt.subplots(nrows=2, ncols=2,sharex=False, sharey=False, figsize=(13,8))
numberOfLabels = len(datingLabels)
LabelsColors = []
for i in datingLabels:
if i == 1:
LabelsColors.append('black')
if i == 2:
LabelsColors.append('orange')
if i == 3:
LabelsColors.append('red')
#画出散点图,以datingDataMat矩阵的第一(飞行常客例程)、第二列(玩游戏)数据画散点数据,散点大小为15,透明度为0.5
axs[0][0].scatter(x=datingDataMat[:,0], y=datingDataMat[:,1], color=LabelsColors,s=15, alpha=.5)
#设置标题,x轴label,y轴label
axs0_title_text = axs[0][0].set_title(u'每年获得的飞行常客里程数与玩视频游戏所消耗时间占比')
axs0_xlabel_text = axs[0][0].set_xlabel(u'每年获得的飞行常客里程数')
axs0_ylabel_text = axs[0][0].set_ylabel(u'玩视频游戏所消耗时间占')
plt.setp(axs0_title_text, size=9, weight='bold', color='red')
plt.setp(axs0_xlabel_text, size=7, weight='bold', color='black')
plt.setp(axs0_ylabel_text, size=7, weight='bold', color='black')
#画出散点图,以datingDataMat矩阵的第一(飞行常客例程)、第三列(冰激凌)数据画散点数据,散点大小为15,透明度为0.5
axs[0][1].scatter(x=datingDataMat[:,0], y=datingDataMat[:,2], color=LabelsColors,s=15, alpha=.5)
#设置标题,x轴label,y轴label
axs1_title_text = axs[0][1].set_title(u'每年获得的飞行常客里程数与每周消费的冰激淋公升数')
axs1_xlabel_text = axs[0][1].set_xlabel(u'每年获得的飞行常客里程数')
axs1_ylabel_text = axs[0][1].set_ylabel(u'每周消费的冰激淋公升数')
plt.setp(axs1_title_text, size=9, weight='bold', color='red')
plt.setp(axs1_xlabel_text, size=7, weight='bold', color='black')
plt.setp(axs1_ylabel_text, size=7, weight='bold', color='black')
#画出散点图,以datingDataMat矩阵的第二(玩游戏)、第三列(冰激凌)数据画散点数据,散点大小为15,透明度为0.5
axs[1][0].scatter(x=datingDataMat[:,1], y=datingDataMat[:,2], color=LabelsColors,s=15, alpha=.5)
#设置标题,x轴label,y轴label
axs2_title_text = axs[1][0].set_title(u'玩视频游戏所消耗时间占比与每周消费的冰激淋公升数')
axs2_xlabel_text = axs[1][0].set_xlabel(u'玩视频游戏所消耗时间占比')
axs2_ylabel_text = axs[1][0].set_ylabel(u'每周消费的冰激淋公升数')
plt.setp(axs2_title_text, size=9, weight='bold', color='red')
plt.setp(axs2_xlabel_text, size=7, weight='bold', color='black')
plt.setp(axs2_ylabel_text, size=7, weight='bold', color='black')
#设置图例
didntLike = mlines.Line2D([], [], color='black', marker='.',
markersize=6, label='didntLike')
smallDoses = mlines.Line2D([], [], color='orange', marker='.',
markersize=6, label='smallDoses')
largeDoses = mlines.Line2D([], [], color='red', marker='.',
markersize=6, label='largeDoses')
#添加图例
axs[0][0].legend(handles=[didntLike,smallDoses,largeDoses])
axs[0][1].legend(handles=[didntLike,smallDoses,largeDoses])
axs[1][0].legend(handles=[didntLike,smallDoses,largeDoses])
#显示图片
plt.show()
def showDataPlot():
import matplotlib.pyplot as plt
plt.plot([1, 2, 3, 4], [1, 4, 9, 16], 'ro')
plt.axis([0, 6, 0, 20])
plt.ylabel('some numbers')
plt.show()
def showDifPlot():
import numpy as np
import matplotlib.pyplot as plt
# evenly sampled time at 200ms intervals
t = np.arange(0., 5., 0.2)
# red dashes, blue squares and green triangles
plt.plot(t, t, 'r--', t, t**2, 'bs', t, t**3, 'g^')
plt.show()
def showPic():
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
img=mpimg.imread('stinkbug.png')
print img
imgplot = plt.imshow(img)
plt.show()
# RGB单色图
lum_img = img[:, :, 0]
plt.imshow(lum_img)
plt.show()
# 热力图
plt.imshow(lum_img, cmap="hot")
plt.show()
#
imgplot = plt.imshow(lum_img)
imgplot.set_cmap('nipy_spectral')
plt.colorbar()
plt.show()
def showHistogram():
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
img=mpimg.imread('stinkbug.png')
lum_img = img[:, :, 0]
plt.hist(lum_img.ravel(), bins=256, range=(0.0, 1.0), fc='k', ec='k')
plt.show()
def showComplex():
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
img=mpimg.imread('stinkbug.png')
lum_img = img[:, :, 0]
imgplot = plt.imshow(lum_img, clim=(0.0, 0.7))
plt.show()
def showImages():
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
from PIL import Image
img = Image.open('stinkbug.png')
img.thumbnail((64, 64), Image.ANTIALIAS) # resizes image in-place
imgplot = plt.imshow(img, interpolation="bicubic")
plt.show()
def showSubplot():
import matplotlib.pyplot as plt
ax1 = plt.subplot2grid((3, 3), (0, 0), colspan=3)
ax2 = plt.subplot2grid((3, 3), (1, 0), colspan=2)
ax3 = plt.subplot2grid((3, 3), (1, 2), rowspan=2)
ax4 = plt.subplot2grid((3, 3), (2, 0))
ax5 = plt.subplot2grid((3, 3), (2, 1))
plt.show()
def showGredSpec():
import matplotlib.gridspec as gridspec
gs = gridspec.GridSpec(3, 3)
ax1 = plt.subplot(gs[0, :])
ax2 = plt.subplot(gs[1, :-1])
ax3 = plt.subplot(gs[1:, -1])
ax4 = plt.subplot(gs[-1, 0])
ax5 = plt.subplot(gs[-1, -2])
plt.show()
def showDemoGridSpec():
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import numpy as np
try:
from itertools import product
except ImportError:
# product is new in v 2.6
def product(*args, **kwds):
pools = map(tuple, args) * kwds.get('repeat', 1)
result = [[]]
for pool in pools:
result = [x+[y] for x in result for y in pool]
for prod in result:
yield tuple(prod)
def squiggle_xy(a, b, c, d, i=np.arange(0.0, 2*np.pi, 0.05)):
return np.sin(i*a)*np.cos(i*b), np.sin(i*c)*np.cos(i*d)
fig = plt.figure(figsize=(8, 8))
# gridspec inside gridspec
outer_grid = gridspec.GridSpec(4, 4, wspace=0.0, hspace=0.0)
for i in range(16):
inner_grid = gridspec.GridSpecFromSubplotSpec(3, 3,
subplot_spec=outer_grid[i], wspace=0.0, hspace=0.0)
a, b = int(i/4)+1,i%4+1
for j, (c, d) in enumerate(product(range(1, 4), repeat=2)):
ax = plt.Subplot(fig, inner_grid[j])
ax.plot(*squiggle_xy(a, b, c, d))
ax.set_xticks([])
ax.set_yticks([])
fig.add_subplot(ax)
all_axes = fig.get_axes()
#show only the outside spines
for ax in all_axes:
for sp in ax.spines.values():
sp.set_visible(False)
if ax.is_first_row():
ax.spines['top'].set_visible(True)
if ax.is_last_row():
ax.spines['bottom'].set_visible(True)
if ax.is_first_col():
ax.spines['left'].set_visible(True)
if ax.is_last_col():
ax.spines['right'].set_visible(True)
plt.show()
if __name__ == '__main__':
handwritingClassTest() | ec[i])
if (classifierResult != datingLabelVec[i]):
errorCount += 1.0
print "the total error rate is:%f" % (errorCount/float(numTestVecs))
print errorCount
def img2vector(filename):
returnVec = np.zeros((1, 1024))
fr = open(filename)
for i in range(32):
| conditional_block |
chat-ui.js | /**
* Copyright (c) 2016 OffGrid Networks. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var NKChatUI = function NKChatUI() {
// JQUERY UI ELEMENTS
this.$chatInput = $('.nkchat-window--message-input');
this.$chatInputButton = $('#nkchat-window--message-button');
this.$chatPane = $('.nkchat-box--pane');
this.$$chatItemClass = "nkchat-box--item";
this.$$chatItemHiddenClass = "nkchat-box--item_HIDDEN";
this.$$chatItemPrefix = "nkchat-box--item_";
this.$$chatItemTitleClass = "nkchat-box--message-title";
this.$roomCreate = $('#nkchat-create-room-input');
this.$roomCreateButton = $('#nkchat-create-room-button');
this.$loading = $('.loader');
this.$roomList = $('#nkchat-room-list');
this.$roomTitle = $('#nkchat-current-room');
this.$roomTitle2 = $('#nkchat-current-room-2');
this.$currentUserDivs = $('.nkchat-current-user');
this.$messageTemplates = $('.nkchat-box--item_HIDDEN');
this.$currentUserAvatars = $('.nkchat-current-avatar');
this.$userList = $('#nkchat-user-list');
// CURRENT CONTEXT
this._roomId = null;
this._digitalAssistantUserId = null;
this._digitalAssistantRoomId = null;
// CONSTANTS AND REGEX HELPERS
this.maxLengthUsername = 15;
this.maxLengthUsernameDisplay = 13;
this.maxLengthRoomName = 15;
this.maxLengthMessage = 120;
this.maxUserSearchResults = 100;
this.urlPattern = /\b(?:https?|ftp):\/\/[a-z0-9-+&@#\/%?=~_|!:,.;]*[a-z0-9-+&@#\/%=~_|]/gim;
this.pseudoUrlPattern = /(^|[^\/])(www\.[\S]+(\b|$))/gim;
var self = this;
// Initialize the UI
this.UIbindElements();
this.UIScrollToInput();
// Initialize the Chat
this._chat = new NKChatChannel('Noddy KitKat', function(user){
self._user = user;
self.refreshRooms();
self.UIPaintPrimaryUser();
self._chat.on('room-enter', self._onEnterRoom.bind(self));
self._chat.on('message-add', self._onNewMessage.bind(self));
self._chat.on('message-remove', self._onRemoveMessage.bind(self));
});
};
Object.defineProperty(NKChatUI.prototype, "userid", {get: function() { return this._user.id; } });
Object.defineProperty(NKChatUI.prototype, "roomid", {get: function() { return this._roomId; } });
NKChatUI.prototype.UIbindElements = function () {
var self = this;
var _converse = function(userText) {
self.$loading.show();
self.sendMessage(userText, function() {
self.$loading.hide();
self.UIClearInput();
});
}
this.$chatInput.keyup(function (event) {
if (event.keyCode === 13) {
_converse($(this).val());
}
});
this.$chatInputButton.bind('click', { self: this }, function(event) {
_converse(self.$chatInput.val());
});
var _createRoom = function(roomName) {
self.$loading.show();
self.createRoom(roomName, function() {
self.$loading.hide();
self.$roomCreate.val('');
});
}
this.$roomCreateButton.bind('click', { self: this }, function(event) {
_createRoom($roomCreate.val());
});
this.$roomCreate.keyup(function (event) {
if (event.keyCode === 13) {
_createRoom($(this).val());
}
});
};
NKChatUI.prototype.UIScrollChatToBottom = function() {
var element = this.$chatPane;
element.animate({
scrollTop: element[0].scrollHeight
}, 420);
};
NKChatUI.prototype.UIScrollToInput = function() {
var element = this.$chatInput;
$('body, html').animate({
scrollTop: (element.offset().top - window.innerHeight + element[0].offsetHeight) + 20 + 'px'
});
};
NKChatUI.prototype.UIClearInput = function() {
this.$chatInput.val('');
};
NKChatUI.prototype.UIPaintRoomList = function(rooms, cb) {
var self = this;
var template = function(obj) { obj || (obj = {}); var __t, __p = '', __e = _.escape, __j = Array.prototype.join; function print() { __p += __j.call(arguments, '') } with (obj) { __p += '<li data-room-type=\'' + __e(type) + '\' data-room-id=\'' + __e(id) + '\' data-room-name=\'' + __e(name) + '\'>\n<a href=\'#!\' class=\'clearfix '; if (isRoomOpen) { ; __p += ' highlight '; }; __p += '\'>\n<i class=\'fa fa-hashtag\'></i><span class=\'left\' title=\'' + __e(name) + '\'>' + __e(nameTrimmed) + '</span><small class=\'label pull-right bg-green\'>public</small>\n</a>\n</li>'; } return __p };
var selectRoomListItem = function(e) {
var parent = $(this).parent(),
roomId = parent.data('room-id'),
roomName = parent.data('room-name');
self._chat.leaveRoom(self._roomId);
self._roomId = roomId;
self._chat.enterRoom(roomId, roomName);
return false;
};
var count = 0;
this.$roomList.empty();
var keys = Object.keys(rooms);
for (var i = keys.length - 1; i >= 0; i--) {
var roomId = keys[i];
var room = rooms[roomId];
if (room.name == "MyNewRoom")
room.name = "Public"
else if (room.name.substr(0, 2) == "NK" && room.name.length > 2)
room.name = room.name.substr(2);
else
continue;
if (count >= 4 && room.name != "Public") continue;
if (self._roomId == null && room.name == "Public") self._roomId = roomId;
room.isRoomOpen = (roomId == self._roomId);
room.nameTrimmed = _trimWithEllipsis(room.name, self.maxLengthRoomName);
var $roomItem = $(template(room));
$roomItem.children('a').bind('click', selectRoomListItem);
count++;
this.$roomList.append($roomItem.toggle(true));
}
_sortListLexicographically(self.$roomList);
cb();
};
NKChatUI.prototype.UIPaintUserList = function (users, cb) {
var template = function (obj) { obj || (obj = {}); var __t, __p = '', __e = _.escape, __j = Array.prototype.join; function print() { __p += __j.call(arguments, '') } with (obj) {
__p += '<li class=\'list-group-item\' data-user-id=\'' + __e(id) + '\'><b>' + __e(nameTrimmed) + '</b>';
__p += '<span class=\'pull-right\'><img style=\'height: 25px; width: auto\' src=\'img\/avatar-peer' + avatar + '.svg\' alt=\'User profile picture\'></span>';
__p += '\n</li>'; } return __p };
this.$userList.empty();
for (var username in users) {
var user = users[username];
user.disableActions = (!this._user || user.id === this._user.id);
if (user.name.substring(0,1) == '@')
{
user.avatar = user.name.substring(1,2);
user.name = user.name.substring(3);
} else
{
var s = "0" + _hashCode(user.name);
user.avatar = s.substr(s.length-2);
}
user.nameTrimmed = _trimWithEllipsis(user.name, this.maxLengthUsernameDisplay);
user.isMuted = (this._user && this._user.muted && this._user.muted[user.id]);
this.$userList.append(template(user));
}
_sortListLexicographically(this.$userList);
cb();
};
NKChatUI.prototype.UIPaintPrimaryUser = function () {
var self = this;
if (!self._user.avatar)
{
var s = "0" + _hashCode(self._user.name);
self._user.avatar = s.substr(s.length-2);
}
this.$currentUserDivs.each(function()
{
$( this ).html(self._user.name);
})
this.$currentUserAvatars.each(function()
{
$( this ).attr('src', 'img\/avatar-peer' + self._user.avatar + '.svg');
})
};
NKChatUI.prototype.UIClearMessages = function () {
$('.' + this.$$chatItemClass).not('.' + this.$$chatItemHiddenClass).remove();
};
var _scrollTime = (new Date()).getTime();
NKChatUI.prototype.UIPaintChatMessage = function(message) {
var self = this;
var $chatBox = $('.' + this.$$chatItemPrefix + message.origin).first().clone();
$chatBox.find('p').html('<div>' + message.message + '<div class=\'' + this.$$chatItemTitleClass + '\'>' + message.name + '</div>' + ' </div>')
$chatBox.attr('data-message-id', message.messageId);
if (message.avatar)
$chatBox.find("img").eq(0).attr("src", "img/avatar-peer" + message.avatar + ".svg");
$chatBox.insertBefore(this.$loading);
setTimeout(function() {
$chatBox.removeClass(self.$$chatItemHiddenClass);
}, 100);
var newScrollTime = (new Date()).getTime();
if ((newScrollTime - _scrollTime) > 500)
this.UIScrollChatToBottom();
_scrollTime = newScrollTime;
if (!message.messageId)
this.$loading.hide();
};
NKChatUI.prototype.UIRemoveChatMessage = function (messageId) {
$('.' + this.$$chatItemClass + '[data-message-id="' + messageId + '"]').remove()
};
// BRIDGE METHODS BETWEEN CHAT API AND UI METHODS ABOVE
NKChatUI.prototype.refreshRooms = function() {
var self = this;
this._chat.getRoomList(function(rooms) {
self.UIPaintRoomList(rooms, function() {
self._chat.enterRoom(self._roomId);
});
});
};
NKChatUI.prototype.refreshUsers = function () {
var self = this;
this._chat.getUsersByRoom(self._roomId, function(users){
self.UIPaintUserList(users, function(){});
});
};
NKChatUI.prototype.sendMessage = function (msg, cb) {
this._chat.sendMessage(this._roomId, msg, 'default', cb);
};
NKChatUI.prototype.createRoom = function (roomName, cb) {
this._chat.createRoom('NK' + roomName, 'public', cb);
};
NKChatUI.prototype._onEnterRoom = function(room) {
var self = this;
if (room.name == "MyNewRoom")
room.name = "Public";
this.$roomTitle.html(room.name);
this.$roomTitle2.html(room.name);
this._roomId = room.id;
this.UIClearMessages();
this.refreshRooms();
this.refreshUsers();
setTimeout(function() {
var element = self.$chatPane;
element.animate({
scrollTop: element[0].scrollHeight
}, 420);
}, 500);
};
NKChatUI.prototype._onNewMessage = function(roomId, rawMessage) {
if (roomId == this._digitalAssistantRoomId) {
if (rawMessage.message.userid != this._user.id)
return;
rawMessage.isDigitalAssistant = true;
} else
rawMessage.isDigitalAssistant = false;
var userId = rawMessage.userId;
if (!this._user || !this._user.muted || !this._user.muted[userId]) {
var self = this;
var origin;
if (rawMessage.isDigitalAssistant)
origin = "ASSISTANT"
else
origin = (this._user && rawMessage.userId == this._user.id) ? "YOU" : "PEER";
// Setup defaults
var message = {
id: rawMessage.id,
localtime: _formatTime(rawMessage.timestamp),
message: (rawMessage.isDigitalAssistant) ? rawMessage.message.body : rawMessage.message || '',
userId: rawMessage.userId,
name: rawMessage.name,
origin: origin,
type: rawMessage.type || 'default',
disableActions: (!self._user || rawMessage.userId == self._user.id)
};
if (!rawMessage.isDigitalAssistant) {
if (message.name.substring(0, 1) == '@') {
message.avatar = message.name.substring(1, 2);
message.name = message.name.substring(3);
} else {
var s = "0" + _hashCode(message.name);
message.avatar = s.substr(s.length - 2);
}
}
if (!rawMessage.isDigitalAssistant) {
message.message = _.map(message.message.split(' '), function(token) {
if (self.urlPattern.test(token) || self.pseudoUrlPattern.test(token)) {
return _linkify(encodeURI(token));
} else {
return _.escape(token);
}
}).join(' ');
message.message = _trimWithEllipsis(message.message, self.maxLengthMessage);
}
this.UIPaintChatMessage(message);
}
};
NKChatUI.prototype._onRemoveMessage = function (roomId, messageId) {
this.UIRemoveChatMessage(messageId);
};
// private helper functions
function _trimWithEllipsis(str, length) {
str = str.replace(/^\s\s*/, '').replace(/\s\s*$/, '');
return (length && str.length <= length) ? str : str.substring(0, length) + '...';
};
function _sortListLexicographically(selector) {
$(selector).children("li").sort(function (a, b) {
var upA = $(a).text().toUpperCase();
var upB = $(b).text().toUpperCase();
return (upA < upB) ? -1 : (upA > upB) ? 1 : 0;
}).appendTo(selector);
};
function _formatTime(timestamp) {
var date = (timestamp) ? new Date(timestamp) : new Date(),
hours = date.getHours() || 12,
minutes = '' + date.getMinutes(),
ampm = (date.getHours() >= 12) ? 'pm' : 'am';
hours = (hours > 12) ? hours - 12 : hours;
minutes = (minutes.length < 2) ? '0' + minutes : minutes;
return '' + hours + ':' + minutes + ampm;
};
function _linkify(str) {
return str
.replace(self.urlPattern, '<a target="_blank" href="$&">$&</a>')
.replace(self.pseudoUrlPattern, '$1<a target="_blank" href="http://$2">$2</a>');
};
function _hashCode(str) | {
var hash = 0;
if (str.length == 0) return hash;
for (var i = 0; i < str.length; i++) {
var char = str.charCodeAt(i);
hash = ((hash<<5)-hash)+char;
hash = hash & hash; // Convert to 32bit integer
}
return ((hash + 2147483647 + 1) % 6) + 1;
} | identifier_body |
|
chat-ui.js | /**
* Copyright (c) 2016 OffGrid Networks. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var NKChatUI = function NKChatUI() {
// JQUERY UI ELEMENTS
this.$chatInput = $('.nkchat-window--message-input');
this.$chatInputButton = $('#nkchat-window--message-button');
this.$chatPane = $('.nkchat-box--pane');
this.$$chatItemClass = "nkchat-box--item";
this.$$chatItemHiddenClass = "nkchat-box--item_HIDDEN";
this.$$chatItemPrefix = "nkchat-box--item_";
this.$$chatItemTitleClass = "nkchat-box--message-title";
this.$roomCreate = $('#nkchat-create-room-input');
this.$roomCreateButton = $('#nkchat-create-room-button');
this.$loading = $('.loader');
this.$roomList = $('#nkchat-room-list');
this.$roomTitle = $('#nkchat-current-room');
this.$roomTitle2 = $('#nkchat-current-room-2');
this.$currentUserDivs = $('.nkchat-current-user');
this.$messageTemplates = $('.nkchat-box--item_HIDDEN');
this.$currentUserAvatars = $('.nkchat-current-avatar');
this.$userList = $('#nkchat-user-list');
// CURRENT CONTEXT
this._roomId = null;
this._digitalAssistantUserId = null;
this._digitalAssistantRoomId = null;
// CONSTANTS AND REGEX HELPERS | this.maxUserSearchResults = 100;
this.urlPattern = /\b(?:https?|ftp):\/\/[a-z0-9-+&@#\/%?=~_|!:,.;]*[a-z0-9-+&@#\/%=~_|]/gim;
this.pseudoUrlPattern = /(^|[^\/])(www\.[\S]+(\b|$))/gim;
var self = this;
// Initialize the UI
this.UIbindElements();
this.UIScrollToInput();
// Initialize the Chat
this._chat = new NKChatChannel('Noddy KitKat', function(user){
self._user = user;
self.refreshRooms();
self.UIPaintPrimaryUser();
self._chat.on('room-enter', self._onEnterRoom.bind(self));
self._chat.on('message-add', self._onNewMessage.bind(self));
self._chat.on('message-remove', self._onRemoveMessage.bind(self));
});
};
Object.defineProperty(NKChatUI.prototype, "userid", {get: function() { return this._user.id; } });
Object.defineProperty(NKChatUI.prototype, "roomid", {get: function() { return this._roomId; } });
NKChatUI.prototype.UIbindElements = function () {
var self = this;
var _converse = function(userText) {
self.$loading.show();
self.sendMessage(userText, function() {
self.$loading.hide();
self.UIClearInput();
});
}
this.$chatInput.keyup(function (event) {
if (event.keyCode === 13) {
_converse($(this).val());
}
});
this.$chatInputButton.bind('click', { self: this }, function(event) {
_converse(self.$chatInput.val());
});
var _createRoom = function(roomName) {
self.$loading.show();
self.createRoom(roomName, function() {
self.$loading.hide();
self.$roomCreate.val('');
});
}
this.$roomCreateButton.bind('click', { self: this }, function(event) {
_createRoom($roomCreate.val());
});
this.$roomCreate.keyup(function (event) {
if (event.keyCode === 13) {
_createRoom($(this).val());
}
});
};
NKChatUI.prototype.UIScrollChatToBottom = function() {
var element = this.$chatPane;
element.animate({
scrollTop: element[0].scrollHeight
}, 420);
};
NKChatUI.prototype.UIScrollToInput = function() {
var element = this.$chatInput;
$('body, html').animate({
scrollTop: (element.offset().top - window.innerHeight + element[0].offsetHeight) + 20 + 'px'
});
};
NKChatUI.prototype.UIClearInput = function() {
this.$chatInput.val('');
};
NKChatUI.prototype.UIPaintRoomList = function(rooms, cb) {
var self = this;
var template = function(obj) { obj || (obj = {}); var __t, __p = '', __e = _.escape, __j = Array.prototype.join; function print() { __p += __j.call(arguments, '') } with (obj) { __p += '<li data-room-type=\'' + __e(type) + '\' data-room-id=\'' + __e(id) + '\' data-room-name=\'' + __e(name) + '\'>\n<a href=\'#!\' class=\'clearfix '; if (isRoomOpen) { ; __p += ' highlight '; }; __p += '\'>\n<i class=\'fa fa-hashtag\'></i><span class=\'left\' title=\'' + __e(name) + '\'>' + __e(nameTrimmed) + '</span><small class=\'label pull-right bg-green\'>public</small>\n</a>\n</li>'; } return __p };
var selectRoomListItem = function(e) {
var parent = $(this).parent(),
roomId = parent.data('room-id'),
roomName = parent.data('room-name');
self._chat.leaveRoom(self._roomId);
self._roomId = roomId;
self._chat.enterRoom(roomId, roomName);
return false;
};
var count = 0;
this.$roomList.empty();
var keys = Object.keys(rooms);
for (var i = keys.length - 1; i >= 0; i--) {
var roomId = keys[i];
var room = rooms[roomId];
if (room.name == "MyNewRoom")
room.name = "Public"
else if (room.name.substr(0, 2) == "NK" && room.name.length > 2)
room.name = room.name.substr(2);
else
continue;
if (count >= 4 && room.name != "Public") continue;
if (self._roomId == null && room.name == "Public") self._roomId = roomId;
room.isRoomOpen = (roomId == self._roomId);
room.nameTrimmed = _trimWithEllipsis(room.name, self.maxLengthRoomName);
var $roomItem = $(template(room));
$roomItem.children('a').bind('click', selectRoomListItem);
count++;
this.$roomList.append($roomItem.toggle(true));
}
_sortListLexicographically(self.$roomList);
cb();
};
NKChatUI.prototype.UIPaintUserList = function (users, cb) {
var template = function (obj) { obj || (obj = {}); var __t, __p = '', __e = _.escape, __j = Array.prototype.join; function print() { __p += __j.call(arguments, '') } with (obj) {
__p += '<li class=\'list-group-item\' data-user-id=\'' + __e(id) + '\'><b>' + __e(nameTrimmed) + '</b>';
__p += '<span class=\'pull-right\'><img style=\'height: 25px; width: auto\' src=\'img\/avatar-peer' + avatar + '.svg\' alt=\'User profile picture\'></span>';
__p += '\n</li>'; } return __p };
this.$userList.empty();
for (var username in users) {
var user = users[username];
user.disableActions = (!this._user || user.id === this._user.id);
if (user.name.substring(0,1) == '@')
{
user.avatar = user.name.substring(1,2);
user.name = user.name.substring(3);
} else
{
var s = "0" + _hashCode(user.name);
user.avatar = s.substr(s.length-2);
}
user.nameTrimmed = _trimWithEllipsis(user.name, this.maxLengthUsernameDisplay);
user.isMuted = (this._user && this._user.muted && this._user.muted[user.id]);
this.$userList.append(template(user));
}
_sortListLexicographically(this.$userList);
cb();
};
NKChatUI.prototype.UIPaintPrimaryUser = function () {
var self = this;
if (!self._user.avatar)
{
var s = "0" + _hashCode(self._user.name);
self._user.avatar = s.substr(s.length-2);
}
this.$currentUserDivs.each(function()
{
$( this ).html(self._user.name);
})
this.$currentUserAvatars.each(function()
{
$( this ).attr('src', 'img\/avatar-peer' + self._user.avatar + '.svg');
})
};
NKChatUI.prototype.UIClearMessages = function () {
$('.' + this.$$chatItemClass).not('.' + this.$$chatItemHiddenClass).remove();
};
var _scrollTime = (new Date()).getTime();
NKChatUI.prototype.UIPaintChatMessage = function(message) {
var self = this;
var $chatBox = $('.' + this.$$chatItemPrefix + message.origin).first().clone();
$chatBox.find('p').html('<div>' + message.message + '<div class=\'' + this.$$chatItemTitleClass + '\'>' + message.name + '</div>' + ' </div>')
$chatBox.attr('data-message-id', message.messageId);
if (message.avatar)
$chatBox.find("img").eq(0).attr("src", "img/avatar-peer" + message.avatar + ".svg");
$chatBox.insertBefore(this.$loading);
setTimeout(function() {
$chatBox.removeClass(self.$$chatItemHiddenClass);
}, 100);
var newScrollTime = (new Date()).getTime();
if ((newScrollTime - _scrollTime) > 500)
this.UIScrollChatToBottom();
_scrollTime = newScrollTime;
if (!message.messageId)
this.$loading.hide();
};
NKChatUI.prototype.UIRemoveChatMessage = function (messageId) {
$('.' + this.$$chatItemClass + '[data-message-id="' + messageId + '"]').remove()
};
// BRIDGE METHODS BETWEEN CHAT API AND UI METHODS ABOVE
NKChatUI.prototype.refreshRooms = function() {
var self = this;
this._chat.getRoomList(function(rooms) {
self.UIPaintRoomList(rooms, function() {
self._chat.enterRoom(self._roomId);
});
});
};
NKChatUI.prototype.refreshUsers = function () {
var self = this;
this._chat.getUsersByRoom(self._roomId, function(users){
self.UIPaintUserList(users, function(){});
});
};
NKChatUI.prototype.sendMessage = function (msg, cb) {
this._chat.sendMessage(this._roomId, msg, 'default', cb);
};
NKChatUI.prototype.createRoom = function (roomName, cb) {
this._chat.createRoom('NK' + roomName, 'public', cb);
};
NKChatUI.prototype._onEnterRoom = function(room) {
var self = this;
if (room.name == "MyNewRoom")
room.name = "Public";
this.$roomTitle.html(room.name);
this.$roomTitle2.html(room.name);
this._roomId = room.id;
this.UIClearMessages();
this.refreshRooms();
this.refreshUsers();
setTimeout(function() {
var element = self.$chatPane;
element.animate({
scrollTop: element[0].scrollHeight
}, 420);
}, 500);
};
NKChatUI.prototype._onNewMessage = function(roomId, rawMessage) {
if (roomId == this._digitalAssistantRoomId) {
if (rawMessage.message.userid != this._user.id)
return;
rawMessage.isDigitalAssistant = true;
} else
rawMessage.isDigitalAssistant = false;
var userId = rawMessage.userId;
if (!this._user || !this._user.muted || !this._user.muted[userId]) {
var self = this;
var origin;
if (rawMessage.isDigitalAssistant)
origin = "ASSISTANT"
else
origin = (this._user && rawMessage.userId == this._user.id) ? "YOU" : "PEER";
// Setup defaults
var message = {
id: rawMessage.id,
localtime: _formatTime(rawMessage.timestamp),
message: (rawMessage.isDigitalAssistant) ? rawMessage.message.body : rawMessage.message || '',
userId: rawMessage.userId,
name: rawMessage.name,
origin: origin,
type: rawMessage.type || 'default',
disableActions: (!self._user || rawMessage.userId == self._user.id)
};
if (!rawMessage.isDigitalAssistant) {
if (message.name.substring(0, 1) == '@') {
message.avatar = message.name.substring(1, 2);
message.name = message.name.substring(3);
} else {
var s = "0" + _hashCode(message.name);
message.avatar = s.substr(s.length - 2);
}
}
if (!rawMessage.isDigitalAssistant) {
message.message = _.map(message.message.split(' '), function(token) {
if (self.urlPattern.test(token) || self.pseudoUrlPattern.test(token)) {
return _linkify(encodeURI(token));
} else {
return _.escape(token);
}
}).join(' ');
message.message = _trimWithEllipsis(message.message, self.maxLengthMessage);
}
this.UIPaintChatMessage(message);
}
};
NKChatUI.prototype._onRemoveMessage = function (roomId, messageId) {
this.UIRemoveChatMessage(messageId);
};
// private helper functions
function _trimWithEllipsis(str, length) {
str = str.replace(/^\s\s*/, '').replace(/\s\s*$/, '');
return (length && str.length <= length) ? str : str.substring(0, length) + '...';
};
function _sortListLexicographically(selector) {
$(selector).children("li").sort(function (a, b) {
var upA = $(a).text().toUpperCase();
var upB = $(b).text().toUpperCase();
return (upA < upB) ? -1 : (upA > upB) ? 1 : 0;
}).appendTo(selector);
};
function _formatTime(timestamp) {
var date = (timestamp) ? new Date(timestamp) : new Date(),
hours = date.getHours() || 12,
minutes = '' + date.getMinutes(),
ampm = (date.getHours() >= 12) ? 'pm' : 'am';
hours = (hours > 12) ? hours - 12 : hours;
minutes = (minutes.length < 2) ? '0' + minutes : minutes;
return '' + hours + ':' + minutes + ampm;
};
function _linkify(str) {
return str
.replace(self.urlPattern, '<a target="_blank" href="$&">$&</a>')
.replace(self.pseudoUrlPattern, '$1<a target="_blank" href="http://$2">$2</a>');
};
function _hashCode(str){
var hash = 0;
if (str.length == 0) return hash;
for (var i = 0; i < str.length; i++) {
var char = str.charCodeAt(i);
hash = ((hash<<5)-hash)+char;
hash = hash & hash; // Convert to 32bit integer
}
return ((hash + 2147483647 + 1) % 6) + 1;
} | this.maxLengthUsername = 15;
this.maxLengthUsernameDisplay = 13;
this.maxLengthRoomName = 15;
this.maxLengthMessage = 120; | random_line_split |
chat-ui.js | /**
* Copyright (c) 2016 OffGrid Networks. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var NKChatUI = function NKChatUI() {
// JQUERY UI ELEMENTS
this.$chatInput = $('.nkchat-window--message-input');
this.$chatInputButton = $('#nkchat-window--message-button');
this.$chatPane = $('.nkchat-box--pane');
this.$$chatItemClass = "nkchat-box--item";
this.$$chatItemHiddenClass = "nkchat-box--item_HIDDEN";
this.$$chatItemPrefix = "nkchat-box--item_";
this.$$chatItemTitleClass = "nkchat-box--message-title";
this.$roomCreate = $('#nkchat-create-room-input');
this.$roomCreateButton = $('#nkchat-create-room-button');
this.$loading = $('.loader');
this.$roomList = $('#nkchat-room-list');
this.$roomTitle = $('#nkchat-current-room');
this.$roomTitle2 = $('#nkchat-current-room-2');
this.$currentUserDivs = $('.nkchat-current-user');
this.$messageTemplates = $('.nkchat-box--item_HIDDEN');
this.$currentUserAvatars = $('.nkchat-current-avatar');
this.$userList = $('#nkchat-user-list');
// CURRENT CONTEXT
this._roomId = null;
this._digitalAssistantUserId = null;
this._digitalAssistantRoomId = null;
// CONSTANTS AND REGEX HELPERS
this.maxLengthUsername = 15;
this.maxLengthUsernameDisplay = 13;
this.maxLengthRoomName = 15;
this.maxLengthMessage = 120;
this.maxUserSearchResults = 100;
this.urlPattern = /\b(?:https?|ftp):\/\/[a-z0-9-+&@#\/%?=~_|!:,.;]*[a-z0-9-+&@#\/%=~_|]/gim;
this.pseudoUrlPattern = /(^|[^\/])(www\.[\S]+(\b|$))/gim;
var self = this;
// Initialize the UI
this.UIbindElements();
this.UIScrollToInput();
// Initialize the Chat
this._chat = new NKChatChannel('Noddy KitKat', function(user){
self._user = user;
self.refreshRooms();
self.UIPaintPrimaryUser();
self._chat.on('room-enter', self._onEnterRoom.bind(self));
self._chat.on('message-add', self._onNewMessage.bind(self));
self._chat.on('message-remove', self._onRemoveMessage.bind(self));
});
};
Object.defineProperty(NKChatUI.prototype, "userid", {get: function() { return this._user.id; } });
Object.defineProperty(NKChatUI.prototype, "roomid", {get: function() { return this._roomId; } });
NKChatUI.prototype.UIbindElements = function () {
var self = this;
var _converse = function(userText) {
self.$loading.show();
self.sendMessage(userText, function() {
self.$loading.hide();
self.UIClearInput();
});
}
this.$chatInput.keyup(function (event) {
if (event.keyCode === 13) {
_converse($(this).val());
}
});
this.$chatInputButton.bind('click', { self: this }, function(event) {
_converse(self.$chatInput.val());
});
var _createRoom = function(roomName) {
self.$loading.show();
self.createRoom(roomName, function() {
self.$loading.hide();
self.$roomCreate.val('');
});
}
this.$roomCreateButton.bind('click', { self: this }, function(event) {
_createRoom($roomCreate.val());
});
this.$roomCreate.keyup(function (event) {
if (event.keyCode === 13) {
_createRoom($(this).val());
}
});
};
NKChatUI.prototype.UIScrollChatToBottom = function() {
var element = this.$chatPane;
element.animate({
scrollTop: element[0].scrollHeight
}, 420);
};
NKChatUI.prototype.UIScrollToInput = function() {
var element = this.$chatInput;
$('body, html').animate({
scrollTop: (element.offset().top - window.innerHeight + element[0].offsetHeight) + 20 + 'px'
});
};
NKChatUI.prototype.UIClearInput = function() {
this.$chatInput.val('');
};
NKChatUI.prototype.UIPaintRoomList = function(rooms, cb) {
var self = this;
var template = function(obj) { obj || (obj = {}); var __t, __p = '', __e = _.escape, __j = Array.prototype.join; function print() { __p += __j.call(arguments, '') } with (obj) { __p += '<li data-room-type=\'' + __e(type) + '\' data-room-id=\'' + __e(id) + '\' data-room-name=\'' + __e(name) + '\'>\n<a href=\'#!\' class=\'clearfix '; if (isRoomOpen) { ; __p += ' highlight '; }; __p += '\'>\n<i class=\'fa fa-hashtag\'></i><span class=\'left\' title=\'' + __e(name) + '\'>' + __e(nameTrimmed) + '</span><small class=\'label pull-right bg-green\'>public</small>\n</a>\n</li>'; } return __p };
var selectRoomListItem = function(e) {
var parent = $(this).parent(),
roomId = parent.data('room-id'),
roomName = parent.data('room-name');
self._chat.leaveRoom(self._roomId);
self._roomId = roomId;
self._chat.enterRoom(roomId, roomName);
return false;
};
var count = 0;
this.$roomList.empty();
var keys = Object.keys(rooms);
for (var i = keys.length - 1; i >= 0; i--) {
var roomId = keys[i];
var room = rooms[roomId];
if (room.name == "MyNewRoom")
room.name = "Public"
else if (room.name.substr(0, 2) == "NK" && room.name.length > 2)
room.name = room.name.substr(2);
else
continue;
if (count >= 4 && room.name != "Public") continue;
if (self._roomId == null && room.name == "Public") self._roomId = roomId;
room.isRoomOpen = (roomId == self._roomId);
room.nameTrimmed = _trimWithEllipsis(room.name, self.maxLengthRoomName);
var $roomItem = $(template(room));
$roomItem.children('a').bind('click', selectRoomListItem);
count++;
this.$roomList.append($roomItem.toggle(true));
}
_sortListLexicographically(self.$roomList);
cb();
};
NKChatUI.prototype.UIPaintUserList = function (users, cb) {
var template = function (obj) { obj || (obj = {}); var __t, __p = '', __e = _.escape, __j = Array.prototype.join; function print() { __p += __j.call(arguments, '') } with (obj) {
__p += '<li class=\'list-group-item\' data-user-id=\'' + __e(id) + '\'><b>' + __e(nameTrimmed) + '</b>';
__p += '<span class=\'pull-right\'><img style=\'height: 25px; width: auto\' src=\'img\/avatar-peer' + avatar + '.svg\' alt=\'User profile picture\'></span>';
__p += '\n</li>'; } return __p };
this.$userList.empty();
for (var username in users) {
var user = users[username];
user.disableActions = (!this._user || user.id === this._user.id);
if (user.name.substring(0,1) == '@')
{
user.avatar = user.name.substring(1,2);
user.name = user.name.substring(3);
} else
{
var s = "0" + _hashCode(user.name);
user.avatar = s.substr(s.length-2);
}
user.nameTrimmed = _trimWithEllipsis(user.name, this.maxLengthUsernameDisplay);
user.isMuted = (this._user && this._user.muted && this._user.muted[user.id]);
this.$userList.append(template(user));
}
_sortListLexicographically(this.$userList);
cb();
};
NKChatUI.prototype.UIPaintPrimaryUser = function () {
var self = this;
if (!self._user.avatar)
{
var s = "0" + _hashCode(self._user.name);
self._user.avatar = s.substr(s.length-2);
}
this.$currentUserDivs.each(function()
{
$( this ).html(self._user.name);
})
this.$currentUserAvatars.each(function()
{
$( this ).attr('src', 'img\/avatar-peer' + self._user.avatar + '.svg');
})
};
NKChatUI.prototype.UIClearMessages = function () {
$('.' + this.$$chatItemClass).not('.' + this.$$chatItemHiddenClass).remove();
};
var _scrollTime = (new Date()).getTime();
NKChatUI.prototype.UIPaintChatMessage = function(message) {
var self = this;
var $chatBox = $('.' + this.$$chatItemPrefix + message.origin).first().clone();
$chatBox.find('p').html('<div>' + message.message + '<div class=\'' + this.$$chatItemTitleClass + '\'>' + message.name + '</div>' + ' </div>')
$chatBox.attr('data-message-id', message.messageId);
if (message.avatar)
$chatBox.find("img").eq(0).attr("src", "img/avatar-peer" + message.avatar + ".svg");
$chatBox.insertBefore(this.$loading);
setTimeout(function() {
$chatBox.removeClass(self.$$chatItemHiddenClass);
}, 100);
var newScrollTime = (new Date()).getTime();
if ((newScrollTime - _scrollTime) > 500)
this.UIScrollChatToBottom();
_scrollTime = newScrollTime;
if (!message.messageId)
this.$loading.hide();
};
NKChatUI.prototype.UIRemoveChatMessage = function (messageId) {
$('.' + this.$$chatItemClass + '[data-message-id="' + messageId + '"]').remove()
};
// BRIDGE METHODS BETWEEN CHAT API AND UI METHODS ABOVE
NKChatUI.prototype.refreshRooms = function() {
var self = this;
this._chat.getRoomList(function(rooms) {
self.UIPaintRoomList(rooms, function() {
self._chat.enterRoom(self._roomId);
});
});
};
NKChatUI.prototype.refreshUsers = function () {
var self = this;
this._chat.getUsersByRoom(self._roomId, function(users){
self.UIPaintUserList(users, function(){});
});
};
NKChatUI.prototype.sendMessage = function (msg, cb) {
this._chat.sendMessage(this._roomId, msg, 'default', cb);
};
NKChatUI.prototype.createRoom = function (roomName, cb) {
this._chat.createRoom('NK' + roomName, 'public', cb);
};
NKChatUI.prototype._onEnterRoom = function(room) {
var self = this;
if (room.name == "MyNewRoom")
room.name = "Public";
this.$roomTitle.html(room.name);
this.$roomTitle2.html(room.name);
this._roomId = room.id;
this.UIClearMessages();
this.refreshRooms();
this.refreshUsers();
setTimeout(function() {
var element = self.$chatPane;
element.animate({
scrollTop: element[0].scrollHeight
}, 420);
}, 500);
};
NKChatUI.prototype._onNewMessage = function(roomId, rawMessage) {
if (roomId == this._digitalAssistantRoomId) {
if (rawMessage.message.userid != this._user.id)
return;
rawMessage.isDigitalAssistant = true;
} else
rawMessage.isDigitalAssistant = false;
var userId = rawMessage.userId;
if (!this._user || !this._user.muted || !this._user.muted[userId]) {
var self = this;
var origin;
if (rawMessage.isDigitalAssistant)
origin = "ASSISTANT"
else
origin = (this._user && rawMessage.userId == this._user.id) ? "YOU" : "PEER";
// Setup defaults
var message = {
id: rawMessage.id,
localtime: _formatTime(rawMessage.timestamp),
message: (rawMessage.isDigitalAssistant) ? rawMessage.message.body : rawMessage.message || '',
userId: rawMessage.userId,
name: rawMessage.name,
origin: origin,
type: rawMessage.type || 'default',
disableActions: (!self._user || rawMessage.userId == self._user.id)
};
if (!rawMessage.isDigitalAssistant) {
if (message.name.substring(0, 1) == '@') {
message.avatar = message.name.substring(1, 2);
message.name = message.name.substring(3);
} else |
}
if (!rawMessage.isDigitalAssistant) {
message.message = _.map(message.message.split(' '), function(token) {
if (self.urlPattern.test(token) || self.pseudoUrlPattern.test(token)) {
return _linkify(encodeURI(token));
} else {
return _.escape(token);
}
}).join(' ');
message.message = _trimWithEllipsis(message.message, self.maxLengthMessage);
}
this.UIPaintChatMessage(message);
}
};
NKChatUI.prototype._onRemoveMessage = function (roomId, messageId) {
this.UIRemoveChatMessage(messageId);
};
// private helper functions
function _trimWithEllipsis(str, length) {
str = str.replace(/^\s\s*/, '').replace(/\s\s*$/, '');
return (length && str.length <= length) ? str : str.substring(0, length) + '...';
};
function _sortListLexicographically(selector) {
$(selector).children("li").sort(function (a, b) {
var upA = $(a).text().toUpperCase();
var upB = $(b).text().toUpperCase();
return (upA < upB) ? -1 : (upA > upB) ? 1 : 0;
}).appendTo(selector);
};
function _formatTime(timestamp) {
var date = (timestamp) ? new Date(timestamp) : new Date(),
hours = date.getHours() || 12,
minutes = '' + date.getMinutes(),
ampm = (date.getHours() >= 12) ? 'pm' : 'am';
hours = (hours > 12) ? hours - 12 : hours;
minutes = (minutes.length < 2) ? '0' + minutes : minutes;
return '' + hours + ':' + minutes + ampm;
};
function _linkify(str) {
return str
.replace(self.urlPattern, '<a target="_blank" href="$&">$&</a>')
.replace(self.pseudoUrlPattern, '$1<a target="_blank" href="http://$2">$2</a>');
};
function _hashCode(str){
var hash = 0;
if (str.length == 0) return hash;
for (var i = 0; i < str.length; i++) {
var char = str.charCodeAt(i);
hash = ((hash<<5)-hash)+char;
hash = hash & hash; // Convert to 32bit integer
}
return ((hash + 2147483647 + 1) % 6) + 1;
} | {
var s = "0" + _hashCode(message.name);
message.avatar = s.substr(s.length - 2);
} | conditional_block |
chat-ui.js | /**
* Copyright (c) 2016 OffGrid Networks. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var NKChatUI = function NKChatUI() {
// JQUERY UI ELEMENTS
this.$chatInput = $('.nkchat-window--message-input');
this.$chatInputButton = $('#nkchat-window--message-button');
this.$chatPane = $('.nkchat-box--pane');
this.$$chatItemClass = "nkchat-box--item";
this.$$chatItemHiddenClass = "nkchat-box--item_HIDDEN";
this.$$chatItemPrefix = "nkchat-box--item_";
this.$$chatItemTitleClass = "nkchat-box--message-title";
this.$roomCreate = $('#nkchat-create-room-input');
this.$roomCreateButton = $('#nkchat-create-room-button');
this.$loading = $('.loader');
this.$roomList = $('#nkchat-room-list');
this.$roomTitle = $('#nkchat-current-room');
this.$roomTitle2 = $('#nkchat-current-room-2');
this.$currentUserDivs = $('.nkchat-current-user');
this.$messageTemplates = $('.nkchat-box--item_HIDDEN');
this.$currentUserAvatars = $('.nkchat-current-avatar');
this.$userList = $('#nkchat-user-list');
// CURRENT CONTEXT
this._roomId = null;
this._digitalAssistantUserId = null;
this._digitalAssistantRoomId = null;
// CONSTANTS AND REGEX HELPERS
this.maxLengthUsername = 15;
this.maxLengthUsernameDisplay = 13;
this.maxLengthRoomName = 15;
this.maxLengthMessage = 120;
this.maxUserSearchResults = 100;
this.urlPattern = /\b(?:https?|ftp):\/\/[a-z0-9-+&@#\/%?=~_|!:,.;]*[a-z0-9-+&@#\/%=~_|]/gim;
this.pseudoUrlPattern = /(^|[^\/])(www\.[\S]+(\b|$))/gim;
var self = this;
// Initialize the UI
this.UIbindElements();
this.UIScrollToInput();
// Initialize the Chat
this._chat = new NKChatChannel('Noddy KitKat', function(user){
self._user = user;
self.refreshRooms();
self.UIPaintPrimaryUser();
self._chat.on('room-enter', self._onEnterRoom.bind(self));
self._chat.on('message-add', self._onNewMessage.bind(self));
self._chat.on('message-remove', self._onRemoveMessage.bind(self));
});
};
Object.defineProperty(NKChatUI.prototype, "userid", {get: function() { return this._user.id; } });
Object.defineProperty(NKChatUI.prototype, "roomid", {get: function() { return this._roomId; } });
NKChatUI.prototype.UIbindElements = function () {
var self = this;
var _converse = function(userText) {
self.$loading.show();
self.sendMessage(userText, function() {
self.$loading.hide();
self.UIClearInput();
});
}
this.$chatInput.keyup(function (event) {
if (event.keyCode === 13) {
_converse($(this).val());
}
});
this.$chatInputButton.bind('click', { self: this }, function(event) {
_converse(self.$chatInput.val());
});
var _createRoom = function(roomName) {
self.$loading.show();
self.createRoom(roomName, function() {
self.$loading.hide();
self.$roomCreate.val('');
});
}
this.$roomCreateButton.bind('click', { self: this }, function(event) {
_createRoom($roomCreate.val());
});
this.$roomCreate.keyup(function (event) {
if (event.keyCode === 13) {
_createRoom($(this).val());
}
});
};
NKChatUI.prototype.UIScrollChatToBottom = function() {
var element = this.$chatPane;
element.animate({
scrollTop: element[0].scrollHeight
}, 420);
};
NKChatUI.prototype.UIScrollToInput = function() {
var element = this.$chatInput;
$('body, html').animate({
scrollTop: (element.offset().top - window.innerHeight + element[0].offsetHeight) + 20 + 'px'
});
};
NKChatUI.prototype.UIClearInput = function() {
this.$chatInput.val('');
};
NKChatUI.prototype.UIPaintRoomList = function(rooms, cb) {
var self = this;
var template = function(obj) { obj || (obj = {}); var __t, __p = '', __e = _.escape, __j = Array.prototype.join; function print() { __p += __j.call(arguments, '') } with (obj) { __p += '<li data-room-type=\'' + __e(type) + '\' data-room-id=\'' + __e(id) + '\' data-room-name=\'' + __e(name) + '\'>\n<a href=\'#!\' class=\'clearfix '; if (isRoomOpen) { ; __p += ' highlight '; }; __p += '\'>\n<i class=\'fa fa-hashtag\'></i><span class=\'left\' title=\'' + __e(name) + '\'>' + __e(nameTrimmed) + '</span><small class=\'label pull-right bg-green\'>public</small>\n</a>\n</li>'; } return __p };
var selectRoomListItem = function(e) {
var parent = $(this).parent(),
roomId = parent.data('room-id'),
roomName = parent.data('room-name');
self._chat.leaveRoom(self._roomId);
self._roomId = roomId;
self._chat.enterRoom(roomId, roomName);
return false;
};
var count = 0;
this.$roomList.empty();
var keys = Object.keys(rooms);
for (var i = keys.length - 1; i >= 0; i--) {
var roomId = keys[i];
var room = rooms[roomId];
if (room.name == "MyNewRoom")
room.name = "Public"
else if (room.name.substr(0, 2) == "NK" && room.name.length > 2)
room.name = room.name.substr(2);
else
continue;
if (count >= 4 && room.name != "Public") continue;
if (self._roomId == null && room.name == "Public") self._roomId = roomId;
room.isRoomOpen = (roomId == self._roomId);
room.nameTrimmed = _trimWithEllipsis(room.name, self.maxLengthRoomName);
var $roomItem = $(template(room));
$roomItem.children('a').bind('click', selectRoomListItem);
count++;
this.$roomList.append($roomItem.toggle(true));
}
_sortListLexicographically(self.$roomList);
cb();
};
NKChatUI.prototype.UIPaintUserList = function (users, cb) {
var template = function (obj) { obj || (obj = {}); var __t, __p = '', __e = _.escape, __j = Array.prototype.join; function print() { __p += __j.call(arguments, '') } with (obj) {
__p += '<li class=\'list-group-item\' data-user-id=\'' + __e(id) + '\'><b>' + __e(nameTrimmed) + '</b>';
__p += '<span class=\'pull-right\'><img style=\'height: 25px; width: auto\' src=\'img\/avatar-peer' + avatar + '.svg\' alt=\'User profile picture\'></span>';
__p += '\n</li>'; } return __p };
this.$userList.empty();
for (var username in users) {
var user = users[username];
user.disableActions = (!this._user || user.id === this._user.id);
if (user.name.substring(0,1) == '@')
{
user.avatar = user.name.substring(1,2);
user.name = user.name.substring(3);
} else
{
var s = "0" + _hashCode(user.name);
user.avatar = s.substr(s.length-2);
}
user.nameTrimmed = _trimWithEllipsis(user.name, this.maxLengthUsernameDisplay);
user.isMuted = (this._user && this._user.muted && this._user.muted[user.id]);
this.$userList.append(template(user));
}
_sortListLexicographically(this.$userList);
cb();
};
NKChatUI.prototype.UIPaintPrimaryUser = function () {
var self = this;
if (!self._user.avatar)
{
var s = "0" + _hashCode(self._user.name);
self._user.avatar = s.substr(s.length-2);
}
this.$currentUserDivs.each(function()
{
$( this ).html(self._user.name);
})
this.$currentUserAvatars.each(function()
{
$( this ).attr('src', 'img\/avatar-peer' + self._user.avatar + '.svg');
})
};
NKChatUI.prototype.UIClearMessages = function () {
$('.' + this.$$chatItemClass).not('.' + this.$$chatItemHiddenClass).remove();
};
var _scrollTime = (new Date()).getTime();
NKChatUI.prototype.UIPaintChatMessage = function(message) {
var self = this;
var $chatBox = $('.' + this.$$chatItemPrefix + message.origin).first().clone();
$chatBox.find('p').html('<div>' + message.message + '<div class=\'' + this.$$chatItemTitleClass + '\'>' + message.name + '</div>' + ' </div>')
$chatBox.attr('data-message-id', message.messageId);
if (message.avatar)
$chatBox.find("img").eq(0).attr("src", "img/avatar-peer" + message.avatar + ".svg");
$chatBox.insertBefore(this.$loading);
setTimeout(function() {
$chatBox.removeClass(self.$$chatItemHiddenClass);
}, 100);
var newScrollTime = (new Date()).getTime();
if ((newScrollTime - _scrollTime) > 500)
this.UIScrollChatToBottom();
_scrollTime = newScrollTime;
if (!message.messageId)
this.$loading.hide();
};
NKChatUI.prototype.UIRemoveChatMessage = function (messageId) {
$('.' + this.$$chatItemClass + '[data-message-id="' + messageId + '"]').remove()
};
// BRIDGE METHODS BETWEEN CHAT API AND UI METHODS ABOVE
NKChatUI.prototype.refreshRooms = function() {
var self = this;
this._chat.getRoomList(function(rooms) {
self.UIPaintRoomList(rooms, function() {
self._chat.enterRoom(self._roomId);
});
});
};
NKChatUI.prototype.refreshUsers = function () {
var self = this;
this._chat.getUsersByRoom(self._roomId, function(users){
self.UIPaintUserList(users, function(){});
});
};
NKChatUI.prototype.sendMessage = function (msg, cb) {
this._chat.sendMessage(this._roomId, msg, 'default', cb);
};
NKChatUI.prototype.createRoom = function (roomName, cb) {
this._chat.createRoom('NK' + roomName, 'public', cb);
};
NKChatUI.prototype._onEnterRoom = function(room) {
var self = this;
if (room.name == "MyNewRoom")
room.name = "Public";
this.$roomTitle.html(room.name);
this.$roomTitle2.html(room.name);
this._roomId = room.id;
this.UIClearMessages();
this.refreshRooms();
this.refreshUsers();
setTimeout(function() {
var element = self.$chatPane;
element.animate({
scrollTop: element[0].scrollHeight
}, 420);
}, 500);
};
NKChatUI.prototype._onNewMessage = function(roomId, rawMessage) {
if (roomId == this._digitalAssistantRoomId) {
if (rawMessage.message.userid != this._user.id)
return;
rawMessage.isDigitalAssistant = true;
} else
rawMessage.isDigitalAssistant = false;
var userId = rawMessage.userId;
if (!this._user || !this._user.muted || !this._user.muted[userId]) {
var self = this;
var origin;
if (rawMessage.isDigitalAssistant)
origin = "ASSISTANT"
else
origin = (this._user && rawMessage.userId == this._user.id) ? "YOU" : "PEER";
// Setup defaults
var message = {
id: rawMessage.id,
localtime: _formatTime(rawMessage.timestamp),
message: (rawMessage.isDigitalAssistant) ? rawMessage.message.body : rawMessage.message || '',
userId: rawMessage.userId,
name: rawMessage.name,
origin: origin,
type: rawMessage.type || 'default',
disableActions: (!self._user || rawMessage.userId == self._user.id)
};
if (!rawMessage.isDigitalAssistant) {
if (message.name.substring(0, 1) == '@') {
message.avatar = message.name.substring(1, 2);
message.name = message.name.substring(3);
} else {
var s = "0" + _hashCode(message.name);
message.avatar = s.substr(s.length - 2);
}
}
if (!rawMessage.isDigitalAssistant) {
message.message = _.map(message.message.split(' '), function(token) {
if (self.urlPattern.test(token) || self.pseudoUrlPattern.test(token)) {
return _linkify(encodeURI(token));
} else {
return _.escape(token);
}
}).join(' ');
message.message = _trimWithEllipsis(message.message, self.maxLengthMessage);
}
this.UIPaintChatMessage(message);
}
};
NKChatUI.prototype._onRemoveMessage = function (roomId, messageId) {
this.UIRemoveChatMessage(messageId);
};
// private helper functions
function _trimWithEllipsis(str, length) {
str = str.replace(/^\s\s*/, '').replace(/\s\s*$/, '');
return (length && str.length <= length) ? str : str.substring(0, length) + '...';
};
function _sortListLexicographically(selector) {
$(selector).children("li").sort(function (a, b) {
var upA = $(a).text().toUpperCase();
var upB = $(b).text().toUpperCase();
return (upA < upB) ? -1 : (upA > upB) ? 1 : 0;
}).appendTo(selector);
};
function _formatTime(timestamp) {
var date = (timestamp) ? new Date(timestamp) : new Date(),
hours = date.getHours() || 12,
minutes = '' + date.getMinutes(),
ampm = (date.getHours() >= 12) ? 'pm' : 'am';
hours = (hours > 12) ? hours - 12 : hours;
minutes = (minutes.length < 2) ? '0' + minutes : minutes;
return '' + hours + ':' + minutes + ampm;
};
function | (str) {
return str
.replace(self.urlPattern, '<a target="_blank" href="$&">$&</a>')
.replace(self.pseudoUrlPattern, '$1<a target="_blank" href="http://$2">$2</a>');
};
function _hashCode(str){
var hash = 0;
if (str.length == 0) return hash;
for (var i = 0; i < str.length; i++) {
var char = str.charCodeAt(i);
hash = ((hash<<5)-hash)+char;
hash = hash & hash; // Convert to 32bit integer
}
return ((hash + 2147483647 + 1) % 6) + 1;
} | _linkify | identifier_name |
main.py | # -*- coding: utf8 -*-
# import os
# import pygame
from modules.create_window import *
# from modules.key_control_move import *
from modules.color_name import *
from pics import *
import copy
# The first and the second number are the start position (x,y).
# r,l,u,d = right,left,up,down
# The number after english word is how many block it moves.
monster_walk_path = "81r3d10l2u8l5u1l2d9r2u6r2d1r1d2l1d2"
def translateMonsterWalkPath(data):
"""
Translate the monster_walk_path to a list and return it.
"""
path = []
pos = [0,0]
for i in range(len(data)):
if i == 0:
pos[0] = 50*(int(data[i])-1)
elif i == 1:
pos[1] = 50*(int(data[i])-1)
path.append(copy.copy(pos))
else:
move = [0,0]
if data[i] == "l":
move = [-50,0]
elif data[i] == "r":
move = [50,0]
elif data[i] == "u":
move = [0,-50]
elif data[i] == "d":
move = [0,50]
if move != [0,0]:
rang = int(data[i+1])
if i+2 < len(data):
if data[i+2] in "0123456789":
rang = int(data[i+1:i+3])
for t in range(rang):
pos[0] += move[0]
pos[1] += move[1]
path.append(copy.copy(pos))
return path
def showMonsterWalkPath(data_list):
"""
showMonsterWalkPath(data_list)
The data_list should be a 2D list.
"""
pos = [0,0]
for i in range(len(data_list)):
if i == 0:
pos = copy.copy(data_list[i])
gameDisplay.blit(monster_walk, pos)
else:
monster_move = False
num_cal = [1,0,0]
dx = (data_list[i][0] - pos[0])/50
dy = (data_list[i][1] - pos[1])/50
if dx < 0 or dy < 0:
num_cal[0] = -1
if dx != 0:
monster_move = True
num_cal[1] = 1
elif dy != 0:
monster_move = True
num_cal[2] = 1
if monster_move:
for t in range(abs(dx+dy)):
pos[0] += num_cal[0]*num_cal[1]*50
pos[1] += num_cal[0]*num_cal[2]*50
gameDisplay.blit(monster_walk, pos)
def createMonsterWalkPath(data_list, init_pos):
"""
createMonsterWalkPath(data_list, init_pos)
"""
path = []
pos = copy.copy(init_pos)
path.append(copy.copy(pos))
monster_size = 20
side_d = (50-monster_size)/2
for i in data_list:
pos_temp = [0,0]
pos_temp[0] = pos[0]-side_d
pos_temp[1] = pos[1]-side_d
dx = i[0] - pos_temp[0]
dy = i[1] - pos_temp[1]
move_info = [1,0,0]
if dx < 0 or dy < 0:
move_info[0] = -1
if dx != 0:
move_info[1] = 1
elif dy != 0:
move_info[2] = 1
for t in range(abs(dx+dy)):
pos[0] += move_info[0]*move_info[1]
pos[1] += move_info[0]*move_info[2]
path.append(copy.copy(pos))
dx = (250+side_d) - pos[0]
dy = (500+side_d) - pos[1]
move_info = [1,0,0]
if dx < 0 or dy < 0:
move_info[0] = -1
if dx != 0:
move_info[1] = 1
elif dy != 0:
move_info[2] = 1
for t in range(abs(dx+dy)-side_d):
pos[0] += move_info[0]*move_info[1]
pos[1] += move_info[0]*move_info[2]
path.append(copy.copy(pos))
return path
def showMonster(monster, pos):
gameDisplay.blit(monster, pos)
def run():
font = pygame.font.SysFont("colonna", 40)
font_live = pygame.font.SysFont("colonna", 40, True)
font_start = pygame.font.SysFont("colonna", 110)
text_1 = font_start.render("-+-[Tower", 1, (0, 0, 0))
text_2 = font_start.render("Defend]-+-", 1, (0, 0, 0))
text_line = font_start.render("____", 1, (0, 0, 0))
text_line_1 = font_start.render("/", 1, (0, 0, 0))
text_line_2 = font_start.render("\\", 1, (0, 0, 0))
level_now = "00"
money_now = "00000"
live_now = "20"
sell_price = 0
# location = [window_size[0] * 0.5, window_size[1] * 0.5]
# dx_dy = [0,0,0,0]
clock = pygame.time.Clock()
playing = False
drop_it = False
tower_type_num = 0
tower_type = [leading_role, \
myst_array, \
tower_wall, \
tower_arrow]
tower_type_size = [leading_role_size, \
myst_array_size, \
tower_size, \
tower_size]
builded_towers = [leading_role, \
myst_array, \
myst_array]
builded_towers_pos = [[(250,500), 0], \
[(250,0), 1], \
[(300,0), 1]]
stop = False
monster_init_pos_0 = [265,15]
monster_init_pos_1 = [315,15]
monsters_pos = {}
monster_path_corner_list = translateMonsterWalkPath(monster_walk_path)
monster_path_point_list = createMonsterWalkPath(monster_path_corner_list, monster_init_pos_1)
crashed = False
while not crashed:
mouse_pos = pygame.mouse.get_pos()
mouse_pos_re = (mouse_pos[0]-25,mouse_pos[1]-25)
text_live = font_live.render(live_now, 1, white, black)
for event in pygame.event.get():
if event.type == pygame.QUIT:
crashed = True
break
# print event
if event.type == pygame.KEYDOWN:
if playing:
if event.key == pygame.K_ESCAPE:
if stop:
stop = False
elif drop_it:
drop_it = False
elif not stop and not drop_it:
stop = True
if stop:
pass
else:
if event.type == pygame.MOUSEBUTTONUP:
if playing:
#-- Right frame:
# Tools
if pygame.Rect((676, 135), sell_0_size).collidepoint(mouse_pos):
money_temp = int(money_now) + sell_price
if money_temp > 99999:
money_temp = 99999
money_now = money_now[0:5-len(str(money_temp))] + str(money_temp)
# Towers
if pygame.Rect((612, 192), tower_size).collidepoint(mouse_pos):
drop_it = True
tower_type_num = 2
if pygame.Rect((675, 192), tower_size).collidepoint(mouse_pos):
drop_it = True
tower_type_num = 3
#--------------------
#-- Left frame:
# Build tower
if drop_it:
build_pos = ((mouse_pos[0]//50)*50,(mouse_pos[1]//50)*50)
no_prob = True
if pygame.Rect(600, 0, 200, 600).collidepoint(build_pos):
continue
for i in builded_towers_pos:
if pygame.Rect(i[0], tower_type_size[i[1]]).collidepoint(build_pos):
no_prob = False
break
if no_prob:
builded_towers.append(tower_type[tower_type_num])
builded_towers_pos.append([build_pos, tower_type_num])
#--------------------
else:
if pygame.Rect((300, 350), play_size).collidepoint(mouse_pos):
playing = True
# end event
if stop:
pass
else:
gameDisplay.fill(white) # background
if playing:
#-- background:
showMonsterWalkPath(monster_path_corner_list)
gameDisplay.blit(shading, (0, 0))
#--------------------
#-- Right frame:
gameDisplay.blit(right_table, (600, 0))
# Infomations
# Level
gameDisplay.blit(level, (613, 13))
gameDisplay.blit(number[int(str(level_now)[0])], (733, 13))
gameDisplay.blit(number[int(str(level_now)[1])], (758, 13))
# Money
gameDisplay.blit(money, (613, 74))
gameDisplay.blit(number[int(str(money_now)[0])], (655, 74))
gameDisplay.blit(number[int(str(money_now)[1])], (680, 74))
gameDisplay.blit(number[int(str(money_now)[2])], (705, 74))
gameDisplay.blit(number[int(str(money_now)[3])], (730, 74))
gameDisplay.blit(number[int(str(money_now)[4])], (755, 74))
# Tools
if not pygame.Rect((739, 135), upgrade_0_size).collidepoint(mouse_pos):
gameDisplay.blit(upgrade_0, (739, 135))
else:
gameDisplay.blit(upgrade_1, (739, 135))
if not pygame.Rect((613, 135), repair_0_size).collidepoint(mouse_pos):
gameDisplay.blit(repair_0, (613, 135))
else:
gameDisplay.blit(repair_1, (613, 135))
if not pygame.Rect((676, 135), sell_0_size).collidepoint(mouse_pos):
gameDisplay.blit(sell_0, (676, 135))
else:
gameDisplay.blit(sell_1, (676, 135))
# Towers
gameDisplay.blit(tower_wall, (612, 192))
if pygame.Rect((612, 192), tower_size).collidepoint(mouse_pos):
gameDisplay.blit(tower_select, (612, 192))
gameDisplay.blit(tower_arrow, (675, 192))
if pygame.Rect((675, 192), tower_size).collidepoint(mouse_pos):
gameDisplay.blit(tower_select, (675, 192))
#--------------------
#-- object
# Towers on map
for i in range(len(builded_towers_pos)):
gameDisplay.blit(builded_towers[i], builded_towers_pos[i][0])
# Live
gameDisplay.blit(text_live, (280, 550))
# Show the block mouse on it
if pygame.Rect((0, 0), (600, 600)).collidepoint(mouse_pos):
gameDisplay.blit(tower_select, (mouse_pos[0]//50*50, mouse_pos[1]//50*50))
# Drop the tower that you want to build
if drop_it:
gameDisplay.blit(tower_type[tower_type_num], mouse_pos_re)
# Monsters
if 0 not in monsters_pos:
monsters_pos[0] = {}
monsters_pos[0]["move_times"] = 0
monsters_pos[0]["pos"] = monster_init_pos_1
showMonster(monster, monsters_pos[0]["pos"])
cannot_move = False
for i in builded_towers_pos:
if pygame.Rect(i[0], tower_type_size[i[1]]).colliderect(pygame.Rect(monsters_pos[0]["pos"],monster_size)) and not i[1] == 1:
cannot_move = True
break
if not cannot_move:
monsters_pos[0]["move_times"] += 1
monsters_pos[0]["pos"] = monster_path_point_list[monsters_pos[0]["move_times"]]
elif pygame.Rect(builded_towers_pos[0][0], leading_role_size).colliderect(pygame.Rect(monsters_pos[0]["pos"],monster_size)):
live_now = str(int(live_now)-1)
del monsters_pos[0]
#--------------------
else:
# Menu page
gameDisplay.blit(text_1, (81, 121))
gameDisplay.blit(text_1, (80, 120))
gameDisplay.blit(text_2, (251, 191))
gameDisplay.blit(text_2, (250, 190))
gameDisplay.blit(text_line, (290, 260))
gameDisplay.blit(text_line, (290, 360)) | gameDisplay.blit(text_line_2, (240, 370))
gameDisplay.blit(text_line_1, (506, 370))
gameDisplay.blit(text_line_2, (506, 340))
gameDisplay.blit(play, (300, 350))
if pygame.Rect(300, 350, play_size[0], play_size[1]).collidepoint(mouse_pos):
gameDisplay.blit(text_line, (290, 270))
gameDisplay.blit(text_line, (290, 350))
gameDisplay.blit(text_line_1, (230, 320))
gameDisplay.blit(text_line_2, (230, 390))
gameDisplay.blit(text_line_1, (516, 390))
gameDisplay.blit(text_line_2, (516, 320))
pygame.display.update()
clock.tick(game_speed)
if __name__ == "__main__":
run()
pygame.quit() | gameDisplay.blit(text_line_1, (240, 340)) | random_line_split |
main.py | # -*- coding: utf8 -*-
# import os
# import pygame
from modules.create_window import *
# from modules.key_control_move import *
from modules.color_name import *
from pics import *
import copy
# The first and the second number are the start position (x,y).
# r,l,u,d = right,left,up,down
# The number after english word is how many block it moves.
monster_walk_path = "81r3d10l2u8l5u1l2d9r2u6r2d1r1d2l1d2"
def translateMonsterWalkPath(data):
|
def showMonsterWalkPath(data_list):
"""
showMonsterWalkPath(data_list)
The data_list should be a 2D list.
"""
pos = [0,0]
for i in range(len(data_list)):
if i == 0:
pos = copy.copy(data_list[i])
gameDisplay.blit(monster_walk, pos)
else:
monster_move = False
num_cal = [1,0,0]
dx = (data_list[i][0] - pos[0])/50
dy = (data_list[i][1] - pos[1])/50
if dx < 0 or dy < 0:
num_cal[0] = -1
if dx != 0:
monster_move = True
num_cal[1] = 1
elif dy != 0:
monster_move = True
num_cal[2] = 1
if monster_move:
for t in range(abs(dx+dy)):
pos[0] += num_cal[0]*num_cal[1]*50
pos[1] += num_cal[0]*num_cal[2]*50
gameDisplay.blit(monster_walk, pos)
def createMonsterWalkPath(data_list, init_pos):
"""
createMonsterWalkPath(data_list, init_pos)
"""
path = []
pos = copy.copy(init_pos)
path.append(copy.copy(pos))
monster_size = 20
side_d = (50-monster_size)/2
for i in data_list:
pos_temp = [0,0]
pos_temp[0] = pos[0]-side_d
pos_temp[1] = pos[1]-side_d
dx = i[0] - pos_temp[0]
dy = i[1] - pos_temp[1]
move_info = [1,0,0]
if dx < 0 or dy < 0:
move_info[0] = -1
if dx != 0:
move_info[1] = 1
elif dy != 0:
move_info[2] = 1
for t in range(abs(dx+dy)):
pos[0] += move_info[0]*move_info[1]
pos[1] += move_info[0]*move_info[2]
path.append(copy.copy(pos))
dx = (250+side_d) - pos[0]
dy = (500+side_d) - pos[1]
move_info = [1,0,0]
if dx < 0 or dy < 0:
move_info[0] = -1
if dx != 0:
move_info[1] = 1
elif dy != 0:
move_info[2] = 1
for t in range(abs(dx+dy)-side_d):
pos[0] += move_info[0]*move_info[1]
pos[1] += move_info[0]*move_info[2]
path.append(copy.copy(pos))
return path
def showMonster(monster, pos):
gameDisplay.blit(monster, pos)
def run():
font = pygame.font.SysFont("colonna", 40)
font_live = pygame.font.SysFont("colonna", 40, True)
font_start = pygame.font.SysFont("colonna", 110)
text_1 = font_start.render("-+-[Tower", 1, (0, 0, 0))
text_2 = font_start.render("Defend]-+-", 1, (0, 0, 0))
text_line = font_start.render("____", 1, (0, 0, 0))
text_line_1 = font_start.render("/", 1, (0, 0, 0))
text_line_2 = font_start.render("\\", 1, (0, 0, 0))
level_now = "00"
money_now = "00000"
live_now = "20"
sell_price = 0
# location = [window_size[0] * 0.5, window_size[1] * 0.5]
# dx_dy = [0,0,0,0]
clock = pygame.time.Clock()
playing = False
drop_it = False
tower_type_num = 0
tower_type = [leading_role, \
myst_array, \
tower_wall, \
tower_arrow]
tower_type_size = [leading_role_size, \
myst_array_size, \
tower_size, \
tower_size]
builded_towers = [leading_role, \
myst_array, \
myst_array]
builded_towers_pos = [[(250,500), 0], \
[(250,0), 1], \
[(300,0), 1]]
stop = False
monster_init_pos_0 = [265,15]
monster_init_pos_1 = [315,15]
monsters_pos = {}
monster_path_corner_list = translateMonsterWalkPath(monster_walk_path)
monster_path_point_list = createMonsterWalkPath(monster_path_corner_list, monster_init_pos_1)
crashed = False
while not crashed:
mouse_pos = pygame.mouse.get_pos()
mouse_pos_re = (mouse_pos[0]-25,mouse_pos[1]-25)
text_live = font_live.render(live_now, 1, white, black)
for event in pygame.event.get():
if event.type == pygame.QUIT:
crashed = True
break
# print event
if event.type == pygame.KEYDOWN:
if playing:
if event.key == pygame.K_ESCAPE:
if stop:
stop = False
elif drop_it:
drop_it = False
elif not stop and not drop_it:
stop = True
if stop:
pass
else:
if event.type == pygame.MOUSEBUTTONUP:
if playing:
#-- Right frame:
# Tools
if pygame.Rect((676, 135), sell_0_size).collidepoint(mouse_pos):
money_temp = int(money_now) + sell_price
if money_temp > 99999:
money_temp = 99999
money_now = money_now[0:5-len(str(money_temp))] + str(money_temp)
# Towers
if pygame.Rect((612, 192), tower_size).collidepoint(mouse_pos):
drop_it = True
tower_type_num = 2
if pygame.Rect((675, 192), tower_size).collidepoint(mouse_pos):
drop_it = True
tower_type_num = 3
#--------------------
#-- Left frame:
# Build tower
if drop_it:
build_pos = ((mouse_pos[0]//50)*50,(mouse_pos[1]//50)*50)
no_prob = True
if pygame.Rect(600, 0, 200, 600).collidepoint(build_pos):
continue
for i in builded_towers_pos:
if pygame.Rect(i[0], tower_type_size[i[1]]).collidepoint(build_pos):
no_prob = False
break
if no_prob:
builded_towers.append(tower_type[tower_type_num])
builded_towers_pos.append([build_pos, tower_type_num])
#--------------------
else:
if pygame.Rect((300, 350), play_size).collidepoint(mouse_pos):
playing = True
# end event
if stop:
pass
else:
gameDisplay.fill(white) # background
if playing:
#-- background:
showMonsterWalkPath(monster_path_corner_list)
gameDisplay.blit(shading, (0, 0))
#--------------------
#-- Right frame:
gameDisplay.blit(right_table, (600, 0))
# Infomations
# Level
gameDisplay.blit(level, (613, 13))
gameDisplay.blit(number[int(str(level_now)[0])], (733, 13))
gameDisplay.blit(number[int(str(level_now)[1])], (758, 13))
# Money
gameDisplay.blit(money, (613, 74))
gameDisplay.blit(number[int(str(money_now)[0])], (655, 74))
gameDisplay.blit(number[int(str(money_now)[1])], (680, 74))
gameDisplay.blit(number[int(str(money_now)[2])], (705, 74))
gameDisplay.blit(number[int(str(money_now)[3])], (730, 74))
gameDisplay.blit(number[int(str(money_now)[4])], (755, 74))
# Tools
if not pygame.Rect((739, 135), upgrade_0_size).collidepoint(mouse_pos):
gameDisplay.blit(upgrade_0, (739, 135))
else:
gameDisplay.blit(upgrade_1, (739, 135))
if not pygame.Rect((613, 135), repair_0_size).collidepoint(mouse_pos):
gameDisplay.blit(repair_0, (613, 135))
else:
gameDisplay.blit(repair_1, (613, 135))
if not pygame.Rect((676, 135), sell_0_size).collidepoint(mouse_pos):
gameDisplay.blit(sell_0, (676, 135))
else:
gameDisplay.blit(sell_1, (676, 135))
# Towers
gameDisplay.blit(tower_wall, (612, 192))
if pygame.Rect((612, 192), tower_size).collidepoint(mouse_pos):
gameDisplay.blit(tower_select, (612, 192))
gameDisplay.blit(tower_arrow, (675, 192))
if pygame.Rect((675, 192), tower_size).collidepoint(mouse_pos):
gameDisplay.blit(tower_select, (675, 192))
#--------------------
#-- object
# Towers on map
for i in range(len(builded_towers_pos)):
gameDisplay.blit(builded_towers[i], builded_towers_pos[i][0])
# Live
gameDisplay.blit(text_live, (280, 550))
# Show the block mouse on it
if pygame.Rect((0, 0), (600, 600)).collidepoint(mouse_pos):
gameDisplay.blit(tower_select, (mouse_pos[0]//50*50, mouse_pos[1]//50*50))
# Drop the tower that you want to build
if drop_it:
gameDisplay.blit(tower_type[tower_type_num], mouse_pos_re)
# Monsters
if 0 not in monsters_pos:
monsters_pos[0] = {}
monsters_pos[0]["move_times"] = 0
monsters_pos[0]["pos"] = monster_init_pos_1
showMonster(monster, monsters_pos[0]["pos"])
cannot_move = False
for i in builded_towers_pos:
if pygame.Rect(i[0], tower_type_size[i[1]]).colliderect(pygame.Rect(monsters_pos[0]["pos"],monster_size)) and not i[1] == 1:
cannot_move = True
break
if not cannot_move:
monsters_pos[0]["move_times"] += 1
monsters_pos[0]["pos"] = monster_path_point_list[monsters_pos[0]["move_times"]]
elif pygame.Rect(builded_towers_pos[0][0], leading_role_size).colliderect(pygame.Rect(monsters_pos[0]["pos"],monster_size)):
live_now = str(int(live_now)-1)
del monsters_pos[0]
#--------------------
else:
# Menu page
gameDisplay.blit(text_1, (81, 121))
gameDisplay.blit(text_1, (80, 120))
gameDisplay.blit(text_2, (251, 191))
gameDisplay.blit(text_2, (250, 190))
gameDisplay.blit(text_line, (290, 260))
gameDisplay.blit(text_line, (290, 360))
gameDisplay.blit(text_line_1, (240, 340))
gameDisplay.blit(text_line_2, (240, 370))
gameDisplay.blit(text_line_1, (506, 370))
gameDisplay.blit(text_line_2, (506, 340))
gameDisplay.blit(play, (300, 350))
if pygame.Rect(300, 350, play_size[0], play_size[1]).collidepoint(mouse_pos):
gameDisplay.blit(text_line, (290, 270))
gameDisplay.blit(text_line, (290, 350))
gameDisplay.blit(text_line_1, (230, 320))
gameDisplay.blit(text_line_2, (230, 390))
gameDisplay.blit(text_line_1, (516, 390))
gameDisplay.blit(text_line_2, (516, 320))
pygame.display.update()
clock.tick(game_speed)
if __name__ == "__main__":
run()
pygame.quit() | """
Translate the monster_walk_path to a list and return it.
"""
path = []
pos = [0,0]
for i in range(len(data)):
if i == 0:
pos[0] = 50*(int(data[i])-1)
elif i == 1:
pos[1] = 50*(int(data[i])-1)
path.append(copy.copy(pos))
else:
move = [0,0]
if data[i] == "l":
move = [-50,0]
elif data[i] == "r":
move = [50,0]
elif data[i] == "u":
move = [0,-50]
elif data[i] == "d":
move = [0,50]
if move != [0,0]:
rang = int(data[i+1])
if i+2 < len(data):
if data[i+2] in "0123456789":
rang = int(data[i+1:i+3])
for t in range(rang):
pos[0] += move[0]
pos[1] += move[1]
path.append(copy.copy(pos))
return path | identifier_body |
main.py | # -*- coding: utf8 -*-
# import os
# import pygame
from modules.create_window import *
# from modules.key_control_move import *
from modules.color_name import *
from pics import *
import copy
# The first and the second number are the start position (x,y).
# r,l,u,d = right,left,up,down
# The number after english word is how many block it moves.
monster_walk_path = "81r3d10l2u8l5u1l2d9r2u6r2d1r1d2l1d2"
def translateMonsterWalkPath(data):
"""
Translate the monster_walk_path to a list and return it.
"""
path = []
pos = [0,0]
for i in range(len(data)):
if i == 0:
pos[0] = 50*(int(data[i])-1)
elif i == 1:
pos[1] = 50*(int(data[i])-1)
path.append(copy.copy(pos))
else:
move = [0,0]
if data[i] == "l":
move = [-50,0]
elif data[i] == "r":
move = [50,0]
elif data[i] == "u":
move = [0,-50]
elif data[i] == "d":
move = [0,50]
if move != [0,0]:
rang = int(data[i+1])
if i+2 < len(data):
if data[i+2] in "0123456789":
rang = int(data[i+1:i+3])
for t in range(rang):
pos[0] += move[0]
pos[1] += move[1]
path.append(copy.copy(pos))
return path
def showMonsterWalkPath(data_list):
"""
showMonsterWalkPath(data_list)
The data_list should be a 2D list.
"""
pos = [0,0]
for i in range(len(data_list)):
if i == 0:
pos = copy.copy(data_list[i])
gameDisplay.blit(monster_walk, pos)
else:
monster_move = False
num_cal = [1,0,0]
dx = (data_list[i][0] - pos[0])/50
dy = (data_list[i][1] - pos[1])/50
if dx < 0 or dy < 0:
num_cal[0] = -1
if dx != 0:
monster_move = True
num_cal[1] = 1
elif dy != 0:
monster_move = True
num_cal[2] = 1
if monster_move:
for t in range(abs(dx+dy)):
pos[0] += num_cal[0]*num_cal[1]*50
pos[1] += num_cal[0]*num_cal[2]*50
gameDisplay.blit(monster_walk, pos)
def createMonsterWalkPath(data_list, init_pos):
"""
createMonsterWalkPath(data_list, init_pos)
"""
path = []
pos = copy.copy(init_pos)
path.append(copy.copy(pos))
monster_size = 20
side_d = (50-monster_size)/2
for i in data_list:
pos_temp = [0,0]
pos_temp[0] = pos[0]-side_d
pos_temp[1] = pos[1]-side_d
dx = i[0] - pos_temp[0]
dy = i[1] - pos_temp[1]
move_info = [1,0,0]
if dx < 0 or dy < 0:
move_info[0] = -1
if dx != 0:
move_info[1] = 1
elif dy != 0:
move_info[2] = 1
for t in range(abs(dx+dy)):
pos[0] += move_info[0]*move_info[1]
pos[1] += move_info[0]*move_info[2]
path.append(copy.copy(pos))
dx = (250+side_d) - pos[0]
dy = (500+side_d) - pos[1]
move_info = [1,0,0]
if dx < 0 or dy < 0:
move_info[0] = -1
if dx != 0:
move_info[1] = 1
elif dy != 0:
move_info[2] = 1
for t in range(abs(dx+dy)-side_d):
pos[0] += move_info[0]*move_info[1]
pos[1] += move_info[0]*move_info[2]
path.append(copy.copy(pos))
return path
def showMonster(monster, pos):
gameDisplay.blit(monster, pos)
def run():
font = pygame.font.SysFont("colonna", 40)
font_live = pygame.font.SysFont("colonna", 40, True)
font_start = pygame.font.SysFont("colonna", 110)
text_1 = font_start.render("-+-[Tower", 1, (0, 0, 0))
text_2 = font_start.render("Defend]-+-", 1, (0, 0, 0))
text_line = font_start.render("____", 1, (0, 0, 0))
text_line_1 = font_start.render("/", 1, (0, 0, 0))
text_line_2 = font_start.render("\\", 1, (0, 0, 0))
level_now = "00"
money_now = "00000"
live_now = "20"
sell_price = 0
# location = [window_size[0] * 0.5, window_size[1] * 0.5]
# dx_dy = [0,0,0,0]
clock = pygame.time.Clock()
playing = False
drop_it = False
tower_type_num = 0
tower_type = [leading_role, \
myst_array, \
tower_wall, \
tower_arrow]
tower_type_size = [leading_role_size, \
myst_array_size, \
tower_size, \
tower_size]
builded_towers = [leading_role, \
myst_array, \
myst_array]
builded_towers_pos = [[(250,500), 0], \
[(250,0), 1], \
[(300,0), 1]]
stop = False
monster_init_pos_0 = [265,15]
monster_init_pos_1 = [315,15]
monsters_pos = {}
monster_path_corner_list = translateMonsterWalkPath(monster_walk_path)
monster_path_point_list = createMonsterWalkPath(monster_path_corner_list, monster_init_pos_1)
crashed = False
while not crashed:
mouse_pos = pygame.mouse.get_pos()
mouse_pos_re = (mouse_pos[0]-25,mouse_pos[1]-25)
text_live = font_live.render(live_now, 1, white, black)
for event in pygame.event.get():
if event.type == pygame.QUIT:
crashed = True
break
# print event
if event.type == pygame.KEYDOWN:
if playing:
if event.key == pygame.K_ESCAPE:
if stop:
stop = False
elif drop_it:
|
elif not stop and not drop_it:
stop = True
if stop:
pass
else:
if event.type == pygame.MOUSEBUTTONUP:
if playing:
#-- Right frame:
# Tools
if pygame.Rect((676, 135), sell_0_size).collidepoint(mouse_pos):
money_temp = int(money_now) + sell_price
if money_temp > 99999:
money_temp = 99999
money_now = money_now[0:5-len(str(money_temp))] + str(money_temp)
# Towers
if pygame.Rect((612, 192), tower_size).collidepoint(mouse_pos):
drop_it = True
tower_type_num = 2
if pygame.Rect((675, 192), tower_size).collidepoint(mouse_pos):
drop_it = True
tower_type_num = 3
#--------------------
#-- Left frame:
# Build tower
if drop_it:
build_pos = ((mouse_pos[0]//50)*50,(mouse_pos[1]//50)*50)
no_prob = True
if pygame.Rect(600, 0, 200, 600).collidepoint(build_pos):
continue
for i in builded_towers_pos:
if pygame.Rect(i[0], tower_type_size[i[1]]).collidepoint(build_pos):
no_prob = False
break
if no_prob:
builded_towers.append(tower_type[tower_type_num])
builded_towers_pos.append([build_pos, tower_type_num])
#--------------------
else:
if pygame.Rect((300, 350), play_size).collidepoint(mouse_pos):
playing = True
# end event
if stop:
pass
else:
gameDisplay.fill(white) # background
if playing:
#-- background:
showMonsterWalkPath(monster_path_corner_list)
gameDisplay.blit(shading, (0, 0))
#--------------------
#-- Right frame:
gameDisplay.blit(right_table, (600, 0))
# Infomations
# Level
gameDisplay.blit(level, (613, 13))
gameDisplay.blit(number[int(str(level_now)[0])], (733, 13))
gameDisplay.blit(number[int(str(level_now)[1])], (758, 13))
# Money
gameDisplay.blit(money, (613, 74))
gameDisplay.blit(number[int(str(money_now)[0])], (655, 74))
gameDisplay.blit(number[int(str(money_now)[1])], (680, 74))
gameDisplay.blit(number[int(str(money_now)[2])], (705, 74))
gameDisplay.blit(number[int(str(money_now)[3])], (730, 74))
gameDisplay.blit(number[int(str(money_now)[4])], (755, 74))
# Tools
if not pygame.Rect((739, 135), upgrade_0_size).collidepoint(mouse_pos):
gameDisplay.blit(upgrade_0, (739, 135))
else:
gameDisplay.blit(upgrade_1, (739, 135))
if not pygame.Rect((613, 135), repair_0_size).collidepoint(mouse_pos):
gameDisplay.blit(repair_0, (613, 135))
else:
gameDisplay.blit(repair_1, (613, 135))
if not pygame.Rect((676, 135), sell_0_size).collidepoint(mouse_pos):
gameDisplay.blit(sell_0, (676, 135))
else:
gameDisplay.blit(sell_1, (676, 135))
# Towers
gameDisplay.blit(tower_wall, (612, 192))
if pygame.Rect((612, 192), tower_size).collidepoint(mouse_pos):
gameDisplay.blit(tower_select, (612, 192))
gameDisplay.blit(tower_arrow, (675, 192))
if pygame.Rect((675, 192), tower_size).collidepoint(mouse_pos):
gameDisplay.blit(tower_select, (675, 192))
#--------------------
#-- object
# Towers on map
for i in range(len(builded_towers_pos)):
gameDisplay.blit(builded_towers[i], builded_towers_pos[i][0])
# Live
gameDisplay.blit(text_live, (280, 550))
# Show the block mouse on it
if pygame.Rect((0, 0), (600, 600)).collidepoint(mouse_pos):
gameDisplay.blit(tower_select, (mouse_pos[0]//50*50, mouse_pos[1]//50*50))
# Drop the tower that you want to build
if drop_it:
gameDisplay.blit(tower_type[tower_type_num], mouse_pos_re)
# Monsters
if 0 not in monsters_pos:
monsters_pos[0] = {}
monsters_pos[0]["move_times"] = 0
monsters_pos[0]["pos"] = monster_init_pos_1
showMonster(monster, monsters_pos[0]["pos"])
cannot_move = False
for i in builded_towers_pos:
if pygame.Rect(i[0], tower_type_size[i[1]]).colliderect(pygame.Rect(monsters_pos[0]["pos"],monster_size)) and not i[1] == 1:
cannot_move = True
break
if not cannot_move:
monsters_pos[0]["move_times"] += 1
monsters_pos[0]["pos"] = monster_path_point_list[monsters_pos[0]["move_times"]]
elif pygame.Rect(builded_towers_pos[0][0], leading_role_size).colliderect(pygame.Rect(monsters_pos[0]["pos"],monster_size)):
live_now = str(int(live_now)-1)
del monsters_pos[0]
#--------------------
else:
# Menu page
gameDisplay.blit(text_1, (81, 121))
gameDisplay.blit(text_1, (80, 120))
gameDisplay.blit(text_2, (251, 191))
gameDisplay.blit(text_2, (250, 190))
gameDisplay.blit(text_line, (290, 260))
gameDisplay.blit(text_line, (290, 360))
gameDisplay.blit(text_line_1, (240, 340))
gameDisplay.blit(text_line_2, (240, 370))
gameDisplay.blit(text_line_1, (506, 370))
gameDisplay.blit(text_line_2, (506, 340))
gameDisplay.blit(play, (300, 350))
if pygame.Rect(300, 350, play_size[0], play_size[1]).collidepoint(mouse_pos):
gameDisplay.blit(text_line, (290, 270))
gameDisplay.blit(text_line, (290, 350))
gameDisplay.blit(text_line_1, (230, 320))
gameDisplay.blit(text_line_2, (230, 390))
gameDisplay.blit(text_line_1, (516, 390))
gameDisplay.blit(text_line_2, (516, 320))
pygame.display.update()
clock.tick(game_speed)
if __name__ == "__main__":
run()
pygame.quit() | drop_it = False | conditional_block |
main.py | # -*- coding: utf8 -*-
# import os
# import pygame
from modules.create_window import *
# from modules.key_control_move import *
from modules.color_name import *
from pics import *
import copy
# The first and the second number are the start position (x,y).
# r,l,u,d = right,left,up,down
# The number after english word is how many block it moves.
monster_walk_path = "81r3d10l2u8l5u1l2d9r2u6r2d1r1d2l1d2"
def translateMonsterWalkPath(data):
"""
Translate the monster_walk_path to a list and return it.
"""
path = []
pos = [0,0]
for i in range(len(data)):
if i == 0:
pos[0] = 50*(int(data[i])-1)
elif i == 1:
pos[1] = 50*(int(data[i])-1)
path.append(copy.copy(pos))
else:
move = [0,0]
if data[i] == "l":
move = [-50,0]
elif data[i] == "r":
move = [50,0]
elif data[i] == "u":
move = [0,-50]
elif data[i] == "d":
move = [0,50]
if move != [0,0]:
rang = int(data[i+1])
if i+2 < len(data):
if data[i+2] in "0123456789":
rang = int(data[i+1:i+3])
for t in range(rang):
pos[0] += move[0]
pos[1] += move[1]
path.append(copy.copy(pos))
return path
def showMonsterWalkPath(data_list):
"""
showMonsterWalkPath(data_list)
The data_list should be a 2D list.
"""
pos = [0,0]
for i in range(len(data_list)):
if i == 0:
pos = copy.copy(data_list[i])
gameDisplay.blit(monster_walk, pos)
else:
monster_move = False
num_cal = [1,0,0]
dx = (data_list[i][0] - pos[0])/50
dy = (data_list[i][1] - pos[1])/50
if dx < 0 or dy < 0:
num_cal[0] = -1
if dx != 0:
monster_move = True
num_cal[1] = 1
elif dy != 0:
monster_move = True
num_cal[2] = 1
if monster_move:
for t in range(abs(dx+dy)):
pos[0] += num_cal[0]*num_cal[1]*50
pos[1] += num_cal[0]*num_cal[2]*50
gameDisplay.blit(monster_walk, pos)
def | (data_list, init_pos):
"""
createMonsterWalkPath(data_list, init_pos)
"""
path = []
pos = copy.copy(init_pos)
path.append(copy.copy(pos))
monster_size = 20
side_d = (50-monster_size)/2
for i in data_list:
pos_temp = [0,0]
pos_temp[0] = pos[0]-side_d
pos_temp[1] = pos[1]-side_d
dx = i[0] - pos_temp[0]
dy = i[1] - pos_temp[1]
move_info = [1,0,0]
if dx < 0 or dy < 0:
move_info[0] = -1
if dx != 0:
move_info[1] = 1
elif dy != 0:
move_info[2] = 1
for t in range(abs(dx+dy)):
pos[0] += move_info[0]*move_info[1]
pos[1] += move_info[0]*move_info[2]
path.append(copy.copy(pos))
dx = (250+side_d) - pos[0]
dy = (500+side_d) - pos[1]
move_info = [1,0,0]
if dx < 0 or dy < 0:
move_info[0] = -1
if dx != 0:
move_info[1] = 1
elif dy != 0:
move_info[2] = 1
for t in range(abs(dx+dy)-side_d):
pos[0] += move_info[0]*move_info[1]
pos[1] += move_info[0]*move_info[2]
path.append(copy.copy(pos))
return path
def showMonster(monster, pos):
gameDisplay.blit(monster, pos)
def run():
font = pygame.font.SysFont("colonna", 40)
font_live = pygame.font.SysFont("colonna", 40, True)
font_start = pygame.font.SysFont("colonna", 110)
text_1 = font_start.render("-+-[Tower", 1, (0, 0, 0))
text_2 = font_start.render("Defend]-+-", 1, (0, 0, 0))
text_line = font_start.render("____", 1, (0, 0, 0))
text_line_1 = font_start.render("/", 1, (0, 0, 0))
text_line_2 = font_start.render("\\", 1, (0, 0, 0))
level_now = "00"
money_now = "00000"
live_now = "20"
sell_price = 0
# location = [window_size[0] * 0.5, window_size[1] * 0.5]
# dx_dy = [0,0,0,0]
clock = pygame.time.Clock()
playing = False
drop_it = False
tower_type_num = 0
tower_type = [leading_role, \
myst_array, \
tower_wall, \
tower_arrow]
tower_type_size = [leading_role_size, \
myst_array_size, \
tower_size, \
tower_size]
builded_towers = [leading_role, \
myst_array, \
myst_array]
builded_towers_pos = [[(250,500), 0], \
[(250,0), 1], \
[(300,0), 1]]
stop = False
monster_init_pos_0 = [265,15]
monster_init_pos_1 = [315,15]
monsters_pos = {}
monster_path_corner_list = translateMonsterWalkPath(monster_walk_path)
monster_path_point_list = createMonsterWalkPath(monster_path_corner_list, monster_init_pos_1)
crashed = False
while not crashed:
mouse_pos = pygame.mouse.get_pos()
mouse_pos_re = (mouse_pos[0]-25,mouse_pos[1]-25)
text_live = font_live.render(live_now, 1, white, black)
for event in pygame.event.get():
if event.type == pygame.QUIT:
crashed = True
break
# print event
if event.type == pygame.KEYDOWN:
if playing:
if event.key == pygame.K_ESCAPE:
if stop:
stop = False
elif drop_it:
drop_it = False
elif not stop and not drop_it:
stop = True
if stop:
pass
else:
if event.type == pygame.MOUSEBUTTONUP:
if playing:
#-- Right frame:
# Tools
if pygame.Rect((676, 135), sell_0_size).collidepoint(mouse_pos):
money_temp = int(money_now) + sell_price
if money_temp > 99999:
money_temp = 99999
money_now = money_now[0:5-len(str(money_temp))] + str(money_temp)
# Towers
if pygame.Rect((612, 192), tower_size).collidepoint(mouse_pos):
drop_it = True
tower_type_num = 2
if pygame.Rect((675, 192), tower_size).collidepoint(mouse_pos):
drop_it = True
tower_type_num = 3
#--------------------
#-- Left frame:
# Build tower
if drop_it:
build_pos = ((mouse_pos[0]//50)*50,(mouse_pos[1]//50)*50)
no_prob = True
if pygame.Rect(600, 0, 200, 600).collidepoint(build_pos):
continue
for i in builded_towers_pos:
if pygame.Rect(i[0], tower_type_size[i[1]]).collidepoint(build_pos):
no_prob = False
break
if no_prob:
builded_towers.append(tower_type[tower_type_num])
builded_towers_pos.append([build_pos, tower_type_num])
#--------------------
else:
if pygame.Rect((300, 350), play_size).collidepoint(mouse_pos):
playing = True
# end event
if stop:
pass
else:
gameDisplay.fill(white) # background
if playing:
#-- background:
showMonsterWalkPath(monster_path_corner_list)
gameDisplay.blit(shading, (0, 0))
#--------------------
#-- Right frame:
gameDisplay.blit(right_table, (600, 0))
# Infomations
# Level
gameDisplay.blit(level, (613, 13))
gameDisplay.blit(number[int(str(level_now)[0])], (733, 13))
gameDisplay.blit(number[int(str(level_now)[1])], (758, 13))
# Money
gameDisplay.blit(money, (613, 74))
gameDisplay.blit(number[int(str(money_now)[0])], (655, 74))
gameDisplay.blit(number[int(str(money_now)[1])], (680, 74))
gameDisplay.blit(number[int(str(money_now)[2])], (705, 74))
gameDisplay.blit(number[int(str(money_now)[3])], (730, 74))
gameDisplay.blit(number[int(str(money_now)[4])], (755, 74))
# Tools
if not pygame.Rect((739, 135), upgrade_0_size).collidepoint(mouse_pos):
gameDisplay.blit(upgrade_0, (739, 135))
else:
gameDisplay.blit(upgrade_1, (739, 135))
if not pygame.Rect((613, 135), repair_0_size).collidepoint(mouse_pos):
gameDisplay.blit(repair_0, (613, 135))
else:
gameDisplay.blit(repair_1, (613, 135))
if not pygame.Rect((676, 135), sell_0_size).collidepoint(mouse_pos):
gameDisplay.blit(sell_0, (676, 135))
else:
gameDisplay.blit(sell_1, (676, 135))
# Towers
gameDisplay.blit(tower_wall, (612, 192))
if pygame.Rect((612, 192), tower_size).collidepoint(mouse_pos):
gameDisplay.blit(tower_select, (612, 192))
gameDisplay.blit(tower_arrow, (675, 192))
if pygame.Rect((675, 192), tower_size).collidepoint(mouse_pos):
gameDisplay.blit(tower_select, (675, 192))
#--------------------
#-- object
# Towers on map
for i in range(len(builded_towers_pos)):
gameDisplay.blit(builded_towers[i], builded_towers_pos[i][0])
# Live
gameDisplay.blit(text_live, (280, 550))
# Show the block mouse on it
if pygame.Rect((0, 0), (600, 600)).collidepoint(mouse_pos):
gameDisplay.blit(tower_select, (mouse_pos[0]//50*50, mouse_pos[1]//50*50))
# Drop the tower that you want to build
if drop_it:
gameDisplay.blit(tower_type[tower_type_num], mouse_pos_re)
# Monsters
if 0 not in monsters_pos:
monsters_pos[0] = {}
monsters_pos[0]["move_times"] = 0
monsters_pos[0]["pos"] = monster_init_pos_1
showMonster(monster, monsters_pos[0]["pos"])
cannot_move = False
for i in builded_towers_pos:
if pygame.Rect(i[0], tower_type_size[i[1]]).colliderect(pygame.Rect(monsters_pos[0]["pos"],monster_size)) and not i[1] == 1:
cannot_move = True
break
if not cannot_move:
monsters_pos[0]["move_times"] += 1
monsters_pos[0]["pos"] = monster_path_point_list[monsters_pos[0]["move_times"]]
elif pygame.Rect(builded_towers_pos[0][0], leading_role_size).colliderect(pygame.Rect(monsters_pos[0]["pos"],monster_size)):
live_now = str(int(live_now)-1)
del monsters_pos[0]
#--------------------
else:
# Menu page
gameDisplay.blit(text_1, (81, 121))
gameDisplay.blit(text_1, (80, 120))
gameDisplay.blit(text_2, (251, 191))
gameDisplay.blit(text_2, (250, 190))
gameDisplay.blit(text_line, (290, 260))
gameDisplay.blit(text_line, (290, 360))
gameDisplay.blit(text_line_1, (240, 340))
gameDisplay.blit(text_line_2, (240, 370))
gameDisplay.blit(text_line_1, (506, 370))
gameDisplay.blit(text_line_2, (506, 340))
gameDisplay.blit(play, (300, 350))
if pygame.Rect(300, 350, play_size[0], play_size[1]).collidepoint(mouse_pos):
gameDisplay.blit(text_line, (290, 270))
gameDisplay.blit(text_line, (290, 350))
gameDisplay.blit(text_line_1, (230, 320))
gameDisplay.blit(text_line_2, (230, 390))
gameDisplay.blit(text_line_1, (516, 390))
gameDisplay.blit(text_line_2, (516, 320))
pygame.display.update()
clock.tick(game_speed)
if __name__ == "__main__":
run()
pygame.quit() | createMonsterWalkPath | identifier_name |
Clout.js | /*!
* clout-js
* Copyright(c) 2015 - 2016 Muhammad Dadu
* MIT Licensed
*/
/**
* Clout
* @module clout-js/lib/Clout
*/
const path = require('path');
const fs = require('fs-extra');
const { EventEmitter } = require('events');
const debug = require('debug')('clout:core');
const async = require('async');
const _ = require('lodash');
const utils = require('./utils');
const Logger = require('./Logger');
const Config = require('./Config');
const CloutApiRoute = require('../hookslib/CloutApiRoute');
/**
* Priority for core hooks
* @typedef {(number|string)} priority
* @property {number} CONFIG 5
* @property {number} MIDDLEWARE 10
* @property {number} MODEL 15
* @property {number} API 20
* @property {number} CONTROLLER 25
* @const
*/
const CORE_PRIORITY = {
CONFIG: 5,
MIDDLEWARE: 10,
MODEL: 15,
API: 20,
CONTROLLER: 25,
};
const CLOUT_MODULE_PATH = path.join(__dirname, '..');
/**
* Clout application
* @class
*/
class Clout extends EventEmitter {
/**
* @constructor
* @param {path} rootDirectory application directory
*/
constructor(rootDirectory) {
super();
this.process = process;
this.handleProcess();
this.rootDirectory = null;
this.package = {};
this.applicationPackage = {};
this.config = {};
this.logger = { debug };
this.app = null;
this.server = {};
this.modules = [];
this.moduleCache = [];
// expose core libraries
this.utils = utils;
this.async = async;
this._ = _;
this.fs = fs;
// allow application hooks (Synchronous)
this.CORE_PRIORITY = CORE_PRIORITY;
this.hooks = {
start: [],
stop: [],
reload: [],
};
// Load clout configuration
this.config = new Config();
this.config.loadFromDir(path.join(__dirname, '../resources/conf'));
this.applicationPackage = {};
// load clout package.json
this.package = require(path.join(__dirname, '../package.json'));
// load clout modules
if (this.package.modules) {
this.addModules(this.package.modules);
}
if (rootDirectory) {
// set application root directory
this.rootDirectory = path.resolve(rootDirectory);
// load application manifest
['package.json', 'clout.json'].forEach((fileName) => {
const filePath = path.resolve(this.rootDirectory, fileName);
if (!fs.existsSync(filePath)) {
return debug(`${fileName} not found`);
}
return _.merge(this.applicationPackage, require(filePath));
});
this.process.title = `[clout-js v${this.package.version}] ${this.applicationPackage.name}`;
// add rootdir to node_modules
module.paths.unshift(path.join(this.rootDirectory, 'node_modules'));
// load modules from application manifest
if (this.applicationPackage.modules) {
this.addModules(this.applicationPackage.modules);
}
}
// append module configuration
this.modules.forEach(module => this.config.loadFromDir(path.join(module.path, 'conf')));
// append application configuration (Overrides module conf)
this.config.loadFromDir(path.join(this.rootDirectory, 'conf'));
// initialize logger
this.logger = new Logger(this);
// 1) load core hooks
// 2) load application hooks
// 3) load module hooks
this.loadHooksFromDir(CLOUT_MODULE_PATH)
.then(this.loadHooksFromDir(this.rootDirectory))
.then(async () => this.modules.map(async module => this.loadHooksFromDir(module.path)))
.then((moduleHooks) => {
this.initialized = true;
return moduleHooks;
})
.catch(err => console.error(err));
}
/**
* hook into clout runtime
* @param {string} event event name
* @param {Function} fn function to execute
* @param {String} fn._name hook name
* @param {String} fn.group hook group
* @param {Number} priority function priority
* @param {Boolean} override override existing
* @example
* // register a function to the hook
* clout.registerHook('start', function (next) {
* next();
* });
* // invoking an error in clout runtime
* clout.registerHook('start', function (next) {
* next(new Error('Error executing function'));
* });
*/
registerHook(event, fn, priority, override) {
const hasPriority = typeof priority !== 'undefined';
const hasEvent = this.hooks[event];
debug('registerHook:event=%s:fn:priority=%s', event, hasEvent, priority);
if (!hasEvent) {
throw new Error('Invalid Hook Event');
}
if (hasPriority) {
fn.priority = priority;
}
// find existing, override
if (override === true) {
debug('override');
for (let i = 0, l = this.hooks[event].length; i < l; i += 1) {
const hook = this.hooks[event][i];
if (hook._name !== null && hook._name === fn._name && hook.group === fn.group) {
debug('match found, overriden');
this.hooks[event][i] = fn;
return;
}
}
}
// push is no priority
if (!fn.priority) {
debug('push hook (no priority)');
this.hooks[event].push(fn);
return;
}
// find the correct place to register hook
for (let i = 0, l = this.hooks[event].length; i < l; i += 1) {
const tmpPriority = this.hooks[event][i].priority || 99999;
if (fn.priority < tmpPriority) |
}
debug('push hook (lowest priority yet)');
this.hooks[event].push(fn);
}
/**
* Loads hooks from directory
* @param {Path} dir directory
* @return {Promise} promise
*/
loadHooksFromDir(dir) {
const glob = path.join(dir, '/hooks/**/*.js');
const files = utils.getGlobbedFiles(glob);
debug('loadHooksFromDir: %s', dir);
return new Promise((resolve, reject) => {
async.each(files, (file, next) => {
debug('loading hooks from file: %s', String(file));
const hooks = require(file);
const keys = Object.keys(hooks);
keys.forEach((key) => {
const hook = hooks[key];
const args = [];
debug('Loading hook: %s', key);
// create args
if (!hook.event || !hook.fn) {
throw new Error('Hook missing attributes');
}
hook.fn.group = file.split('hooks/')[1].replace('.js', '');
hook.fn._name = key;
args.push(hook.event);
args.push(hook.fn);
if (typeof hook.priority !== 'undefined') {
if (typeof hook.priority === 'string') {
if (!this.CORE_PRIORITY[hook.priority]) {
throw new Error('Invalid priority type');
}
hook.priority = this.CORE_PRIORITY[hook.priority];
}
args.push(hook.priority);
} else {
args.push(null);
}
if (hook.override) {
args.push(true);
}
this.registerHook(...args);
});
next();
}, (err) => {
if (err) {
debug(err);
return reject(err);
}
debug('all hooks loaded from %s', dir);
resolve();
});
});
}
addModules(modules) {
debug('loading modules', JSON.stringify(modules));
modules.forEach(moduleName => this.addModule(moduleName));
}
/**
* Load clout-js node module
* @param {string} moduleName clout node module name
*/
addModule(moduleName) {
if (this.moduleCache.includes(moduleName)) {
debug('module: %s already loaded', moduleName);
return;
}
this.logger.debug('loading module: %s', moduleName);
this.moduleCache.push(moduleName);
const cloutModule = {
name: moduleName,
path: path.dirname(require.resolve(moduleName)),
manifest: {},
};
this.modules.push(cloutModule);
debug(cloutModule);
// load module manifest
['package.json', 'clout.json'].forEach((fileName) => {
const filePath = path.resolve(cloutModule.path, fileName);
if (!fs.existsSync(filePath)) {
return debug(`${fileName} not found`);
}
_.merge(cloutModule.manifest, require(filePath));
});
// load module modules
if (cloutModule.manifest.modules) {
debug('%s loading modules %s', moduleName, cloutModule.manifest.modules);
this.addModules(cloutModule.manifest.modules);
}
}
/**
* Start clout
* @return {Promise} returns a promise
*/
start() {
this.emit('initialized');
if (!this.initialized) {
return new Promise((resolve) => {
setTimeout(() => resolve(this.start()), 100);
});
}
this.emit('start');
return new Promise((resolve, reject) => {
process.nextTick(() => {
async.eachLimit(this.hooks.start, 1, (hook, next) => {
debug('executing', hook.name || hook._name, hook.group);
const hookResponse = hook.apply(this, [next]);
// support promises
if (typeof hookResponse === 'object') {
hookResponse.then(next, err => next(null, err));
}
}, (err) => {
if (err) {
debug(err);
return reject(err);
}
resolve();
this.emit('started');
});
});
});
}
// TODO:- investigate if we still need this?
/**
* Add API
* @param {string} path api path
* @param {function} fn express function
*/
addApi(apiPath, fn) {
if (this.core.api) {
this.core.addRoute(new CloutApiRoute({
path: apiPath,
fn,
}));
}
}
/**
* Stop clout
* @return {Promise} returns a promise
*/
stop() {
this.emit('stop');
return new Promise((resolve, reject) => {
async.eachLimit(this.hooks.stop, 1, (hook, next) => {
hook.apply(this, [next]);
}, (err) => {
if (err) {
debug(err);
return reject(err);
}
resolve();
this.emit('stopped');
});
});
}
/**
* Reload clout
* @return {Promise} returns a promise
*/
reload() {
this.emit('reload');
return this.stop()
.then(this.start)
.then(() => this.emit('reloaded'));
}
handleProcess() {
this.process.on('unhandledRejection', err => console.error(err));
this.process.on('uncaughtException', (err) => {
console.error(err);
process.exit(0);
});
}
}
module.exports = Clout;
module.exports.PRIORITY = CORE_PRIORITY;
| {
debug('push hook at index %s', String(i));
this.hooks[event].splice(i, 0, fn);
return;
} | conditional_block |
Clout.js | /*!
* clout-js
* Copyright(c) 2015 - 2016 Muhammad Dadu
* MIT Licensed
*/
/**
* Clout
* @module clout-js/lib/Clout
*/
const path = require('path');
const fs = require('fs-extra');
const { EventEmitter } = require('events');
const debug = require('debug')('clout:core');
const async = require('async');
const _ = require('lodash');
const utils = require('./utils');
const Logger = require('./Logger');
const Config = require('./Config');
const CloutApiRoute = require('../hookslib/CloutApiRoute');
/**
* Priority for core hooks
* @typedef {(number|string)} priority
* @property {number} CONFIG 5
* @property {number} MIDDLEWARE 10
* @property {number} MODEL 15
* @property {number} API 20
* @property {number} CONTROLLER 25
* @const
*/
const CORE_PRIORITY = {
CONFIG: 5,
MIDDLEWARE: 10,
MODEL: 15,
API: 20,
CONTROLLER: 25,
};
const CLOUT_MODULE_PATH = path.join(__dirname, '..');
/**
* Clout application
* @class
*/
class Clout extends EventEmitter {
/**
* @constructor
* @param {path} rootDirectory application directory
*/
constructor(rootDirectory) {
super();
this.process = process;
this.handleProcess();
this.rootDirectory = null;
this.package = {};
this.applicationPackage = {};
this.config = {};
this.logger = { debug };
this.app = null;
this.server = {};
this.modules = [];
this.moduleCache = [];
// expose core libraries
this.utils = utils;
this.async = async;
this._ = _;
this.fs = fs;
// allow application hooks (Synchronous)
this.CORE_PRIORITY = CORE_PRIORITY;
this.hooks = {
start: [],
stop: [],
reload: [],
};
// Load clout configuration
this.config = new Config();
this.config.loadFromDir(path.join(__dirname, '../resources/conf'));
this.applicationPackage = {};
// load clout package.json
this.package = require(path.join(__dirname, '../package.json'));
// load clout modules
if (this.package.modules) {
this.addModules(this.package.modules);
}
if (rootDirectory) {
// set application root directory
this.rootDirectory = path.resolve(rootDirectory);
// load application manifest
['package.json', 'clout.json'].forEach((fileName) => {
const filePath = path.resolve(this.rootDirectory, fileName);
if (!fs.existsSync(filePath)) {
return debug(`${fileName} not found`);
}
return _.merge(this.applicationPackage, require(filePath));
});
this.process.title = `[clout-js v${this.package.version}] ${this.applicationPackage.name}`;
// add rootdir to node_modules
module.paths.unshift(path.join(this.rootDirectory, 'node_modules'));
// load modules from application manifest
if (this.applicationPackage.modules) {
this.addModules(this.applicationPackage.modules);
}
}
// append module configuration
this.modules.forEach(module => this.config.loadFromDir(path.join(module.path, 'conf')));
// append application configuration (Overrides module conf)
this.config.loadFromDir(path.join(this.rootDirectory, 'conf'));
// initialize logger
this.logger = new Logger(this);
// 1) load core hooks
// 2) load application hooks
// 3) load module hooks
this.loadHooksFromDir(CLOUT_MODULE_PATH)
.then(this.loadHooksFromDir(this.rootDirectory))
.then(async () => this.modules.map(async module => this.loadHooksFromDir(module.path)))
.then((moduleHooks) => {
this.initialized = true;
return moduleHooks;
})
.catch(err => console.error(err));
}
/**
* hook into clout runtime
* @param {string} event event name
* @param {Function} fn function to execute
* @param {String} fn._name hook name
* @param {String} fn.group hook group
* @param {Number} priority function priority
* @param {Boolean} override override existing
* @example
* // register a function to the hook
* clout.registerHook('start', function (next) {
* next();
* });
* // invoking an error in clout runtime
* clout.registerHook('start', function (next) {
* next(new Error('Error executing function'));
* });
*/
registerHook(event, fn, priority, override) {
const hasPriority = typeof priority !== 'undefined';
const hasEvent = this.hooks[event];
debug('registerHook:event=%s:fn:priority=%s', event, hasEvent, priority);
if (!hasEvent) {
throw new Error('Invalid Hook Event');
}
if (hasPriority) {
fn.priority = priority;
}
// find existing, override
if (override === true) {
debug('override');
for (let i = 0, l = this.hooks[event].length; i < l; i += 1) {
const hook = this.hooks[event][i];
if (hook._name !== null && hook._name === fn._name && hook.group === fn.group) {
debug('match found, overriden');
this.hooks[event][i] = fn;
return;
}
}
}
// push is no priority
if (!fn.priority) {
debug('push hook (no priority)');
this.hooks[event].push(fn);
return;
}
// find the correct place to register hook
for (let i = 0, l = this.hooks[event].length; i < l; i += 1) {
const tmpPriority = this.hooks[event][i].priority || 99999;
if (fn.priority < tmpPriority) {
debug('push hook at index %s', String(i));
this.hooks[event].splice(i, 0, fn);
return;
}
}
debug('push hook (lowest priority yet)');
this.hooks[event].push(fn); | * @param {Path} dir directory
* @return {Promise} promise
*/
loadHooksFromDir(dir) {
const glob = path.join(dir, '/hooks/**/*.js');
const files = utils.getGlobbedFiles(glob);
debug('loadHooksFromDir: %s', dir);
return new Promise((resolve, reject) => {
async.each(files, (file, next) => {
debug('loading hooks from file: %s', String(file));
const hooks = require(file);
const keys = Object.keys(hooks);
keys.forEach((key) => {
const hook = hooks[key];
const args = [];
debug('Loading hook: %s', key);
// create args
if (!hook.event || !hook.fn) {
throw new Error('Hook missing attributes');
}
hook.fn.group = file.split('hooks/')[1].replace('.js', '');
hook.fn._name = key;
args.push(hook.event);
args.push(hook.fn);
if (typeof hook.priority !== 'undefined') {
if (typeof hook.priority === 'string') {
if (!this.CORE_PRIORITY[hook.priority]) {
throw new Error('Invalid priority type');
}
hook.priority = this.CORE_PRIORITY[hook.priority];
}
args.push(hook.priority);
} else {
args.push(null);
}
if (hook.override) {
args.push(true);
}
this.registerHook(...args);
});
next();
}, (err) => {
if (err) {
debug(err);
return reject(err);
}
debug('all hooks loaded from %s', dir);
resolve();
});
});
}
addModules(modules) {
debug('loading modules', JSON.stringify(modules));
modules.forEach(moduleName => this.addModule(moduleName));
}
/**
* Load clout-js node module
* @param {string} moduleName clout node module name
*/
addModule(moduleName) {
if (this.moduleCache.includes(moduleName)) {
debug('module: %s already loaded', moduleName);
return;
}
this.logger.debug('loading module: %s', moduleName);
this.moduleCache.push(moduleName);
const cloutModule = {
name: moduleName,
path: path.dirname(require.resolve(moduleName)),
manifest: {},
};
this.modules.push(cloutModule);
debug(cloutModule);
// load module manifest
['package.json', 'clout.json'].forEach((fileName) => {
const filePath = path.resolve(cloutModule.path, fileName);
if (!fs.existsSync(filePath)) {
return debug(`${fileName} not found`);
}
_.merge(cloutModule.manifest, require(filePath));
});
// load module modules
if (cloutModule.manifest.modules) {
debug('%s loading modules %s', moduleName, cloutModule.manifest.modules);
this.addModules(cloutModule.manifest.modules);
}
}
/**
* Start clout
* @return {Promise} returns a promise
*/
start() {
this.emit('initialized');
if (!this.initialized) {
return new Promise((resolve) => {
setTimeout(() => resolve(this.start()), 100);
});
}
this.emit('start');
return new Promise((resolve, reject) => {
process.nextTick(() => {
async.eachLimit(this.hooks.start, 1, (hook, next) => {
debug('executing', hook.name || hook._name, hook.group);
const hookResponse = hook.apply(this, [next]);
// support promises
if (typeof hookResponse === 'object') {
hookResponse.then(next, err => next(null, err));
}
}, (err) => {
if (err) {
debug(err);
return reject(err);
}
resolve();
this.emit('started');
});
});
});
}
// TODO:- investigate if we still need this?
/**
* Add API
* @param {string} path api path
* @param {function} fn express function
*/
addApi(apiPath, fn) {
if (this.core.api) {
this.core.addRoute(new CloutApiRoute({
path: apiPath,
fn,
}));
}
}
/**
* Stop clout
* @return {Promise} returns a promise
*/
stop() {
this.emit('stop');
return new Promise((resolve, reject) => {
async.eachLimit(this.hooks.stop, 1, (hook, next) => {
hook.apply(this, [next]);
}, (err) => {
if (err) {
debug(err);
return reject(err);
}
resolve();
this.emit('stopped');
});
});
}
/**
* Reload clout
* @return {Promise} returns a promise
*/
reload() {
this.emit('reload');
return this.stop()
.then(this.start)
.then(() => this.emit('reloaded'));
}
handleProcess() {
this.process.on('unhandledRejection', err => console.error(err));
this.process.on('uncaughtException', (err) => {
console.error(err);
process.exit(0);
});
}
}
module.exports = Clout;
module.exports.PRIORITY = CORE_PRIORITY; | }
/**
* Loads hooks from directory | random_line_split |
Clout.js | /*!
* clout-js
* Copyright(c) 2015 - 2016 Muhammad Dadu
* MIT Licensed
*/
/**
* Clout
* @module clout-js/lib/Clout
*/
const path = require('path');
const fs = require('fs-extra');
const { EventEmitter } = require('events');
const debug = require('debug')('clout:core');
const async = require('async');
const _ = require('lodash');
const utils = require('./utils');
const Logger = require('./Logger');
const Config = require('./Config');
const CloutApiRoute = require('../hookslib/CloutApiRoute');
/**
* Priority for core hooks
* @typedef {(number|string)} priority
* @property {number} CONFIG 5
* @property {number} MIDDLEWARE 10
* @property {number} MODEL 15
* @property {number} API 20
* @property {number} CONTROLLER 25
* @const
*/
const CORE_PRIORITY = {
CONFIG: 5,
MIDDLEWARE: 10,
MODEL: 15,
API: 20,
CONTROLLER: 25,
};
const CLOUT_MODULE_PATH = path.join(__dirname, '..');
/**
* Clout application
* @class
*/
class Clout extends EventEmitter {
/**
* @constructor
* @param {path} rootDirectory application directory
*/
constructor(rootDirectory) {
super();
this.process = process;
this.handleProcess();
this.rootDirectory = null;
this.package = {};
this.applicationPackage = {};
this.config = {};
this.logger = { debug };
this.app = null;
this.server = {};
this.modules = [];
this.moduleCache = [];
// expose core libraries
this.utils = utils;
this.async = async;
this._ = _;
this.fs = fs;
// allow application hooks (Synchronous)
this.CORE_PRIORITY = CORE_PRIORITY;
this.hooks = {
start: [],
stop: [],
reload: [],
};
// Load clout configuration
this.config = new Config();
this.config.loadFromDir(path.join(__dirname, '../resources/conf'));
this.applicationPackage = {};
// load clout package.json
this.package = require(path.join(__dirname, '../package.json'));
// load clout modules
if (this.package.modules) {
this.addModules(this.package.modules);
}
if (rootDirectory) {
// set application root directory
this.rootDirectory = path.resolve(rootDirectory);
// load application manifest
['package.json', 'clout.json'].forEach((fileName) => {
const filePath = path.resolve(this.rootDirectory, fileName);
if (!fs.existsSync(filePath)) {
return debug(`${fileName} not found`);
}
return _.merge(this.applicationPackage, require(filePath));
});
this.process.title = `[clout-js v${this.package.version}] ${this.applicationPackage.name}`;
// add rootdir to node_modules
module.paths.unshift(path.join(this.rootDirectory, 'node_modules'));
// load modules from application manifest
if (this.applicationPackage.modules) {
this.addModules(this.applicationPackage.modules);
}
}
// append module configuration
this.modules.forEach(module => this.config.loadFromDir(path.join(module.path, 'conf')));
// append application configuration (Overrides module conf)
this.config.loadFromDir(path.join(this.rootDirectory, 'conf'));
// initialize logger
this.logger = new Logger(this);
// 1) load core hooks
// 2) load application hooks
// 3) load module hooks
this.loadHooksFromDir(CLOUT_MODULE_PATH)
.then(this.loadHooksFromDir(this.rootDirectory))
.then(async () => this.modules.map(async module => this.loadHooksFromDir(module.path)))
.then((moduleHooks) => {
this.initialized = true;
return moduleHooks;
})
.catch(err => console.error(err));
}
/**
* hook into clout runtime
* @param {string} event event name
* @param {Function} fn function to execute
* @param {String} fn._name hook name
* @param {String} fn.group hook group
* @param {Number} priority function priority
* @param {Boolean} override override existing
* @example
* // register a function to the hook
* clout.registerHook('start', function (next) {
* next();
* });
* // invoking an error in clout runtime
* clout.registerHook('start', function (next) {
* next(new Error('Error executing function'));
* });
*/
registerHook(event, fn, priority, override) {
const hasPriority = typeof priority !== 'undefined';
const hasEvent = this.hooks[event];
debug('registerHook:event=%s:fn:priority=%s', event, hasEvent, priority);
if (!hasEvent) {
throw new Error('Invalid Hook Event');
}
if (hasPriority) {
fn.priority = priority;
}
// find existing, override
if (override === true) {
debug('override');
for (let i = 0, l = this.hooks[event].length; i < l; i += 1) {
const hook = this.hooks[event][i];
if (hook._name !== null && hook._name === fn._name && hook.group === fn.group) {
debug('match found, overriden');
this.hooks[event][i] = fn;
return;
}
}
}
// push is no priority
if (!fn.priority) {
debug('push hook (no priority)');
this.hooks[event].push(fn);
return;
}
// find the correct place to register hook
for (let i = 0, l = this.hooks[event].length; i < l; i += 1) {
const tmpPriority = this.hooks[event][i].priority || 99999;
if (fn.priority < tmpPriority) {
debug('push hook at index %s', String(i));
this.hooks[event].splice(i, 0, fn);
return;
}
}
debug('push hook (lowest priority yet)');
this.hooks[event].push(fn);
}
/**
* Loads hooks from directory
* @param {Path} dir directory
* @return {Promise} promise
*/
loadHooksFromDir(dir) {
const glob = path.join(dir, '/hooks/**/*.js');
const files = utils.getGlobbedFiles(glob);
debug('loadHooksFromDir: %s', dir);
return new Promise((resolve, reject) => {
async.each(files, (file, next) => {
debug('loading hooks from file: %s', String(file));
const hooks = require(file);
const keys = Object.keys(hooks);
keys.forEach((key) => {
const hook = hooks[key];
const args = [];
debug('Loading hook: %s', key);
// create args
if (!hook.event || !hook.fn) {
throw new Error('Hook missing attributes');
}
hook.fn.group = file.split('hooks/')[1].replace('.js', '');
hook.fn._name = key;
args.push(hook.event);
args.push(hook.fn);
if (typeof hook.priority !== 'undefined') {
if (typeof hook.priority === 'string') {
if (!this.CORE_PRIORITY[hook.priority]) {
throw new Error('Invalid priority type');
}
hook.priority = this.CORE_PRIORITY[hook.priority];
}
args.push(hook.priority);
} else {
args.push(null);
}
if (hook.override) {
args.push(true);
}
this.registerHook(...args);
});
next();
}, (err) => {
if (err) {
debug(err);
return reject(err);
}
debug('all hooks loaded from %s', dir);
resolve();
});
});
}
addModules(modules) |
/**
* Load clout-js node module
* @param {string} moduleName clout node module name
*/
addModule(moduleName) {
if (this.moduleCache.includes(moduleName)) {
debug('module: %s already loaded', moduleName);
return;
}
this.logger.debug('loading module: %s', moduleName);
this.moduleCache.push(moduleName);
const cloutModule = {
name: moduleName,
path: path.dirname(require.resolve(moduleName)),
manifest: {},
};
this.modules.push(cloutModule);
debug(cloutModule);
// load module manifest
['package.json', 'clout.json'].forEach((fileName) => {
const filePath = path.resolve(cloutModule.path, fileName);
if (!fs.existsSync(filePath)) {
return debug(`${fileName} not found`);
}
_.merge(cloutModule.manifest, require(filePath));
});
// load module modules
if (cloutModule.manifest.modules) {
debug('%s loading modules %s', moduleName, cloutModule.manifest.modules);
this.addModules(cloutModule.manifest.modules);
}
}
/**
* Start clout
* @return {Promise} returns a promise
*/
start() {
this.emit('initialized');
if (!this.initialized) {
return new Promise((resolve) => {
setTimeout(() => resolve(this.start()), 100);
});
}
this.emit('start');
return new Promise((resolve, reject) => {
process.nextTick(() => {
async.eachLimit(this.hooks.start, 1, (hook, next) => {
debug('executing', hook.name || hook._name, hook.group);
const hookResponse = hook.apply(this, [next]);
// support promises
if (typeof hookResponse === 'object') {
hookResponse.then(next, err => next(null, err));
}
}, (err) => {
if (err) {
debug(err);
return reject(err);
}
resolve();
this.emit('started');
});
});
});
}
// TODO:- investigate if we still need this?
/**
* Add API
* @param {string} path api path
* @param {function} fn express function
*/
addApi(apiPath, fn) {
if (this.core.api) {
this.core.addRoute(new CloutApiRoute({
path: apiPath,
fn,
}));
}
}
/**
* Stop clout
* @return {Promise} returns a promise
*/
stop() {
this.emit('stop');
return new Promise((resolve, reject) => {
async.eachLimit(this.hooks.stop, 1, (hook, next) => {
hook.apply(this, [next]);
}, (err) => {
if (err) {
debug(err);
return reject(err);
}
resolve();
this.emit('stopped');
});
});
}
/**
* Reload clout
* @return {Promise} returns a promise
*/
reload() {
this.emit('reload');
return this.stop()
.then(this.start)
.then(() => this.emit('reloaded'));
}
handleProcess() {
this.process.on('unhandledRejection', err => console.error(err));
this.process.on('uncaughtException', (err) => {
console.error(err);
process.exit(0);
});
}
}
module.exports = Clout;
module.exports.PRIORITY = CORE_PRIORITY;
| {
debug('loading modules', JSON.stringify(modules));
modules.forEach(moduleName => this.addModule(moduleName));
} | identifier_body |
Clout.js | /*!
* clout-js
* Copyright(c) 2015 - 2016 Muhammad Dadu
* MIT Licensed
*/
/**
* Clout
* @module clout-js/lib/Clout
*/
const path = require('path');
const fs = require('fs-extra');
const { EventEmitter } = require('events');
const debug = require('debug')('clout:core');
const async = require('async');
const _ = require('lodash');
const utils = require('./utils');
const Logger = require('./Logger');
const Config = require('./Config');
const CloutApiRoute = require('../hookslib/CloutApiRoute');
/**
* Priority for core hooks
* @typedef {(number|string)} priority
* @property {number} CONFIG 5
* @property {number} MIDDLEWARE 10
* @property {number} MODEL 15
* @property {number} API 20
* @property {number} CONTROLLER 25
* @const
*/
const CORE_PRIORITY = {
CONFIG: 5,
MIDDLEWARE: 10,
MODEL: 15,
API: 20,
CONTROLLER: 25,
};
const CLOUT_MODULE_PATH = path.join(__dirname, '..');
/**
* Clout application
* @class
*/
class Clout extends EventEmitter {
/**
* @constructor
* @param {path} rootDirectory application directory
*/
constructor(rootDirectory) {
super();
this.process = process;
this.handleProcess();
this.rootDirectory = null;
this.package = {};
this.applicationPackage = {};
this.config = {};
this.logger = { debug };
this.app = null;
this.server = {};
this.modules = [];
this.moduleCache = [];
// expose core libraries
this.utils = utils;
this.async = async;
this._ = _;
this.fs = fs;
// allow application hooks (Synchronous)
this.CORE_PRIORITY = CORE_PRIORITY;
this.hooks = {
start: [],
stop: [],
reload: [],
};
// Load clout configuration
this.config = new Config();
this.config.loadFromDir(path.join(__dirname, '../resources/conf'));
this.applicationPackage = {};
// load clout package.json
this.package = require(path.join(__dirname, '../package.json'));
// load clout modules
if (this.package.modules) {
this.addModules(this.package.modules);
}
if (rootDirectory) {
// set application root directory
this.rootDirectory = path.resolve(rootDirectory);
// load application manifest
['package.json', 'clout.json'].forEach((fileName) => {
const filePath = path.resolve(this.rootDirectory, fileName);
if (!fs.existsSync(filePath)) {
return debug(`${fileName} not found`);
}
return _.merge(this.applicationPackage, require(filePath));
});
this.process.title = `[clout-js v${this.package.version}] ${this.applicationPackage.name}`;
// add rootdir to node_modules
module.paths.unshift(path.join(this.rootDirectory, 'node_modules'));
// load modules from application manifest
if (this.applicationPackage.modules) {
this.addModules(this.applicationPackage.modules);
}
}
// append module configuration
this.modules.forEach(module => this.config.loadFromDir(path.join(module.path, 'conf')));
// append application configuration (Overrides module conf)
this.config.loadFromDir(path.join(this.rootDirectory, 'conf'));
// initialize logger
this.logger = new Logger(this);
// 1) load core hooks
// 2) load application hooks
// 3) load module hooks
this.loadHooksFromDir(CLOUT_MODULE_PATH)
.then(this.loadHooksFromDir(this.rootDirectory))
.then(async () => this.modules.map(async module => this.loadHooksFromDir(module.path)))
.then((moduleHooks) => {
this.initialized = true;
return moduleHooks;
})
.catch(err => console.error(err));
}
/**
* hook into clout runtime
* @param {string} event event name
* @param {Function} fn function to execute
* @param {String} fn._name hook name
* @param {String} fn.group hook group
* @param {Number} priority function priority
* @param {Boolean} override override existing
* @example
* // register a function to the hook
* clout.registerHook('start', function (next) {
* next();
* });
* // invoking an error in clout runtime
* clout.registerHook('start', function (next) {
* next(new Error('Error executing function'));
* });
*/
registerHook(event, fn, priority, override) {
const hasPriority = typeof priority !== 'undefined';
const hasEvent = this.hooks[event];
debug('registerHook:event=%s:fn:priority=%s', event, hasEvent, priority);
if (!hasEvent) {
throw new Error('Invalid Hook Event');
}
if (hasPriority) {
fn.priority = priority;
}
// find existing, override
if (override === true) {
debug('override');
for (let i = 0, l = this.hooks[event].length; i < l; i += 1) {
const hook = this.hooks[event][i];
if (hook._name !== null && hook._name === fn._name && hook.group === fn.group) {
debug('match found, overriden');
this.hooks[event][i] = fn;
return;
}
}
}
// push is no priority
if (!fn.priority) {
debug('push hook (no priority)');
this.hooks[event].push(fn);
return;
}
// find the correct place to register hook
for (let i = 0, l = this.hooks[event].length; i < l; i += 1) {
const tmpPriority = this.hooks[event][i].priority || 99999;
if (fn.priority < tmpPriority) {
debug('push hook at index %s', String(i));
this.hooks[event].splice(i, 0, fn);
return;
}
}
debug('push hook (lowest priority yet)');
this.hooks[event].push(fn);
}
/**
* Loads hooks from directory
* @param {Path} dir directory
* @return {Promise} promise
*/
| (dir) {
const glob = path.join(dir, '/hooks/**/*.js');
const files = utils.getGlobbedFiles(glob);
debug('loadHooksFromDir: %s', dir);
return new Promise((resolve, reject) => {
async.each(files, (file, next) => {
debug('loading hooks from file: %s', String(file));
const hooks = require(file);
const keys = Object.keys(hooks);
keys.forEach((key) => {
const hook = hooks[key];
const args = [];
debug('Loading hook: %s', key);
// create args
if (!hook.event || !hook.fn) {
throw new Error('Hook missing attributes');
}
hook.fn.group = file.split('hooks/')[1].replace('.js', '');
hook.fn._name = key;
args.push(hook.event);
args.push(hook.fn);
if (typeof hook.priority !== 'undefined') {
if (typeof hook.priority === 'string') {
if (!this.CORE_PRIORITY[hook.priority]) {
throw new Error('Invalid priority type');
}
hook.priority = this.CORE_PRIORITY[hook.priority];
}
args.push(hook.priority);
} else {
args.push(null);
}
if (hook.override) {
args.push(true);
}
this.registerHook(...args);
});
next();
}, (err) => {
if (err) {
debug(err);
return reject(err);
}
debug('all hooks loaded from %s', dir);
resolve();
});
});
}
addModules(modules) {
debug('loading modules', JSON.stringify(modules));
modules.forEach(moduleName => this.addModule(moduleName));
}
/**
* Load clout-js node module
* @param {string} moduleName clout node module name
*/
addModule(moduleName) {
if (this.moduleCache.includes(moduleName)) {
debug('module: %s already loaded', moduleName);
return;
}
this.logger.debug('loading module: %s', moduleName);
this.moduleCache.push(moduleName);
const cloutModule = {
name: moduleName,
path: path.dirname(require.resolve(moduleName)),
manifest: {},
};
this.modules.push(cloutModule);
debug(cloutModule);
// load module manifest
['package.json', 'clout.json'].forEach((fileName) => {
const filePath = path.resolve(cloutModule.path, fileName);
if (!fs.existsSync(filePath)) {
return debug(`${fileName} not found`);
}
_.merge(cloutModule.manifest, require(filePath));
});
// load module modules
if (cloutModule.manifest.modules) {
debug('%s loading modules %s', moduleName, cloutModule.manifest.modules);
this.addModules(cloutModule.manifest.modules);
}
}
/**
* Start clout
* @return {Promise} returns a promise
*/
start() {
this.emit('initialized');
if (!this.initialized) {
return new Promise((resolve) => {
setTimeout(() => resolve(this.start()), 100);
});
}
this.emit('start');
return new Promise((resolve, reject) => {
process.nextTick(() => {
async.eachLimit(this.hooks.start, 1, (hook, next) => {
debug('executing', hook.name || hook._name, hook.group);
const hookResponse = hook.apply(this, [next]);
// support promises
if (typeof hookResponse === 'object') {
hookResponse.then(next, err => next(null, err));
}
}, (err) => {
if (err) {
debug(err);
return reject(err);
}
resolve();
this.emit('started');
});
});
});
}
// TODO:- investigate if we still need this?
/**
* Add API
* @param {string} path api path
* @param {function} fn express function
*/
addApi(apiPath, fn) {
if (this.core.api) {
this.core.addRoute(new CloutApiRoute({
path: apiPath,
fn,
}));
}
}
/**
* Stop clout
* @return {Promise} returns a promise
*/
stop() {
this.emit('stop');
return new Promise((resolve, reject) => {
async.eachLimit(this.hooks.stop, 1, (hook, next) => {
hook.apply(this, [next]);
}, (err) => {
if (err) {
debug(err);
return reject(err);
}
resolve();
this.emit('stopped');
});
});
}
/**
* Reload clout
* @return {Promise} returns a promise
*/
reload() {
this.emit('reload');
return this.stop()
.then(this.start)
.then(() => this.emit('reloaded'));
}
handleProcess() {
this.process.on('unhandledRejection', err => console.error(err));
this.process.on('uncaughtException', (err) => {
console.error(err);
process.exit(0);
});
}
}
module.exports = Clout;
module.exports.PRIORITY = CORE_PRIORITY;
| loadHooksFromDir | identifier_name |
htp1.py | import json
import logging
from typing import Optional, List
import semver
from autobahn.exception import Disconnected
from autobahn.twisted.websocket import WebSocketClientFactory, connectWS, WebSocketClientProtocol
from twisted.internet.protocol import ReconnectingClientFactory
from ezbeq.apis.ws import WsServer
from ezbeq.catalogue import CatalogueEntry, CatalogueProvider
from ezbeq.device import DeviceState, SlotState, PersistentDevice
logger = logging.getLogger('ezbeq.htp1')
class Htp1SlotState(SlotState):
def __init__(self):
super().__init__('HTP1')
class Htp1State(DeviceState):
def __init__(self, name: str):
self.__name = name
self.slot = Htp1SlotState()
self.slot.active = True
def serialise(self) -> dict:
return {
'name': self.__name,
'slots': [self.slot.as_dict()]
}
class Htp1(PersistentDevice[Htp1State]):
def __init__(self, name: str, config_path: str, cfg: dict, ws_server: WsServer, catalogue: CatalogueProvider):
super().__init__(config_path, name, ws_server)
self.__name = name
self.__catalogue = catalogue
self.__ip = cfg['ip']
self.__channels = cfg['channels']
self.__peq = {}
self.__supports_shelf = True
if not self.__channels:
raise ValueError('No channels supplied for HTP-1')
self.__client = Htp1Client(self.__ip, self)
def _load_initial_state(self) -> Htp1State:
return Htp1State(self.name)
def _merge_state(self, loaded: Htp1State, cached: dict) -> Htp1State:
if 'slots' in cached:
for slot in cached['slots']:
if 'id' in slot:
if slot['id'] == 'HTP1':
if slot['last']:
loaded.slot.last = slot['last']
return loaded
@property
def device_type(self) -> str:
return self.__class__.__name__.lower()
def update(self, params: dict) -> bool:
any_update = False
if 'slots' in params:
for slot in params['slots']:
if slot['id'] == 'HTP1':
if 'entry' in slot:
if slot['entry']:
match = self.__catalogue.find(slot['entry'])
if match:
self.load_filter('HTP1', match)
any_update = True
else:
self.clear_filter('HTP1')
any_update = True
return any_update
def __send(self, to_load: List['PEQ']):
logger.info(f"Sending {len(to_load)} filters")
while len(to_load) < 16:
peq = PEQ(len(to_load), fc=100, q=1, gain=0, filter_type_name='PeakingEQ')
to_load.append(peq)
ops = [peq.as_ops(c, use_shelf=self.__supports_shelf) for peq in to_load for c in self.__peq.keys()]
ops = [op for slot_ops in ops for op in slot_ops if op]
if ops:
self.__client.send('changemso [{"op":"replace","path":"/peq/peqsw","value":true}]')
self.__client.send(f"changemso {json.dumps(ops)}")
else:
logger.warning(f"Nothing to send")
def activate(self, slot: str) -> None:
def __do_it():
self._current_state.slot.active = True
self._hydrate_cache_broadcast(__do_it)
def load_filter(self, slot: str, entry: CatalogueEntry) -> None:
to_load = [PEQ(idx, fc=f['freq'], q=f['q'], gain=f['gain'], filter_type_name=f['type'])
for idx, f in enumerate(entry.filters)]
self._hydrate_cache_broadcast(lambda: self.__do_it(to_load, entry.formatted_title))
def __do_it(self, to_load: List['PEQ'], title: str):
try:
self.__send(to_load)
self._current_state.slot.last = title
except Exception as e:
self._current_state.slot.last = 'ERRUR'
raise e
def clear_filter(self, slot: str) -> None:
self._hydrate_cache_broadcast(lambda: self.__do_it([], 'Empty'))
def mute(self, slot: Optional[str], channel: Optional[int]) -> None:
raise NotImplementedError()
def unmute(self, slot: Optional[str], channel: Optional[int]) -> None:
raise NotImplementedError()
def set_gain(self, slot: Optional[str], channel: Optional[int], gain: float) -> None:
raise NotImplementedError()
def on_mso(self, mso: dict):
logger.info(f"Received {mso}")
version = mso['versions']['swVer']
version = version[1:] if version[0] == 'v' or version[0] == 'V' else version
try:
self.__supports_shelf = semver.parse_version_info(version) > semver.parse_version_info('1.4.0')
except:
logger.error(f"Unable to parse version {mso['versions']['swVer']}, will not send shelf filters")
self.__supports_shelf = False
if not self.__supports_shelf:
logger.error(f"Device version {mso['versions']['swVer']} too old, lacks shelf filter support")
speakers = mso['speakers']['groups']
channels = ['lf', 'rf']
for group in [s for s, v in speakers.items() if 'present' in v and v['present'] is True]:
if group[0:2] == 'lr' and len(group) > 2:
channels.append('l' + group[2:])
channels.append('r' + group[2:])
else:
channels.append(group)
peq_slots = mso['peq']['slots']
filters = {c: [] for c in channels}
unknown_channels = set()
for idx, s in enumerate(peq_slots):
for c in channels:
if c in s['channels']:
filters[c].append(PEQ(idx, s['channels'][c]))
else:
unknown_channels.add(c)
if unknown_channels:
peq_channels = peq_slots[0]['channels'].keys()
logger.error(f"Unknown channels encountered [peq channels: {peq_channels}, unknown: {unknown_channels}]")
for c in filters.keys():
if c in self.__channels:
logger.info(f"Updating PEQ channel {c} with {filters[c]}")
self.__peq[c] = filters[c]
else:
logger.info(f"Discarding filter channel {c} - {filters[c]}")
def on_msoupdate(self, msoupdate: dict):
logger.info(f"Received {msoupdate}")
class Htp1Client:
def __init__(self, ip, listener):
self.__factory = Htp1ClientFactory(listener, f"ws://{ip}/ws/controller")
self.__connector = connectWS(self.__factory)
def send(self, msg: str):
self.__factory.broadcast(msg)
class Htp1Protocol(WebSocketClientProtocol):
def onConnecting(self, transport_details):
logger.info(f"Connecting to {transport_details}")
def onConnect(self, response):
logger.info(f"Connected to {response.peer}") | self.sendMessage('getmso'.encode('utf-8'), isBinary=False)
def onOpen(self):
logger.info("Connected to HTP1")
self.factory.register(self)
def onClose(self, was_clean, code, reason):
if was_clean:
logger.info(f"Disconnected code: {code} reason: {reason}")
else:
logger.warning(f"UNCLEAN! Disconnected code: {code} reason: {reason}")
def onMessage(self, payload, is_binary):
if is_binary:
logger.warning(f"Received {len(payload)} bytes in binary payload, ignoring")
else:
msg = payload.decode('utf8')
if msg.startswith('mso '):
logger.debug(f"Processing mso {msg}")
self.factory.listener.on_mso(json.loads(msg[4:]))
elif msg.startswith('msoupdate '):
logger.debug(f"Processing msoupdate {msg}")
self.factory.listener.on_msoupdate(json.loads(msg[10:]))
else:
logger.info(f"Received unknown payload {msg}")
class Htp1ClientFactory(WebSocketClientFactory, ReconnectingClientFactory):
protocol = Htp1Protocol
maxDelay = 5
initialDelay = 0.5
def __init__(self, listener, *args, **kwargs):
super(Htp1ClientFactory, self).__init__(*args, **kwargs)
self.__clients: List[Htp1Protocol] = []
self.listener = listener
self.setProtocolOptions(version=13)
def clientConnectionFailed(self, connector, reason):
logger.warning(f"Client connection failed {reason} .. retrying ..")
super().clientConnectionFailed(connector, reason)
def clientConnectionLost(self, connector, reason):
logger.warning(f"Client connection failed {reason} .. retrying ..")
super().clientConnectionLost(connector, reason)
def register(self, client: Htp1Protocol):
if client not in self.__clients:
logger.info(f"Registered device {client.peer}")
self.__clients.append(client)
else:
logger.info(f"Ignoring duplicate device {client.peer}")
def unregister(self, client: Htp1Protocol):
if client in self.__clients:
logger.info(f"Unregistering device {client.peer}")
self.__clients.remove(client)
else:
logger.info(f"Ignoring unregistered device {client.peer}")
def broadcast(self, msg):
if self.__clients:
disconnected_clients = []
for c in self.__clients:
logger.info(f"Sending to {c.peer} - {msg}")
try:
c.sendMessage(msg.encode('utf8'))
except Disconnected as e:
logger.exception(f"Failed to send to {c.peer}, discarding")
disconnected_clients.append(c)
for c in disconnected_clients:
self.unregister(c)
else:
raise ValueError(f"No devices connected, ignoring {msg}")
class PEQ:
def __init__(self, slot, params=None, fc=None, q=None, gain=None, filter_type_name=None):
self.slot = slot
if params is not None:
self.fc = params['Fc']
self.q = params['Q']
self.gain = params['gaindB']
self.filter_type = params.get('FilterType', 0)
self.filter_type_name = 'PeakingEQ' if self.filter_type == 0 else 'LowShelf' if self.filter_type == 1 else 'HighShelf'
else:
self.fc = fc
self.q = q
self.gain = gain
self.filter_type = 0 if filter_type_name == 'PeakingEQ' else 1 if filter_type_name == 'LowShelf' else 2
self.filter_type_name = filter_type_name
def as_ops(self, channel: str, use_shelf: bool = True):
if self.filter_type == 0 or use_shelf:
prefix = f"/peq/slots/{self.slot}/channels/{channel}"
ops = [
{
'op': 'replace',
'path': f"{prefix}/Fc",
'value': self.fc
},
{
'op': 'replace',
'path': f"{prefix}/Q",
'value': self.q
},
{
'op': 'replace',
'path': f"{prefix}/gaindB",
'value': self.gain
}
]
if use_shelf:
ops.append(
{
'op': 'replace',
'path': f"{prefix}/FilterType",
'value': self.filter_type
}
)
return ops
else:
return []
def __repr__(self):
return f"{self.slot}: {self.filter_type_name} {self.fc} Hz {self.gain} dB {self.q}" | random_line_split |
|
htp1.py | import json
import logging
from typing import Optional, List
import semver
from autobahn.exception import Disconnected
from autobahn.twisted.websocket import WebSocketClientFactory, connectWS, WebSocketClientProtocol
from twisted.internet.protocol import ReconnectingClientFactory
from ezbeq.apis.ws import WsServer
from ezbeq.catalogue import CatalogueEntry, CatalogueProvider
from ezbeq.device import DeviceState, SlotState, PersistentDevice
logger = logging.getLogger('ezbeq.htp1')
class Htp1SlotState(SlotState):
def __init__(self):
super().__init__('HTP1')
class Htp1State(DeviceState):
def __init__(self, name: str):
self.__name = name
self.slot = Htp1SlotState()
self.slot.active = True
def serialise(self) -> dict:
return {
'name': self.__name,
'slots': [self.slot.as_dict()]
}
class Htp1(PersistentDevice[Htp1State]):
def __init__(self, name: str, config_path: str, cfg: dict, ws_server: WsServer, catalogue: CatalogueProvider):
super().__init__(config_path, name, ws_server)
self.__name = name
self.__catalogue = catalogue
self.__ip = cfg['ip']
self.__channels = cfg['channels']
self.__peq = {}
self.__supports_shelf = True
if not self.__channels:
raise ValueError('No channels supplied for HTP-1')
self.__client = Htp1Client(self.__ip, self)
def _load_initial_state(self) -> Htp1State:
return Htp1State(self.name)
def _merge_state(self, loaded: Htp1State, cached: dict) -> Htp1State:
if 'slots' in cached:
for slot in cached['slots']:
if 'id' in slot:
if slot['id'] == 'HTP1':
if slot['last']:
loaded.slot.last = slot['last']
return loaded
@property
def device_type(self) -> str:
return self.__class__.__name__.lower()
def update(self, params: dict) -> bool:
any_update = False
if 'slots' in params:
for slot in params['slots']:
if slot['id'] == 'HTP1':
if 'entry' in slot:
if slot['entry']:
match = self.__catalogue.find(slot['entry'])
if match:
self.load_filter('HTP1', match)
any_update = True
else:
self.clear_filter('HTP1')
any_update = True
return any_update
def __send(self, to_load: List['PEQ']):
logger.info(f"Sending {len(to_load)} filters")
while len(to_load) < 16:
|
ops = [peq.as_ops(c, use_shelf=self.__supports_shelf) for peq in to_load for c in self.__peq.keys()]
ops = [op for slot_ops in ops for op in slot_ops if op]
if ops:
self.__client.send('changemso [{"op":"replace","path":"/peq/peqsw","value":true}]')
self.__client.send(f"changemso {json.dumps(ops)}")
else:
logger.warning(f"Nothing to send")
def activate(self, slot: str) -> None:
def __do_it():
self._current_state.slot.active = True
self._hydrate_cache_broadcast(__do_it)
def load_filter(self, slot: str, entry: CatalogueEntry) -> None:
to_load = [PEQ(idx, fc=f['freq'], q=f['q'], gain=f['gain'], filter_type_name=f['type'])
for idx, f in enumerate(entry.filters)]
self._hydrate_cache_broadcast(lambda: self.__do_it(to_load, entry.formatted_title))
def __do_it(self, to_load: List['PEQ'], title: str):
try:
self.__send(to_load)
self._current_state.slot.last = title
except Exception as e:
self._current_state.slot.last = 'ERRUR'
raise e
def clear_filter(self, slot: str) -> None:
self._hydrate_cache_broadcast(lambda: self.__do_it([], 'Empty'))
def mute(self, slot: Optional[str], channel: Optional[int]) -> None:
raise NotImplementedError()
def unmute(self, slot: Optional[str], channel: Optional[int]) -> None:
raise NotImplementedError()
def set_gain(self, slot: Optional[str], channel: Optional[int], gain: float) -> None:
raise NotImplementedError()
def on_mso(self, mso: dict):
logger.info(f"Received {mso}")
version = mso['versions']['swVer']
version = version[1:] if version[0] == 'v' or version[0] == 'V' else version
try:
self.__supports_shelf = semver.parse_version_info(version) > semver.parse_version_info('1.4.0')
except:
logger.error(f"Unable to parse version {mso['versions']['swVer']}, will not send shelf filters")
self.__supports_shelf = False
if not self.__supports_shelf:
logger.error(f"Device version {mso['versions']['swVer']} too old, lacks shelf filter support")
speakers = mso['speakers']['groups']
channels = ['lf', 'rf']
for group in [s for s, v in speakers.items() if 'present' in v and v['present'] is True]:
if group[0:2] == 'lr' and len(group) > 2:
channels.append('l' + group[2:])
channels.append('r' + group[2:])
else:
channels.append(group)
peq_slots = mso['peq']['slots']
filters = {c: [] for c in channels}
unknown_channels = set()
for idx, s in enumerate(peq_slots):
for c in channels:
if c in s['channels']:
filters[c].append(PEQ(idx, s['channels'][c]))
else:
unknown_channels.add(c)
if unknown_channels:
peq_channels = peq_slots[0]['channels'].keys()
logger.error(f"Unknown channels encountered [peq channels: {peq_channels}, unknown: {unknown_channels}]")
for c in filters.keys():
if c in self.__channels:
logger.info(f"Updating PEQ channel {c} with {filters[c]}")
self.__peq[c] = filters[c]
else:
logger.info(f"Discarding filter channel {c} - {filters[c]}")
def on_msoupdate(self, msoupdate: dict):
logger.info(f"Received {msoupdate}")
class Htp1Client:
def __init__(self, ip, listener):
self.__factory = Htp1ClientFactory(listener, f"ws://{ip}/ws/controller")
self.__connector = connectWS(self.__factory)
def send(self, msg: str):
self.__factory.broadcast(msg)
class Htp1Protocol(WebSocketClientProtocol):
def onConnecting(self, transport_details):
logger.info(f"Connecting to {transport_details}")
def onConnect(self, response):
logger.info(f"Connected to {response.peer}")
self.sendMessage('getmso'.encode('utf-8'), isBinary=False)
def onOpen(self):
logger.info("Connected to HTP1")
self.factory.register(self)
def onClose(self, was_clean, code, reason):
if was_clean:
logger.info(f"Disconnected code: {code} reason: {reason}")
else:
logger.warning(f"UNCLEAN! Disconnected code: {code} reason: {reason}")
def onMessage(self, payload, is_binary):
if is_binary:
logger.warning(f"Received {len(payload)} bytes in binary payload, ignoring")
else:
msg = payload.decode('utf8')
if msg.startswith('mso '):
logger.debug(f"Processing mso {msg}")
self.factory.listener.on_mso(json.loads(msg[4:]))
elif msg.startswith('msoupdate '):
logger.debug(f"Processing msoupdate {msg}")
self.factory.listener.on_msoupdate(json.loads(msg[10:]))
else:
logger.info(f"Received unknown payload {msg}")
class Htp1ClientFactory(WebSocketClientFactory, ReconnectingClientFactory):
protocol = Htp1Protocol
maxDelay = 5
initialDelay = 0.5
def __init__(self, listener, *args, **kwargs):
super(Htp1ClientFactory, self).__init__(*args, **kwargs)
self.__clients: List[Htp1Protocol] = []
self.listener = listener
self.setProtocolOptions(version=13)
def clientConnectionFailed(self, connector, reason):
logger.warning(f"Client connection failed {reason} .. retrying ..")
super().clientConnectionFailed(connector, reason)
def clientConnectionLost(self, connector, reason):
logger.warning(f"Client connection failed {reason} .. retrying ..")
super().clientConnectionLost(connector, reason)
def register(self, client: Htp1Protocol):
if client not in self.__clients:
logger.info(f"Registered device {client.peer}")
self.__clients.append(client)
else:
logger.info(f"Ignoring duplicate device {client.peer}")
def unregister(self, client: Htp1Protocol):
if client in self.__clients:
logger.info(f"Unregistering device {client.peer}")
self.__clients.remove(client)
else:
logger.info(f"Ignoring unregistered device {client.peer}")
def broadcast(self, msg):
if self.__clients:
disconnected_clients = []
for c in self.__clients:
logger.info(f"Sending to {c.peer} - {msg}")
try:
c.sendMessage(msg.encode('utf8'))
except Disconnected as e:
logger.exception(f"Failed to send to {c.peer}, discarding")
disconnected_clients.append(c)
for c in disconnected_clients:
self.unregister(c)
else:
raise ValueError(f"No devices connected, ignoring {msg}")
class PEQ:
def __init__(self, slot, params=None, fc=None, q=None, gain=None, filter_type_name=None):
self.slot = slot
if params is not None:
self.fc = params['Fc']
self.q = params['Q']
self.gain = params['gaindB']
self.filter_type = params.get('FilterType', 0)
self.filter_type_name = 'PeakingEQ' if self.filter_type == 0 else 'LowShelf' if self.filter_type == 1 else 'HighShelf'
else:
self.fc = fc
self.q = q
self.gain = gain
self.filter_type = 0 if filter_type_name == 'PeakingEQ' else 1 if filter_type_name == 'LowShelf' else 2
self.filter_type_name = filter_type_name
def as_ops(self, channel: str, use_shelf: bool = True):
if self.filter_type == 0 or use_shelf:
prefix = f"/peq/slots/{self.slot}/channels/{channel}"
ops = [
{
'op': 'replace',
'path': f"{prefix}/Fc",
'value': self.fc
},
{
'op': 'replace',
'path': f"{prefix}/Q",
'value': self.q
},
{
'op': 'replace',
'path': f"{prefix}/gaindB",
'value': self.gain
}
]
if use_shelf:
ops.append(
{
'op': 'replace',
'path': f"{prefix}/FilterType",
'value': self.filter_type
}
)
return ops
else:
return []
def __repr__(self):
return f"{self.slot}: {self.filter_type_name} {self.fc} Hz {self.gain} dB {self.q}"
| peq = PEQ(len(to_load), fc=100, q=1, gain=0, filter_type_name='PeakingEQ')
to_load.append(peq) | conditional_block |
htp1.py | import json
import logging
from typing import Optional, List
import semver
from autobahn.exception import Disconnected
from autobahn.twisted.websocket import WebSocketClientFactory, connectWS, WebSocketClientProtocol
from twisted.internet.protocol import ReconnectingClientFactory
from ezbeq.apis.ws import WsServer
from ezbeq.catalogue import CatalogueEntry, CatalogueProvider
from ezbeq.device import DeviceState, SlotState, PersistentDevice
logger = logging.getLogger('ezbeq.htp1')
class Htp1SlotState(SlotState):
def __init__(self):
super().__init__('HTP1')
class Htp1State(DeviceState):
def __init__(self, name: str):
self.__name = name
self.slot = Htp1SlotState()
self.slot.active = True
def serialise(self) -> dict:
return {
'name': self.__name,
'slots': [self.slot.as_dict()]
}
class Htp1(PersistentDevice[Htp1State]):
def __init__(self, name: str, config_path: str, cfg: dict, ws_server: WsServer, catalogue: CatalogueProvider):
super().__init__(config_path, name, ws_server)
self.__name = name
self.__catalogue = catalogue
self.__ip = cfg['ip']
self.__channels = cfg['channels']
self.__peq = {}
self.__supports_shelf = True
if not self.__channels:
raise ValueError('No channels supplied for HTP-1')
self.__client = Htp1Client(self.__ip, self)
def _load_initial_state(self) -> Htp1State:
return Htp1State(self.name)
def _merge_state(self, loaded: Htp1State, cached: dict) -> Htp1State:
if 'slots' in cached:
for slot in cached['slots']:
if 'id' in slot:
if slot['id'] == 'HTP1':
if slot['last']:
loaded.slot.last = slot['last']
return loaded
@property
def device_type(self) -> str:
return self.__class__.__name__.lower()
def update(self, params: dict) -> bool:
any_update = False
if 'slots' in params:
for slot in params['slots']:
if slot['id'] == 'HTP1':
if 'entry' in slot:
if slot['entry']:
match = self.__catalogue.find(slot['entry'])
if match:
self.load_filter('HTP1', match)
any_update = True
else:
self.clear_filter('HTP1')
any_update = True
return any_update
def __send(self, to_load: List['PEQ']):
logger.info(f"Sending {len(to_load)} filters")
while len(to_load) < 16:
peq = PEQ(len(to_load), fc=100, q=1, gain=0, filter_type_name='PeakingEQ')
to_load.append(peq)
ops = [peq.as_ops(c, use_shelf=self.__supports_shelf) for peq in to_load for c in self.__peq.keys()]
ops = [op for slot_ops in ops for op in slot_ops if op]
if ops:
self.__client.send('changemso [{"op":"replace","path":"/peq/peqsw","value":true}]')
self.__client.send(f"changemso {json.dumps(ops)}")
else:
logger.warning(f"Nothing to send")
def activate(self, slot: str) -> None:
def __do_it():
self._current_state.slot.active = True
self._hydrate_cache_broadcast(__do_it)
def load_filter(self, slot: str, entry: CatalogueEntry) -> None:
to_load = [PEQ(idx, fc=f['freq'], q=f['q'], gain=f['gain'], filter_type_name=f['type'])
for idx, f in enumerate(entry.filters)]
self._hydrate_cache_broadcast(lambda: self.__do_it(to_load, entry.formatted_title))
def __do_it(self, to_load: List['PEQ'], title: str):
try:
self.__send(to_load)
self._current_state.slot.last = title
except Exception as e:
self._current_state.slot.last = 'ERRUR'
raise e
def clear_filter(self, slot: str) -> None:
self._hydrate_cache_broadcast(lambda: self.__do_it([], 'Empty'))
def mute(self, slot: Optional[str], channel: Optional[int]) -> None:
raise NotImplementedError()
def unmute(self, slot: Optional[str], channel: Optional[int]) -> None:
raise NotImplementedError()
def set_gain(self, slot: Optional[str], channel: Optional[int], gain: float) -> None:
raise NotImplementedError()
def on_mso(self, mso: dict):
logger.info(f"Received {mso}")
version = mso['versions']['swVer']
version = version[1:] if version[0] == 'v' or version[0] == 'V' else version
try:
self.__supports_shelf = semver.parse_version_info(version) > semver.parse_version_info('1.4.0')
except:
logger.error(f"Unable to parse version {mso['versions']['swVer']}, will not send shelf filters")
self.__supports_shelf = False
if not self.__supports_shelf:
logger.error(f"Device version {mso['versions']['swVer']} too old, lacks shelf filter support")
speakers = mso['speakers']['groups']
channels = ['lf', 'rf']
for group in [s for s, v in speakers.items() if 'present' in v and v['present'] is True]:
if group[0:2] == 'lr' and len(group) > 2:
channels.append('l' + group[2:])
channels.append('r' + group[2:])
else:
channels.append(group)
peq_slots = mso['peq']['slots']
filters = {c: [] for c in channels}
unknown_channels = set()
for idx, s in enumerate(peq_slots):
for c in channels:
if c in s['channels']:
filters[c].append(PEQ(idx, s['channels'][c]))
else:
unknown_channels.add(c)
if unknown_channels:
peq_channels = peq_slots[0]['channels'].keys()
logger.error(f"Unknown channels encountered [peq channels: {peq_channels}, unknown: {unknown_channels}]")
for c in filters.keys():
if c in self.__channels:
logger.info(f"Updating PEQ channel {c} with {filters[c]}")
self.__peq[c] = filters[c]
else:
logger.info(f"Discarding filter channel {c} - {filters[c]}")
def on_msoupdate(self, msoupdate: dict):
logger.info(f"Received {msoupdate}")
class Htp1Client:
def __init__(self, ip, listener):
self.__factory = Htp1ClientFactory(listener, f"ws://{ip}/ws/controller")
self.__connector = connectWS(self.__factory)
def send(self, msg: str):
self.__factory.broadcast(msg)
class Htp1Protocol(WebSocketClientProtocol):
def onConnecting(self, transport_details):
logger.info(f"Connecting to {transport_details}")
def onConnect(self, response):
logger.info(f"Connected to {response.peer}")
self.sendMessage('getmso'.encode('utf-8'), isBinary=False)
def onOpen(self):
logger.info("Connected to HTP1")
self.factory.register(self)
def onClose(self, was_clean, code, reason):
if was_clean:
logger.info(f"Disconnected code: {code} reason: {reason}")
else:
logger.warning(f"UNCLEAN! Disconnected code: {code} reason: {reason}")
def onMessage(self, payload, is_binary):
if is_binary:
logger.warning(f"Received {len(payload)} bytes in binary payload, ignoring")
else:
msg = payload.decode('utf8')
if msg.startswith('mso '):
logger.debug(f"Processing mso {msg}")
self.factory.listener.on_mso(json.loads(msg[4:]))
elif msg.startswith('msoupdate '):
logger.debug(f"Processing msoupdate {msg}")
self.factory.listener.on_msoupdate(json.loads(msg[10:]))
else:
logger.info(f"Received unknown payload {msg}")
class Htp1ClientFactory(WebSocketClientFactory, ReconnectingClientFactory):
protocol = Htp1Protocol
maxDelay = 5
initialDelay = 0.5
def __init__(self, listener, *args, **kwargs):
|
def clientConnectionFailed(self, connector, reason):
logger.warning(f"Client connection failed {reason} .. retrying ..")
super().clientConnectionFailed(connector, reason)
def clientConnectionLost(self, connector, reason):
logger.warning(f"Client connection failed {reason} .. retrying ..")
super().clientConnectionLost(connector, reason)
def register(self, client: Htp1Protocol):
if client not in self.__clients:
logger.info(f"Registered device {client.peer}")
self.__clients.append(client)
else:
logger.info(f"Ignoring duplicate device {client.peer}")
def unregister(self, client: Htp1Protocol):
if client in self.__clients:
logger.info(f"Unregistering device {client.peer}")
self.__clients.remove(client)
else:
logger.info(f"Ignoring unregistered device {client.peer}")
def broadcast(self, msg):
if self.__clients:
disconnected_clients = []
for c in self.__clients:
logger.info(f"Sending to {c.peer} - {msg}")
try:
c.sendMessage(msg.encode('utf8'))
except Disconnected as e:
logger.exception(f"Failed to send to {c.peer}, discarding")
disconnected_clients.append(c)
for c in disconnected_clients:
self.unregister(c)
else:
raise ValueError(f"No devices connected, ignoring {msg}")
class PEQ:
def __init__(self, slot, params=None, fc=None, q=None, gain=None, filter_type_name=None):
self.slot = slot
if params is not None:
self.fc = params['Fc']
self.q = params['Q']
self.gain = params['gaindB']
self.filter_type = params.get('FilterType', 0)
self.filter_type_name = 'PeakingEQ' if self.filter_type == 0 else 'LowShelf' if self.filter_type == 1 else 'HighShelf'
else:
self.fc = fc
self.q = q
self.gain = gain
self.filter_type = 0 if filter_type_name == 'PeakingEQ' else 1 if filter_type_name == 'LowShelf' else 2
self.filter_type_name = filter_type_name
def as_ops(self, channel: str, use_shelf: bool = True):
if self.filter_type == 0 or use_shelf:
prefix = f"/peq/slots/{self.slot}/channels/{channel}"
ops = [
{
'op': 'replace',
'path': f"{prefix}/Fc",
'value': self.fc
},
{
'op': 'replace',
'path': f"{prefix}/Q",
'value': self.q
},
{
'op': 'replace',
'path': f"{prefix}/gaindB",
'value': self.gain
}
]
if use_shelf:
ops.append(
{
'op': 'replace',
'path': f"{prefix}/FilterType",
'value': self.filter_type
}
)
return ops
else:
return []
def __repr__(self):
return f"{self.slot}: {self.filter_type_name} {self.fc} Hz {self.gain} dB {self.q}"
| super(Htp1ClientFactory, self).__init__(*args, **kwargs)
self.__clients: List[Htp1Protocol] = []
self.listener = listener
self.setProtocolOptions(version=13) | identifier_body |
htp1.py | import json
import logging
from typing import Optional, List
import semver
from autobahn.exception import Disconnected
from autobahn.twisted.websocket import WebSocketClientFactory, connectWS, WebSocketClientProtocol
from twisted.internet.protocol import ReconnectingClientFactory
from ezbeq.apis.ws import WsServer
from ezbeq.catalogue import CatalogueEntry, CatalogueProvider
from ezbeq.device import DeviceState, SlotState, PersistentDevice
logger = logging.getLogger('ezbeq.htp1')
class Htp1SlotState(SlotState):
def __init__(self):
super().__init__('HTP1')
class Htp1State(DeviceState):
def __init__(self, name: str):
self.__name = name
self.slot = Htp1SlotState()
self.slot.active = True
def serialise(self) -> dict:
return {
'name': self.__name,
'slots': [self.slot.as_dict()]
}
class Htp1(PersistentDevice[Htp1State]):
def __init__(self, name: str, config_path: str, cfg: dict, ws_server: WsServer, catalogue: CatalogueProvider):
super().__init__(config_path, name, ws_server)
self.__name = name
self.__catalogue = catalogue
self.__ip = cfg['ip']
self.__channels = cfg['channels']
self.__peq = {}
self.__supports_shelf = True
if not self.__channels:
raise ValueError('No channels supplied for HTP-1')
self.__client = Htp1Client(self.__ip, self)
def _load_initial_state(self) -> Htp1State:
return Htp1State(self.name)
def _merge_state(self, loaded: Htp1State, cached: dict) -> Htp1State:
if 'slots' in cached:
for slot in cached['slots']:
if 'id' in slot:
if slot['id'] == 'HTP1':
if slot['last']:
loaded.slot.last = slot['last']
return loaded
@property
def device_type(self) -> str:
return self.__class__.__name__.lower()
def update(self, params: dict) -> bool:
any_update = False
if 'slots' in params:
for slot in params['slots']:
if slot['id'] == 'HTP1':
if 'entry' in slot:
if slot['entry']:
match = self.__catalogue.find(slot['entry'])
if match:
self.load_filter('HTP1', match)
any_update = True
else:
self.clear_filter('HTP1')
any_update = True
return any_update
def __send(self, to_load: List['PEQ']):
logger.info(f"Sending {len(to_load)} filters")
while len(to_load) < 16:
peq = PEQ(len(to_load), fc=100, q=1, gain=0, filter_type_name='PeakingEQ')
to_load.append(peq)
ops = [peq.as_ops(c, use_shelf=self.__supports_shelf) for peq in to_load for c in self.__peq.keys()]
ops = [op for slot_ops in ops for op in slot_ops if op]
if ops:
self.__client.send('changemso [{"op":"replace","path":"/peq/peqsw","value":true}]')
self.__client.send(f"changemso {json.dumps(ops)}")
else:
logger.warning(f"Nothing to send")
def activate(self, slot: str) -> None:
def __do_it():
self._current_state.slot.active = True
self._hydrate_cache_broadcast(__do_it)
def load_filter(self, slot: str, entry: CatalogueEntry) -> None:
to_load = [PEQ(idx, fc=f['freq'], q=f['q'], gain=f['gain'], filter_type_name=f['type'])
for idx, f in enumerate(entry.filters)]
self._hydrate_cache_broadcast(lambda: self.__do_it(to_load, entry.formatted_title))
def __do_it(self, to_load: List['PEQ'], title: str):
try:
self.__send(to_load)
self._current_state.slot.last = title
except Exception as e:
self._current_state.slot.last = 'ERRUR'
raise e
def clear_filter(self, slot: str) -> None:
self._hydrate_cache_broadcast(lambda: self.__do_it([], 'Empty'))
def mute(self, slot: Optional[str], channel: Optional[int]) -> None:
raise NotImplementedError()
def unmute(self, slot: Optional[str], channel: Optional[int]) -> None:
raise NotImplementedError()
def set_gain(self, slot: Optional[str], channel: Optional[int], gain: float) -> None:
raise NotImplementedError()
def on_mso(self, mso: dict):
logger.info(f"Received {mso}")
version = mso['versions']['swVer']
version = version[1:] if version[0] == 'v' or version[0] == 'V' else version
try:
self.__supports_shelf = semver.parse_version_info(version) > semver.parse_version_info('1.4.0')
except:
logger.error(f"Unable to parse version {mso['versions']['swVer']}, will not send shelf filters")
self.__supports_shelf = False
if not self.__supports_shelf:
logger.error(f"Device version {mso['versions']['swVer']} too old, lacks shelf filter support")
speakers = mso['speakers']['groups']
channels = ['lf', 'rf']
for group in [s for s, v in speakers.items() if 'present' in v and v['present'] is True]:
if group[0:2] == 'lr' and len(group) > 2:
channels.append('l' + group[2:])
channels.append('r' + group[2:])
else:
channels.append(group)
peq_slots = mso['peq']['slots']
filters = {c: [] for c in channels}
unknown_channels = set()
for idx, s in enumerate(peq_slots):
for c in channels:
if c in s['channels']:
filters[c].append(PEQ(idx, s['channels'][c]))
else:
unknown_channels.add(c)
if unknown_channels:
peq_channels = peq_slots[0]['channels'].keys()
logger.error(f"Unknown channels encountered [peq channels: {peq_channels}, unknown: {unknown_channels}]")
for c in filters.keys():
if c in self.__channels:
logger.info(f"Updating PEQ channel {c} with {filters[c]}")
self.__peq[c] = filters[c]
else:
logger.info(f"Discarding filter channel {c} - {filters[c]}")
def on_msoupdate(self, msoupdate: dict):
logger.info(f"Received {msoupdate}")
class Htp1Client:
def __init__(self, ip, listener):
self.__factory = Htp1ClientFactory(listener, f"ws://{ip}/ws/controller")
self.__connector = connectWS(self.__factory)
def send(self, msg: str):
self.__factory.broadcast(msg)
class Htp1Protocol(WebSocketClientProtocol):
def onConnecting(self, transport_details):
logger.info(f"Connecting to {transport_details}")
def onConnect(self, response):
logger.info(f"Connected to {response.peer}")
self.sendMessage('getmso'.encode('utf-8'), isBinary=False)
def onOpen(self):
logger.info("Connected to HTP1")
self.factory.register(self)
def onClose(self, was_clean, code, reason):
if was_clean:
logger.info(f"Disconnected code: {code} reason: {reason}")
else:
logger.warning(f"UNCLEAN! Disconnected code: {code} reason: {reason}")
def onMessage(self, payload, is_binary):
if is_binary:
logger.warning(f"Received {len(payload)} bytes in binary payload, ignoring")
else:
msg = payload.decode('utf8')
if msg.startswith('mso '):
logger.debug(f"Processing mso {msg}")
self.factory.listener.on_mso(json.loads(msg[4:]))
elif msg.startswith('msoupdate '):
logger.debug(f"Processing msoupdate {msg}")
self.factory.listener.on_msoupdate(json.loads(msg[10:]))
else:
logger.info(f"Received unknown payload {msg}")
class Htp1ClientFactory(WebSocketClientFactory, ReconnectingClientFactory):
protocol = Htp1Protocol
maxDelay = 5
initialDelay = 0.5
def __init__(self, listener, *args, **kwargs):
super(Htp1ClientFactory, self).__init__(*args, **kwargs)
self.__clients: List[Htp1Protocol] = []
self.listener = listener
self.setProtocolOptions(version=13)
def clientConnectionFailed(self, connector, reason):
logger.warning(f"Client connection failed {reason} .. retrying ..")
super().clientConnectionFailed(connector, reason)
def clientConnectionLost(self, connector, reason):
logger.warning(f"Client connection failed {reason} .. retrying ..")
super().clientConnectionLost(connector, reason)
def register(self, client: Htp1Protocol):
if client not in self.__clients:
logger.info(f"Registered device {client.peer}")
self.__clients.append(client)
else:
logger.info(f"Ignoring duplicate device {client.peer}")
def | (self, client: Htp1Protocol):
if client in self.__clients:
logger.info(f"Unregistering device {client.peer}")
self.__clients.remove(client)
else:
logger.info(f"Ignoring unregistered device {client.peer}")
def broadcast(self, msg):
if self.__clients:
disconnected_clients = []
for c in self.__clients:
logger.info(f"Sending to {c.peer} - {msg}")
try:
c.sendMessage(msg.encode('utf8'))
except Disconnected as e:
logger.exception(f"Failed to send to {c.peer}, discarding")
disconnected_clients.append(c)
for c in disconnected_clients:
self.unregister(c)
else:
raise ValueError(f"No devices connected, ignoring {msg}")
class PEQ:
def __init__(self, slot, params=None, fc=None, q=None, gain=None, filter_type_name=None):
self.slot = slot
if params is not None:
self.fc = params['Fc']
self.q = params['Q']
self.gain = params['gaindB']
self.filter_type = params.get('FilterType', 0)
self.filter_type_name = 'PeakingEQ' if self.filter_type == 0 else 'LowShelf' if self.filter_type == 1 else 'HighShelf'
else:
self.fc = fc
self.q = q
self.gain = gain
self.filter_type = 0 if filter_type_name == 'PeakingEQ' else 1 if filter_type_name == 'LowShelf' else 2
self.filter_type_name = filter_type_name
def as_ops(self, channel: str, use_shelf: bool = True):
if self.filter_type == 0 or use_shelf:
prefix = f"/peq/slots/{self.slot}/channels/{channel}"
ops = [
{
'op': 'replace',
'path': f"{prefix}/Fc",
'value': self.fc
},
{
'op': 'replace',
'path': f"{prefix}/Q",
'value': self.q
},
{
'op': 'replace',
'path': f"{prefix}/gaindB",
'value': self.gain
}
]
if use_shelf:
ops.append(
{
'op': 'replace',
'path': f"{prefix}/FilterType",
'value': self.filter_type
}
)
return ops
else:
return []
def __repr__(self):
return f"{self.slot}: {self.filter_type_name} {self.fc} Hz {self.gain} dB {self.q}"
| unregister | identifier_name |
Final of the Electromagnet Project.py | # coding: utf-8
# # INTRODUCTION
# This project is related to designing a junk-yard magnet, which is an electromagnet type. Throughout this project, electromagnet operating principles and design techniques in terms of different load values and electromagnet types are covered. The electromagnet should be capable of lifting standard sized metal sheets and plates of Parker Steel Company.
# Firstly, electromagnet operation, its types and some important losses are explained. Then, the design code, which is written in Pyhton language by using Ipython notebook with Anaconda, is explained in detailed.
#
# ## Electromagnet
# An electromagnet is a type of magnet in which the magnetic field is produced by an electric current. The magnetic field disappears when the current is turned off. The main advantage of an electromagnet over a permanent magnet is that the magnetic field can be rapidly manipulated over a wide range by controlling the amount of electric current. However, a continuous supply of electrical energy is required to maintain the field.
#
# 
#
# Electromagnets are widely used as components of other electrical devices, such as motors, generators, relays, loudspeakers, hard disks, MRI machines, scientific instruments, and magnetic separation equipment. Electomagnets are also employed in industry for picking up and moving heavy iron objects such as scrap iron and steel. A junk-yard electromagnet is shown in above.
#
# There lots of type of electromagnet for different industrial purposes. The common types of electromagnet is shown below(in Dropbox link).
# 
#
# My electromagnet design is applicable for any type of electromagnet, especially U-shaped. It has re-adjustable functions and variable areas, and also there are information steps for user friendly development.
#
# In[1]:
print "Welcome to the Electromagnet Design Code"
# In[2]:
print "B = Magnetic Field Density"
print "A = Cross Sectional Area"
print "uo = Permeability of Free Space"
print "L = Pitch or Length of Former"
from math import pi, exp, sqrt
uo = float(4 * pi * 10**-7)
print uo
# ## Calculation of the load
#
# The main goal of this design is to operate with the standard sized metal sheets and plates of Parker Steel Company.
# For this purpose, there are five types of material and an optional step, which is created for unlisted material. In order to
# calculate the weight of the load, the volume and corresponding density of the load are required. The length, width and the
# thickness of the load are requested and the corresponding density for the selected material is written automatically. The densities are obtained from http://www.psyclops.com/tools/technotes/materials/density.html.
# In[3]:
# CALCULATION OF LOAD FORCE in N
print "Enter the diameters of the load"
print " For material type: \n For Stainless Steel Enter 1 \n For Carbon Steel Enter 2 \n For Aluminium Enter 3 \n For Titanium Enter 4 \n For Brass Enter 5"
print " For unlisted material Enter 10" | tt = float(raw_input("Enter the thickness of the load in mm :"))
llcm = ll * float(2.54)
wwcm = ww * float(2.54)
ttcm = tt / float(10)
VV = float(llcm * wwcm * ttcm)
density = float(7.81)
mgg = VV * density
mg = mgg / float(1000)
elif material == 2:
ll = float(raw_input("Enter the length of the load in inches :"))
ww = float(raw_input("Enter the width of the load in inches :"))
tt = float(raw_input("Enter the thickness of the load in mm :"))
llcm = ll * float(2.54)
wwcm = ww * float(2.54)
ttcm = tt / float(10)
VV = float(llcm * wwcm * ttcm)
density = float(7.83)
mgg = VV * density
mg = mgg / float(1000)
elif material == 3:
ll = float(raw_input("Enter the length of the load in inches :"))
ww = float(raw_input("Enter the width of the load in inches :"))
tt = float(raw_input("Enter the thickness of the load in mm :"))
llcm = ll * float(2.54)
wwcm = ww * float(2.54)
ttcm = tt / float(10)
VV = float(llcm * wwcm * ttcm)
density = float(2.7)
mgg = VV * density
mg = mgg / float(1000)
elif material == 4:
ll = float(raw_input("Enter the length of the load in inches :"))
ww = float(raw_input("Enter the width of the load in inches :"))
tt = float(raw_input("Enter the thickness of the load in mm :"))
llcm = ll * float(2.54)
wwcm = ww * float(2.54)
ttcm = tt / float(10)
VV = float(llcm * wwcm * ttcm)
density = float(4.51)
mgg = VV * density
mg = mgg / float(1000)
elif material == 5:
ll = float(raw_input("Enter the length of the load in inches :"))
ww = float(raw_input("Enter the width of the load in inches :"))
tt = float(raw_input("Enter the thickness of the load in mm :"))
llcm = ll * float(2.54)
wwcm = ww * float(2.54)
ttcm = tt / float(10)
VV = float(llcm * wwcm * ttcm)
density = float(8.70)
mgg = VV * density
mg = mgg / float(1000)
elif material == 10:
ll = float(raw_input("Enter the length of the load in inches :"))
ww = float(raw_input("Enter the width of the load in inches :"))
tt = float(raw_input("Enter the thickness of the load in mm :"))
llcm = ll * float(2.54)
wwcm = ww * float(2.54)
ttcm = tt / float(10)
VV = float(llcm * wwcm * ttcm)
density = float(raw_input("Enter the density of the material in g/cm^3 :"))
mgg = VV * density
mg = mgg / float(1000)
else:
print " Wrong Entered Number, Please Enter again"
print "The weight of the load is %f in kg" %mg
print "Fmg = The gravitational force produced by load "
Fmg = float(9.8 * mg)
print "Fmg is %f in N" %(Fmg)
# In this step, calculation of magnetic force is covered. Magnetic flux density is limited around 1.5 T due to saturation. Cross sectional area of the core is calculated based on the weight of the load. And, to get better area parameters, there is an optional re-adjusted area. Our design is based on circular area because of the stacking factor improvement and lower losses of the core.
#
# The force exerted by an electromagnet on a section of core material is:
# $$F = B^2A/2\mu_0$$
#
# In[4]:
B = float(raw_input ("Enter an appropriate B value in T : ")) # Nominal 0.6 - 1.5 T
A = Fmg * 2* uo / (B * B)
print "Cross Sectional Area (min) in m^2 : %f" %A
A = float(raw_input ("Enter Cross Sectional Area (A) in m^2 : ")) # TO GET BETTER APPROXIMATION OF AREA
dia_core = sqrt( A / pi )
dia_core = dia_core * float(1000)
print "For circular design ---> diameter of core : %f mm" %dia_core # CIRCULAR DESIGN
F = B * B * A / (2 * uo)
print "Force is %f in N and Fmg is %f in N" %(F,Fmg)
while True:
if F < Fmg:
print ("Not sufficient MMF, Enter new design values")
A = float(raw_input("Enter Cross Sectional Area (A) in m^2 : "))
F = B * B * A / (2 * uo)
else:
break
# Throughout this part,
# Firstly, lifting distance is requested and pre_design core length (roughly) is requested. For unsufficient core length, code gives an error at the end. According to flux path in both core and air gap, MMF is calculated. After entering number of turns for given MMF value, current on the wire is obtained.
# ### Selection of the material
# Solid steel is generally best, in terms of economics, for the yoke, or frame, of static field devices. The mass of material required to efficiently carry flux between the far poles of an assembly make anything else impossible to justify. The reluctance of this part of the magnetic circuit is relatively low, even without annealing, compared to that of the working gap, so the losses associated with driving flux through the material are a small portion of overall losses
#
# ### Losses of Electromagnet by choosing current
# The only power consumed in a DC electromagnet is due to the resistance of the windings, and is dissipated as heat.Since the MMF is proportional to the product NI,the number of turns in the windings N and the current I can be chosen to minimize heat losses, as long as their product is constant. Since the power dissipation,
#
# $$P = I^2R$$
#
# ,increases with the square of the current but only increases approximately linearly with the number of windings, the power lost in the windings can be minimized by reducing I and increasing the number of turns N proportionally, or using thicker wire to reduce the resistance; However, the limit to increasing N or lowering the resistance is that the windings take up more room between the magnet’s core pieces. If the area available for the windings is filled up, more turns require going to a smaller diameter of wire, which has higher resistance, which cancels the advantage of using more turns.
#
# ## Electromagnet Design Procedure (for U shaped)
# U shaped electromagnet is shown below. It has two airgap and two wire turn area. The core is in a circular form.
#
# 
#
# Applying Ampere's Law to the circuit,
#
# $$\sum_{}^\ i = 2Ni = \int_{S}^{} H dl = H_{s}l_{s} + 2H_{g}l_{g}$$
#
# where Hs is the value of H in the core path, which is assumed to be constant and ls is the total length of the core path. At the boundary the magnetic flux densities are equal.
# $$ B_{s} = B_{g}$$
#
# Actually,
#
# $$H_{s}l_{s} << 2H_{g}l_{g}$$
#
# However, nothing is neglected to get accurate result. Therefore,
#
# $$F = 2Ni = \phi(R_{s} + 2R{g})$$
#
# where:
# $$R_{s} = l_{s}/\mu\mu_0A_{s}$$
# $$R_{g} = l_{g}/\mu_0A_{g}$$
#
# In[5]:
# CALCULATION OF CURRENT
L_gap = float(raw_input("Enter the lifting distance in mm :")) # FOR CONTACTED LIFTING => L_gap = 0.1 mm
L_core = float(raw_input("Enter the length of the core in cm :")) # LATER, it will be CHECKED
L_core = L_core * 10
print "Length of the core is %f in mm" %L_core
L_path = 2 * L_core + 2 * L_gap
print "Path of Flux is %f in mm" %L_path
L_path = L_path / float(1000) #ADJUSTED THE LENGTHS in m
L_core = L_core / float(1000)
L_gap = L_gap / float(1000)
print " L_core = %f / L_gap = %f / L_path = %f in m" %(L_core, L_gap, L_path)
print " Permeability : For Iron (99.8% pure) is 6.3 * 10^-3 H/m"
print " Permeability : For Electrical steel is 5 * 10^-3 H/m"
print " Permeability : For Platinum is 1.256970 * 10^-6 H/m"
print " Permeability : For Aluminum is 1.256665 * 10^-6 H/m"
u = float(raw_input("Enter the selected material's permeability in H/m : "))
MMF = B * (L_core / u * uo + 2 * L_gap / uo) # Multiple L_gap with how many air_gap of the design
print "MMF is %f (A/m)" %MMF
N = raw_input("Enter the total number of turns : ")
#offset = float(raw_input("Determine an offset for MMF : ")) #OPTINAL FOR SAFETY
#I = (MMF + offset) / float(N)
I = MMF / float(N)
print " Required Current is %f (A)" %I
# According to calculated current value, appropriate wire is selected in this step. The list is obtained from http://en.wikipedia.org/wiki/American_wire_gauge and http://www.powerstream.com/Wire_Size.htm
# The sufficient selection of wire improves the performances by reducing the losses around the wire and core.
#
# ### Attention !!
# The excel table of the wire is not embedded in the code. Therefore, you have to change the line, which indicates the location of the excel file.
# The excel file can be downloaded from https://www.dropbox.com/s/e46f0idnv303z25/Copper_wires.xlsx?dl=0.
#
# In[6]:
# SELECTION OF WİRE TYPE FROM COPPER WIRES
import xlrd
file_location = "C:\Users\ASUS\Desktop\Copper_wires.xlsx"
workbook = xlrd.open_workbook(file_location, "rb")
sheet = workbook.sheet_by_index(0)
#print sheet.nrows
#print sheet.ncols
for row in range(sheet.nrows):
if I > sheet.cell_value(row,4):
print "AWG gauge for the wire is %d " %sheet.cell_value(row-1,0)
diameter_wire = sheet.cell_value(row-1,1)
print "Diameter of selected wire is %f mm" %sheet.cell_value(row-1,1)
resistance_wire = sheet.cell_value(row-1,2)/ float(1000)
print "Resistances per m of selected wire is %f ohm/m" %resistance_wire
break
# At the last part, all calculations is operated by considering both one layer of wire turns and multiple layer.Multiple
# layer is needed when the core length is not enough to contain all the wire turns.
# Then,
# Total resistance of wire, flux, weight of the wire and required voltage are calculated, respectively.
#
# Total length of wire is calculated as:
# $$ T = L_{former} * stacking factor/d$$
# where:
# - T is the maximum number of turns in the first layer
# - L_former is the length of core which the wire is wrapped. It should be smaller than length of the core.
# - d = diamater of the selected wire.
# $$ n = N / T$$
# where:
# - n is the total number of layers
# - N is the total number of turns
# $$ firstlayerlength = \pi D T$$
# where:
# - D is the diameter of the core
# $$Sum_{lengthofwire} = (n/2) (2 \pi D + (n-1)d)$$
# $$Total_Sum = Sum_{lengthofwire} * T $$
#
# The total resistance of the wire is calculated by using the AWG table.
# $$ R_{total} = Total_Sum * R_{ohm / m}$$
#
# The inductance of the coil is obtained as:
# $$ L = N \phi / I$$
# where:
# - L is the total inductance of the coil
# - I is the current on the wire
#
# The total weight of the wire is calculated by using the diameter and density of the selected material. Material is mostly selected copper, which has the density 8.96 g/cm^3.
# $$ Volume_{wire} = \pi * (d/2)^2 * Total_sum$$
# $$ Weight_{wire} = Volume_{wire} * density_{material}$$
#
# The required voltage is obtained as:
# $$ V_{required} = I * R_{total}$$
# In[7]:
stacking = float(raw_input ("Enter the stacking factor : %f")) #roughly 0.9
# dia_meter is obtained
# dia_wire is obtained
# turn number is obtained
print "The entered length of the core is %f m" %L_core
L_former = float(raw_input("Enter the Length of Former in cm :"))
L_former = float (L_former * 10)
L_coremm = L_core * 1000 # Turn Lcore from m to mm
if L_former > L_coremm:
print " L_former is longer than L_core, Please re-enter the L_core values"
print "L_core value is %f" %L_core
L_former = float(raw_input("Enter the Length of Former in cm :"))
L_former = float (L_former * 10)
first = int (L_former * stacking / diameter_wire)
print " The maximum number of winding in the first layer is %f" %first
n = float(N) / first
n = int(n) + 1
print "the total number of layers required to give the total number of turns : %f" %n
Sum_length_wire = (float(n) / 2) * float(2 * pi * diameter_wire + (n-1) * diameter_wire)
Sum_length_wire = Sum_length_wire / float(10) # in cm
if N < first :
Sum_length_wire = Sum_length_wire * float (N)
else:
Sum_length_wire = Sum_length_wire * float (first)
print "Total Length of wire is %f (cm)" %Sum_length_wire
Sum_length_wire = Sum_length_wire / float(100) #in m
R_total = resistance_wire * Sum_length_wire
print " The resistance of the wire is %f ohm " %R_total # TOO LOW VALUE OF RESISTANCES !!!!!!!!!
flux = float( B * A)
Inductance = float(N) * flux / I
print " The inductance of the coil is %f H" %Inductance
dia_wiree = diameter_wire / float(10)
dia_wire = dia_wiree / float(2)
density_copper = 8.96 # g / cm^-3
weight = pi* dia_wire * dia_wire * Sum_length_wire * density_copper
print " The weight of the wire is %f g" %weight
V_required = I * R_total
print " Required voltage for the electromagnet is %f V" %V_required
# # U-shaped Electromagnet Design Prodecure !!More Accurate!!
# At this part is an optional area if the user is trying to design U-shaped electromagnet, far more accurately. It needs more entries on core.
# U-shaped electromagnet is shown below.
# 
#
# The user is used the obtained results and parameters, which are obtained above, to check more accurately.
# All reluctances are included into the design below.
# The magnetic circuit of the U-shaped electromagnet is shown below.
#
# 
#
# Due to U-shape, there are two airgap.The force of attraction across each air gap between the electromagnet and total force is expressed, respectively.
# $$M_{airgap} = NI/2$$
# $$R_{airgap} = g / \mu_0 A_{pole}$$
#
# $$F_{magnet} = \mu_0 (NI)^2 A_{pole} / 4g^2$$
#
# where,
# - M is the magneto-motive force across the each air gap
# - g is the length of the airgap
# - R_airgap is the reluctance of the each airgap
# Fmagnet equation shows that the magnetic force is a nonlinear function of both current and air gap length. And also, for a constant current the force decreases with increasing air gap, hence it has a negative stiffness coefficient. There is therefore no point of equilibrium between two magnetised bodies and so the open-loop force-air gap characteristic of an electromagnet is unstable.
# The assumption of uniform air gap flux distribution is valid only for air gaps that are much smaller than the pole width. At larger air gaps, flux fringing increases the effective air gap flux area and hence decreases the air gap reluctance.
#
# Directly lifting force is expressed below. For the loads in Parker Steel Company, the lateral offset (y) is zero.
# $$F_{lift} = F_{magnet}(1+(2g/\pi p)(1-(y/g)tan^-1(y/g)))$$
# where,
# - p is the pole width
# - y is the lateral offset between the electromagnet and track poles. It is set to zero.
#
# By considering zero lateral offset,
# $$R_{airgap} = g / (\mu_0 l (p + 2g/\pi))$$
#
# Magnet and track path reluctance is calculated by summing the exact dimension entries.
# $$R_{magnet} = (2h + w + 2p) / \mu_M \mu_0 lp$$
# $$R_{track} = (t + 4p)/ \mu_T \mu_0 lp$$
# where,
# - w is the width between the electromagnet poles
# - p is the pole width
# - h is the pole height above the yoke
# - l is the length of the electromagnet
# - t is the width between the track poles
# - μM is the relavite permeability of electromagnet
# - μT is the relavite permeability of track
#
# Leakage flux is calculated by neglecting fringe flux. Because air gap is so small for fringe flux calculation.
# $$R_{leakage} = w / \mu_0 l (h/2) $$
#
# All the parameters are obtained in the U-shaped magnetic circuit, shown above. After the analyzing the circuit,
# $$ M_{airgap} = (R_{airgap}R_{leakage}/ (R_{track}+2R_{airgap})(R_{leakage}+R_{magnet}+R_{magnet}R_{leakage})) M_{coils}$$
# where,
# $$M_{coils} = NI$$
#
# By substituting M_airgap into the function;
# $$F_{magnet} = M_{airgap}^2/g R_{airgap} $$
#
# $$F_{magnet} = (R_{airgap}R_{leakage}^2/g((R_{track}+2R_{airgap})(R_{leakage}+R_{magnet}+R_{magnet}R_{leakage}))^2)M_{coils}^2$$
#
# F_lift is calculated by substituting F_magnet into F_lift function.
#
# In[ ]:
print "Welcome to the Electromagnet Design Code"
print " U-SHAPE Electromagnet is covered in detailed "
# In[ ]:
while True:
l = float(raw_input("Enter the length of Electromagnet in mm "))
w = float(raw_input("Enter the Width between the electromagnet pole pieces in mm "))
h = float(raw_input("Enter the Height of the pole pieces above the yoke in mm "))
p = float(raw_input("Enter the Width of the pole pieces in mm "))
if l >= (2 * h + w + 2 * p):
print "The length of Electromagnet ERROR"
print "l should be equal to (2 * h + w + 2 * p)"
l = float(raw_input("Enter the length of Electromagnet in mm "))
w = float(raw_input("Enter the Width between the electromagnet pole pieces in mm "))
h = float(raw_input("Enter the Height of the pole pieces above the yoke in mm "))
p = float(raw_input("Enter the Width of the pole pieces in mm "))
else:
break
break
g = float(raw_input("Enter the length of Air gap in mm ")) # 0.1 mm for Contacted Lifting
t = 0 # the width between the track poles
from math import pi, exp, sqrt, atan
uo = float(4 * pi * 10**-7)
print " Permeability : For Iron (99.8% pure) is 6.3 * 10^-3 H/m"
print " Permeability : For Electrical steel is 5 * 10^-3 H/m"
print " Permeability : For Platinum is 1.256970 * 10^-6 H/m"
print " Permeability : For Aluminum is 1.256665 * 10^-6 H/m"
u = float(raw_input("Enter the selected material's permeability in H/m : "))
# In[ ]:
# RELUCTANCES
R_air = g / (uo * l * (p + 2 * g / pi)) #Air gap Reluctance
R_magnet = (2 * h + w + 2 * p) / (u * uo * l * p) # Core path Reluctance
R_leakage = w / (uo * l * h / 2) # Leakage Flux Reluctance *By neglecting fringing flux due to plate lifting*
R_track = (t + 4* p) / (u * uo * l * p)
print " R_air = %f" %R_air
print "R_magnet = %f" %R_magnet
print "R_leakage = %f" %R_leakage
print "R_track = %f" %R_track
# In[ ]:
N = raw_input("Enter the number of Turns : ")
I = raw_input("Enter the coil current in A : ")
M_coil = float(N) * float(I)
M_air = ((R_air * R_leakage) / ((R_track + 2 * R_air) * (R_leakage + R_magnet) + R_magnet * R_leakage)) * M_coil
print "M_air is %f" %M_air
F_magnet = M_air * M_air / (g * R_air)
y = 0 # Lateral offset
F_lift = F_magnet * (1 - ((y / g) * atan(y / g)) / (1 + pi * p / (2 * g) ))
print " Lifting Force is %f in N" %F_lift
# CALCULATION OF LOAD FORCE in N
print "Fmg = The gravitational force produced by load "
mg = float(raw_input("Enter the weight of the load in kg : "))
Fmg = float(9.8 * mg)
print "Fmg is %f in N" %(Fmg)
if F_lift < Fmg:
print "ERROR"
else:
print "SUCCESSFUL DESIGN :)"
# #ADDITIONAL PART
# ## Determining the magnetic flux density at a distance X from the magnet at the center line, B(X)
# For cylindrical shaped magnet, if the air gap is significantly high , this equation about B(x) is helpful to get better results.
# The cylindrical shaped electromagnet and the dimensions are shown below,
# 
#
# The equation is;
# $$B(x) = (B_r/2)((L + x)/\sqrt(R^2+(L+x)^2) - x/\sqrt(R^2+x^2))$$
# where,
# - Br is the residual flux density of the magnet
# - x is the distance from the surface of the magnet
# - L is the length of the magnet
# - R is the radius of the magnet
# | material = float(raw_input("The load material: "))
if material == 1:
ll = float(raw_input("Enter the length of the load in inches :"))
ww = float(raw_input("Enter the width of the load in inches :")) | random_line_split |
Final of the Electromagnet Project.py |
# coding: utf-8
# # INTRODUCTION
# This project is related to designing a junk-yard magnet, which is an electromagnet type. Throughout this project, electromagnet operating principles and design techniques in terms of different load values and electromagnet types are covered. The electromagnet should be capable of lifting standard sized metal sheets and plates of Parker Steel Company.
# Firstly, electromagnet operation, its types and some important losses are explained. Then, the design code, which is written in Pyhton language by using Ipython notebook with Anaconda, is explained in detailed.
#
# ## Electromagnet
# An electromagnet is a type of magnet in which the magnetic field is produced by an electric current. The magnetic field disappears when the current is turned off. The main advantage of an electromagnet over a permanent magnet is that the magnetic field can be rapidly manipulated over a wide range by controlling the amount of electric current. However, a continuous supply of electrical energy is required to maintain the field.
#
# 
#
# Electromagnets are widely used as components of other electrical devices, such as motors, generators, relays, loudspeakers, hard disks, MRI machines, scientific instruments, and magnetic separation equipment. Electomagnets are also employed in industry for picking up and moving heavy iron objects such as scrap iron and steel. A junk-yard electromagnet is shown in above.
#
# There lots of type of electromagnet for different industrial purposes. The common types of electromagnet is shown below(in Dropbox link).
# 
#
# My electromagnet design is applicable for any type of electromagnet, especially U-shaped. It has re-adjustable functions and variable areas, and also there are information steps for user friendly development.
#
# In[1]:
print "Welcome to the Electromagnet Design Code"
# In[2]:
print "B = Magnetic Field Density"
print "A = Cross Sectional Area"
print "uo = Permeability of Free Space"
print "L = Pitch or Length of Former"
from math import pi, exp, sqrt
uo = float(4 * pi * 10**-7)
print uo
# ## Calculation of the load
#
# The main goal of this design is to operate with the standard sized metal sheets and plates of Parker Steel Company.
# For this purpose, there are five types of material and an optional step, which is created for unlisted material. In order to
# calculate the weight of the load, the volume and corresponding density of the load are required. The length, width and the
# thickness of the load are requested and the corresponding density for the selected material is written automatically. The densities are obtained from http://www.psyclops.com/tools/technotes/materials/density.html.
# In[3]:
# CALCULATION OF LOAD FORCE in N
print "Enter the diameters of the load"
print " For material type: \n For Stainless Steel Enter 1 \n For Carbon Steel Enter 2 \n For Aluminium Enter 3 \n For Titanium Enter 4 \n For Brass Enter 5"
print " For unlisted material Enter 10"
material = float(raw_input("The load material: "))
if material == 1:
ll = float(raw_input("Enter the length of the load in inches :"))
ww = float(raw_input("Enter the width of the load in inches :"))
tt = float(raw_input("Enter the thickness of the load in mm :"))
llcm = ll * float(2.54)
wwcm = ww * float(2.54)
ttcm = tt / float(10)
VV = float(llcm * wwcm * ttcm)
density = float(7.81)
mgg = VV * density
mg = mgg / float(1000)
elif material == 2:
ll = float(raw_input("Enter the length of the load in inches :"))
ww = float(raw_input("Enter the width of the load in inches :"))
tt = float(raw_input("Enter the thickness of the load in mm :"))
llcm = ll * float(2.54)
wwcm = ww * float(2.54)
ttcm = tt / float(10)
VV = float(llcm * wwcm * ttcm)
density = float(7.83)
mgg = VV * density
mg = mgg / float(1000)
elif material == 3:
ll = float(raw_input("Enter the length of the load in inches :"))
ww = float(raw_input("Enter the width of the load in inches :"))
tt = float(raw_input("Enter the thickness of the load in mm :"))
llcm = ll * float(2.54)
wwcm = ww * float(2.54)
ttcm = tt / float(10)
VV = float(llcm * wwcm * ttcm)
density = float(2.7)
mgg = VV * density
mg = mgg / float(1000)
elif material == 4:
ll = float(raw_input("Enter the length of the load in inches :"))
ww = float(raw_input("Enter the width of the load in inches :"))
tt = float(raw_input("Enter the thickness of the load in mm :"))
llcm = ll * float(2.54)
wwcm = ww * float(2.54)
ttcm = tt / float(10)
VV = float(llcm * wwcm * ttcm)
density = float(4.51)
mgg = VV * density
mg = mgg / float(1000)
elif material == 5:
ll = float(raw_input("Enter the length of the load in inches :"))
ww = float(raw_input("Enter the width of the load in inches :"))
tt = float(raw_input("Enter the thickness of the load in mm :"))
llcm = ll * float(2.54)
wwcm = ww * float(2.54)
ttcm = tt / float(10)
VV = float(llcm * wwcm * ttcm)
density = float(8.70)
mgg = VV * density
mg = mgg / float(1000)
elif material == 10:
ll = float(raw_input("Enter the length of the load in inches :"))
ww = float(raw_input("Enter the width of the load in inches :"))
tt = float(raw_input("Enter the thickness of the load in mm :"))
llcm = ll * float(2.54)
wwcm = ww * float(2.54)
ttcm = tt / float(10)
VV = float(llcm * wwcm * ttcm)
density = float(raw_input("Enter the density of the material in g/cm^3 :"))
mgg = VV * density
mg = mgg / float(1000)
else:
print " Wrong Entered Number, Please Enter again"
print "The weight of the load is %f in kg" %mg
print "Fmg = The gravitational force produced by load "
Fmg = float(9.8 * mg)
print "Fmg is %f in N" %(Fmg)
# In this step, calculation of magnetic force is covered. Magnetic flux density is limited around 1.5 T due to saturation. Cross sectional area of the core is calculated based on the weight of the load. And, to get better area parameters, there is an optional re-adjusted area. Our design is based on circular area because of the stacking factor improvement and lower losses of the core.
#
# The force exerted by an electromagnet on a section of core material is:
# $$F = B^2A/2\mu_0$$
#
# In[4]:
B = float(raw_input ("Enter an appropriate B value in T : ")) # Nominal 0.6 - 1.5 T
A = Fmg * 2* uo / (B * B)
print "Cross Sectional Area (min) in m^2 : %f" %A
A = float(raw_input ("Enter Cross Sectional Area (A) in m^2 : ")) # TO GET BETTER APPROXIMATION OF AREA
dia_core = sqrt( A / pi )
dia_core = dia_core * float(1000)
print "For circular design ---> diameter of core : %f mm" %dia_core # CIRCULAR DESIGN
F = B * B * A / (2 * uo)
print "Force is %f in N and Fmg is %f in N" %(F,Fmg)
while True:
if F < Fmg:
print ("Not sufficient MMF, Enter new design values")
A = float(raw_input("Enter Cross Sectional Area (A) in m^2 : "))
F = B * B * A / (2 * uo)
else:
break
# Throughout this part,
# Firstly, lifting distance is requested and pre_design core length (roughly) is requested. For unsufficient core length, code gives an error at the end. According to flux path in both core and air gap, MMF is calculated. After entering number of turns for given MMF value, current on the wire is obtained.
# ### Selection of the material
# Solid steel is generally best, in terms of economics, for the yoke, or frame, of static field devices. The mass of material required to efficiently carry flux between the far poles of an assembly make anything else impossible to justify. The reluctance of this part of the magnetic circuit is relatively low, even without annealing, compared to that of the working gap, so the losses associated with driving flux through the material are a small portion of overall losses
#
# ### Losses of Electromagnet by choosing current
# The only power consumed in a DC electromagnet is due to the resistance of the windings, and is dissipated as heat.Since the MMF is proportional to the product NI,the number of turns in the windings N and the current I can be chosen to minimize heat losses, as long as their product is constant. Since the power dissipation,
#
# $$P = I^2R$$
#
# ,increases with the square of the current but only increases approximately linearly with the number of windings, the power lost in the windings can be minimized by reducing I and increasing the number of turns N proportionally, or using thicker wire to reduce the resistance; However, the limit to increasing N or lowering the resistance is that the windings take up more room between the magnet’s core pieces. If the area available for the windings is filled up, more turns require going to a smaller diameter of wire, which has higher resistance, which cancels the advantage of using more turns.
#
# ## Electromagnet Design Procedure (for U shaped)
# U shaped electromagnet is shown below. It has two airgap and two wire turn area. The core is in a circular form.
#
# 
#
# Applying Ampere's Law to the circuit,
#
# $$\sum_{}^\ i = 2Ni = \int_{S}^{} H dl = H_{s}l_{s} + 2H_{g}l_{g}$$
#
# where Hs is the value of H in the core path, which is assumed to be constant and ls is the total length of the core path. At the boundary the magnetic flux densities are equal.
# $$ B_{s} = B_{g}$$
#
# Actually,
#
# $$H_{s}l_{s} << 2H_{g}l_{g}$$
#
# However, nothing is neglected to get accurate result. Therefore,
#
# $$F = 2Ni = \phi(R_{s} + 2R{g})$$
#
# where:
# $$R_{s} = l_{s}/\mu\mu_0A_{s}$$
# $$R_{g} = l_{g}/\mu_0A_{g}$$
#
# In[5]:
# CALCULATION OF CURRENT
L_gap = float(raw_input("Enter the lifting distance in mm :")) # FOR CONTACTED LIFTING => L_gap = 0.1 mm
L_core = float(raw_input("Enter the length of the core in cm :")) # LATER, it will be CHECKED
L_core = L_core * 10
print "Length of the core is %f in mm" %L_core
L_path = 2 * L_core + 2 * L_gap
print "Path of Flux is %f in mm" %L_path
L_path = L_path / float(1000) #ADJUSTED THE LENGTHS in m
L_core = L_core / float(1000)
L_gap = L_gap / float(1000)
print " L_core = %f / L_gap = %f / L_path = %f in m" %(L_core, L_gap, L_path)
print " Permeability : For Iron (99.8% pure) is 6.3 * 10^-3 H/m"
print " Permeability : For Electrical steel is 5 * 10^-3 H/m"
print " Permeability : For Platinum is 1.256970 * 10^-6 H/m"
print " Permeability : For Aluminum is 1.256665 * 10^-6 H/m"
u = float(raw_input("Enter the selected material's permeability in H/m : "))
MMF = B * (L_core / u * uo + 2 * L_gap / uo) # Multiple L_gap with how many air_gap of the design
print "MMF is %f (A/m)" %MMF
N = raw_input("Enter the total number of turns : ")
#offset = float(raw_input("Determine an offset for MMF : ")) #OPTINAL FOR SAFETY
#I = (MMF + offset) / float(N)
I = MMF / float(N)
print " Required Current is %f (A)" %I
# According to calculated current value, appropriate wire is selected in this step. The list is obtained from http://en.wikipedia.org/wiki/American_wire_gauge and http://www.powerstream.com/Wire_Size.htm
# The sufficient selection of wire improves the performances by reducing the losses around the wire and core.
#
# ### Attention !!
# The excel table of the wire is not embedded in the code. Therefore, you have to change the line, which indicates the location of the excel file.
# The excel file can be downloaded from https://www.dropbox.com/s/e46f0idnv303z25/Copper_wires.xlsx?dl=0.
#
# In[6]:
# SELECTION OF WİRE TYPE FROM COPPER WIRES
import xlrd
file_location = "C:\Users\ASUS\Desktop\Copper_wires.xlsx"
workbook = xlrd.open_workbook(file_location, "rb")
sheet = workbook.sheet_by_index(0)
#print sheet.nrows
#print sheet.ncols
for row in range(sheet.nrows):
if I > sheet.cell_value(row,4):
print "AWG gauge for the wire is %d " %sheet.cell_value(row-1,0)
diameter_wire = sheet.cell_value(row-1,1)
print "Diameter of selected wire is %f mm" %sheet.cell_value(row-1,1)
resistance_wire = sheet.cell_value(row-1,2)/ float(1000)
print "Resistances per m of selected wire is %f ohm/m" %resistance_wire
break
# At the last part, all calculations is operated by considering both one layer of wire turns and multiple layer.Multiple
# layer is needed when the core length is not enough to contain all the wire turns.
# Then,
# Total resistance of wire, flux, weight of the wire and required voltage are calculated, respectively.
#
# Total length of wire is calculated as:
# $$ T = L_{former} * stacking factor/d$$
# where:
# - T is the maximum number of turns in the first layer
# - L_former is the length of core which the wire is wrapped. It should be smaller than length of the core.
# - d = diamater of the selected wire.
# $$ n = N / T$$
# where:
# - n is the total number of layers
# - N is the total number of turns
# $$ firstlayerlength = \pi D T$$
# where:
# - D is the diameter of the core
# $$Sum_{lengthofwire} = (n/2) (2 \pi D + (n-1)d)$$
# $$Total_Sum = Sum_{lengthofwire} * T $$
#
# The total resistance of the wire is calculated by using the AWG table.
# $$ R_{total} = Total_Sum * R_{ohm / m}$$
#
# The inductance of the coil is obtained as:
# $$ L = N \phi / I$$
# where:
# - L is the total inductance of the coil
# - I is the current on the wire
#
# The total weight of the wire is calculated by using the diameter and density of the selected material. Material is mostly selected copper, which has the density 8.96 g/cm^3.
# $$ Volume_{wire} = \pi * (d/2)^2 * Total_sum$$
# $$ Weight_{wire} = Volume_{wire} * density_{material}$$
#
# The required voltage is obtained as:
# $$ V_{required} = I * R_{total}$$
# In[7]:
stacking = float(raw_input ("Enter the stacking factor : %f")) #roughly 0.9
# dia_meter is obtained
# dia_wire is obtained
# turn number is obtained
print "The entered length of the core is %f m" %L_core
L_former = float(raw_input("Enter the Length of Former in cm :"))
L_former = float (L_former * 10)
L_coremm = L_core * 1000 # Turn Lcore from m to mm
if L_former > L_coremm:
print " L_former is longer than L_core, Please re-enter the L_core values"
print "L_core value is %f" %L_core
L_former = float(raw_input("Enter the Length of Former in cm :"))
L_former = float (L_former * 10)
first = int (L_former * stacking / diameter_wire)
print " The maximum number of winding in the first layer is %f" %first
n = float(N) / first
n = int(n) + 1
print "the total number of layers required to give the total number of turns : %f" %n
Sum_length_wire = (float(n) / 2) * float(2 * pi * diameter_wire + (n-1) * diameter_wire)
Sum_length_wire = Sum_length_wire / float(10) # in cm
if N < first :
Sum_length_wire = Sum_length_wire * float (N)
else:
Sum_length_wire = Sum_length_wire * float (first)
print "Total Length of wire is %f (cm)" %Sum_length_wire
Sum_length_wire = Sum_length_wire / float(100) #in m
R_total = resistance_wire * Sum_length_wire
print " The resistance of the wire is %f ohm " %R_total # TOO LOW VALUE OF RESISTANCES !!!!!!!!!
flux = float( B * A)
Inductance = float(N) * flux / I
print " The inductance of the coil is %f H" %Inductance
dia_wiree = diameter_wire / float(10)
dia_wire = dia_wiree / float(2)
density_copper = 8.96 # g / cm^-3
weight = pi* dia_wire * dia_wire * Sum_length_wire * density_copper
print " The weight of the wire is %f g" %weight
V_required = I * R_total
print " Required voltage for the electromagnet is %f V" %V_required
# # U-shaped Electromagnet Design Prodecure !!More Accurate!!
# At this part is an optional area if the user is trying to design U-shaped electromagnet, far more accurately. It needs more entries on core.
# U-shaped electromagnet is shown below.
# 
#
# The user is used the obtained results and parameters, which are obtained above, to check more accurately.
# All reluctances are included into the design below.
# The magnetic circuit of the U-shaped electromagnet is shown below.
#
# 
#
# Due to U-shape, there are two airgap.The force of attraction across each air gap between the electromagnet and total force is expressed, respectively.
# $$M_{airgap} = NI/2$$
# $$R_{airgap} = g / \mu_0 A_{pole}$$
#
# $$F_{magnet} = \mu_0 (NI)^2 A_{pole} / 4g^2$$
#
# where,
# - M is the magneto-motive force across the each air gap
# - g is the length of the airgap
# - R_airgap is the reluctance of the each airgap
# Fmagnet equation shows that the magnetic force is a nonlinear function of both current and air gap length. And also, for a constant current the force decreases with increasing air gap, hence it has a negative stiffness coefficient. There is therefore no point of equilibrium between two magnetised bodies and so the open-loop force-air gap characteristic of an electromagnet is unstable.
# The assumption of uniform air gap flux distribution is valid only for air gaps that are much smaller than the pole width. At larger air gaps, flux fringing increases the effective air gap flux area and hence decreases the air gap reluctance.
#
# Directly lifting force is expressed below. For the loads in Parker Steel Company, the lateral offset (y) is zero.
# $$F_{lift} = F_{magnet}(1+(2g/\pi p)(1-(y/g)tan^-1(y/g)))$$
# where,
# - p is the pole width
# - y is the lateral offset between the electromagnet and track poles. It is set to zero.
#
# By considering zero lateral offset,
# $$R_{airgap} = g / (\mu_0 l (p + 2g/\pi))$$
#
# Magnet and track path reluctance is calculated by summing the exact dimension entries.
# $$R_{magnet} = (2h + w + 2p) / \mu_M \mu_0 lp$$
# $$R_{track} = (t + 4p)/ \mu_T \mu_0 lp$$
# where,
# - w is the width between the electromagnet poles
# - p is the pole width
# - h is the pole height above the yoke
# - l is the length of the electromagnet
# - t is the width between the track poles
# - μM is the relavite permeability of electromagnet
# - μT is the relavite permeability of track
#
# Leakage flux is calculated by neglecting fringe flux. Because air gap is so small for fringe flux calculation.
# $$R_{leakage} = w / \mu_0 l (h/2) $$
#
# All the parameters are obtained in the U-shaped magnetic circuit, shown above. After the analyzing the circuit,
# $$ M_{airgap} = (R_{airgap}R_{leakage}/ (R_{track}+2R_{airgap})(R_{leakage}+R_{magnet}+R_{magnet}R_{leakage})) M_{coils}$$
# where,
# $$M_{coils} = NI$$
#
# By substituting M_airgap into the function;
# $$F_{magnet} = M_{airgap}^2/g R_{airgap} $$
#
# $$F_{magnet} = (R_{airgap}R_{leakage}^2/g((R_{track}+2R_{airgap})(R_{leakage}+R_{magnet}+R_{magnet}R_{leakage}))^2)M_{coils}^2$$
#
# F_lift is calculated by substituting F_magnet into F_lift function.
#
# In[ ]:
print "Welcome to the Electromagnet Design Code"
print " U-SHAPE Electromagnet is covered in detailed "
# In[ ]:
while True:
l = f |
g = float(raw_input("Enter the length of Air gap in mm ")) # 0.1 mm for Contacted Lifting
t = 0 # the width between the track poles
from math import pi, exp, sqrt, atan
uo = float(4 * pi * 10**-7)
print " Permeability : For Iron (99.8% pure) is 6.3 * 10^-3 H/m"
print " Permeability : For Electrical steel is 5 * 10^-3 H/m"
print " Permeability : For Platinum is 1.256970 * 10^-6 H/m"
print " Permeability : For Aluminum is 1.256665 * 10^-6 H/m"
u = float(raw_input("Enter the selected material's permeability in H/m : "))
# In[ ]:
# RELUCTANCES
R_air = g / (uo * l * (p + 2 * g / pi)) #Air gap Reluctance
R_magnet = (2 * h + w + 2 * p) / (u * uo * l * p) # Core path Reluctance
R_leakage = w / (uo * l * h / 2) # Leakage Flux Reluctance *By neglecting fringing flux due to plate lifting*
R_track = (t + 4* p) / (u * uo * l * p)
print " R_air = %f" %R_air
print "R_magnet = %f" %R_magnet
print "R_leakage = %f" %R_leakage
print "R_track = %f" %R_track
# In[ ]:
N = raw_input("Enter the number of Turns : ")
I = raw_input("Enter the coil current in A : ")
M_coil = float(N) * float(I)
M_air = ((R_air * R_leakage) / ((R_track + 2 * R_air) * (R_leakage + R_magnet) + R_magnet * R_leakage)) * M_coil
print "M_air is %f" %M_air
F_magnet = M_air * M_air / (g * R_air)
y = 0 # Lateral offset
F_lift = F_magnet * (1 - ((y / g) * atan(y / g)) / (1 + pi * p / (2 * g) ))
print " Lifting Force is %f in N" %F_lift
# CALCULATION OF LOAD FORCE in N
print "Fmg = The gravitational force produced by load "
mg = float(raw_input("Enter the weight of the load in kg : "))
Fmg = float(9.8 * mg)
print "Fmg is %f in N" %(Fmg)
if F_lift < Fmg:
print "ERROR"
else:
print "SUCCESSFUL DESIGN :)"
# #ADDITIONAL PART
# ## Determining the magnetic flux density at a distance X from the magnet at the center line, B(X)
# For cylindrical shaped magnet, if the air gap is significantly high , this equation about B(x) is helpful to get better results.
# The cylindrical shaped electromagnet and the dimensions are shown below,
# 
#
# The equation is;
# $$B(x) = (B_r/2)((L + x)/\sqrt(R^2+(L+x)^2) - x/\sqrt(R^2+x^2))$$
# where,
# - Br is the residual flux density of the magnet
# - x is the distance from the surface of the magnet
# - L is the length of the magnet
# - R is the radius of the magnet
#
| loat(raw_input("Enter the length of Electromagnet in mm "))
w = float(raw_input("Enter the Width between the electromagnet pole pieces in mm "))
h = float(raw_input("Enter the Height of the pole pieces above the yoke in mm "))
p = float(raw_input("Enter the Width of the pole pieces in mm "))
if l >= (2 * h + w + 2 * p):
print "The length of Electromagnet ERROR"
print "l should be equal to (2 * h + w + 2 * p)"
l = float(raw_input("Enter the length of Electromagnet in mm "))
w = float(raw_input("Enter the Width between the electromagnet pole pieces in mm "))
h = float(raw_input("Enter the Height of the pole pieces above the yoke in mm "))
p = float(raw_input("Enter the Width of the pole pieces in mm "))
else:
break
break
| conditional_block |
aa_changes.rs | use crate::alphabet::aa::Aa;
use crate::alphabet::letter::Letter;
use crate::alphabet::letter::{serde_deserialize_seq, serde_serialize_seq};
use crate::alphabet::nuc::Nuc;
use crate::analyze::aa_del::AaDel;
use crate::analyze::aa_sub::AaSub;
use crate::analyze::nuc_del::NucDelRange;
use crate::analyze::nuc_sub::NucSub;
use crate::coord::coord_map_cds_to_global::cds_codon_pos_to_ref_range;
use crate::coord::position::{AaRefPosition, NucRefGlobalPosition, PositionLike};
use crate::coord::range::{have_intersection, AaRefRange, NucRefGlobalRange};
use crate::gene::cds::Cds;
use crate::gene::gene::GeneStrand;
use crate::gene::gene_map::GeneMap;
use crate::translate::complement::reverse_complement_in_place;
use crate::translate::translate_genes::{CdsTranslation, Translation};
use crate::utils::collections::extend_map_of_vecs;
use either::Either;
use eyre::Report;
use itertools::{Itertools, MinMaxResult};
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
#[derive(Clone, Debug, Default, Serialize, Deserialize, schemars::JsonSchema)]
#[serde(rename_all = "camelCase")]
pub struct AaChangeWithContext {
pub cds_name: String,
pub pos: AaRefPosition,
pub ref_aa: Aa,
pub qry_aa: Aa,
pub nuc_pos: NucRefGlobalPosition,
#[schemars(with = "String")]
#[serde(serialize_with = "serde_serialize_seq")]
#[serde(deserialize_with = "serde_deserialize_seq")]
pub ref_triplet: Vec<Nuc>,
#[schemars(with = "String")]
#[serde(serialize_with = "serde_serialize_seq")]
#[serde(deserialize_with = "serde_deserialize_seq")]
pub qry_triplet: Vec<Nuc>,
pub nuc_ranges: Vec<NucRefGlobalRange>,
}
impl AaChangeWithContext {
pub fn new(
cds: &Cds,
pos: AaRefPosition,
qry_seq: &[Nuc],
ref_seq: &[Nuc],
ref_tr: &CdsTranslation,
qry_tr: &CdsTranslation,
) -> Self {
let ref_aa = ref_tr.seq[pos.as_usize()];
let qry_aa = qry_tr.seq[pos.as_usize()];
let nuc_ranges = cds_codon_pos_to_ref_range(cds, pos);
let ref_triplet = nuc_ranges
.iter()
.flat_map(|(range, strand)| {
let mut nucs = ref_seq[range.to_std()].to_vec();
if strand == &GeneStrand::Reverse {
reverse_complement_in_place(&mut nucs);
}
nucs
})
.collect_vec();
let qry_triplet = nuc_ranges
.iter()
.flat_map(|(range, strand)| {
let mut nucs = qry_seq[range.clamp_range(0, qry_seq.len()).to_std()].to_vec();
if strand == &GeneStrand::Reverse {
reverse_complement_in_place(&mut nucs);
}
nucs
})
.collect_vec();
let nuc_ranges = nuc_ranges.into_iter().map(|(range, _)| range).collect_vec();
Self {
cds_name: cds.name.clone(),
pos,
ref_aa,
qry_aa,
nuc_pos: nuc_ranges[0].begin,
nuc_ranges,
ref_triplet,
qry_triplet,
}
}
#[inline]
pub fn is_mutated_or_deleted(&self) -> bool {
is_aa_mutated_or_deleted(self.ref_aa, self.qry_aa)
}
}
#[derive(Clone, Debug, Default, Serialize, Deserialize, schemars::JsonSchema)]
#[serde(rename_all = "camelCase")]
pub struct AaChangesGroup {
name: String,
range: AaRefRange,
changes: Vec<AaChangeWithContext>,
nuc_subs: Vec<NucSub>,
nuc_dels: Vec<NucDelRange>,
}
impl AaChangesGroup {
pub fn new(name: impl AsRef<str>) -> Self {
Self::with_changes(name, vec![])
}
pub fn with_changes(name: impl AsRef<str>, changes: Vec<AaChangeWithContext>) -> Self {
Self {
name: name.as_ref().to_owned(),
range: Self::find_codon_range(&changes),
changes,
nuc_subs: vec![],
nuc_dels: vec![],
}
}
pub fn push(&mut self, change: AaChangeWithContext) {
self.changes.push(change);
self.range = Self::find_codon_range(&self.changes);
}
pub fn last(&self) -> Option<&AaChangeWithContext> {
self.changes.last()
}
fn find_codon_range(changes: &[AaChangeWithContext]) -> AaRefRange {
match changes.iter().minmax_by_key(|change| change.pos) {
MinMaxResult::NoElements => AaRefRange::from_isize(0, 0),
MinMaxResult::OneElement(one) => AaRefRange::new(one.pos, one.pos + 1),
MinMaxResult::MinMax(first, last) => AaRefRange::new(first.pos, last.pos + 1),
}
}
}
#[derive(Clone, Debug, Default, Serialize, Deserialize, schemars::JsonSchema)]
#[serde(rename_all = "camelCase")]
pub struct FindAaChangesOutput {
pub aa_changes_groups: Vec<AaChangesGroup>,
pub aa_substitutions: Vec<AaSub>,
pub aa_deletions: Vec<AaDel>,
pub nuc_to_aa_muts: BTreeMap<String, Vec<AaSub>>,
}
/// Finds aminoacid substitutions and deletions in query peptides relative to reference peptides, in all genes
///
/// ## Precondition
/// Nucleotide sequences and peptides are required to be stripped from insertions
pub fn find_aa_changes(
ref_seq: &[Nuc],
qry_seq: &[Nuc],
ref_translation: &Translation,
qry_translation: &Translation,
gene_map: &GeneMap,
nuc_subs: &[NucSub],
nuc_dels: &[NucDelRange],
) -> Result<FindAaChangesOutput, Report> |
/// Finds aminoacid substitutions and deletions in query peptides relative to reference peptides, in one gene
///
/// ## Precondition
/// Nucleotide sequences and peptides are required to be stripped from insertions
///
///
/// ## Implementation details
/// We compare reference and query peptides (extracted by the preceding call to Nextalign),
/// one aminoacid at at time, and deduce changes. We then report the change and relevant nucleotide context surrounding
/// this change.
/// Previously we reported one-to-one mapping of aminoacid changes to corresponding nucleotide changes. However, it
/// was not always accurate, because if there are multiple nucleotide changes in a codon, the direct correspondence
/// might not always be established without knowing the order in which nucleotide changes have occurred. And in the
/// context of Nextclade we don't have this information.
fn find_aa_changes_for_cds(
cds: &Cds,
qry_seq: &[Nuc],
ref_seq: &[Nuc],
ref_tr: &CdsTranslation,
qry_tr: &CdsTranslation,
nuc_subs: &[NucSub],
nuc_dels: &[NucDelRange],
) -> FindAaChangesOutput {
assert_eq!(ref_tr.seq.len(), qry_tr.seq.len());
assert_eq!(qry_seq.len(), ref_seq.len());
let aa_alignment_ranges = &qry_tr.alignment_ranges;
let mut aa_changes_groups = vec![AaChangesGroup::new(&cds.name)];
let mut curr_group = aa_changes_groups.last_mut().unwrap();
for codon in AaRefRange::from_usize(0, qry_tr.seq.len()).iter() {
if !is_codon_sequenced(aa_alignment_ranges, codon) {
continue;
}
let ref_aa = ref_tr.seq[codon.as_usize()];
let qry_aa = qry_tr.seq[codon.as_usize()];
if is_aa_mutated_or_deleted(ref_aa, qry_aa) {
match curr_group.last() {
// If current group is empty, then we are about to insert the first codon into the first group.
None => {
if codon > 0 && is_codon_sequenced(aa_alignment_ranges, codon - 1) {
// Also prepend one codon to the left, for additional context, to start the group
curr_group.push(AaChangeWithContext::new(
cds,
codon - 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
// The current codon itself
curr_group.push(AaChangeWithContext::new(cds, codon, qry_seq, ref_seq, ref_tr, qry_tr));
}
// Current group is not empty
Some(prev) => {
// If previous codon in the group is adjacent or almost adjacent (there is 1 item in between),
// then append to the group.
if codon <= prev.pos + 2 {
// If previous codon in the group is not exactly adjacent, there is 1 item in between,
// then cover the hole by inserting previous codon.
if codon == prev.pos + 2 && is_codon_sequenced(aa_alignment_ranges, codon - 1) {
curr_group.push(AaChangeWithContext::new(
cds,
codon - 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
// And insert the current codon
curr_group.push(AaChangeWithContext::new(cds, codon, qry_seq, ref_seq, ref_tr, qry_tr));
}
// If previous codon in the group is not adjacent, then terminate the current group and start a new group.
else {
// Add one codon to the right, for additional context, to finalize the current group
if is_codon_sequenced(aa_alignment_ranges, prev.pos + 1) {
curr_group.push(AaChangeWithContext::new(
cds,
prev.pos + 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
let mut new_group = AaChangesGroup::new(&cds.name);
// Start a new group and push the current codon into it.
if is_codon_sequenced(aa_alignment_ranges, codon - 1) {
// Also prepend one codon to the left, for additional context, to start the new group.
new_group.push(AaChangeWithContext::new(
cds,
codon - 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
// Push the current codon to the new group
new_group.push(AaChangeWithContext::new(cds, codon, qry_seq, ref_seq, ref_tr, qry_tr));
aa_changes_groups.push(new_group);
curr_group = aa_changes_groups.last_mut().unwrap();
}
}
}
}
}
// Add one codon to the right, for additional context, to finalize the last group
if let Some(last) = curr_group.last() {
if is_codon_sequenced(aa_alignment_ranges, last.pos + 1) {
curr_group.push(AaChangeWithContext::new(
cds,
last.pos + 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
}
// Keep only non-empty groups
aa_changes_groups.retain(|group| !group.range.is_empty() && !group.changes.is_empty());
aa_changes_groups.iter_mut().for_each(|group| {
let ranges = group
.range
.iter()
.flat_map(|codon| {
cds_codon_pos_to_ref_range(cds, codon)
.into_iter()
.map(|(range, _)| range)
})
.collect_vec();
group.nuc_subs = nuc_subs
.iter()
.filter(|nuc_sub| ranges.iter().any(|range| range.contains(nuc_sub.pos)))
.cloned()
.collect_vec();
group.nuc_dels = nuc_dels
.iter()
.filter(|nuc_del| ranges.iter().any(|range| have_intersection(range, nuc_del.range())))
.cloned()
.collect_vec();
});
let (aa_substitutions, aa_deletions): (Vec<AaSub>, Vec<AaDel>) = aa_changes_groups
.iter()
.flat_map(|aa_changes_group| &aa_changes_group.changes)
.filter(|change| is_aa_mutated_or_deleted(change.ref_aa, change.qry_aa))
.partition_map(|change| {
if change.qry_aa.is_gap() {
Either::Right(AaDel {
cds_name: cds.name.clone(),
ref_aa: change.ref_aa,
pos: change.pos,
})
} else {
Either::Left(AaSub {
cds_name: cds.name.clone(),
ref_aa: change.ref_aa,
pos: change.pos,
qry_aa: change.qry_aa,
})
}
});
// Associate nuc positions with aa mutations.
let nuc_to_aa_muts: BTreeMap<String, Vec<AaSub>> = aa_changes_groups
.iter()
.flat_map(|group| {
group
.changes
.iter()
.filter(|change| AaChangeWithContext::is_mutated_or_deleted(change))
.flat_map(|change| {
change.nuc_ranges.iter().flat_map(move |range| {
range.iter()
// TODO: We convert position to string here, because when communicating with WASM we will pass through
// JSON schema, and JSON object keys must be strings. Maybe there is a way to keep the keys as numbers?
.map(move |pos| (pos.to_string(), AaSub::from(change)))
})
})
})
.into_group_map()
.into_iter()
.map(|(pos, mut aa_muts)| {
aa_muts.sort();
aa_muts.dedup();
(pos, aa_muts)
})
.collect();
FindAaChangesOutput {
aa_changes_groups,
aa_substitutions,
aa_deletions,
nuc_to_aa_muts,
}
}
/// Check whether a given pair if reference and query aminoacids constitute a mutation or deletion
#[inline]
fn is_aa_mutated_or_deleted(ref_aa: Aa, qry_aa: Aa) -> bool {
// NOTE: We chose to ignore mutations to `X`.
qry_aa != ref_aa && qry_aa != Aa::X
}
/// Check whether a given codon position corresponds to a sequenced aminoacid
fn is_codon_sequenced(aa_alignment_ranges: &[AaRefRange], codon: AaRefPosition) -> bool {
aa_alignment_ranges
.iter()
.any(|aa_alignment_range| aa_alignment_range.contains(codon))
}
| {
let mut changes = qry_translation
.iter_cdses()
.map(|(qry_name, qry_cds_tr)| {
let ref_cds_tr = ref_translation.get_cds(qry_name)?;
let cds = gene_map.get_cds(&qry_cds_tr.name)?;
Ok(find_aa_changes_for_cds(
cds, qry_seq, ref_seq, ref_cds_tr, qry_cds_tr, nuc_subs, nuc_dels,
))
})
.collect::<Result<Vec<FindAaChangesOutput>, Report>>()?
.into_iter()
// Merge changes from all CDSes into one struct
.fold(FindAaChangesOutput::default(), |mut output, changes| {
output.aa_changes_groups.extend(changes.aa_changes_groups);
output.aa_substitutions.extend(changes.aa_substitutions);
output.aa_deletions.extend(changes.aa_deletions);
extend_map_of_vecs(&mut output.nuc_to_aa_muts, changes.nuc_to_aa_muts);
output
});
changes.aa_substitutions.sort();
changes.aa_deletions.sort();
changes.nuc_to_aa_muts.iter_mut().for_each(|(_, vals)| {
vals.sort();
vals.dedup();
});
Ok(changes)
} | identifier_body |
aa_changes.rs | use crate::alphabet::aa::Aa;
use crate::alphabet::letter::Letter;
use crate::alphabet::letter::{serde_deserialize_seq, serde_serialize_seq};
use crate::alphabet::nuc::Nuc;
use crate::analyze::aa_del::AaDel;
use crate::analyze::aa_sub::AaSub;
use crate::analyze::nuc_del::NucDelRange;
use crate::analyze::nuc_sub::NucSub;
use crate::coord::coord_map_cds_to_global::cds_codon_pos_to_ref_range;
use crate::coord::position::{AaRefPosition, NucRefGlobalPosition, PositionLike};
use crate::coord::range::{have_intersection, AaRefRange, NucRefGlobalRange};
use crate::gene::cds::Cds;
use crate::gene::gene::GeneStrand;
use crate::gene::gene_map::GeneMap;
use crate::translate::complement::reverse_complement_in_place;
use crate::translate::translate_genes::{CdsTranslation, Translation};
use crate::utils::collections::extend_map_of_vecs;
use either::Either;
use eyre::Report;
use itertools::{Itertools, MinMaxResult};
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
#[derive(Clone, Debug, Default, Serialize, Deserialize, schemars::JsonSchema)]
#[serde(rename_all = "camelCase")]
pub struct AaChangeWithContext {
pub cds_name: String,
pub pos: AaRefPosition,
pub ref_aa: Aa,
pub qry_aa: Aa,
pub nuc_pos: NucRefGlobalPosition,
#[schemars(with = "String")]
#[serde(serialize_with = "serde_serialize_seq")]
#[serde(deserialize_with = "serde_deserialize_seq")]
pub ref_triplet: Vec<Nuc>,
#[schemars(with = "String")]
#[serde(serialize_with = "serde_serialize_seq")]
#[serde(deserialize_with = "serde_deserialize_seq")]
pub qry_triplet: Vec<Nuc>,
pub nuc_ranges: Vec<NucRefGlobalRange>,
}
impl AaChangeWithContext {
pub fn new(
cds: &Cds,
pos: AaRefPosition,
qry_seq: &[Nuc],
ref_seq: &[Nuc],
ref_tr: &CdsTranslation,
qry_tr: &CdsTranslation,
) -> Self {
let ref_aa = ref_tr.seq[pos.as_usize()];
let qry_aa = qry_tr.seq[pos.as_usize()];
let nuc_ranges = cds_codon_pos_to_ref_range(cds, pos);
let ref_triplet = nuc_ranges
.iter()
.flat_map(|(range, strand)| {
let mut nucs = ref_seq[range.to_std()].to_vec();
if strand == &GeneStrand::Reverse {
reverse_complement_in_place(&mut nucs);
}
nucs
})
.collect_vec();
let qry_triplet = nuc_ranges
.iter()
.flat_map(|(range, strand)| {
let mut nucs = qry_seq[range.clamp_range(0, qry_seq.len()).to_std()].to_vec();
if strand == &GeneStrand::Reverse {
reverse_complement_in_place(&mut nucs);
}
nucs
})
.collect_vec();
let nuc_ranges = nuc_ranges.into_iter().map(|(range, _)| range).collect_vec();
Self {
cds_name: cds.name.clone(),
pos,
ref_aa,
qry_aa,
nuc_pos: nuc_ranges[0].begin,
nuc_ranges,
ref_triplet,
qry_triplet,
}
}
#[inline]
pub fn is_mutated_or_deleted(&self) -> bool {
is_aa_mutated_or_deleted(self.ref_aa, self.qry_aa)
}
}
#[derive(Clone, Debug, Default, Serialize, Deserialize, schemars::JsonSchema)]
#[serde(rename_all = "camelCase")]
pub struct AaChangesGroup {
name: String,
range: AaRefRange,
changes: Vec<AaChangeWithContext>,
nuc_subs: Vec<NucSub>,
nuc_dels: Vec<NucDelRange>,
}
impl AaChangesGroup {
pub fn new(name: impl AsRef<str>) -> Self {
Self::with_changes(name, vec![])
}
pub fn with_changes(name: impl AsRef<str>, changes: Vec<AaChangeWithContext>) -> Self {
Self {
name: name.as_ref().to_owned(),
range: Self::find_codon_range(&changes),
changes,
nuc_subs: vec![],
nuc_dels: vec![],
}
}
pub fn push(&mut self, change: AaChangeWithContext) {
self.changes.push(change);
self.range = Self::find_codon_range(&self.changes);
}
pub fn last(&self) -> Option<&AaChangeWithContext> {
self.changes.last()
}
fn find_codon_range(changes: &[AaChangeWithContext]) -> AaRefRange {
match changes.iter().minmax_by_key(|change| change.pos) {
MinMaxResult::NoElements => AaRefRange::from_isize(0, 0),
MinMaxResult::OneElement(one) => AaRefRange::new(one.pos, one.pos + 1),
MinMaxResult::MinMax(first, last) => AaRefRange::new(first.pos, last.pos + 1),
}
}
}
#[derive(Clone, Debug, Default, Serialize, Deserialize, schemars::JsonSchema)]
#[serde(rename_all = "camelCase")]
pub struct FindAaChangesOutput {
pub aa_changes_groups: Vec<AaChangesGroup>,
pub aa_substitutions: Vec<AaSub>,
pub aa_deletions: Vec<AaDel>,
pub nuc_to_aa_muts: BTreeMap<String, Vec<AaSub>>,
}
/// Finds aminoacid substitutions and deletions in query peptides relative to reference peptides, in all genes
///
/// ## Precondition
/// Nucleotide sequences and peptides are required to be stripped from insertions
pub fn find_aa_changes(
ref_seq: &[Nuc],
qry_seq: &[Nuc],
ref_translation: &Translation,
qry_translation: &Translation,
gene_map: &GeneMap,
nuc_subs: &[NucSub],
nuc_dels: &[NucDelRange],
) -> Result<FindAaChangesOutput, Report> {
let mut changes = qry_translation
.iter_cdses()
.map(|(qry_name, qry_cds_tr)| {
let ref_cds_tr = ref_translation.get_cds(qry_name)?;
let cds = gene_map.get_cds(&qry_cds_tr.name)?;
Ok(find_aa_changes_for_cds(
cds, qry_seq, ref_seq, ref_cds_tr, qry_cds_tr, nuc_subs, nuc_dels,
))
})
.collect::<Result<Vec<FindAaChangesOutput>, Report>>()?
.into_iter()
// Merge changes from all CDSes into one struct
.fold(FindAaChangesOutput::default(), |mut output, changes| {
output.aa_changes_groups.extend(changes.aa_changes_groups);
output.aa_substitutions.extend(changes.aa_substitutions);
output.aa_deletions.extend(changes.aa_deletions);
extend_map_of_vecs(&mut output.nuc_to_aa_muts, changes.nuc_to_aa_muts);
output
});
changes.aa_substitutions.sort();
changes.aa_deletions.sort();
changes.nuc_to_aa_muts.iter_mut().for_each(|(_, vals)| {
vals.sort();
vals.dedup();
});
Ok(changes)
}
/// Finds aminoacid substitutions and deletions in query peptides relative to reference peptides, in one gene
///
/// ## Precondition
/// Nucleotide sequences and peptides are required to be stripped from insertions
///
///
/// ## Implementation details
/// We compare reference and query peptides (extracted by the preceding call to Nextalign),
/// one aminoacid at at time, and deduce changes. We then report the change and relevant nucleotide context surrounding
/// this change.
/// Previously we reported one-to-one mapping of aminoacid changes to corresponding nucleotide changes. However, it
/// was not always accurate, because if there are multiple nucleotide changes in a codon, the direct correspondence
/// might not always be established without knowing the order in which nucleotide changes have occurred. And in the
/// context of Nextclade we don't have this information.
fn find_aa_changes_for_cds(
cds: &Cds,
qry_seq: &[Nuc],
ref_seq: &[Nuc],
ref_tr: &CdsTranslation,
qry_tr: &CdsTranslation,
nuc_subs: &[NucSub],
nuc_dels: &[NucDelRange],
) -> FindAaChangesOutput {
assert_eq!(ref_tr.seq.len(), qry_tr.seq.len());
assert_eq!(qry_seq.len(), ref_seq.len());
let aa_alignment_ranges = &qry_tr.alignment_ranges;
let mut aa_changes_groups = vec![AaChangesGroup::new(&cds.name)];
let mut curr_group = aa_changes_groups.last_mut().unwrap();
for codon in AaRefRange::from_usize(0, qry_tr.seq.len()).iter() {
if !is_codon_sequenced(aa_alignment_ranges, codon) {
continue;
}
let ref_aa = ref_tr.seq[codon.as_usize()];
let qry_aa = qry_tr.seq[codon.as_usize()];
if is_aa_mutated_or_deleted(ref_aa, qry_aa) {
match curr_group.last() {
// If current group is empty, then we are about to insert the first codon into the first group.
None => {
if codon > 0 && is_codon_sequenced(aa_alignment_ranges, codon - 1) {
// Also prepend one codon to the left, for additional context, to start the group
curr_group.push(AaChangeWithContext::new(
cds,
codon - 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
// The current codon itself
curr_group.push(AaChangeWithContext::new(cds, codon, qry_seq, ref_seq, ref_tr, qry_tr));
}
// Current group is not empty
Some(prev) => {
// If previous codon in the group is adjacent or almost adjacent (there is 1 item in between), | // then append to the group.
if codon <= prev.pos + 2 {
// If previous codon in the group is not exactly adjacent, there is 1 item in between,
// then cover the hole by inserting previous codon.
if codon == prev.pos + 2 && is_codon_sequenced(aa_alignment_ranges, codon - 1) {
curr_group.push(AaChangeWithContext::new(
cds,
codon - 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
// And insert the current codon
curr_group.push(AaChangeWithContext::new(cds, codon, qry_seq, ref_seq, ref_tr, qry_tr));
}
// If previous codon in the group is not adjacent, then terminate the current group and start a new group.
else {
// Add one codon to the right, for additional context, to finalize the current group
if is_codon_sequenced(aa_alignment_ranges, prev.pos + 1) {
curr_group.push(AaChangeWithContext::new(
cds,
prev.pos + 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
let mut new_group = AaChangesGroup::new(&cds.name);
// Start a new group and push the current codon into it.
if is_codon_sequenced(aa_alignment_ranges, codon - 1) {
// Also prepend one codon to the left, for additional context, to start the new group.
new_group.push(AaChangeWithContext::new(
cds,
codon - 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
// Push the current codon to the new group
new_group.push(AaChangeWithContext::new(cds, codon, qry_seq, ref_seq, ref_tr, qry_tr));
aa_changes_groups.push(new_group);
curr_group = aa_changes_groups.last_mut().unwrap();
}
}
}
}
}
// Add one codon to the right, for additional context, to finalize the last group
if let Some(last) = curr_group.last() {
if is_codon_sequenced(aa_alignment_ranges, last.pos + 1) {
curr_group.push(AaChangeWithContext::new(
cds,
last.pos + 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
}
// Keep only non-empty groups
aa_changes_groups.retain(|group| !group.range.is_empty() && !group.changes.is_empty());
aa_changes_groups.iter_mut().for_each(|group| {
let ranges = group
.range
.iter()
.flat_map(|codon| {
cds_codon_pos_to_ref_range(cds, codon)
.into_iter()
.map(|(range, _)| range)
})
.collect_vec();
group.nuc_subs = nuc_subs
.iter()
.filter(|nuc_sub| ranges.iter().any(|range| range.contains(nuc_sub.pos)))
.cloned()
.collect_vec();
group.nuc_dels = nuc_dels
.iter()
.filter(|nuc_del| ranges.iter().any(|range| have_intersection(range, nuc_del.range())))
.cloned()
.collect_vec();
});
let (aa_substitutions, aa_deletions): (Vec<AaSub>, Vec<AaDel>) = aa_changes_groups
.iter()
.flat_map(|aa_changes_group| &aa_changes_group.changes)
.filter(|change| is_aa_mutated_or_deleted(change.ref_aa, change.qry_aa))
.partition_map(|change| {
if change.qry_aa.is_gap() {
Either::Right(AaDel {
cds_name: cds.name.clone(),
ref_aa: change.ref_aa,
pos: change.pos,
})
} else {
Either::Left(AaSub {
cds_name: cds.name.clone(),
ref_aa: change.ref_aa,
pos: change.pos,
qry_aa: change.qry_aa,
})
}
});
// Associate nuc positions with aa mutations.
let nuc_to_aa_muts: BTreeMap<String, Vec<AaSub>> = aa_changes_groups
.iter()
.flat_map(|group| {
group
.changes
.iter()
.filter(|change| AaChangeWithContext::is_mutated_or_deleted(change))
.flat_map(|change| {
change.nuc_ranges.iter().flat_map(move |range| {
range.iter()
// TODO: We convert position to string here, because when communicating with WASM we will pass through
// JSON schema, and JSON object keys must be strings. Maybe there is a way to keep the keys as numbers?
.map(move |pos| (pos.to_string(), AaSub::from(change)))
})
})
})
.into_group_map()
.into_iter()
.map(|(pos, mut aa_muts)| {
aa_muts.sort();
aa_muts.dedup();
(pos, aa_muts)
})
.collect();
FindAaChangesOutput {
aa_changes_groups,
aa_substitutions,
aa_deletions,
nuc_to_aa_muts,
}
}
/// Check whether a given pair if reference and query aminoacids constitute a mutation or deletion
#[inline]
fn is_aa_mutated_or_deleted(ref_aa: Aa, qry_aa: Aa) -> bool {
// NOTE: We chose to ignore mutations to `X`.
qry_aa != ref_aa && qry_aa != Aa::X
}
/// Check whether a given codon position corresponds to a sequenced aminoacid
fn is_codon_sequenced(aa_alignment_ranges: &[AaRefRange], codon: AaRefPosition) -> bool {
aa_alignment_ranges
.iter()
.any(|aa_alignment_range| aa_alignment_range.contains(codon))
} | random_line_split |
|
aa_changes.rs | use crate::alphabet::aa::Aa;
use crate::alphabet::letter::Letter;
use crate::alphabet::letter::{serde_deserialize_seq, serde_serialize_seq};
use crate::alphabet::nuc::Nuc;
use crate::analyze::aa_del::AaDel;
use crate::analyze::aa_sub::AaSub;
use crate::analyze::nuc_del::NucDelRange;
use crate::analyze::nuc_sub::NucSub;
use crate::coord::coord_map_cds_to_global::cds_codon_pos_to_ref_range;
use crate::coord::position::{AaRefPosition, NucRefGlobalPosition, PositionLike};
use crate::coord::range::{have_intersection, AaRefRange, NucRefGlobalRange};
use crate::gene::cds::Cds;
use crate::gene::gene::GeneStrand;
use crate::gene::gene_map::GeneMap;
use crate::translate::complement::reverse_complement_in_place;
use crate::translate::translate_genes::{CdsTranslation, Translation};
use crate::utils::collections::extend_map_of_vecs;
use either::Either;
use eyre::Report;
use itertools::{Itertools, MinMaxResult};
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
#[derive(Clone, Debug, Default, Serialize, Deserialize, schemars::JsonSchema)]
#[serde(rename_all = "camelCase")]
pub struct AaChangeWithContext {
pub cds_name: String,
pub pos: AaRefPosition,
pub ref_aa: Aa,
pub qry_aa: Aa,
pub nuc_pos: NucRefGlobalPosition,
#[schemars(with = "String")]
#[serde(serialize_with = "serde_serialize_seq")]
#[serde(deserialize_with = "serde_deserialize_seq")]
pub ref_triplet: Vec<Nuc>,
#[schemars(with = "String")]
#[serde(serialize_with = "serde_serialize_seq")]
#[serde(deserialize_with = "serde_deserialize_seq")]
pub qry_triplet: Vec<Nuc>,
pub nuc_ranges: Vec<NucRefGlobalRange>,
}
impl AaChangeWithContext {
pub fn new(
cds: &Cds,
pos: AaRefPosition,
qry_seq: &[Nuc],
ref_seq: &[Nuc],
ref_tr: &CdsTranslation,
qry_tr: &CdsTranslation,
) -> Self {
let ref_aa = ref_tr.seq[pos.as_usize()];
let qry_aa = qry_tr.seq[pos.as_usize()];
let nuc_ranges = cds_codon_pos_to_ref_range(cds, pos);
let ref_triplet = nuc_ranges
.iter()
.flat_map(|(range, strand)| {
let mut nucs = ref_seq[range.to_std()].to_vec();
if strand == &GeneStrand::Reverse {
reverse_complement_in_place(&mut nucs);
}
nucs
})
.collect_vec();
let qry_triplet = nuc_ranges
.iter()
.flat_map(|(range, strand)| {
let mut nucs = qry_seq[range.clamp_range(0, qry_seq.len()).to_std()].to_vec();
if strand == &GeneStrand::Reverse {
reverse_complement_in_place(&mut nucs);
}
nucs
})
.collect_vec();
let nuc_ranges = nuc_ranges.into_iter().map(|(range, _)| range).collect_vec();
Self {
cds_name: cds.name.clone(),
pos,
ref_aa,
qry_aa,
nuc_pos: nuc_ranges[0].begin,
nuc_ranges,
ref_triplet,
qry_triplet,
}
}
#[inline]
pub fn is_mutated_or_deleted(&self) -> bool {
is_aa_mutated_or_deleted(self.ref_aa, self.qry_aa)
}
}
#[derive(Clone, Debug, Default, Serialize, Deserialize, schemars::JsonSchema)]
#[serde(rename_all = "camelCase")]
pub struct AaChangesGroup {
name: String,
range: AaRefRange,
changes: Vec<AaChangeWithContext>,
nuc_subs: Vec<NucSub>,
nuc_dels: Vec<NucDelRange>,
}
impl AaChangesGroup {
pub fn new(name: impl AsRef<str>) -> Self {
Self::with_changes(name, vec![])
}
pub fn with_changes(name: impl AsRef<str>, changes: Vec<AaChangeWithContext>) -> Self {
Self {
name: name.as_ref().to_owned(),
range: Self::find_codon_range(&changes),
changes,
nuc_subs: vec![],
nuc_dels: vec![],
}
}
pub fn push(&mut self, change: AaChangeWithContext) {
self.changes.push(change);
self.range = Self::find_codon_range(&self.changes);
}
pub fn last(&self) -> Option<&AaChangeWithContext> {
self.changes.last()
}
fn find_codon_range(changes: &[AaChangeWithContext]) -> AaRefRange {
match changes.iter().minmax_by_key(|change| change.pos) {
MinMaxResult::NoElements => AaRefRange::from_isize(0, 0),
MinMaxResult::OneElement(one) => AaRefRange::new(one.pos, one.pos + 1),
MinMaxResult::MinMax(first, last) => AaRefRange::new(first.pos, last.pos + 1),
}
}
}
#[derive(Clone, Debug, Default, Serialize, Deserialize, schemars::JsonSchema)]
#[serde(rename_all = "camelCase")]
pub struct FindAaChangesOutput {
pub aa_changes_groups: Vec<AaChangesGroup>,
pub aa_substitutions: Vec<AaSub>,
pub aa_deletions: Vec<AaDel>,
pub nuc_to_aa_muts: BTreeMap<String, Vec<AaSub>>,
}
/// Finds aminoacid substitutions and deletions in query peptides relative to reference peptides, in all genes
///
/// ## Precondition
/// Nucleotide sequences and peptides are required to be stripped from insertions
pub fn find_aa_changes(
ref_seq: &[Nuc],
qry_seq: &[Nuc],
ref_translation: &Translation,
qry_translation: &Translation,
gene_map: &GeneMap,
nuc_subs: &[NucSub],
nuc_dels: &[NucDelRange],
) -> Result<FindAaChangesOutput, Report> {
let mut changes = qry_translation
.iter_cdses()
.map(|(qry_name, qry_cds_tr)| {
let ref_cds_tr = ref_translation.get_cds(qry_name)?;
let cds = gene_map.get_cds(&qry_cds_tr.name)?;
Ok(find_aa_changes_for_cds(
cds, qry_seq, ref_seq, ref_cds_tr, qry_cds_tr, nuc_subs, nuc_dels,
))
})
.collect::<Result<Vec<FindAaChangesOutput>, Report>>()?
.into_iter()
// Merge changes from all CDSes into one struct
.fold(FindAaChangesOutput::default(), |mut output, changes| {
output.aa_changes_groups.extend(changes.aa_changes_groups);
output.aa_substitutions.extend(changes.aa_substitutions);
output.aa_deletions.extend(changes.aa_deletions);
extend_map_of_vecs(&mut output.nuc_to_aa_muts, changes.nuc_to_aa_muts);
output
});
changes.aa_substitutions.sort();
changes.aa_deletions.sort();
changes.nuc_to_aa_muts.iter_mut().for_each(|(_, vals)| {
vals.sort();
vals.dedup();
});
Ok(changes)
}
/// Finds aminoacid substitutions and deletions in query peptides relative to reference peptides, in one gene
///
/// ## Precondition
/// Nucleotide sequences and peptides are required to be stripped from insertions
///
///
/// ## Implementation details
/// We compare reference and query peptides (extracted by the preceding call to Nextalign),
/// one aminoacid at at time, and deduce changes. We then report the change and relevant nucleotide context surrounding
/// this change.
/// Previously we reported one-to-one mapping of aminoacid changes to corresponding nucleotide changes. However, it
/// was not always accurate, because if there are multiple nucleotide changes in a codon, the direct correspondence
/// might not always be established without knowing the order in which nucleotide changes have occurred. And in the
/// context of Nextclade we don't have this information.
fn find_aa_changes_for_cds(
cds: &Cds,
qry_seq: &[Nuc],
ref_seq: &[Nuc],
ref_tr: &CdsTranslation,
qry_tr: &CdsTranslation,
nuc_subs: &[NucSub],
nuc_dels: &[NucDelRange],
) -> FindAaChangesOutput {
assert_eq!(ref_tr.seq.len(), qry_tr.seq.len());
assert_eq!(qry_seq.len(), ref_seq.len());
let aa_alignment_ranges = &qry_tr.alignment_ranges;
let mut aa_changes_groups = vec![AaChangesGroup::new(&cds.name)];
let mut curr_group = aa_changes_groups.last_mut().unwrap();
for codon in AaRefRange::from_usize(0, qry_tr.seq.len()).iter() {
if !is_codon_sequenced(aa_alignment_ranges, codon) {
continue;
}
let ref_aa = ref_tr.seq[codon.as_usize()];
let qry_aa = qry_tr.seq[codon.as_usize()];
if is_aa_mutated_or_deleted(ref_aa, qry_aa) {
match curr_group.last() {
// If current group is empty, then we are about to insert the first codon into the first group.
None => |
// Current group is not empty
Some(prev) => {
// If previous codon in the group is adjacent or almost adjacent (there is 1 item in between),
// then append to the group.
if codon <= prev.pos + 2 {
// If previous codon in the group is not exactly adjacent, there is 1 item in between,
// then cover the hole by inserting previous codon.
if codon == prev.pos + 2 && is_codon_sequenced(aa_alignment_ranges, codon - 1) {
curr_group.push(AaChangeWithContext::new(
cds,
codon - 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
// And insert the current codon
curr_group.push(AaChangeWithContext::new(cds, codon, qry_seq, ref_seq, ref_tr, qry_tr));
}
// If previous codon in the group is not adjacent, then terminate the current group and start a new group.
else {
// Add one codon to the right, for additional context, to finalize the current group
if is_codon_sequenced(aa_alignment_ranges, prev.pos + 1) {
curr_group.push(AaChangeWithContext::new(
cds,
prev.pos + 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
let mut new_group = AaChangesGroup::new(&cds.name);
// Start a new group and push the current codon into it.
if is_codon_sequenced(aa_alignment_ranges, codon - 1) {
// Also prepend one codon to the left, for additional context, to start the new group.
new_group.push(AaChangeWithContext::new(
cds,
codon - 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
// Push the current codon to the new group
new_group.push(AaChangeWithContext::new(cds, codon, qry_seq, ref_seq, ref_tr, qry_tr));
aa_changes_groups.push(new_group);
curr_group = aa_changes_groups.last_mut().unwrap();
}
}
}
}
}
// Add one codon to the right, for additional context, to finalize the last group
if let Some(last) = curr_group.last() {
if is_codon_sequenced(aa_alignment_ranges, last.pos + 1) {
curr_group.push(AaChangeWithContext::new(
cds,
last.pos + 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
}
// Keep only non-empty groups
aa_changes_groups.retain(|group| !group.range.is_empty() && !group.changes.is_empty());
aa_changes_groups.iter_mut().for_each(|group| {
let ranges = group
.range
.iter()
.flat_map(|codon| {
cds_codon_pos_to_ref_range(cds, codon)
.into_iter()
.map(|(range, _)| range)
})
.collect_vec();
group.nuc_subs = nuc_subs
.iter()
.filter(|nuc_sub| ranges.iter().any(|range| range.contains(nuc_sub.pos)))
.cloned()
.collect_vec();
group.nuc_dels = nuc_dels
.iter()
.filter(|nuc_del| ranges.iter().any(|range| have_intersection(range, nuc_del.range())))
.cloned()
.collect_vec();
});
let (aa_substitutions, aa_deletions): (Vec<AaSub>, Vec<AaDel>) = aa_changes_groups
.iter()
.flat_map(|aa_changes_group| &aa_changes_group.changes)
.filter(|change| is_aa_mutated_or_deleted(change.ref_aa, change.qry_aa))
.partition_map(|change| {
if change.qry_aa.is_gap() {
Either::Right(AaDel {
cds_name: cds.name.clone(),
ref_aa: change.ref_aa,
pos: change.pos,
})
} else {
Either::Left(AaSub {
cds_name: cds.name.clone(),
ref_aa: change.ref_aa,
pos: change.pos,
qry_aa: change.qry_aa,
})
}
});
// Associate nuc positions with aa mutations.
let nuc_to_aa_muts: BTreeMap<String, Vec<AaSub>> = aa_changes_groups
.iter()
.flat_map(|group| {
group
.changes
.iter()
.filter(|change| AaChangeWithContext::is_mutated_or_deleted(change))
.flat_map(|change| {
change.nuc_ranges.iter().flat_map(move |range| {
range.iter()
// TODO: We convert position to string here, because when communicating with WASM we will pass through
// JSON schema, and JSON object keys must be strings. Maybe there is a way to keep the keys as numbers?
.map(move |pos| (pos.to_string(), AaSub::from(change)))
})
})
})
.into_group_map()
.into_iter()
.map(|(pos, mut aa_muts)| {
aa_muts.sort();
aa_muts.dedup();
(pos, aa_muts)
})
.collect();
FindAaChangesOutput {
aa_changes_groups,
aa_substitutions,
aa_deletions,
nuc_to_aa_muts,
}
}
/// Check whether a given pair if reference and query aminoacids constitute a mutation or deletion
#[inline]
fn is_aa_mutated_or_deleted(ref_aa: Aa, qry_aa: Aa) -> bool {
// NOTE: We chose to ignore mutations to `X`.
qry_aa != ref_aa && qry_aa != Aa::X
}
/// Check whether a given codon position corresponds to a sequenced aminoacid
fn is_codon_sequenced(aa_alignment_ranges: &[AaRefRange], codon: AaRefPosition) -> bool {
aa_alignment_ranges
.iter()
.any(|aa_alignment_range| aa_alignment_range.contains(codon))
}
| {
if codon > 0 && is_codon_sequenced(aa_alignment_ranges, codon - 1) {
// Also prepend one codon to the left, for additional context, to start the group
curr_group.push(AaChangeWithContext::new(
cds,
codon - 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
// The current codon itself
curr_group.push(AaChangeWithContext::new(cds, codon, qry_seq, ref_seq, ref_tr, qry_tr));
} | conditional_block |
aa_changes.rs | use crate::alphabet::aa::Aa;
use crate::alphabet::letter::Letter;
use crate::alphabet::letter::{serde_deserialize_seq, serde_serialize_seq};
use crate::alphabet::nuc::Nuc;
use crate::analyze::aa_del::AaDel;
use crate::analyze::aa_sub::AaSub;
use crate::analyze::nuc_del::NucDelRange;
use crate::analyze::nuc_sub::NucSub;
use crate::coord::coord_map_cds_to_global::cds_codon_pos_to_ref_range;
use crate::coord::position::{AaRefPosition, NucRefGlobalPosition, PositionLike};
use crate::coord::range::{have_intersection, AaRefRange, NucRefGlobalRange};
use crate::gene::cds::Cds;
use crate::gene::gene::GeneStrand;
use crate::gene::gene_map::GeneMap;
use crate::translate::complement::reverse_complement_in_place;
use crate::translate::translate_genes::{CdsTranslation, Translation};
use crate::utils::collections::extend_map_of_vecs;
use either::Either;
use eyre::Report;
use itertools::{Itertools, MinMaxResult};
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
#[derive(Clone, Debug, Default, Serialize, Deserialize, schemars::JsonSchema)]
#[serde(rename_all = "camelCase")]
pub struct | {
pub cds_name: String,
pub pos: AaRefPosition,
pub ref_aa: Aa,
pub qry_aa: Aa,
pub nuc_pos: NucRefGlobalPosition,
#[schemars(with = "String")]
#[serde(serialize_with = "serde_serialize_seq")]
#[serde(deserialize_with = "serde_deserialize_seq")]
pub ref_triplet: Vec<Nuc>,
#[schemars(with = "String")]
#[serde(serialize_with = "serde_serialize_seq")]
#[serde(deserialize_with = "serde_deserialize_seq")]
pub qry_triplet: Vec<Nuc>,
pub nuc_ranges: Vec<NucRefGlobalRange>,
}
impl AaChangeWithContext {
pub fn new(
cds: &Cds,
pos: AaRefPosition,
qry_seq: &[Nuc],
ref_seq: &[Nuc],
ref_tr: &CdsTranslation,
qry_tr: &CdsTranslation,
) -> Self {
let ref_aa = ref_tr.seq[pos.as_usize()];
let qry_aa = qry_tr.seq[pos.as_usize()];
let nuc_ranges = cds_codon_pos_to_ref_range(cds, pos);
let ref_triplet = nuc_ranges
.iter()
.flat_map(|(range, strand)| {
let mut nucs = ref_seq[range.to_std()].to_vec();
if strand == &GeneStrand::Reverse {
reverse_complement_in_place(&mut nucs);
}
nucs
})
.collect_vec();
let qry_triplet = nuc_ranges
.iter()
.flat_map(|(range, strand)| {
let mut nucs = qry_seq[range.clamp_range(0, qry_seq.len()).to_std()].to_vec();
if strand == &GeneStrand::Reverse {
reverse_complement_in_place(&mut nucs);
}
nucs
})
.collect_vec();
let nuc_ranges = nuc_ranges.into_iter().map(|(range, _)| range).collect_vec();
Self {
cds_name: cds.name.clone(),
pos,
ref_aa,
qry_aa,
nuc_pos: nuc_ranges[0].begin,
nuc_ranges,
ref_triplet,
qry_triplet,
}
}
#[inline]
pub fn is_mutated_or_deleted(&self) -> bool {
is_aa_mutated_or_deleted(self.ref_aa, self.qry_aa)
}
}
#[derive(Clone, Debug, Default, Serialize, Deserialize, schemars::JsonSchema)]
#[serde(rename_all = "camelCase")]
pub struct AaChangesGroup {
name: String,
range: AaRefRange,
changes: Vec<AaChangeWithContext>,
nuc_subs: Vec<NucSub>,
nuc_dels: Vec<NucDelRange>,
}
impl AaChangesGroup {
pub fn new(name: impl AsRef<str>) -> Self {
Self::with_changes(name, vec![])
}
pub fn with_changes(name: impl AsRef<str>, changes: Vec<AaChangeWithContext>) -> Self {
Self {
name: name.as_ref().to_owned(),
range: Self::find_codon_range(&changes),
changes,
nuc_subs: vec![],
nuc_dels: vec![],
}
}
pub fn push(&mut self, change: AaChangeWithContext) {
self.changes.push(change);
self.range = Self::find_codon_range(&self.changes);
}
pub fn last(&self) -> Option<&AaChangeWithContext> {
self.changes.last()
}
fn find_codon_range(changes: &[AaChangeWithContext]) -> AaRefRange {
match changes.iter().minmax_by_key(|change| change.pos) {
MinMaxResult::NoElements => AaRefRange::from_isize(0, 0),
MinMaxResult::OneElement(one) => AaRefRange::new(one.pos, one.pos + 1),
MinMaxResult::MinMax(first, last) => AaRefRange::new(first.pos, last.pos + 1),
}
}
}
#[derive(Clone, Debug, Default, Serialize, Deserialize, schemars::JsonSchema)]
#[serde(rename_all = "camelCase")]
pub struct FindAaChangesOutput {
pub aa_changes_groups: Vec<AaChangesGroup>,
pub aa_substitutions: Vec<AaSub>,
pub aa_deletions: Vec<AaDel>,
pub nuc_to_aa_muts: BTreeMap<String, Vec<AaSub>>,
}
/// Finds aminoacid substitutions and deletions in query peptides relative to reference peptides, in all genes
///
/// ## Precondition
/// Nucleotide sequences and peptides are required to be stripped from insertions
pub fn find_aa_changes(
ref_seq: &[Nuc],
qry_seq: &[Nuc],
ref_translation: &Translation,
qry_translation: &Translation,
gene_map: &GeneMap,
nuc_subs: &[NucSub],
nuc_dels: &[NucDelRange],
) -> Result<FindAaChangesOutput, Report> {
let mut changes = qry_translation
.iter_cdses()
.map(|(qry_name, qry_cds_tr)| {
let ref_cds_tr = ref_translation.get_cds(qry_name)?;
let cds = gene_map.get_cds(&qry_cds_tr.name)?;
Ok(find_aa_changes_for_cds(
cds, qry_seq, ref_seq, ref_cds_tr, qry_cds_tr, nuc_subs, nuc_dels,
))
})
.collect::<Result<Vec<FindAaChangesOutput>, Report>>()?
.into_iter()
// Merge changes from all CDSes into one struct
.fold(FindAaChangesOutput::default(), |mut output, changes| {
output.aa_changes_groups.extend(changes.aa_changes_groups);
output.aa_substitutions.extend(changes.aa_substitutions);
output.aa_deletions.extend(changes.aa_deletions);
extend_map_of_vecs(&mut output.nuc_to_aa_muts, changes.nuc_to_aa_muts);
output
});
changes.aa_substitutions.sort();
changes.aa_deletions.sort();
changes.nuc_to_aa_muts.iter_mut().for_each(|(_, vals)| {
vals.sort();
vals.dedup();
});
Ok(changes)
}
/// Finds aminoacid substitutions and deletions in query peptides relative to reference peptides, in one gene
///
/// ## Precondition
/// Nucleotide sequences and peptides are required to be stripped from insertions
///
///
/// ## Implementation details
/// We compare reference and query peptides (extracted by the preceding call to Nextalign),
/// one aminoacid at at time, and deduce changes. We then report the change and relevant nucleotide context surrounding
/// this change.
/// Previously we reported one-to-one mapping of aminoacid changes to corresponding nucleotide changes. However, it
/// was not always accurate, because if there are multiple nucleotide changes in a codon, the direct correspondence
/// might not always be established without knowing the order in which nucleotide changes have occurred. And in the
/// context of Nextclade we don't have this information.
fn find_aa_changes_for_cds(
cds: &Cds,
qry_seq: &[Nuc],
ref_seq: &[Nuc],
ref_tr: &CdsTranslation,
qry_tr: &CdsTranslation,
nuc_subs: &[NucSub],
nuc_dels: &[NucDelRange],
) -> FindAaChangesOutput {
assert_eq!(ref_tr.seq.len(), qry_tr.seq.len());
assert_eq!(qry_seq.len(), ref_seq.len());
let aa_alignment_ranges = &qry_tr.alignment_ranges;
let mut aa_changes_groups = vec![AaChangesGroup::new(&cds.name)];
let mut curr_group = aa_changes_groups.last_mut().unwrap();
for codon in AaRefRange::from_usize(0, qry_tr.seq.len()).iter() {
if !is_codon_sequenced(aa_alignment_ranges, codon) {
continue;
}
let ref_aa = ref_tr.seq[codon.as_usize()];
let qry_aa = qry_tr.seq[codon.as_usize()];
if is_aa_mutated_or_deleted(ref_aa, qry_aa) {
match curr_group.last() {
// If current group is empty, then we are about to insert the first codon into the first group.
None => {
if codon > 0 && is_codon_sequenced(aa_alignment_ranges, codon - 1) {
// Also prepend one codon to the left, for additional context, to start the group
curr_group.push(AaChangeWithContext::new(
cds,
codon - 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
// The current codon itself
curr_group.push(AaChangeWithContext::new(cds, codon, qry_seq, ref_seq, ref_tr, qry_tr));
}
// Current group is not empty
Some(prev) => {
// If previous codon in the group is adjacent or almost adjacent (there is 1 item in between),
// then append to the group.
if codon <= prev.pos + 2 {
// If previous codon in the group is not exactly adjacent, there is 1 item in between,
// then cover the hole by inserting previous codon.
if codon == prev.pos + 2 && is_codon_sequenced(aa_alignment_ranges, codon - 1) {
curr_group.push(AaChangeWithContext::new(
cds,
codon - 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
// And insert the current codon
curr_group.push(AaChangeWithContext::new(cds, codon, qry_seq, ref_seq, ref_tr, qry_tr));
}
// If previous codon in the group is not adjacent, then terminate the current group and start a new group.
else {
// Add one codon to the right, for additional context, to finalize the current group
if is_codon_sequenced(aa_alignment_ranges, prev.pos + 1) {
curr_group.push(AaChangeWithContext::new(
cds,
prev.pos + 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
let mut new_group = AaChangesGroup::new(&cds.name);
// Start a new group and push the current codon into it.
if is_codon_sequenced(aa_alignment_ranges, codon - 1) {
// Also prepend one codon to the left, for additional context, to start the new group.
new_group.push(AaChangeWithContext::new(
cds,
codon - 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
// Push the current codon to the new group
new_group.push(AaChangeWithContext::new(cds, codon, qry_seq, ref_seq, ref_tr, qry_tr));
aa_changes_groups.push(new_group);
curr_group = aa_changes_groups.last_mut().unwrap();
}
}
}
}
}
// Add one codon to the right, for additional context, to finalize the last group
if let Some(last) = curr_group.last() {
if is_codon_sequenced(aa_alignment_ranges, last.pos + 1) {
curr_group.push(AaChangeWithContext::new(
cds,
last.pos + 1,
qry_seq,
ref_seq,
ref_tr,
qry_tr,
));
}
}
// Keep only non-empty groups
aa_changes_groups.retain(|group| !group.range.is_empty() && !group.changes.is_empty());
aa_changes_groups.iter_mut().for_each(|group| {
let ranges = group
.range
.iter()
.flat_map(|codon| {
cds_codon_pos_to_ref_range(cds, codon)
.into_iter()
.map(|(range, _)| range)
})
.collect_vec();
group.nuc_subs = nuc_subs
.iter()
.filter(|nuc_sub| ranges.iter().any(|range| range.contains(nuc_sub.pos)))
.cloned()
.collect_vec();
group.nuc_dels = nuc_dels
.iter()
.filter(|nuc_del| ranges.iter().any(|range| have_intersection(range, nuc_del.range())))
.cloned()
.collect_vec();
});
let (aa_substitutions, aa_deletions): (Vec<AaSub>, Vec<AaDel>) = aa_changes_groups
.iter()
.flat_map(|aa_changes_group| &aa_changes_group.changes)
.filter(|change| is_aa_mutated_or_deleted(change.ref_aa, change.qry_aa))
.partition_map(|change| {
if change.qry_aa.is_gap() {
Either::Right(AaDel {
cds_name: cds.name.clone(),
ref_aa: change.ref_aa,
pos: change.pos,
})
} else {
Either::Left(AaSub {
cds_name: cds.name.clone(),
ref_aa: change.ref_aa,
pos: change.pos,
qry_aa: change.qry_aa,
})
}
});
// Associate nuc positions with aa mutations.
let nuc_to_aa_muts: BTreeMap<String, Vec<AaSub>> = aa_changes_groups
.iter()
.flat_map(|group| {
group
.changes
.iter()
.filter(|change| AaChangeWithContext::is_mutated_or_deleted(change))
.flat_map(|change| {
change.nuc_ranges.iter().flat_map(move |range| {
range.iter()
// TODO: We convert position to string here, because when communicating with WASM we will pass through
// JSON schema, and JSON object keys must be strings. Maybe there is a way to keep the keys as numbers?
.map(move |pos| (pos.to_string(), AaSub::from(change)))
})
})
})
.into_group_map()
.into_iter()
.map(|(pos, mut aa_muts)| {
aa_muts.sort();
aa_muts.dedup();
(pos, aa_muts)
})
.collect();
FindAaChangesOutput {
aa_changes_groups,
aa_substitutions,
aa_deletions,
nuc_to_aa_muts,
}
}
/// Check whether a given pair if reference and query aminoacids constitute a mutation or deletion
#[inline]
fn is_aa_mutated_or_deleted(ref_aa: Aa, qry_aa: Aa) -> bool {
// NOTE: We chose to ignore mutations to `X`.
qry_aa != ref_aa && qry_aa != Aa::X
}
/// Check whether a given codon position corresponds to a sequenced aminoacid
fn is_codon_sequenced(aa_alignment_ranges: &[AaRefRange], codon: AaRefPosition) -> bool {
aa_alignment_ranges
.iter()
.any(|aa_alignment_range| aa_alignment_range.contains(codon))
}
| AaChangeWithContext | identifier_name |
lib.rs | #![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default_derive))]
#![deny(warnings)]
extern crate bytes;
extern crate conduit_proxy_controller_grpc;
extern crate env_logger;
extern crate deflate;
#[macro_use]
extern crate futures;
extern crate futures_mpsc_lossy;
extern crate futures_watch;
extern crate h2;
extern crate http;
extern crate httparse;
extern crate hyper;
#[cfg(target_os = "linux")]
extern crate inotify;
extern crate ipnet;
#[cfg(target_os = "linux")]
extern crate libc;
#[macro_use]
extern crate log;
#[cfg_attr(test, macro_use)]
extern crate indexmap;
#[cfg(target_os = "linux")]
extern crate procinfo;
extern crate prost;
extern crate prost_types;
#[cfg(test)]
#[macro_use]
extern crate quickcheck;
extern crate rand;
extern crate regex;
extern crate ring;
#[cfg(test)]
extern crate tempdir;
extern crate tokio;
extern crate tokio_connect;
extern crate tokio_timer;
extern crate tower_balance;
extern crate tower_buffer;
extern crate tower_discover;
extern crate tower_grpc;
extern crate tower_h2;
extern crate tower_h2_balance;
extern crate tower_reconnect;
extern crate tower_service;
extern crate conduit_proxy_router;
extern crate tower_util;
extern crate tower_in_flight_limit;
extern crate trust_dns_resolver;
extern crate try_lock;
use futures::*;
use std::error::Error;
use std::io;
use std::net::SocketAddr;
use std::sync::Arc;
use std::thread;
use std::time::Duration;
use indexmap::IndexSet;
use tokio::{
executor::{self, DefaultExecutor, Executor},
runtime::current_thread,
};
use tower_service::NewService;
use tower_fn::*;
use conduit_proxy_router::{Recognize, Router, Error as RouteError};
pub mod app;
mod bind;
pub mod config;
mod connection;
pub mod conditional;
pub mod control;
pub mod convert;
pub mod ctx;
mod dns;
mod drain;
pub mod fs_watch;
mod inbound;
mod logging;
mod map_err;
mod outbound;
pub mod stream;
pub mod task;
pub mod telemetry;
mod transparency;
mod transport;
pub mod timeout;
mod tower_fn; // TODO: move to tower-fn
mod watch_service; // TODO: move to tower
use bind::Bind;
use conditional::Conditional;
use connection::BoundPort;
use inbound::Inbound;
use map_err::MapErr;
use task::MainRuntime;
use transparency::{HttpBody, Server};
pub use transport::{AddrInfo, GetOriginalDst, SoOriginalDst, tls};
use outbound::Outbound;
pub use watch_service::WatchService;
/// Runs a sidecar proxy.
///
/// The proxy binds two listeners:
///
/// - a private socket (TCP or UNIX) for outbound requests to other instances;
/// - and a public socket (TCP and optionally TLS) for inbound requests from other
/// instances.
///
/// The public listener forwards requests to a local socket (TCP or UNIX).
///
/// The private listener routes requests to service-discovery-aware load-balancer.
///
pub struct Main<G> {
config: config::Config,
control_listener: BoundPort,
inbound_listener: BoundPort,
outbound_listener: BoundPort,
metrics_listener: BoundPort,
get_original_dst: G,
runtime: MainRuntime,
}
impl<G> Main<G>
where
G: GetOriginalDst + Clone + Send + 'static,
{
pub fn new<R>(
config: config::Config,
get_original_dst: G,
runtime: R
) -> Self
where
R: Into<MainRuntime>,
{
let control_listener = BoundPort::new(config.control_listener.addr)
.expect("controller listener bind");
let inbound_listener = BoundPort::new(config.public_listener.addr)
.expect("public listener bind");
let outbound_listener = BoundPort::new(config.private_listener.addr)
.expect("private listener bind");
let runtime = runtime.into();
let metrics_listener = BoundPort::new(config.metrics_listener.addr)
.expect("metrics listener bind");
Main {
config,
control_listener,
inbound_listener,
outbound_listener,
metrics_listener,
get_original_dst,
runtime,
}
}
pub fn control_addr(&self) -> SocketAddr {
self.control_listener.local_addr()
}
pub fn inbound_addr(&self) -> SocketAddr {
self.inbound_listener.local_addr()
}
pub fn outbound_addr(&self) -> SocketAddr {
self.outbound_listener.local_addr()
}
pub fn metrics_addr(&self) -> SocketAddr {
self.metrics_listener.local_addr()
}
pub fn run_until<F>(self, shutdown_signal: F)
where
F: Future<Item = (), Error = ()> + Send + 'static,
{
let process_ctx = ctx::Process::new(&self.config);
let Main {
config,
control_listener,
inbound_listener,
outbound_listener,
metrics_listener,
get_original_dst,
mut runtime,
} = self;
let control_host_and_port = config.control_host_and_port.clone();
info!("using controller at {:?}", control_host_and_port);
info!("routing on {:?}", outbound_listener.local_addr());
info!(
"proxying on {:?} to {:?}",
inbound_listener.local_addr(),
config.private_forward
);
info!(
"serving Prometheus metrics on {:?}",
metrics_listener.local_addr(),
);
info!(
"protocol detection disabled for inbound ports {:?}",
config.inbound_ports_disable_protocol_detection,
);
info!(
"protocol detection disabled for outbound ports {:?}",
config.outbound_ports_disable_protocol_detection,
);
let (taps, observe) = control::Observe::new(100);
let (sensors, telemetry) = telemetry::new(
&process_ctx,
config.event_buffer_capacity,
config.metrics_retain_idle,
&taps,
);
let (tls_client_config, tls_server_config, tls_cfg_bg) =
tls::watch_for_config_changes(
config.tls_settings.as_ref(),
sensors.tls_config(),
);
let controller_tls = config.tls_settings.as_ref().and_then(|settings| {
settings.controller_identity.as_ref().map(|controller_identity| {
tls::ConnectionConfig {
identity: controller_identity.clone(),
config: tls_client_config.clone(),
}
})
});
let (dns_resolver, dns_bg) = dns::Resolver::from_system_config_and_env(&config)
.unwrap_or_else(|e| {
// TODO: Make DNS configuration infallible.
panic!("invalid DNS configuration: {:?}", e);
});
let (resolver, resolver_bg) = control::destination::new(
dns_resolver.clone(),
config.namespaces.clone(),
control_host_and_port,
controller_tls,
);
let (drain_tx, drain_rx) = drain::channel();
let bind = Bind::new(tls_client_config).with_sensors(sensors.clone());
// Setup the public listener. This will listen on a publicly accessible
// address and listen for inbound connections that should be forwarded
// to the managed application (private destination).
let inbound = {
let ctx = ctx::Proxy::inbound(&process_ctx);
let bind = bind.clone().with_ctx(ctx.clone());
let default_addr = config.private_forward.map(|a| a.into());
let router = Router::new(
Inbound::new(default_addr, bind),
config.inbound_router_capacity,
config.inbound_router_max_idle_age,
);
let tls_settings = config.tls_settings.as_ref().map(|settings| {
tls::ConnectionConfig {
identity: settings.pod_identity.clone(),
config: tls_server_config
}
});
serve(
inbound_listener,
tls_settings,
router,
config.private_connect_timeout,
config.inbound_ports_disable_protocol_detection,
ctx,
sensors.clone(),
get_original_dst.clone(),
drain_rx.clone(),
)
};
// Setup the private listener. This will listen on a locally accessible
// address and listen for outbound requests that should be routed
// to a remote service (public destination).
let outbound = {
let ctx = ctx::Proxy::outbound(&process_ctx);
let bind = bind.clone().with_ctx(ctx.clone());
let router = Router::new(
Outbound::new(bind, resolver, config.bind_timeout),
config.outbound_router_capacity,
config.outbound_router_max_idle_age,
);
serve(
outbound_listener,
Conditional::None(tls::ReasonForNoTls::InternalTraffic),
router,
config.public_connect_timeout,
config.outbound_ports_disable_protocol_detection,
ctx,
sensors,
get_original_dst,
drain_rx,
)
};
trace!("running");
let (_tx, admin_shutdown_signal) = futures::sync::oneshot::channel::<()>();
{
thread::Builder::new()
.name("admin".into())
.spawn(move || {
use conduit_proxy_controller_grpc::tap::server::TapServer;
let mut rt = current_thread::Runtime::new()
.expect("initialize admin thread runtime");
let tap = serve_tap(control_listener, TapServer::new(observe));
let metrics_server = telemetry.serve_metrics(metrics_listener);
let fut = ::logging::admin().bg("resolver").future(resolver_bg)
.join5(
::logging::admin().bg("telemetry").future(telemetry),
tap.map_err(|_| {}),
metrics_server.map_err(|_| {}),
::logging::admin().bg("dns-resolver").future(dns_bg),
)
// There's no `Future::join6` combinator...
.join(::logging::admin().bg("tls-config").future(tls_cfg_bg))
.map(|_| {});
rt.spawn(Box::new(fut));
let shutdown = admin_shutdown_signal.then(|_| Ok::<(), ()>(()));
rt.block_on(shutdown).expect("admin");
trace!("admin shutdown finished");
})
.expect("initialize controller api thread");
trace!("controller client thread spawned");
}
let fut = inbound
.join(outbound)
.map(|_| ())
.map_err(|err| error!("main error: {:?}", err));
runtime.spawn(Box::new(fut));
trace!("main task spawned");
let shutdown_signal = shutdown_signal.and_then(move |()| {
debug!("shutdown signaled");
drain_tx.drain()
});
runtime.run_until(shutdown_signal).expect("executor");
debug!("shutdown complete");
}
}
fn serve<R, B, E, F, G>(
bound_port: BoundPort,
tls_config: tls::ConditionalConnectionConfig<tls::ServerConfigWatch>,
router: Router<R>,
tcp_connect_timeout: Duration,
disable_protocol_detection_ports: IndexSet<u16>,
proxy_ctx: Arc<ctx::Proxy>,
sensors: telemetry::Sensors,
get_orig_dst: G,
drain_rx: drain::Watch,
) -> impl Future<Item = (), Error = io::Error> + Send + 'static
where
B: tower_h2::Body + Default + Send + 'static,
B::Data: Send,
<B::Data as ::bytes::IntoBuf>::Buf: Send,
E: Error + Send + 'static,
F: Error + Send + 'static,
R: Recognize<
Request = http::Request<HttpBody>,
Response = http::Response<B>,
Error = E,
RouteError = F,
>
+ Send + Sync + 'static,
R::Key: Send,
R::Service: Send,
<R::Service as tower_service::Service>::Future: Send,
Router<R>: Send,
G: GetOriginalDst + Send + 'static,
{
let stack = Arc::new(NewServiceFn::new(move || {
// Clone the router handle
let router = router.clone();
// Map errors to appropriate response error codes.
let map_err = MapErr::new(router, |e| {
match e {
RouteError::Route(r) => {
error!(" turning route error: {} into 500", r);
http::StatusCode::INTERNAL_SERVER_ERROR
}
RouteError::Inner(i) => |
RouteError::NotRecognized => {
error!("turning route not recognized error into 500");
http::StatusCode::INTERNAL_SERVER_ERROR
}
RouteError::NoCapacity(capacity) => {
// TODO For H2 streams, we should probably signal a protocol-level
// capacity change.
error!("router at capacity ({}); returning a 503", capacity);
http::StatusCode::SERVICE_UNAVAILABLE
}
}
});
// Install the request open timestamp module at the very top
// of the stack, in order to take the timestamp as close as
// possible to the beginning of the request's lifetime.
telemetry::sensor::http::TimestampRequestOpen::new(map_err)
}));
let listen_addr = bound_port.local_addr();
let server = Server::new(
listen_addr,
proxy_ctx.clone(),
sensors,
get_orig_dst,
stack,
tcp_connect_timeout,
disable_protocol_detection_ports,
drain_rx.clone(),
);
let log = server.log().clone();
let accept = {
let fut = bound_port.listen_and_fold(
tls_config,
(),
move |(), (connection, remote_addr)| {
let s = server.serve(connection, remote_addr);
// Logging context is configured by the server.
let r = DefaultExecutor::current()
.spawn(Box::new(s))
.map_err(task::Error::into_io);
future::result(r)
},
);
log.future(fut)
};
let accept_until = Cancelable {
future: accept,
canceled: false,
};
// As soon as we get a shutdown signal, the listener
// is canceled immediately.
drain_rx.watch(accept_until, |accept| {
accept.canceled = true;
})
}
/// Can cancel a future by setting a flag.
///
/// Used to 'watch' the accept futures, and close the listeners
/// as soon as the shutdown signal starts.
struct Cancelable<F> {
future: F,
canceled: bool,
}
impl<F> Future for Cancelable<F>
where
F: Future<Item=()>,
{
type Item = ();
type Error = F::Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
if self.canceled {
Ok(().into())
} else {
self.future.poll()
}
}
}
fn serve_tap<N, B>(
bound_port: BoundPort,
new_service: N,
) -> impl Future<Item = (), Error = io::Error> + 'static
where
B: tower_h2::Body + Send + 'static,
<B::Data as bytes::IntoBuf>::Buf: Send,
N: NewService<
Request = http::Request<tower_h2::RecvBody>,
Response = http::Response<B>
>
+ Send + 'static,
tower_h2::server::Connection<
connection::Connection,
N,
::logging::ServerExecutor,
B,
()
>: Future<Item = ()>,
{
let log = logging::admin().server("tap", bound_port.local_addr());
let h2_builder = h2::server::Builder::default();
let server = tower_h2::Server::new(
new_service,
h2_builder,
log.clone().executor(),
);
let fut = {
let log = log.clone();
// TODO: serve over TLS.
bound_port.listen_and_fold(
Conditional::None(tls::ReasonForNoIdentity::NotImplementedForTap.into()),
server,
move |server, (session, remote)| {
let log = log.clone().with_remote(remote);
let serve = server.serve(session).map_err(|_| ());
let r = executor::current_thread::TaskExecutor::current()
.spawn_local(Box::new(log.future(serve)))
.map(move |_| server)
.map_err(task::Error::into_io);
future::result(r)
},
)
};
log.future(fut)
}
| {
error!("turning {} into 500", i);
http::StatusCode::INTERNAL_SERVER_ERROR
} | conditional_block |
lib.rs | #![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default_derive))]
#![deny(warnings)]
extern crate bytes;
extern crate conduit_proxy_controller_grpc;
extern crate env_logger;
extern crate deflate;
#[macro_use]
extern crate futures;
extern crate futures_mpsc_lossy;
extern crate futures_watch;
extern crate h2;
extern crate http;
extern crate httparse;
extern crate hyper;
#[cfg(target_os = "linux")]
extern crate inotify;
extern crate ipnet;
#[cfg(target_os = "linux")]
extern crate libc;
#[macro_use]
extern crate log;
#[cfg_attr(test, macro_use)]
extern crate indexmap;
#[cfg(target_os = "linux")]
extern crate procinfo;
extern crate prost;
extern crate prost_types;
#[cfg(test)]
#[macro_use]
extern crate quickcheck;
extern crate rand;
extern crate regex;
extern crate ring;
#[cfg(test)]
extern crate tempdir;
extern crate tokio;
extern crate tokio_connect;
extern crate tokio_timer;
extern crate tower_balance;
extern crate tower_buffer;
extern crate tower_discover;
extern crate tower_grpc;
extern crate tower_h2;
extern crate tower_h2_balance;
extern crate tower_reconnect;
extern crate tower_service;
extern crate conduit_proxy_router;
extern crate tower_util;
extern crate tower_in_flight_limit;
extern crate trust_dns_resolver;
extern crate try_lock;
use futures::*;
use std::error::Error;
use std::io;
use std::net::SocketAddr;
use std::sync::Arc;
use std::thread;
use std::time::Duration;
use indexmap::IndexSet;
use tokio::{
executor::{self, DefaultExecutor, Executor},
runtime::current_thread,
};
use tower_service::NewService;
use tower_fn::*;
use conduit_proxy_router::{Recognize, Router, Error as RouteError};
pub mod app;
mod bind;
pub mod config;
mod connection;
pub mod conditional;
pub mod control;
pub mod convert;
pub mod ctx;
mod dns;
mod drain;
pub mod fs_watch;
mod inbound;
mod logging;
mod map_err;
mod outbound;
pub mod stream;
pub mod task;
pub mod telemetry;
mod transparency;
mod transport;
pub mod timeout;
mod tower_fn; // TODO: move to tower-fn
mod watch_service; // TODO: move to tower
use bind::Bind;
use conditional::Conditional;
use connection::BoundPort;
use inbound::Inbound;
use map_err::MapErr;
use task::MainRuntime;
use transparency::{HttpBody, Server};
pub use transport::{AddrInfo, GetOriginalDst, SoOriginalDst, tls};
use outbound::Outbound;
pub use watch_service::WatchService;
/// Runs a sidecar proxy.
///
/// The proxy binds two listeners:
///
/// - a private socket (TCP or UNIX) for outbound requests to other instances;
/// - and a public socket (TCP and optionally TLS) for inbound requests from other
/// instances.
///
/// The public listener forwards requests to a local socket (TCP or UNIX).
///
/// The private listener routes requests to service-discovery-aware load-balancer.
///
pub struct Main<G> {
config: config::Config,
control_listener: BoundPort,
inbound_listener: BoundPort,
outbound_listener: BoundPort,
metrics_listener: BoundPort,
get_original_dst: G,
runtime: MainRuntime,
}
impl<G> Main<G>
where
G: GetOriginalDst + Clone + Send + 'static,
{
pub fn new<R>(
config: config::Config,
get_original_dst: G,
runtime: R
) -> Self
where
R: Into<MainRuntime>,
{
let control_listener = BoundPort::new(config.control_listener.addr)
.expect("controller listener bind");
let inbound_listener = BoundPort::new(config.public_listener.addr)
.expect("public listener bind");
let outbound_listener = BoundPort::new(config.private_listener.addr)
.expect("private listener bind");
let runtime = runtime.into();
let metrics_listener = BoundPort::new(config.metrics_listener.addr)
.expect("metrics listener bind");
Main {
config,
control_listener,
inbound_listener,
outbound_listener,
metrics_listener,
get_original_dst,
runtime,
}
}
pub fn control_addr(&self) -> SocketAddr {
self.control_listener.local_addr()
}
pub fn inbound_addr(&self) -> SocketAddr {
self.inbound_listener.local_addr()
}
pub fn outbound_addr(&self) -> SocketAddr {
self.outbound_listener.local_addr()
}
pub fn metrics_addr(&self) -> SocketAddr {
self.metrics_listener.local_addr()
}
pub fn run_until<F>(self, shutdown_signal: F)
where
F: Future<Item = (), Error = ()> + Send + 'static,
{
let process_ctx = ctx::Process::new(&self.config);
let Main {
config,
control_listener,
inbound_listener,
outbound_listener,
metrics_listener,
get_original_dst,
mut runtime,
} = self;
let control_host_and_port = config.control_host_and_port.clone();
info!("using controller at {:?}", control_host_and_port);
info!("routing on {:?}", outbound_listener.local_addr());
info!(
"proxying on {:?} to {:?}",
inbound_listener.local_addr(),
config.private_forward
);
info!(
"serving Prometheus metrics on {:?}",
metrics_listener.local_addr(),
);
info!(
"protocol detection disabled for inbound ports {:?}",
config.inbound_ports_disable_protocol_detection,
);
info!(
"protocol detection disabled for outbound ports {:?}",
config.outbound_ports_disable_protocol_detection,
);
let (taps, observe) = control::Observe::new(100);
let (sensors, telemetry) = telemetry::new(
&process_ctx,
config.event_buffer_capacity,
config.metrics_retain_idle,
&taps,
);
let (tls_client_config, tls_server_config, tls_cfg_bg) =
tls::watch_for_config_changes(
config.tls_settings.as_ref(),
sensors.tls_config(),
);
let controller_tls = config.tls_settings.as_ref().and_then(|settings| {
settings.controller_identity.as_ref().map(|controller_identity| {
tls::ConnectionConfig {
identity: controller_identity.clone(),
config: tls_client_config.clone(),
}
})
});
let (dns_resolver, dns_bg) = dns::Resolver::from_system_config_and_env(&config)
.unwrap_or_else(|e| {
// TODO: Make DNS configuration infallible.
panic!("invalid DNS configuration: {:?}", e);
});
let (resolver, resolver_bg) = control::destination::new(
dns_resolver.clone(),
config.namespaces.clone(),
control_host_and_port,
controller_tls,
);
let (drain_tx, drain_rx) = drain::channel();
let bind = Bind::new(tls_client_config).with_sensors(sensors.clone());
// Setup the public listener. This will listen on a publicly accessible
// address and listen for inbound connections that should be forwarded
// to the managed application (private destination).
let inbound = {
let ctx = ctx::Proxy::inbound(&process_ctx);
let bind = bind.clone().with_ctx(ctx.clone());
let default_addr = config.private_forward.map(|a| a.into());
let router = Router::new(
Inbound::new(default_addr, bind),
config.inbound_router_capacity,
config.inbound_router_max_idle_age,
);
let tls_settings = config.tls_settings.as_ref().map(|settings| {
tls::ConnectionConfig {
identity: settings.pod_identity.clone(),
config: tls_server_config
}
});
serve(
inbound_listener,
tls_settings,
router,
config.private_connect_timeout,
config.inbound_ports_disable_protocol_detection,
ctx,
sensors.clone(),
get_original_dst.clone(),
drain_rx.clone(),
)
};
// Setup the private listener. This will listen on a locally accessible
// address and listen for outbound requests that should be routed
// to a remote service (public destination).
let outbound = {
let ctx = ctx::Proxy::outbound(&process_ctx);
let bind = bind.clone().with_ctx(ctx.clone());
let router = Router::new(
Outbound::new(bind, resolver, config.bind_timeout),
config.outbound_router_capacity,
config.outbound_router_max_idle_age,
);
serve(
outbound_listener,
Conditional::None(tls::ReasonForNoTls::InternalTraffic),
router,
config.public_connect_timeout,
config.outbound_ports_disable_protocol_detection,
ctx,
sensors,
get_original_dst,
drain_rx,
)
};
trace!("running");
let (_tx, admin_shutdown_signal) = futures::sync::oneshot::channel::<()>();
{
thread::Builder::new()
.name("admin".into())
.spawn(move || {
use conduit_proxy_controller_grpc::tap::server::TapServer;
let mut rt = current_thread::Runtime::new()
.expect("initialize admin thread runtime");
let tap = serve_tap(control_listener, TapServer::new(observe));
let metrics_server = telemetry.serve_metrics(metrics_listener);
let fut = ::logging::admin().bg("resolver").future(resolver_bg)
.join5(
::logging::admin().bg("telemetry").future(telemetry),
tap.map_err(|_| {}),
metrics_server.map_err(|_| {}),
::logging::admin().bg("dns-resolver").future(dns_bg),
)
// There's no `Future::join6` combinator...
.join(::logging::admin().bg("tls-config").future(tls_cfg_bg))
.map(|_| {});
rt.spawn(Box::new(fut));
let shutdown = admin_shutdown_signal.then(|_| Ok::<(), ()>(()));
rt.block_on(shutdown).expect("admin");
trace!("admin shutdown finished");
})
.expect("initialize controller api thread");
trace!("controller client thread spawned");
}
let fut = inbound
.join(outbound)
.map(|_| ())
.map_err(|err| error!("main error: {:?}", err));
runtime.spawn(Box::new(fut));
trace!("main task spawned");
let shutdown_signal = shutdown_signal.and_then(move |()| {
debug!("shutdown signaled");
drain_tx.drain()
});
runtime.run_until(shutdown_signal).expect("executor");
debug!("shutdown complete");
}
}
fn serve<R, B, E, F, G>(
bound_port: BoundPort,
tls_config: tls::ConditionalConnectionConfig<tls::ServerConfigWatch>,
router: Router<R>,
tcp_connect_timeout: Duration,
disable_protocol_detection_ports: IndexSet<u16>,
proxy_ctx: Arc<ctx::Proxy>,
sensors: telemetry::Sensors,
get_orig_dst: G,
drain_rx: drain::Watch,
) -> impl Future<Item = (), Error = io::Error> + Send + 'static
where
B: tower_h2::Body + Default + Send + 'static,
B::Data: Send,
<B::Data as ::bytes::IntoBuf>::Buf: Send,
E: Error + Send + 'static,
F: Error + Send + 'static,
R: Recognize<
Request = http::Request<HttpBody>,
Response = http::Response<B>,
Error = E,
RouteError = F,
>
+ Send + Sync + 'static,
R::Key: Send,
R::Service: Send,
<R::Service as tower_service::Service>::Future: Send,
Router<R>: Send,
G: GetOriginalDst + Send + 'static,
|
/// Can cancel a future by setting a flag.
///
/// Used to 'watch' the accept futures, and close the listeners
/// as soon as the shutdown signal starts.
struct Cancelable<F> {
future: F,
canceled: bool,
}
impl<F> Future for Cancelable<F>
where
F: Future<Item=()>,
{
type Item = ();
type Error = F::Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
if self.canceled {
Ok(().into())
} else {
self.future.poll()
}
}
}
fn serve_tap<N, B>(
bound_port: BoundPort,
new_service: N,
) -> impl Future<Item = (), Error = io::Error> + 'static
where
B: tower_h2::Body + Send + 'static,
<B::Data as bytes::IntoBuf>::Buf: Send,
N: NewService<
Request = http::Request<tower_h2::RecvBody>,
Response = http::Response<B>
>
+ Send + 'static,
tower_h2::server::Connection<
connection::Connection,
N,
::logging::ServerExecutor,
B,
()
>: Future<Item = ()>,
{
let log = logging::admin().server("tap", bound_port.local_addr());
let h2_builder = h2::server::Builder::default();
let server = tower_h2::Server::new(
new_service,
h2_builder,
log.clone().executor(),
);
let fut = {
let log = log.clone();
// TODO: serve over TLS.
bound_port.listen_and_fold(
Conditional::None(tls::ReasonForNoIdentity::NotImplementedForTap.into()),
server,
move |server, (session, remote)| {
let log = log.clone().with_remote(remote);
let serve = server.serve(session).map_err(|_| ());
let r = executor::current_thread::TaskExecutor::current()
.spawn_local(Box::new(log.future(serve)))
.map(move |_| server)
.map_err(task::Error::into_io);
future::result(r)
},
)
};
log.future(fut)
}
| {
let stack = Arc::new(NewServiceFn::new(move || {
// Clone the router handle
let router = router.clone();
// Map errors to appropriate response error codes.
let map_err = MapErr::new(router, |e| {
match e {
RouteError::Route(r) => {
error!(" turning route error: {} into 500", r);
http::StatusCode::INTERNAL_SERVER_ERROR
}
RouteError::Inner(i) => {
error!("turning {} into 500", i);
http::StatusCode::INTERNAL_SERVER_ERROR
}
RouteError::NotRecognized => {
error!("turning route not recognized error into 500");
http::StatusCode::INTERNAL_SERVER_ERROR
}
RouteError::NoCapacity(capacity) => {
// TODO For H2 streams, we should probably signal a protocol-level
// capacity change.
error!("router at capacity ({}); returning a 503", capacity);
http::StatusCode::SERVICE_UNAVAILABLE
}
}
});
// Install the request open timestamp module at the very top
// of the stack, in order to take the timestamp as close as
// possible to the beginning of the request's lifetime.
telemetry::sensor::http::TimestampRequestOpen::new(map_err)
}));
let listen_addr = bound_port.local_addr();
let server = Server::new(
listen_addr,
proxy_ctx.clone(),
sensors,
get_orig_dst,
stack,
tcp_connect_timeout,
disable_protocol_detection_ports,
drain_rx.clone(),
);
let log = server.log().clone();
let accept = {
let fut = bound_port.listen_and_fold(
tls_config,
(),
move |(), (connection, remote_addr)| {
let s = server.serve(connection, remote_addr);
// Logging context is configured by the server.
let r = DefaultExecutor::current()
.spawn(Box::new(s))
.map_err(task::Error::into_io);
future::result(r)
},
);
log.future(fut)
};
let accept_until = Cancelable {
future: accept,
canceled: false,
};
// As soon as we get a shutdown signal, the listener
// is canceled immediately.
drain_rx.watch(accept_until, |accept| {
accept.canceled = true;
})
} | identifier_body |
lib.rs | #![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default_derive))]
#![deny(warnings)]
extern crate bytes;
extern crate conduit_proxy_controller_grpc;
extern crate env_logger;
extern crate deflate;
#[macro_use]
extern crate futures;
extern crate futures_mpsc_lossy;
extern crate futures_watch;
extern crate h2;
extern crate http;
extern crate httparse;
extern crate hyper;
#[cfg(target_os = "linux")]
extern crate inotify;
extern crate ipnet;
#[cfg(target_os = "linux")]
extern crate libc;
#[macro_use]
extern crate log;
#[cfg_attr(test, macro_use)]
extern crate indexmap;
#[cfg(target_os = "linux")]
extern crate procinfo;
extern crate prost;
extern crate prost_types;
#[cfg(test)]
#[macro_use]
extern crate quickcheck;
extern crate rand;
extern crate regex;
extern crate ring;
#[cfg(test)]
extern crate tempdir;
extern crate tokio;
extern crate tokio_connect;
extern crate tokio_timer;
extern crate tower_balance;
extern crate tower_buffer;
extern crate tower_discover;
extern crate tower_grpc;
extern crate tower_h2;
extern crate tower_h2_balance;
extern crate tower_reconnect;
extern crate tower_service;
extern crate conduit_proxy_router;
extern crate tower_util;
extern crate tower_in_flight_limit;
extern crate trust_dns_resolver;
extern crate try_lock;
use futures::*;
use std::error::Error;
use std::io;
use std::net::SocketAddr;
use std::sync::Arc;
use std::thread;
use std::time::Duration;
use indexmap::IndexSet;
use tokio::{
executor::{self, DefaultExecutor, Executor},
runtime::current_thread,
};
use tower_service::NewService;
use tower_fn::*;
use conduit_proxy_router::{Recognize, Router, Error as RouteError};
pub mod app;
mod bind;
pub mod config;
mod connection;
pub mod conditional; | pub mod ctx;
mod dns;
mod drain;
pub mod fs_watch;
mod inbound;
mod logging;
mod map_err;
mod outbound;
pub mod stream;
pub mod task;
pub mod telemetry;
mod transparency;
mod transport;
pub mod timeout;
mod tower_fn; // TODO: move to tower-fn
mod watch_service; // TODO: move to tower
use bind::Bind;
use conditional::Conditional;
use connection::BoundPort;
use inbound::Inbound;
use map_err::MapErr;
use task::MainRuntime;
use transparency::{HttpBody, Server};
pub use transport::{AddrInfo, GetOriginalDst, SoOriginalDst, tls};
use outbound::Outbound;
pub use watch_service::WatchService;
/// Runs a sidecar proxy.
///
/// The proxy binds two listeners:
///
/// - a private socket (TCP or UNIX) for outbound requests to other instances;
/// - and a public socket (TCP and optionally TLS) for inbound requests from other
/// instances.
///
/// The public listener forwards requests to a local socket (TCP or UNIX).
///
/// The private listener routes requests to service-discovery-aware load-balancer.
///
pub struct Main<G> {
config: config::Config,
control_listener: BoundPort,
inbound_listener: BoundPort,
outbound_listener: BoundPort,
metrics_listener: BoundPort,
get_original_dst: G,
runtime: MainRuntime,
}
impl<G> Main<G>
where
G: GetOriginalDst + Clone + Send + 'static,
{
pub fn new<R>(
config: config::Config,
get_original_dst: G,
runtime: R
) -> Self
where
R: Into<MainRuntime>,
{
let control_listener = BoundPort::new(config.control_listener.addr)
.expect("controller listener bind");
let inbound_listener = BoundPort::new(config.public_listener.addr)
.expect("public listener bind");
let outbound_listener = BoundPort::new(config.private_listener.addr)
.expect("private listener bind");
let runtime = runtime.into();
let metrics_listener = BoundPort::new(config.metrics_listener.addr)
.expect("metrics listener bind");
Main {
config,
control_listener,
inbound_listener,
outbound_listener,
metrics_listener,
get_original_dst,
runtime,
}
}
pub fn control_addr(&self) -> SocketAddr {
self.control_listener.local_addr()
}
pub fn inbound_addr(&self) -> SocketAddr {
self.inbound_listener.local_addr()
}
pub fn outbound_addr(&self) -> SocketAddr {
self.outbound_listener.local_addr()
}
pub fn metrics_addr(&self) -> SocketAddr {
self.metrics_listener.local_addr()
}
pub fn run_until<F>(self, shutdown_signal: F)
where
F: Future<Item = (), Error = ()> + Send + 'static,
{
let process_ctx = ctx::Process::new(&self.config);
let Main {
config,
control_listener,
inbound_listener,
outbound_listener,
metrics_listener,
get_original_dst,
mut runtime,
} = self;
let control_host_and_port = config.control_host_and_port.clone();
info!("using controller at {:?}", control_host_and_port);
info!("routing on {:?}", outbound_listener.local_addr());
info!(
"proxying on {:?} to {:?}",
inbound_listener.local_addr(),
config.private_forward
);
info!(
"serving Prometheus metrics on {:?}",
metrics_listener.local_addr(),
);
info!(
"protocol detection disabled for inbound ports {:?}",
config.inbound_ports_disable_protocol_detection,
);
info!(
"protocol detection disabled for outbound ports {:?}",
config.outbound_ports_disable_protocol_detection,
);
let (taps, observe) = control::Observe::new(100);
let (sensors, telemetry) = telemetry::new(
&process_ctx,
config.event_buffer_capacity,
config.metrics_retain_idle,
&taps,
);
let (tls_client_config, tls_server_config, tls_cfg_bg) =
tls::watch_for_config_changes(
config.tls_settings.as_ref(),
sensors.tls_config(),
);
let controller_tls = config.tls_settings.as_ref().and_then(|settings| {
settings.controller_identity.as_ref().map(|controller_identity| {
tls::ConnectionConfig {
identity: controller_identity.clone(),
config: tls_client_config.clone(),
}
})
});
let (dns_resolver, dns_bg) = dns::Resolver::from_system_config_and_env(&config)
.unwrap_or_else(|e| {
// TODO: Make DNS configuration infallible.
panic!("invalid DNS configuration: {:?}", e);
});
let (resolver, resolver_bg) = control::destination::new(
dns_resolver.clone(),
config.namespaces.clone(),
control_host_and_port,
controller_tls,
);
let (drain_tx, drain_rx) = drain::channel();
let bind = Bind::new(tls_client_config).with_sensors(sensors.clone());
// Setup the public listener. This will listen on a publicly accessible
// address and listen for inbound connections that should be forwarded
// to the managed application (private destination).
let inbound = {
let ctx = ctx::Proxy::inbound(&process_ctx);
let bind = bind.clone().with_ctx(ctx.clone());
let default_addr = config.private_forward.map(|a| a.into());
let router = Router::new(
Inbound::new(default_addr, bind),
config.inbound_router_capacity,
config.inbound_router_max_idle_age,
);
let tls_settings = config.tls_settings.as_ref().map(|settings| {
tls::ConnectionConfig {
identity: settings.pod_identity.clone(),
config: tls_server_config
}
});
serve(
inbound_listener,
tls_settings,
router,
config.private_connect_timeout,
config.inbound_ports_disable_protocol_detection,
ctx,
sensors.clone(),
get_original_dst.clone(),
drain_rx.clone(),
)
};
// Setup the private listener. This will listen on a locally accessible
// address and listen for outbound requests that should be routed
// to a remote service (public destination).
let outbound = {
let ctx = ctx::Proxy::outbound(&process_ctx);
let bind = bind.clone().with_ctx(ctx.clone());
let router = Router::new(
Outbound::new(bind, resolver, config.bind_timeout),
config.outbound_router_capacity,
config.outbound_router_max_idle_age,
);
serve(
outbound_listener,
Conditional::None(tls::ReasonForNoTls::InternalTraffic),
router,
config.public_connect_timeout,
config.outbound_ports_disable_protocol_detection,
ctx,
sensors,
get_original_dst,
drain_rx,
)
};
trace!("running");
let (_tx, admin_shutdown_signal) = futures::sync::oneshot::channel::<()>();
{
thread::Builder::new()
.name("admin".into())
.spawn(move || {
use conduit_proxy_controller_grpc::tap::server::TapServer;
let mut rt = current_thread::Runtime::new()
.expect("initialize admin thread runtime");
let tap = serve_tap(control_listener, TapServer::new(observe));
let metrics_server = telemetry.serve_metrics(metrics_listener);
let fut = ::logging::admin().bg("resolver").future(resolver_bg)
.join5(
::logging::admin().bg("telemetry").future(telemetry),
tap.map_err(|_| {}),
metrics_server.map_err(|_| {}),
::logging::admin().bg("dns-resolver").future(dns_bg),
)
// There's no `Future::join6` combinator...
.join(::logging::admin().bg("tls-config").future(tls_cfg_bg))
.map(|_| {});
rt.spawn(Box::new(fut));
let shutdown = admin_shutdown_signal.then(|_| Ok::<(), ()>(()));
rt.block_on(shutdown).expect("admin");
trace!("admin shutdown finished");
})
.expect("initialize controller api thread");
trace!("controller client thread spawned");
}
let fut = inbound
.join(outbound)
.map(|_| ())
.map_err(|err| error!("main error: {:?}", err));
runtime.spawn(Box::new(fut));
trace!("main task spawned");
let shutdown_signal = shutdown_signal.and_then(move |()| {
debug!("shutdown signaled");
drain_tx.drain()
});
runtime.run_until(shutdown_signal).expect("executor");
debug!("shutdown complete");
}
}
fn serve<R, B, E, F, G>(
bound_port: BoundPort,
tls_config: tls::ConditionalConnectionConfig<tls::ServerConfigWatch>,
router: Router<R>,
tcp_connect_timeout: Duration,
disable_protocol_detection_ports: IndexSet<u16>,
proxy_ctx: Arc<ctx::Proxy>,
sensors: telemetry::Sensors,
get_orig_dst: G,
drain_rx: drain::Watch,
) -> impl Future<Item = (), Error = io::Error> + Send + 'static
where
B: tower_h2::Body + Default + Send + 'static,
B::Data: Send,
<B::Data as ::bytes::IntoBuf>::Buf: Send,
E: Error + Send + 'static,
F: Error + Send + 'static,
R: Recognize<
Request = http::Request<HttpBody>,
Response = http::Response<B>,
Error = E,
RouteError = F,
>
+ Send + Sync + 'static,
R::Key: Send,
R::Service: Send,
<R::Service as tower_service::Service>::Future: Send,
Router<R>: Send,
G: GetOriginalDst + Send + 'static,
{
let stack = Arc::new(NewServiceFn::new(move || {
// Clone the router handle
let router = router.clone();
// Map errors to appropriate response error codes.
let map_err = MapErr::new(router, |e| {
match e {
RouteError::Route(r) => {
error!(" turning route error: {} into 500", r);
http::StatusCode::INTERNAL_SERVER_ERROR
}
RouteError::Inner(i) => {
error!("turning {} into 500", i);
http::StatusCode::INTERNAL_SERVER_ERROR
}
RouteError::NotRecognized => {
error!("turning route not recognized error into 500");
http::StatusCode::INTERNAL_SERVER_ERROR
}
RouteError::NoCapacity(capacity) => {
// TODO For H2 streams, we should probably signal a protocol-level
// capacity change.
error!("router at capacity ({}); returning a 503", capacity);
http::StatusCode::SERVICE_UNAVAILABLE
}
}
});
// Install the request open timestamp module at the very top
// of the stack, in order to take the timestamp as close as
// possible to the beginning of the request's lifetime.
telemetry::sensor::http::TimestampRequestOpen::new(map_err)
}));
let listen_addr = bound_port.local_addr();
let server = Server::new(
listen_addr,
proxy_ctx.clone(),
sensors,
get_orig_dst,
stack,
tcp_connect_timeout,
disable_protocol_detection_ports,
drain_rx.clone(),
);
let log = server.log().clone();
let accept = {
let fut = bound_port.listen_and_fold(
tls_config,
(),
move |(), (connection, remote_addr)| {
let s = server.serve(connection, remote_addr);
// Logging context is configured by the server.
let r = DefaultExecutor::current()
.spawn(Box::new(s))
.map_err(task::Error::into_io);
future::result(r)
},
);
log.future(fut)
};
let accept_until = Cancelable {
future: accept,
canceled: false,
};
// As soon as we get a shutdown signal, the listener
// is canceled immediately.
drain_rx.watch(accept_until, |accept| {
accept.canceled = true;
})
}
/// Can cancel a future by setting a flag.
///
/// Used to 'watch' the accept futures, and close the listeners
/// as soon as the shutdown signal starts.
struct Cancelable<F> {
future: F,
canceled: bool,
}
impl<F> Future for Cancelable<F>
where
F: Future<Item=()>,
{
type Item = ();
type Error = F::Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
if self.canceled {
Ok(().into())
} else {
self.future.poll()
}
}
}
fn serve_tap<N, B>(
bound_port: BoundPort,
new_service: N,
) -> impl Future<Item = (), Error = io::Error> + 'static
where
B: tower_h2::Body + Send + 'static,
<B::Data as bytes::IntoBuf>::Buf: Send,
N: NewService<
Request = http::Request<tower_h2::RecvBody>,
Response = http::Response<B>
>
+ Send + 'static,
tower_h2::server::Connection<
connection::Connection,
N,
::logging::ServerExecutor,
B,
()
>: Future<Item = ()>,
{
let log = logging::admin().server("tap", bound_port.local_addr());
let h2_builder = h2::server::Builder::default();
let server = tower_h2::Server::new(
new_service,
h2_builder,
log.clone().executor(),
);
let fut = {
let log = log.clone();
// TODO: serve over TLS.
bound_port.listen_and_fold(
Conditional::None(tls::ReasonForNoIdentity::NotImplementedForTap.into()),
server,
move |server, (session, remote)| {
let log = log.clone().with_remote(remote);
let serve = server.serve(session).map_err(|_| ());
let r = executor::current_thread::TaskExecutor::current()
.spawn_local(Box::new(log.future(serve)))
.map(move |_| server)
.map_err(task::Error::into_io);
future::result(r)
},
)
};
log.future(fut)
} | pub mod control;
pub mod convert; | random_line_split |
lib.rs | #![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr))]
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default_derive))]
#![deny(warnings)]
extern crate bytes;
extern crate conduit_proxy_controller_grpc;
extern crate env_logger;
extern crate deflate;
#[macro_use]
extern crate futures;
extern crate futures_mpsc_lossy;
extern crate futures_watch;
extern crate h2;
extern crate http;
extern crate httparse;
extern crate hyper;
#[cfg(target_os = "linux")]
extern crate inotify;
extern crate ipnet;
#[cfg(target_os = "linux")]
extern crate libc;
#[macro_use]
extern crate log;
#[cfg_attr(test, macro_use)]
extern crate indexmap;
#[cfg(target_os = "linux")]
extern crate procinfo;
extern crate prost;
extern crate prost_types;
#[cfg(test)]
#[macro_use]
extern crate quickcheck;
extern crate rand;
extern crate regex;
extern crate ring;
#[cfg(test)]
extern crate tempdir;
extern crate tokio;
extern crate tokio_connect;
extern crate tokio_timer;
extern crate tower_balance;
extern crate tower_buffer;
extern crate tower_discover;
extern crate tower_grpc;
extern crate tower_h2;
extern crate tower_h2_balance;
extern crate tower_reconnect;
extern crate tower_service;
extern crate conduit_proxy_router;
extern crate tower_util;
extern crate tower_in_flight_limit;
extern crate trust_dns_resolver;
extern crate try_lock;
use futures::*;
use std::error::Error;
use std::io;
use std::net::SocketAddr;
use std::sync::Arc;
use std::thread;
use std::time::Duration;
use indexmap::IndexSet;
use tokio::{
executor::{self, DefaultExecutor, Executor},
runtime::current_thread,
};
use tower_service::NewService;
use tower_fn::*;
use conduit_proxy_router::{Recognize, Router, Error as RouteError};
pub mod app;
mod bind;
pub mod config;
mod connection;
pub mod conditional;
pub mod control;
pub mod convert;
pub mod ctx;
mod dns;
mod drain;
pub mod fs_watch;
mod inbound;
mod logging;
mod map_err;
mod outbound;
pub mod stream;
pub mod task;
pub mod telemetry;
mod transparency;
mod transport;
pub mod timeout;
mod tower_fn; // TODO: move to tower-fn
mod watch_service; // TODO: move to tower
use bind::Bind;
use conditional::Conditional;
use connection::BoundPort;
use inbound::Inbound;
use map_err::MapErr;
use task::MainRuntime;
use transparency::{HttpBody, Server};
pub use transport::{AddrInfo, GetOriginalDst, SoOriginalDst, tls};
use outbound::Outbound;
pub use watch_service::WatchService;
/// Runs a sidecar proxy.
///
/// The proxy binds two listeners:
///
/// - a private socket (TCP or UNIX) for outbound requests to other instances;
/// - and a public socket (TCP and optionally TLS) for inbound requests from other
/// instances.
///
/// The public listener forwards requests to a local socket (TCP or UNIX).
///
/// The private listener routes requests to service-discovery-aware load-balancer.
///
pub struct Main<G> {
config: config::Config,
control_listener: BoundPort,
inbound_listener: BoundPort,
outbound_listener: BoundPort,
metrics_listener: BoundPort,
get_original_dst: G,
runtime: MainRuntime,
}
impl<G> Main<G>
where
G: GetOriginalDst + Clone + Send + 'static,
{
pub fn new<R>(
config: config::Config,
get_original_dst: G,
runtime: R
) -> Self
where
R: Into<MainRuntime>,
{
let control_listener = BoundPort::new(config.control_listener.addr)
.expect("controller listener bind");
let inbound_listener = BoundPort::new(config.public_listener.addr)
.expect("public listener bind");
let outbound_listener = BoundPort::new(config.private_listener.addr)
.expect("private listener bind");
let runtime = runtime.into();
let metrics_listener = BoundPort::new(config.metrics_listener.addr)
.expect("metrics listener bind");
Main {
config,
control_listener,
inbound_listener,
outbound_listener,
metrics_listener,
get_original_dst,
runtime,
}
}
pub fn control_addr(&self) -> SocketAddr {
self.control_listener.local_addr()
}
pub fn | (&self) -> SocketAddr {
self.inbound_listener.local_addr()
}
pub fn outbound_addr(&self) -> SocketAddr {
self.outbound_listener.local_addr()
}
pub fn metrics_addr(&self) -> SocketAddr {
self.metrics_listener.local_addr()
}
pub fn run_until<F>(self, shutdown_signal: F)
where
F: Future<Item = (), Error = ()> + Send + 'static,
{
let process_ctx = ctx::Process::new(&self.config);
let Main {
config,
control_listener,
inbound_listener,
outbound_listener,
metrics_listener,
get_original_dst,
mut runtime,
} = self;
let control_host_and_port = config.control_host_and_port.clone();
info!("using controller at {:?}", control_host_and_port);
info!("routing on {:?}", outbound_listener.local_addr());
info!(
"proxying on {:?} to {:?}",
inbound_listener.local_addr(),
config.private_forward
);
info!(
"serving Prometheus metrics on {:?}",
metrics_listener.local_addr(),
);
info!(
"protocol detection disabled for inbound ports {:?}",
config.inbound_ports_disable_protocol_detection,
);
info!(
"protocol detection disabled for outbound ports {:?}",
config.outbound_ports_disable_protocol_detection,
);
let (taps, observe) = control::Observe::new(100);
let (sensors, telemetry) = telemetry::new(
&process_ctx,
config.event_buffer_capacity,
config.metrics_retain_idle,
&taps,
);
let (tls_client_config, tls_server_config, tls_cfg_bg) =
tls::watch_for_config_changes(
config.tls_settings.as_ref(),
sensors.tls_config(),
);
let controller_tls = config.tls_settings.as_ref().and_then(|settings| {
settings.controller_identity.as_ref().map(|controller_identity| {
tls::ConnectionConfig {
identity: controller_identity.clone(),
config: tls_client_config.clone(),
}
})
});
let (dns_resolver, dns_bg) = dns::Resolver::from_system_config_and_env(&config)
.unwrap_or_else(|e| {
// TODO: Make DNS configuration infallible.
panic!("invalid DNS configuration: {:?}", e);
});
let (resolver, resolver_bg) = control::destination::new(
dns_resolver.clone(),
config.namespaces.clone(),
control_host_and_port,
controller_tls,
);
let (drain_tx, drain_rx) = drain::channel();
let bind = Bind::new(tls_client_config).with_sensors(sensors.clone());
// Setup the public listener. This will listen on a publicly accessible
// address and listen for inbound connections that should be forwarded
// to the managed application (private destination).
let inbound = {
let ctx = ctx::Proxy::inbound(&process_ctx);
let bind = bind.clone().with_ctx(ctx.clone());
let default_addr = config.private_forward.map(|a| a.into());
let router = Router::new(
Inbound::new(default_addr, bind),
config.inbound_router_capacity,
config.inbound_router_max_idle_age,
);
let tls_settings = config.tls_settings.as_ref().map(|settings| {
tls::ConnectionConfig {
identity: settings.pod_identity.clone(),
config: tls_server_config
}
});
serve(
inbound_listener,
tls_settings,
router,
config.private_connect_timeout,
config.inbound_ports_disable_protocol_detection,
ctx,
sensors.clone(),
get_original_dst.clone(),
drain_rx.clone(),
)
};
// Setup the private listener. This will listen on a locally accessible
// address and listen for outbound requests that should be routed
// to a remote service (public destination).
let outbound = {
let ctx = ctx::Proxy::outbound(&process_ctx);
let bind = bind.clone().with_ctx(ctx.clone());
let router = Router::new(
Outbound::new(bind, resolver, config.bind_timeout),
config.outbound_router_capacity,
config.outbound_router_max_idle_age,
);
serve(
outbound_listener,
Conditional::None(tls::ReasonForNoTls::InternalTraffic),
router,
config.public_connect_timeout,
config.outbound_ports_disable_protocol_detection,
ctx,
sensors,
get_original_dst,
drain_rx,
)
};
trace!("running");
let (_tx, admin_shutdown_signal) = futures::sync::oneshot::channel::<()>();
{
thread::Builder::new()
.name("admin".into())
.spawn(move || {
use conduit_proxy_controller_grpc::tap::server::TapServer;
let mut rt = current_thread::Runtime::new()
.expect("initialize admin thread runtime");
let tap = serve_tap(control_listener, TapServer::new(observe));
let metrics_server = telemetry.serve_metrics(metrics_listener);
let fut = ::logging::admin().bg("resolver").future(resolver_bg)
.join5(
::logging::admin().bg("telemetry").future(telemetry),
tap.map_err(|_| {}),
metrics_server.map_err(|_| {}),
::logging::admin().bg("dns-resolver").future(dns_bg),
)
// There's no `Future::join6` combinator...
.join(::logging::admin().bg("tls-config").future(tls_cfg_bg))
.map(|_| {});
rt.spawn(Box::new(fut));
let shutdown = admin_shutdown_signal.then(|_| Ok::<(), ()>(()));
rt.block_on(shutdown).expect("admin");
trace!("admin shutdown finished");
})
.expect("initialize controller api thread");
trace!("controller client thread spawned");
}
let fut = inbound
.join(outbound)
.map(|_| ())
.map_err(|err| error!("main error: {:?}", err));
runtime.spawn(Box::new(fut));
trace!("main task spawned");
let shutdown_signal = shutdown_signal.and_then(move |()| {
debug!("shutdown signaled");
drain_tx.drain()
});
runtime.run_until(shutdown_signal).expect("executor");
debug!("shutdown complete");
}
}
fn serve<R, B, E, F, G>(
bound_port: BoundPort,
tls_config: tls::ConditionalConnectionConfig<tls::ServerConfigWatch>,
router: Router<R>,
tcp_connect_timeout: Duration,
disable_protocol_detection_ports: IndexSet<u16>,
proxy_ctx: Arc<ctx::Proxy>,
sensors: telemetry::Sensors,
get_orig_dst: G,
drain_rx: drain::Watch,
) -> impl Future<Item = (), Error = io::Error> + Send + 'static
where
B: tower_h2::Body + Default + Send + 'static,
B::Data: Send,
<B::Data as ::bytes::IntoBuf>::Buf: Send,
E: Error + Send + 'static,
F: Error + Send + 'static,
R: Recognize<
Request = http::Request<HttpBody>,
Response = http::Response<B>,
Error = E,
RouteError = F,
>
+ Send + Sync + 'static,
R::Key: Send,
R::Service: Send,
<R::Service as tower_service::Service>::Future: Send,
Router<R>: Send,
G: GetOriginalDst + Send + 'static,
{
let stack = Arc::new(NewServiceFn::new(move || {
// Clone the router handle
let router = router.clone();
// Map errors to appropriate response error codes.
let map_err = MapErr::new(router, |e| {
match e {
RouteError::Route(r) => {
error!(" turning route error: {} into 500", r);
http::StatusCode::INTERNAL_SERVER_ERROR
}
RouteError::Inner(i) => {
error!("turning {} into 500", i);
http::StatusCode::INTERNAL_SERVER_ERROR
}
RouteError::NotRecognized => {
error!("turning route not recognized error into 500");
http::StatusCode::INTERNAL_SERVER_ERROR
}
RouteError::NoCapacity(capacity) => {
// TODO For H2 streams, we should probably signal a protocol-level
// capacity change.
error!("router at capacity ({}); returning a 503", capacity);
http::StatusCode::SERVICE_UNAVAILABLE
}
}
});
// Install the request open timestamp module at the very top
// of the stack, in order to take the timestamp as close as
// possible to the beginning of the request's lifetime.
telemetry::sensor::http::TimestampRequestOpen::new(map_err)
}));
let listen_addr = bound_port.local_addr();
let server = Server::new(
listen_addr,
proxy_ctx.clone(),
sensors,
get_orig_dst,
stack,
tcp_connect_timeout,
disable_protocol_detection_ports,
drain_rx.clone(),
);
let log = server.log().clone();
let accept = {
let fut = bound_port.listen_and_fold(
tls_config,
(),
move |(), (connection, remote_addr)| {
let s = server.serve(connection, remote_addr);
// Logging context is configured by the server.
let r = DefaultExecutor::current()
.spawn(Box::new(s))
.map_err(task::Error::into_io);
future::result(r)
},
);
log.future(fut)
};
let accept_until = Cancelable {
future: accept,
canceled: false,
};
// As soon as we get a shutdown signal, the listener
// is canceled immediately.
drain_rx.watch(accept_until, |accept| {
accept.canceled = true;
})
}
/// Can cancel a future by setting a flag.
///
/// Used to 'watch' the accept futures, and close the listeners
/// as soon as the shutdown signal starts.
struct Cancelable<F> {
future: F,
canceled: bool,
}
impl<F> Future for Cancelable<F>
where
F: Future<Item=()>,
{
type Item = ();
type Error = F::Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
if self.canceled {
Ok(().into())
} else {
self.future.poll()
}
}
}
fn serve_tap<N, B>(
bound_port: BoundPort,
new_service: N,
) -> impl Future<Item = (), Error = io::Error> + 'static
where
B: tower_h2::Body + Send + 'static,
<B::Data as bytes::IntoBuf>::Buf: Send,
N: NewService<
Request = http::Request<tower_h2::RecvBody>,
Response = http::Response<B>
>
+ Send + 'static,
tower_h2::server::Connection<
connection::Connection,
N,
::logging::ServerExecutor,
B,
()
>: Future<Item = ()>,
{
let log = logging::admin().server("tap", bound_port.local_addr());
let h2_builder = h2::server::Builder::default();
let server = tower_h2::Server::new(
new_service,
h2_builder,
log.clone().executor(),
);
let fut = {
let log = log.clone();
// TODO: serve over TLS.
bound_port.listen_and_fold(
Conditional::None(tls::ReasonForNoIdentity::NotImplementedForTap.into()),
server,
move |server, (session, remote)| {
let log = log.clone().with_remote(remote);
let serve = server.serve(session).map_err(|_| ());
let r = executor::current_thread::TaskExecutor::current()
.spawn_local(Box::new(log.future(serve)))
.map(move |_| server)
.map_err(task::Error::into_io);
future::result(r)
},
)
};
log.future(fut)
}
| inbound_addr | identifier_name |
auto_aliasing.go | // Copyright 2016-2023, Pulumi Corporation.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package tfbridge
import (
"github.com/Masterminds/semver"
shim "github.com/pulumi/pulumi-terraform-bridge/v3/pkg/tfshim"
md "github.com/pulumi/pulumi-terraform-bridge/v3/unstable/metadata"
"github.com/pulumi/pulumi/sdk/v3/go/common/tokens"
"github.com/pulumi/pulumi/sdk/v3/go/common/util/contract"
)
type tokenHistory[T ~string] struct {
Current T `json:"current"` // the current Pulumi token for the resource
Past []alias[T] `json:"past,omitempty"` // Previous tokens
MajorVersion int `json:"majorVersion,omitempty"`
Fields map[string]*fieldHistory `json:"fields,omitempty"`
}
type alias[T ~string] struct {
Name T `json:"name"` // The previous token.
InCodegen bool `json:"inCodegen"` // If the alias is a fully generated resource, or just a schema alias.
MajorVersion int `json:"majorVersion"` // The provider's major version when Name was introduced.
}
type aliasHistory struct {
Resources map[string]*tokenHistory[tokens.Type] `json:"resources,omitempty"`
DataSources map[string]*tokenHistory[tokens.ModuleMember] `json:"datasources,omitempty"`
}
type fieldHistory struct {
MaxItemsOne *bool `json:"maxItemsOne,omitempty"`
Fields map[string]*fieldHistory `json:"fields,omitempty"`
Elem *fieldHistory `json:"elem,omitempty"`
}
// Automatically applies backwards compatibility best practices.
//
// Specifically, [ApplyAutoAliases] may perform the following actions:
//
// - Call [ProviderInfo.RenameResourceWithAlias] or [ProviderInfo.RenameDataSource]
// - Edit [ResourceInfo.Aliases]
// - Edit [SchemaInfo.MaxItemsOne]
//
// The goal is to always maximize backwards compatibility and reduce breaking changes for
// the users of the Pulumi providers.
//
// Resource aliases help mask TF provider resource renames or changes in mapped tokens so
// older programs continue to work. See [ResourceInfo.RenameResourceWithAlias] and
// [ResourceInfo.Aliases] for more details.
//
// [SchemaInfo.MaxItemsOne] changes are also important because they involve flattening and
// pluralizing names. Collections (lists or sets) marked with MaxItems=1 are projected as
// scalar types in Pulumi SDKs. Therefore changes to the MaxItems property may be breaking
// the compilation of programs as the type changes from `T to List[T]` or vice versa. To
// avoid these breaking changes, this method undoes any upstream changes to MaxItems using
// [SchemaInfo.MaxItemsOne] overrides. This happens until a major version change is
// detected, and then overrides are cleared. Effectively this makes sure that upstream
// MaxItems changes are deferred until the next major version.
//
// Implementation note: to operate correctly this method needs to keep a persistent track
// of a database of past decision history. This is currently done by doing reads and
// writes to `providerInfo.GetMetadata()`, which is assumed to be persisted across
// provider releases. The bridge framework keeps this information written out to an opaque
// `bridge-metadata.json` blob which is expected to be stored in source control to persist
// across releases.
//
// Panics if [ProviderInfo.ApplyAutoAliases] would return an error.
func (info *ProviderInfo) MustApplyAutoAliases() {
err := info.ApplyAutoAliases()
contract.AssertNoErrorf(err, "Failed to apply aliases")
}
// Automatically applies backwards compatibility best practices.
//
// Specifically, [ApplyAutoAliases] may perform the following actions:
//
// - Call [ProviderInfo.RenameResourceWithAlias] or [ProviderInfo.RenameDataSource]
// - Edit [ResourceInfo.Aliases]
// - Edit [SchemaInfo.MaxItemsOne]
//
// The goal is to always maximize backwards compatibility and reduce breaking changes for
// the users of the Pulumi providers.
//
// Resource aliases help mask TF provider resource renames or changes in mapped tokens so
// older programs continue to work. See [ResourceInfo.RenameResourceWithAlias] and
// [ResourceInfo.Aliases] for more details.
//
// [SchemaInfo.MaxItemsOne] changes are also important because they involve flattening and
// pluralizing names. Collections (lists or sets) marked with MaxItems=1 are projected as
// scalar types in Pulumi SDKs. Therefore changes to the MaxItems property may be breaking
// the compilation of programs as the type changes from `T to List[T]` or vice versa. To
// avoid these breaking changes, this method undoes any upstream changes to MaxItems using
// [SchemaInfo.MaxItemsOne] overrides. This happens until a major version change is
// detected, and then overrides are cleared. Effectively this makes sure that upstream
// MaxItems changes are deferred until the next major version.
//
// Implementation note: to operate correctly this method needs to keep a persistent track
// of a database of past decision history. This is currently done by doing reads and
// writes to `providerInfo.GetMetadata()`, which is assumed to be persisted across
// provider releases. The bridge framework keeps this information written out to an opaque
// `bridge-metadata.json` blob which is expected to be stored in source control to persist
// across releas
func (info *ProviderInfo) ApplyAutoAliases() error {
artifact := info.GetMetadata()
hist, err := getHistory(artifact)
if err != nil {
return err
}
var currentVersion int
// If version is missing, we assume the current version is the most recent major
// version in mentioned in history.
if info.Version != "" {
v, err := semver.NewVersion(info.Version)
if err != nil {
return err
}
currentVersion = int(v.Major())
} else {
for _, r := range hist.Resources {
for _, p := range r.Past {
if p.MajorVersion > currentVersion {
currentVersion = p.MajorVersion
}
}
}
for _, d := range hist.DataSources {
for _, p := range d.Past {
if p.MajorVersion > currentVersion {
currentVersion = p.MajorVersion
}
}
}
}
rMap := info.P.ResourcesMap()
dMap := info.P.DataSourcesMap()
// Applying resource aliases adds new resources to providerInfo.Resources. To keep
// this process deterministic, we don't apply resource aliases until all resources
// have been examined.
//
// The same logic applies to datasources.
applyAliases := []func(){}
for tfToken, computed := range info.Resources {
r, _ := rMap.GetOk(tfToken)
aliasResource(info, r, &applyAliases, hist.Resources,
computed, tfToken, currentVersion)
}
for tfToken, computed := range info.DataSources {
ds, _ := dMap.GetOk(tfToken)
aliasDataSource(info, ds, &applyAliases, hist.DataSources,
computed, tfToken, currentVersion)
}
for _, f := range applyAliases {
f()
}
if err := md.Set(artifact, aliasMetadataKey, hist); err != nil {
// Set fails only when `hist` is not serializable. Because `hist` is
// composed of marshallable, non-cyclic types, this is impossible.
contract.AssertNoErrorf(err, "History failed to serialize")
}
return nil
}
const aliasMetadataKey = "auto-aliasing"
func getHistory(artifact ProviderMetadata) (aliasHistory, error) |
func aliasResource(
p *ProviderInfo, res shim.Resource,
applyResourceAliases *[]func(),
hist map[string]*tokenHistory[tokens.Type], computed *ResourceInfo,
tfToken string, version int,
) {
prev, hasPrev := hist[tfToken]
if !hasPrev {
// It's not in the history, so it must be new. Stick it in the history for
// next time.
hist[tfToken] = &tokenHistory[tokens.Type]{
Current: computed.Tok,
}
} else {
// We don't do this eagerly because aliasResource is called while
// iterating over p.Resources which aliasOrRenameResource mutates.
*applyResourceAliases = append(*applyResourceAliases,
func() { aliasOrRenameResource(p, computed, tfToken, prev, version) })
}
// Apply Aliasing to MaxItemOne by traversing the field tree and applying the
// stored value.
//
// Note: If the user explicitly sets a MaxItemOne value, that value is respected
// and overwrites the current history.'
if res == nil {
return
}
// If we are behind the major version, reset the fields and the major version.
if hist[tfToken].MajorVersion < version {
hist[tfToken].MajorVersion = version
hist[tfToken].Fields = nil
}
applyResourceMaxItemsOneAliasing(res, &hist[tfToken].Fields, &computed.Fields)
}
// applyResourceMaxItemsOneAliasing traverses a shim.Resource, applying walk to each field in the resource.
func applyResourceMaxItemsOneAliasing(
r shim.Resource, hist *map[string]*fieldHistory, info *map[string]*SchemaInfo,
) (bool, bool) {
if r == nil {
return hist != nil, info != nil
}
m := r.Schema()
if m == nil {
return hist != nil, info != nil
}
var rHasH, rHasI bool
m.Range(func(k string, v shim.Schema) bool {
h, hasH := getNonNil(hist, k)
i, hasI := getNonNil(info, k)
fieldHasHist, fieldHasInfo := applyMaxItemsOneAliasing(v, h, i)
hasH = hasH || fieldHasHist
hasI = hasI || fieldHasInfo
if !hasH {
delete(*hist, k)
}
if !hasI {
delete(*info, k)
}
rHasH = rHasH || hasH
rHasI = rHasI || hasI
return true
})
return rHasH, rHasI
}
// When walking the schema tree for a resource, we create mirroring trees in
// *fieldHistory and *SchemaInfo. To avoid polluting either tree (and
// interfering with other actions such as SetAutonaming), we clean up the paths
// that we created but did not store any information into.
//
// For example, consider the schema for a field of type `Object{ Key1:
// List[String] }`. The schema tree for this field looks like this:
//
// Object:
// Fields:
// Key1:
// List:
// Elem:
// String
//
// When we walk the tree, we create an almost identical history tree:
//
// Object:
// Fields:
// Key1:
// List:
// MaxItemsOne: false
// Elem:
// String
//
// We stored the additional piece of information `MaxItemsOne: false`. We need to
// keep enough of the tree to maintain that information, but no more. We can
// discard the unnecessary `Elem: String`.
//
// This keeps the tree as clean as possible for other processes which expect a
// `nil` element when making changes. Since other processes (like SetAutonaming)
// act on edge nodes (like our String), this allows us to inter-operate with them
// without interference.
//
// applyMaxItemsOneAliasing traverses a generic shim.Schema recursively, applying fieldHistory to
// SchemaInfo and vise versa as necessary to avoid breaking changes in the
// resulting sdk.
func applyMaxItemsOneAliasing(schema shim.Schema, h *fieldHistory, info *SchemaInfo) (hasH bool, hasI bool) {
//revive:disable-next-line:empty-block
if schema == nil || (schema.Type() != shim.TypeList && schema.Type() != shim.TypeSet) {
// MaxItemsOne does not apply, so do nothing
} else if info.MaxItemsOne != nil {
// The user has overwritten the value, so we will just record that.
h.MaxItemsOne = info.MaxItemsOne
hasH = true
} else if h.MaxItemsOne != nil {
// If we have a previous value in the history, we keep it as is.
info.MaxItemsOne = h.MaxItemsOne
hasI = true
} else {
// There is no history for this value, so we bake it into the
// alias history.
h.MaxItemsOne = BoolRef(IsMaxItemsOne(schema, info))
hasH = true
}
// Ensure that the h.Elem and info.Elem fields are non-nil so they can be
// safely recursed on.
//
// If the .Elem existed before this function, we mark it as unsafe to cleanup.
var hasElemH, hasElemI bool
populateElem := func() {
if h.Elem == nil {
h.Elem = &fieldHistory{}
} else {
hasElemH = true
}
if info.Elem == nil {
info.Elem = &SchemaInfo{}
} else {
hasElemI = true
}
}
// Cleanup after we have walked a .Elem value.
//
// If the .Elem field was created in populateElem and the field was not
// changed, we then delete the field.
cleanupElem := func(elemHist, elemInfo bool) {
hasElemH = hasElemH || elemHist
hasElemI = hasElemI || elemInfo
if !hasElemH {
h.Elem = nil
}
if !hasElemI {
info.Elem = nil
}
}
e := schema.Elem()
switch e := e.(type) {
case shim.Resource:
populateElem()
eHasH, eHasI := applyResourceMaxItemsOneAliasing(e, &h.Elem.Fields, &info.Elem.Fields)
cleanupElem(eHasH, eHasI)
case shim.Schema:
populateElem()
eHasH, eHasI := applyMaxItemsOneAliasing(e, h.Elem, info.Elem)
cleanupElem(eHasH, eHasI)
}
return hasH || hasElemH, hasI || hasElemI
}
func getNonNil[K comparable, V any](m *map[K]*V, key K) (_ *V, alreadyThere bool) {
contract.Assertf(m != nil, "Cannot restore map if ptr is nil")
if *m == nil {
*m = map[K]*V{}
}
v := (*m)[key]
if v == nil {
var new V
v = &new
(*m)[key] = v
} else {
alreadyThere = true
}
return v, alreadyThere
}
func aliasOrRenameResource(
p *ProviderInfo,
res *ResourceInfo, tfToken string,
hist *tokenHistory[tokens.Type], currentVersion int,
) {
var alreadyPresent bool
for _, a := range hist.Past {
if a.Name == hist.Current {
alreadyPresent = true
break
}
}
if !alreadyPresent && res.Tok != hist.Current {
// The resource is in history, but the name has changed. Update the new current name
// and add the old name to the history.
hist.Past = append(hist.Past, alias[tokens.Type]{
Name: hist.Current,
InCodegen: true,
MajorVersion: currentVersion,
})
hist.Current = res.Tok
}
for _, a := range hist.Past {
legacy := a.Name
// Only respect hard aliases introduced in the same major version
if a.InCodegen && a.MajorVersion == currentVersion {
p.RenameResourceWithAlias(tfToken, legacy,
res.Tok, legacy.Module().Name().String(),
res.Tok.Module().Name().String(), res)
} else {
res.Aliases = append(res.Aliases,
AliasInfo{Type: (*string)(&legacy)})
}
}
}
func aliasDataSource(
p *ProviderInfo,
ds shim.Resource,
queue *[]func(),
hist map[string]*tokenHistory[tokens.ModuleMember],
computed *DataSourceInfo,
tfToken string,
version int,
) {
prev, hasPrev := hist[tfToken]
if !hasPrev {
// It's not in the history, so it must be new. Stick it in the history for
// next time.
hist[tfToken] = &tokenHistory[tokens.ModuleMember]{
Current: computed.Tok,
MajorVersion: version,
}
} else {
*queue = append(*queue,
func() { aliasOrRenameDataSource(p, computed, tfToken, prev, version) })
}
if ds == nil {
return
}
// If we are behind the major version, reset the fields and the major version.
if hist[tfToken].MajorVersion < version {
hist[tfToken].MajorVersion = version
hist[tfToken].Fields = nil
}
applyResourceMaxItemsOneAliasing(ds, &hist[tfToken].Fields, &computed.Fields)
}
func aliasOrRenameDataSource(
p *ProviderInfo,
ds *DataSourceInfo, tfToken string,
prev *tokenHistory[tokens.ModuleMember],
currentVersion int,
) {
// re-fetch the resource, to make sure we have the right pointer.
computed, ok := p.DataSources[tfToken]
if !ok {
// The DataSource to alias has been removed. There
// is nothing to alias anymore.
return
}
var alreadyPresent bool
for _, a := range prev.Past {
if a.Name == prev.Current {
alreadyPresent = true
break
}
}
if !alreadyPresent && ds.Tok != prev.Current {
prev.Past = append(prev.Past, alias[tokens.ModuleMember]{
Name: prev.Current,
MajorVersion: currentVersion,
})
}
for _, a := range prev.Past {
if a.MajorVersion != currentVersion {
continue
}
legacy := a.Name
p.RenameDataSource(tfToken, legacy,
computed.Tok, legacy.Module().Name().String(),
computed.Tok.Module().Name().String(), computed)
}
}
| {
hist, ok, err := md.Get[aliasHistory](artifact, aliasMetadataKey)
if err != nil {
return aliasHistory{}, err
}
if !ok {
hist = aliasHistory{
Resources: map[string]*tokenHistory[tokens.Type]{},
DataSources: map[string]*tokenHistory[tokens.ModuleMember]{},
}
}
return hist, nil
} | identifier_body |
auto_aliasing.go | // Copyright 2016-2023, Pulumi Corporation.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package tfbridge
import (
"github.com/Masterminds/semver"
shim "github.com/pulumi/pulumi-terraform-bridge/v3/pkg/tfshim"
md "github.com/pulumi/pulumi-terraform-bridge/v3/unstable/metadata"
"github.com/pulumi/pulumi/sdk/v3/go/common/tokens"
"github.com/pulumi/pulumi/sdk/v3/go/common/util/contract"
)
type tokenHistory[T ~string] struct {
Current T `json:"current"` // the current Pulumi token for the resource
Past []alias[T] `json:"past,omitempty"` // Previous tokens
MajorVersion int `json:"majorVersion,omitempty"`
Fields map[string]*fieldHistory `json:"fields,omitempty"`
}
type alias[T ~string] struct {
Name T `json:"name"` // The previous token.
InCodegen bool `json:"inCodegen"` // If the alias is a fully generated resource, or just a schema alias.
MajorVersion int `json:"majorVersion"` // The provider's major version when Name was introduced.
}
type aliasHistory struct {
Resources map[string]*tokenHistory[tokens.Type] `json:"resources,omitempty"`
DataSources map[string]*tokenHistory[tokens.ModuleMember] `json:"datasources,omitempty"`
}
type fieldHistory struct {
MaxItemsOne *bool `json:"maxItemsOne,omitempty"`
Fields map[string]*fieldHistory `json:"fields,omitempty"`
Elem *fieldHistory `json:"elem,omitempty"`
}
// Automatically applies backwards compatibility best practices.
//
// Specifically, [ApplyAutoAliases] may perform the following actions:
//
// - Call [ProviderInfo.RenameResourceWithAlias] or [ProviderInfo.RenameDataSource]
// - Edit [ResourceInfo.Aliases]
// - Edit [SchemaInfo.MaxItemsOne]
//
// The goal is to always maximize backwards compatibility and reduce breaking changes for
// the users of the Pulumi providers.
//
// Resource aliases help mask TF provider resource renames or changes in mapped tokens so
// older programs continue to work. See [ResourceInfo.RenameResourceWithAlias] and
// [ResourceInfo.Aliases] for more details.
//
// [SchemaInfo.MaxItemsOne] changes are also important because they involve flattening and
// pluralizing names. Collections (lists or sets) marked with MaxItems=1 are projected as
// scalar types in Pulumi SDKs. Therefore changes to the MaxItems property may be breaking
// the compilation of programs as the type changes from `T to List[T]` or vice versa. To
// avoid these breaking changes, this method undoes any upstream changes to MaxItems using
// [SchemaInfo.MaxItemsOne] overrides. This happens until a major version change is
// detected, and then overrides are cleared. Effectively this makes sure that upstream
// MaxItems changes are deferred until the next major version.
//
// Implementation note: to operate correctly this method needs to keep a persistent track
// of a database of past decision history. This is currently done by doing reads and
// writes to `providerInfo.GetMetadata()`, which is assumed to be persisted across
// provider releases. The bridge framework keeps this information written out to an opaque
// `bridge-metadata.json` blob which is expected to be stored in source control to persist
// across releases.
//
// Panics if [ProviderInfo.ApplyAutoAliases] would return an error.
func (info *ProviderInfo) MustApplyAutoAliases() {
err := info.ApplyAutoAliases()
contract.AssertNoErrorf(err, "Failed to apply aliases")
}
// Automatically applies backwards compatibility best practices.
//
// Specifically, [ApplyAutoAliases] may perform the following actions:
//
// - Call [ProviderInfo.RenameResourceWithAlias] or [ProviderInfo.RenameDataSource]
// - Edit [ResourceInfo.Aliases]
// - Edit [SchemaInfo.MaxItemsOne]
//
// The goal is to always maximize backwards compatibility and reduce breaking changes for
// the users of the Pulumi providers.
//
// Resource aliases help mask TF provider resource renames or changes in mapped tokens so
// older programs continue to work. See [ResourceInfo.RenameResourceWithAlias] and
// [ResourceInfo.Aliases] for more details.
//
// [SchemaInfo.MaxItemsOne] changes are also important because they involve flattening and
// pluralizing names. Collections (lists or sets) marked with MaxItems=1 are projected as
// scalar types in Pulumi SDKs. Therefore changes to the MaxItems property may be breaking
// the compilation of programs as the type changes from `T to List[T]` or vice versa. To
// avoid these breaking changes, this method undoes any upstream changes to MaxItems using
// [SchemaInfo.MaxItemsOne] overrides. This happens until a major version change is
// detected, and then overrides are cleared. Effectively this makes sure that upstream
// MaxItems changes are deferred until the next major version.
//
// Implementation note: to operate correctly this method needs to keep a persistent track
// of a database of past decision history. This is currently done by doing reads and
// writes to `providerInfo.GetMetadata()`, which is assumed to be persisted across
// provider releases. The bridge framework keeps this information written out to an opaque
// `bridge-metadata.json` blob which is expected to be stored in source control to persist
// across releas
func (info *ProviderInfo) ApplyAutoAliases() error {
artifact := info.GetMetadata()
hist, err := getHistory(artifact)
if err != nil {
return err
}
var currentVersion int
// If version is missing, we assume the current version is the most recent major
// version in mentioned in history.
if info.Version != "" {
v, err := semver.NewVersion(info.Version)
if err != nil {
return err
}
currentVersion = int(v.Major())
} else {
for _, r := range hist.Resources {
for _, p := range r.Past {
if p.MajorVersion > currentVersion {
currentVersion = p.MajorVersion
}
}
}
for _, d := range hist.DataSources {
for _, p := range d.Past {
if p.MajorVersion > currentVersion {
currentVersion = p.MajorVersion
}
}
}
}
rMap := info.P.ResourcesMap()
dMap := info.P.DataSourcesMap()
// Applying resource aliases adds new resources to providerInfo.Resources. To keep
// this process deterministic, we don't apply resource aliases until all resources
// have been examined.
//
// The same logic applies to datasources.
applyAliases := []func(){}
for tfToken, computed := range info.Resources {
r, _ := rMap.GetOk(tfToken)
aliasResource(info, r, &applyAliases, hist.Resources,
computed, tfToken, currentVersion)
}
for tfToken, computed := range info.DataSources {
ds, _ := dMap.GetOk(tfToken)
aliasDataSource(info, ds, &applyAliases, hist.DataSources,
computed, tfToken, currentVersion)
}
for _, f := range applyAliases {
f()
}
if err := md.Set(artifact, aliasMetadataKey, hist); err != nil {
// Set fails only when `hist` is not serializable. Because `hist` is
// composed of marshallable, non-cyclic types, this is impossible.
contract.AssertNoErrorf(err, "History failed to serialize")
}
return nil
}
const aliasMetadataKey = "auto-aliasing"
func getHistory(artifact ProviderMetadata) (aliasHistory, error) {
hist, ok, err := md.Get[aliasHistory](artifact, aliasMetadataKey)
if err != nil {
return aliasHistory{}, err
}
if !ok {
hist = aliasHistory{
Resources: map[string]*tokenHistory[tokens.Type]{},
DataSources: map[string]*tokenHistory[tokens.ModuleMember]{},
}
}
return hist, nil
}
func aliasResource(
p *ProviderInfo, res shim.Resource,
applyResourceAliases *[]func(),
hist map[string]*tokenHistory[tokens.Type], computed *ResourceInfo,
tfToken string, version int,
) {
prev, hasPrev := hist[tfToken]
if !hasPrev {
// It's not in the history, so it must be new. Stick it in the history for
// next time.
hist[tfToken] = &tokenHistory[tokens.Type]{
Current: computed.Tok,
}
} else {
// We don't do this eagerly because aliasResource is called while
// iterating over p.Resources which aliasOrRenameResource mutates.
*applyResourceAliases = append(*applyResourceAliases,
func() { aliasOrRenameResource(p, computed, tfToken, prev, version) })
}
// Apply Aliasing to MaxItemOne by traversing the field tree and applying the
// stored value.
//
// Note: If the user explicitly sets a MaxItemOne value, that value is respected
// and overwrites the current history.'
if res == nil {
return
}
// If we are behind the major version, reset the fields and the major version.
if hist[tfToken].MajorVersion < version {
hist[tfToken].MajorVersion = version
hist[tfToken].Fields = nil
}
applyResourceMaxItemsOneAliasing(res, &hist[tfToken].Fields, &computed.Fields)
}
// applyResourceMaxItemsOneAliasing traverses a shim.Resource, applying walk to each field in the resource.
func applyResourceMaxItemsOneAliasing(
r shim.Resource, hist *map[string]*fieldHistory, info *map[string]*SchemaInfo,
) (bool, bool) {
if r == nil {
return hist != nil, info != nil
}
m := r.Schema()
if m == nil {
return hist != nil, info != nil
}
var rHasH, rHasI bool
m.Range(func(k string, v shim.Schema) bool {
h, hasH := getNonNil(hist, k)
i, hasI := getNonNil(info, k)
fieldHasHist, fieldHasInfo := applyMaxItemsOneAliasing(v, h, i)
hasH = hasH || fieldHasHist
hasI = hasI || fieldHasInfo
if !hasH {
delete(*hist, k)
}
if !hasI {
delete(*info, k)
}
rHasH = rHasH || hasH
rHasI = rHasI || hasI
return true
})
return rHasH, rHasI
}
// When walking the schema tree for a resource, we create mirroring trees in
// *fieldHistory and *SchemaInfo. To avoid polluting either tree (and
// interfering with other actions such as SetAutonaming), we clean up the paths
// that we created but did not store any information into.
//
// For example, consider the schema for a field of type `Object{ Key1:
// List[String] }`. The schema tree for this field looks like this:
//
// Object:
// Fields:
// Key1:
// List:
// Elem:
// String
//
// When we walk the tree, we create an almost identical history tree:
//
// Object:
// Fields:
// Key1:
// List:
// MaxItemsOne: false
// Elem:
// String
//
// We stored the additional piece of information `MaxItemsOne: false`. We need to
// keep enough of the tree to maintain that information, but no more. We can
// discard the unnecessary `Elem: String`.
//
// This keeps the tree as clean as possible for other processes which expect a
// `nil` element when making changes. Since other processes (like SetAutonaming)
// act on edge nodes (like our String), this allows us to inter-operate with them
// without interference.
//
// applyMaxItemsOneAliasing traverses a generic shim.Schema recursively, applying fieldHistory to
// SchemaInfo and vise versa as necessary to avoid breaking changes in the
// resulting sdk.
func | (schema shim.Schema, h *fieldHistory, info *SchemaInfo) (hasH bool, hasI bool) {
//revive:disable-next-line:empty-block
if schema == nil || (schema.Type() != shim.TypeList && schema.Type() != shim.TypeSet) {
// MaxItemsOne does not apply, so do nothing
} else if info.MaxItemsOne != nil {
// The user has overwritten the value, so we will just record that.
h.MaxItemsOne = info.MaxItemsOne
hasH = true
} else if h.MaxItemsOne != nil {
// If we have a previous value in the history, we keep it as is.
info.MaxItemsOne = h.MaxItemsOne
hasI = true
} else {
// There is no history for this value, so we bake it into the
// alias history.
h.MaxItemsOne = BoolRef(IsMaxItemsOne(schema, info))
hasH = true
}
// Ensure that the h.Elem and info.Elem fields are non-nil so they can be
// safely recursed on.
//
// If the .Elem existed before this function, we mark it as unsafe to cleanup.
var hasElemH, hasElemI bool
populateElem := func() {
if h.Elem == nil {
h.Elem = &fieldHistory{}
} else {
hasElemH = true
}
if info.Elem == nil {
info.Elem = &SchemaInfo{}
} else {
hasElemI = true
}
}
// Cleanup after we have walked a .Elem value.
//
// If the .Elem field was created in populateElem and the field was not
// changed, we then delete the field.
cleanupElem := func(elemHist, elemInfo bool) {
hasElemH = hasElemH || elemHist
hasElemI = hasElemI || elemInfo
if !hasElemH {
h.Elem = nil
}
if !hasElemI {
info.Elem = nil
}
}
e := schema.Elem()
switch e := e.(type) {
case shim.Resource:
populateElem()
eHasH, eHasI := applyResourceMaxItemsOneAliasing(e, &h.Elem.Fields, &info.Elem.Fields)
cleanupElem(eHasH, eHasI)
case shim.Schema:
populateElem()
eHasH, eHasI := applyMaxItemsOneAliasing(e, h.Elem, info.Elem)
cleanupElem(eHasH, eHasI)
}
return hasH || hasElemH, hasI || hasElemI
}
func getNonNil[K comparable, V any](m *map[K]*V, key K) (_ *V, alreadyThere bool) {
contract.Assertf(m != nil, "Cannot restore map if ptr is nil")
if *m == nil {
*m = map[K]*V{}
}
v := (*m)[key]
if v == nil {
var new V
v = &new
(*m)[key] = v
} else {
alreadyThere = true
}
return v, alreadyThere
}
func aliasOrRenameResource(
p *ProviderInfo,
res *ResourceInfo, tfToken string,
hist *tokenHistory[tokens.Type], currentVersion int,
) {
var alreadyPresent bool
for _, a := range hist.Past {
if a.Name == hist.Current {
alreadyPresent = true
break
}
}
if !alreadyPresent && res.Tok != hist.Current {
// The resource is in history, but the name has changed. Update the new current name
// and add the old name to the history.
hist.Past = append(hist.Past, alias[tokens.Type]{
Name: hist.Current,
InCodegen: true,
MajorVersion: currentVersion,
})
hist.Current = res.Tok
}
for _, a := range hist.Past {
legacy := a.Name
// Only respect hard aliases introduced in the same major version
if a.InCodegen && a.MajorVersion == currentVersion {
p.RenameResourceWithAlias(tfToken, legacy,
res.Tok, legacy.Module().Name().String(),
res.Tok.Module().Name().String(), res)
} else {
res.Aliases = append(res.Aliases,
AliasInfo{Type: (*string)(&legacy)})
}
}
}
func aliasDataSource(
p *ProviderInfo,
ds shim.Resource,
queue *[]func(),
hist map[string]*tokenHistory[tokens.ModuleMember],
computed *DataSourceInfo,
tfToken string,
version int,
) {
prev, hasPrev := hist[tfToken]
if !hasPrev {
// It's not in the history, so it must be new. Stick it in the history for
// next time.
hist[tfToken] = &tokenHistory[tokens.ModuleMember]{
Current: computed.Tok,
MajorVersion: version,
}
} else {
*queue = append(*queue,
func() { aliasOrRenameDataSource(p, computed, tfToken, prev, version) })
}
if ds == nil {
return
}
// If we are behind the major version, reset the fields and the major version.
if hist[tfToken].MajorVersion < version {
hist[tfToken].MajorVersion = version
hist[tfToken].Fields = nil
}
applyResourceMaxItemsOneAliasing(ds, &hist[tfToken].Fields, &computed.Fields)
}
func aliasOrRenameDataSource(
p *ProviderInfo,
ds *DataSourceInfo, tfToken string,
prev *tokenHistory[tokens.ModuleMember],
currentVersion int,
) {
// re-fetch the resource, to make sure we have the right pointer.
computed, ok := p.DataSources[tfToken]
if !ok {
// The DataSource to alias has been removed. There
// is nothing to alias anymore.
return
}
var alreadyPresent bool
for _, a := range prev.Past {
if a.Name == prev.Current {
alreadyPresent = true
break
}
}
if !alreadyPresent && ds.Tok != prev.Current {
prev.Past = append(prev.Past, alias[tokens.ModuleMember]{
Name: prev.Current,
MajorVersion: currentVersion,
})
}
for _, a := range prev.Past {
if a.MajorVersion != currentVersion {
continue
}
legacy := a.Name
p.RenameDataSource(tfToken, legacy,
computed.Tok, legacy.Module().Name().String(),
computed.Tok.Module().Name().String(), computed)
}
}
| applyMaxItemsOneAliasing | identifier_name |
auto_aliasing.go | // Copyright 2016-2023, Pulumi Corporation.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package tfbridge
import (
"github.com/Masterminds/semver"
shim "github.com/pulumi/pulumi-terraform-bridge/v3/pkg/tfshim"
md "github.com/pulumi/pulumi-terraform-bridge/v3/unstable/metadata"
"github.com/pulumi/pulumi/sdk/v3/go/common/tokens"
"github.com/pulumi/pulumi/sdk/v3/go/common/util/contract"
)
type tokenHistory[T ~string] struct {
Current T `json:"current"` // the current Pulumi token for the resource
Past []alias[T] `json:"past,omitempty"` // Previous tokens
MajorVersion int `json:"majorVersion,omitempty"`
Fields map[string]*fieldHistory `json:"fields,omitempty"`
}
type alias[T ~string] struct {
Name T `json:"name"` // The previous token.
InCodegen bool `json:"inCodegen"` // If the alias is a fully generated resource, or just a schema alias.
MajorVersion int `json:"majorVersion"` // The provider's major version when Name was introduced.
}
type aliasHistory struct {
Resources map[string]*tokenHistory[tokens.Type] `json:"resources,omitempty"`
DataSources map[string]*tokenHistory[tokens.ModuleMember] `json:"datasources,omitempty"`
}
type fieldHistory struct {
MaxItemsOne *bool `json:"maxItemsOne,omitempty"`
Fields map[string]*fieldHistory `json:"fields,omitempty"`
Elem *fieldHistory `json:"elem,omitempty"`
}
// Automatically applies backwards compatibility best practices.
//
// Specifically, [ApplyAutoAliases] may perform the following actions:
//
// - Call [ProviderInfo.RenameResourceWithAlias] or [ProviderInfo.RenameDataSource]
// - Edit [ResourceInfo.Aliases]
// - Edit [SchemaInfo.MaxItemsOne]
//
// The goal is to always maximize backwards compatibility and reduce breaking changes for
// the users of the Pulumi providers.
//
// Resource aliases help mask TF provider resource renames or changes in mapped tokens so
// older programs continue to work. See [ResourceInfo.RenameResourceWithAlias] and
// [ResourceInfo.Aliases] for more details.
//
// [SchemaInfo.MaxItemsOne] changes are also important because they involve flattening and
// pluralizing names. Collections (lists or sets) marked with MaxItems=1 are projected as
// scalar types in Pulumi SDKs. Therefore changes to the MaxItems property may be breaking
// the compilation of programs as the type changes from `T to List[T]` or vice versa. To
// avoid these breaking changes, this method undoes any upstream changes to MaxItems using
// [SchemaInfo.MaxItemsOne] overrides. This happens until a major version change is
// detected, and then overrides are cleared. Effectively this makes sure that upstream
// MaxItems changes are deferred until the next major version.
//
// Implementation note: to operate correctly this method needs to keep a persistent track
// of a database of past decision history. This is currently done by doing reads and
// writes to `providerInfo.GetMetadata()`, which is assumed to be persisted across
// provider releases. The bridge framework keeps this information written out to an opaque
// `bridge-metadata.json` blob which is expected to be stored in source control to persist
// across releases.
//
// Panics if [ProviderInfo.ApplyAutoAliases] would return an error.
func (info *ProviderInfo) MustApplyAutoAliases() {
err := info.ApplyAutoAliases()
contract.AssertNoErrorf(err, "Failed to apply aliases")
}
// Automatically applies backwards compatibility best practices.
//
// Specifically, [ApplyAutoAliases] may perform the following actions:
//
// - Call [ProviderInfo.RenameResourceWithAlias] or [ProviderInfo.RenameDataSource]
// - Edit [ResourceInfo.Aliases]
// - Edit [SchemaInfo.MaxItemsOne]
//
// The goal is to always maximize backwards compatibility and reduce breaking changes for
// the users of the Pulumi providers.
//
// Resource aliases help mask TF provider resource renames or changes in mapped tokens so
// older programs continue to work. See [ResourceInfo.RenameResourceWithAlias] and
// [ResourceInfo.Aliases] for more details.
//
// [SchemaInfo.MaxItemsOne] changes are also important because they involve flattening and
// pluralizing names. Collections (lists or sets) marked with MaxItems=1 are projected as
// scalar types in Pulumi SDKs. Therefore changes to the MaxItems property may be breaking
// the compilation of programs as the type changes from `T to List[T]` or vice versa. To
// avoid these breaking changes, this method undoes any upstream changes to MaxItems using
// [SchemaInfo.MaxItemsOne] overrides. This happens until a major version change is
// detected, and then overrides are cleared. Effectively this makes sure that upstream
// MaxItems changes are deferred until the next major version.
//
// Implementation note: to operate correctly this method needs to keep a persistent track
// of a database of past decision history. This is currently done by doing reads and
// writes to `providerInfo.GetMetadata()`, which is assumed to be persisted across
// provider releases. The bridge framework keeps this information written out to an opaque
// `bridge-metadata.json` blob which is expected to be stored in source control to persist
// across releas
func (info *ProviderInfo) ApplyAutoAliases() error {
artifact := info.GetMetadata()
hist, err := getHistory(artifact)
if err != nil {
return err
}
var currentVersion int
// If version is missing, we assume the current version is the most recent major
// version in mentioned in history.
if info.Version != "" {
v, err := semver.NewVersion(info.Version)
if err != nil {
return err
}
currentVersion = int(v.Major())
} else {
for _, r := range hist.Resources {
for _, p := range r.Past {
if p.MajorVersion > currentVersion {
currentVersion = p.MajorVersion
}
}
}
for _, d := range hist.DataSources {
for _, p := range d.Past {
if p.MajorVersion > currentVersion {
currentVersion = p.MajorVersion
}
}
}
}
rMap := info.P.ResourcesMap()
dMap := info.P.DataSourcesMap()
// Applying resource aliases adds new resources to providerInfo.Resources. To keep
// this process deterministic, we don't apply resource aliases until all resources
// have been examined.
//
// The same logic applies to datasources.
applyAliases := []func(){}
for tfToken, computed := range info.Resources {
r, _ := rMap.GetOk(tfToken)
aliasResource(info, r, &applyAliases, hist.Resources,
computed, tfToken, currentVersion)
}
for tfToken, computed := range info.DataSources {
ds, _ := dMap.GetOk(tfToken)
aliasDataSource(info, ds, &applyAliases, hist.DataSources,
computed, tfToken, currentVersion)
}
for _, f := range applyAliases {
f()
}
if err := md.Set(artifact, aliasMetadataKey, hist); err != nil {
// Set fails only when `hist` is not serializable. Because `hist` is
// composed of marshallable, non-cyclic types, this is impossible.
contract.AssertNoErrorf(err, "History failed to serialize")
}
return nil
}
const aliasMetadataKey = "auto-aliasing"
func getHistory(artifact ProviderMetadata) (aliasHistory, error) {
hist, ok, err := md.Get[aliasHistory](artifact, aliasMetadataKey)
if err != nil {
return aliasHistory{}, err
}
if !ok |
return hist, nil
}
func aliasResource(
p *ProviderInfo, res shim.Resource,
applyResourceAliases *[]func(),
hist map[string]*tokenHistory[tokens.Type], computed *ResourceInfo,
tfToken string, version int,
) {
prev, hasPrev := hist[tfToken]
if !hasPrev {
// It's not in the history, so it must be new. Stick it in the history for
// next time.
hist[tfToken] = &tokenHistory[tokens.Type]{
Current: computed.Tok,
}
} else {
// We don't do this eagerly because aliasResource is called while
// iterating over p.Resources which aliasOrRenameResource mutates.
*applyResourceAliases = append(*applyResourceAliases,
func() { aliasOrRenameResource(p, computed, tfToken, prev, version) })
}
// Apply Aliasing to MaxItemOne by traversing the field tree and applying the
// stored value.
//
// Note: If the user explicitly sets a MaxItemOne value, that value is respected
// and overwrites the current history.'
if res == nil {
return
}
// If we are behind the major version, reset the fields and the major version.
if hist[tfToken].MajorVersion < version {
hist[tfToken].MajorVersion = version
hist[tfToken].Fields = nil
}
applyResourceMaxItemsOneAliasing(res, &hist[tfToken].Fields, &computed.Fields)
}
// applyResourceMaxItemsOneAliasing traverses a shim.Resource, applying walk to each field in the resource.
func applyResourceMaxItemsOneAliasing(
r shim.Resource, hist *map[string]*fieldHistory, info *map[string]*SchemaInfo,
) (bool, bool) {
if r == nil {
return hist != nil, info != nil
}
m := r.Schema()
if m == nil {
return hist != nil, info != nil
}
var rHasH, rHasI bool
m.Range(func(k string, v shim.Schema) bool {
h, hasH := getNonNil(hist, k)
i, hasI := getNonNil(info, k)
fieldHasHist, fieldHasInfo := applyMaxItemsOneAliasing(v, h, i)
hasH = hasH || fieldHasHist
hasI = hasI || fieldHasInfo
if !hasH {
delete(*hist, k)
}
if !hasI {
delete(*info, k)
}
rHasH = rHasH || hasH
rHasI = rHasI || hasI
return true
})
return rHasH, rHasI
}
// When walking the schema tree for a resource, we create mirroring trees in
// *fieldHistory and *SchemaInfo. To avoid polluting either tree (and
// interfering with other actions such as SetAutonaming), we clean up the paths
// that we created but did not store any information into.
//
// For example, consider the schema for a field of type `Object{ Key1:
// List[String] }`. The schema tree for this field looks like this:
//
// Object:
// Fields:
// Key1:
// List:
// Elem:
// String
//
// When we walk the tree, we create an almost identical history tree:
//
// Object:
// Fields:
// Key1:
// List:
// MaxItemsOne: false
// Elem:
// String
//
// We stored the additional piece of information `MaxItemsOne: false`. We need to
// keep enough of the tree to maintain that information, but no more. We can
// discard the unnecessary `Elem: String`.
//
// This keeps the tree as clean as possible for other processes which expect a
// `nil` element when making changes. Since other processes (like SetAutonaming)
// act on edge nodes (like our String), this allows us to inter-operate with them
// without interference.
//
// applyMaxItemsOneAliasing traverses a generic shim.Schema recursively, applying fieldHistory to
// SchemaInfo and vise versa as necessary to avoid breaking changes in the
// resulting sdk.
func applyMaxItemsOneAliasing(schema shim.Schema, h *fieldHistory, info *SchemaInfo) (hasH bool, hasI bool) {
//revive:disable-next-line:empty-block
if schema == nil || (schema.Type() != shim.TypeList && schema.Type() != shim.TypeSet) {
// MaxItemsOne does not apply, so do nothing
} else if info.MaxItemsOne != nil {
// The user has overwritten the value, so we will just record that.
h.MaxItemsOne = info.MaxItemsOne
hasH = true
} else if h.MaxItemsOne != nil {
// If we have a previous value in the history, we keep it as is.
info.MaxItemsOne = h.MaxItemsOne
hasI = true
} else {
// There is no history for this value, so we bake it into the
// alias history.
h.MaxItemsOne = BoolRef(IsMaxItemsOne(schema, info))
hasH = true
}
// Ensure that the h.Elem and info.Elem fields are non-nil so they can be
// safely recursed on.
//
// If the .Elem existed before this function, we mark it as unsafe to cleanup.
var hasElemH, hasElemI bool
populateElem := func() {
if h.Elem == nil {
h.Elem = &fieldHistory{}
} else {
hasElemH = true
}
if info.Elem == nil {
info.Elem = &SchemaInfo{}
} else {
hasElemI = true
}
}
// Cleanup after we have walked a .Elem value.
//
// If the .Elem field was created in populateElem and the field was not
// changed, we then delete the field.
cleanupElem := func(elemHist, elemInfo bool) {
hasElemH = hasElemH || elemHist
hasElemI = hasElemI || elemInfo
if !hasElemH {
h.Elem = nil
}
if !hasElemI {
info.Elem = nil
}
}
e := schema.Elem()
switch e := e.(type) {
case shim.Resource:
populateElem()
eHasH, eHasI := applyResourceMaxItemsOneAliasing(e, &h.Elem.Fields, &info.Elem.Fields)
cleanupElem(eHasH, eHasI)
case shim.Schema:
populateElem()
eHasH, eHasI := applyMaxItemsOneAliasing(e, h.Elem, info.Elem)
cleanupElem(eHasH, eHasI)
}
return hasH || hasElemH, hasI || hasElemI
}
func getNonNil[K comparable, V any](m *map[K]*V, key K) (_ *V, alreadyThere bool) {
contract.Assertf(m != nil, "Cannot restore map if ptr is nil")
if *m == nil {
*m = map[K]*V{}
}
v := (*m)[key]
if v == nil {
var new V
v = &new
(*m)[key] = v
} else {
alreadyThere = true
}
return v, alreadyThere
}
func aliasOrRenameResource(
p *ProviderInfo,
res *ResourceInfo, tfToken string,
hist *tokenHistory[tokens.Type], currentVersion int,
) {
var alreadyPresent bool
for _, a := range hist.Past {
if a.Name == hist.Current {
alreadyPresent = true
break
}
}
if !alreadyPresent && res.Tok != hist.Current {
// The resource is in history, but the name has changed. Update the new current name
// and add the old name to the history.
hist.Past = append(hist.Past, alias[tokens.Type]{
Name: hist.Current,
InCodegen: true,
MajorVersion: currentVersion,
})
hist.Current = res.Tok
}
for _, a := range hist.Past {
legacy := a.Name
// Only respect hard aliases introduced in the same major version
if a.InCodegen && a.MajorVersion == currentVersion {
p.RenameResourceWithAlias(tfToken, legacy,
res.Tok, legacy.Module().Name().String(),
res.Tok.Module().Name().String(), res)
} else {
res.Aliases = append(res.Aliases,
AliasInfo{Type: (*string)(&legacy)})
}
}
}
func aliasDataSource(
p *ProviderInfo,
ds shim.Resource,
queue *[]func(),
hist map[string]*tokenHistory[tokens.ModuleMember],
computed *DataSourceInfo,
tfToken string,
version int,
) {
prev, hasPrev := hist[tfToken]
if !hasPrev {
// It's not in the history, so it must be new. Stick it in the history for
// next time.
hist[tfToken] = &tokenHistory[tokens.ModuleMember]{
Current: computed.Tok,
MajorVersion: version,
}
} else {
*queue = append(*queue,
func() { aliasOrRenameDataSource(p, computed, tfToken, prev, version) })
}
if ds == nil {
return
}
// If we are behind the major version, reset the fields and the major version.
if hist[tfToken].MajorVersion < version {
hist[tfToken].MajorVersion = version
hist[tfToken].Fields = nil
}
applyResourceMaxItemsOneAliasing(ds, &hist[tfToken].Fields, &computed.Fields)
}
func aliasOrRenameDataSource(
p *ProviderInfo,
ds *DataSourceInfo, tfToken string,
prev *tokenHistory[tokens.ModuleMember],
currentVersion int,
) {
// re-fetch the resource, to make sure we have the right pointer.
computed, ok := p.DataSources[tfToken]
if !ok {
// The DataSource to alias has been removed. There
// is nothing to alias anymore.
return
}
var alreadyPresent bool
for _, a := range prev.Past {
if a.Name == prev.Current {
alreadyPresent = true
break
}
}
if !alreadyPresent && ds.Tok != prev.Current {
prev.Past = append(prev.Past, alias[tokens.ModuleMember]{
Name: prev.Current,
MajorVersion: currentVersion,
})
}
for _, a := range prev.Past {
if a.MajorVersion != currentVersion {
continue
}
legacy := a.Name
p.RenameDataSource(tfToken, legacy,
computed.Tok, legacy.Module().Name().String(),
computed.Tok.Module().Name().String(), computed)
}
}
| {
hist = aliasHistory{
Resources: map[string]*tokenHistory[tokens.Type]{},
DataSources: map[string]*tokenHistory[tokens.ModuleMember]{},
}
} | conditional_block |
auto_aliasing.go | // Copyright 2016-2023, Pulumi Corporation.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package tfbridge
import (
"github.com/Masterminds/semver"
shim "github.com/pulumi/pulumi-terraform-bridge/v3/pkg/tfshim"
md "github.com/pulumi/pulumi-terraform-bridge/v3/unstable/metadata"
"github.com/pulumi/pulumi/sdk/v3/go/common/tokens"
"github.com/pulumi/pulumi/sdk/v3/go/common/util/contract"
)
type tokenHistory[T ~string] struct {
Current T `json:"current"` // the current Pulumi token for the resource
Past []alias[T] `json:"past,omitempty"` // Previous tokens
MajorVersion int `json:"majorVersion,omitempty"`
Fields map[string]*fieldHistory `json:"fields,omitempty"`
}
type alias[T ~string] struct {
Name T `json:"name"` // The previous token.
InCodegen bool `json:"inCodegen"` // If the alias is a fully generated resource, or just a schema alias.
MajorVersion int `json:"majorVersion"` // The provider's major version when Name was introduced.
}
type aliasHistory struct {
Resources map[string]*tokenHistory[tokens.Type] `json:"resources,omitempty"`
DataSources map[string]*tokenHistory[tokens.ModuleMember] `json:"datasources,omitempty"`
}
type fieldHistory struct {
MaxItemsOne *bool `json:"maxItemsOne,omitempty"`
Fields map[string]*fieldHistory `json:"fields,omitempty"`
Elem *fieldHistory `json:"elem,omitempty"`
}
// Automatically applies backwards compatibility best practices.
//
// Specifically, [ApplyAutoAliases] may perform the following actions:
//
// - Call [ProviderInfo.RenameResourceWithAlias] or [ProviderInfo.RenameDataSource]
// - Edit [ResourceInfo.Aliases]
// - Edit [SchemaInfo.MaxItemsOne]
//
// The goal is to always maximize backwards compatibility and reduce breaking changes for
// the users of the Pulumi providers.
//
// Resource aliases help mask TF provider resource renames or changes in mapped tokens so
// older programs continue to work. See [ResourceInfo.RenameResourceWithAlias] and
// [ResourceInfo.Aliases] for more details.
//
// [SchemaInfo.MaxItemsOne] changes are also important because they involve flattening and
// pluralizing names. Collections (lists or sets) marked with MaxItems=1 are projected as
// scalar types in Pulumi SDKs. Therefore changes to the MaxItems property may be breaking
// the compilation of programs as the type changes from `T to List[T]` or vice versa. To
// avoid these breaking changes, this method undoes any upstream changes to MaxItems using
// [SchemaInfo.MaxItemsOne] overrides. This happens until a major version change is
// detected, and then overrides are cleared. Effectively this makes sure that upstream
// MaxItems changes are deferred until the next major version.
//
// Implementation note: to operate correctly this method needs to keep a persistent track
// of a database of past decision history. This is currently done by doing reads and
// writes to `providerInfo.GetMetadata()`, which is assumed to be persisted across
// provider releases. The bridge framework keeps this information written out to an opaque
// `bridge-metadata.json` blob which is expected to be stored in source control to persist
// across releases.
//
// Panics if [ProviderInfo.ApplyAutoAliases] would return an error.
func (info *ProviderInfo) MustApplyAutoAliases() {
err := info.ApplyAutoAliases()
contract.AssertNoErrorf(err, "Failed to apply aliases")
}
// Automatically applies backwards compatibility best practices.
//
// Specifically, [ApplyAutoAliases] may perform the following actions:
//
// - Call [ProviderInfo.RenameResourceWithAlias] or [ProviderInfo.RenameDataSource]
// - Edit [ResourceInfo.Aliases]
// - Edit [SchemaInfo.MaxItemsOne]
//
// The goal is to always maximize backwards compatibility and reduce breaking changes for
// the users of the Pulumi providers.
//
// Resource aliases help mask TF provider resource renames or changes in mapped tokens so
// older programs continue to work. See [ResourceInfo.RenameResourceWithAlias] and
// [ResourceInfo.Aliases] for more details.
// | // the compilation of programs as the type changes from `T to List[T]` or vice versa. To
// avoid these breaking changes, this method undoes any upstream changes to MaxItems using
// [SchemaInfo.MaxItemsOne] overrides. This happens until a major version change is
// detected, and then overrides are cleared. Effectively this makes sure that upstream
// MaxItems changes are deferred until the next major version.
//
// Implementation note: to operate correctly this method needs to keep a persistent track
// of a database of past decision history. This is currently done by doing reads and
// writes to `providerInfo.GetMetadata()`, which is assumed to be persisted across
// provider releases. The bridge framework keeps this information written out to an opaque
// `bridge-metadata.json` blob which is expected to be stored in source control to persist
// across releas
func (info *ProviderInfo) ApplyAutoAliases() error {
artifact := info.GetMetadata()
hist, err := getHistory(artifact)
if err != nil {
return err
}
var currentVersion int
// If version is missing, we assume the current version is the most recent major
// version in mentioned in history.
if info.Version != "" {
v, err := semver.NewVersion(info.Version)
if err != nil {
return err
}
currentVersion = int(v.Major())
} else {
for _, r := range hist.Resources {
for _, p := range r.Past {
if p.MajorVersion > currentVersion {
currentVersion = p.MajorVersion
}
}
}
for _, d := range hist.DataSources {
for _, p := range d.Past {
if p.MajorVersion > currentVersion {
currentVersion = p.MajorVersion
}
}
}
}
rMap := info.P.ResourcesMap()
dMap := info.P.DataSourcesMap()
// Applying resource aliases adds new resources to providerInfo.Resources. To keep
// this process deterministic, we don't apply resource aliases until all resources
// have been examined.
//
// The same logic applies to datasources.
applyAliases := []func(){}
for tfToken, computed := range info.Resources {
r, _ := rMap.GetOk(tfToken)
aliasResource(info, r, &applyAliases, hist.Resources,
computed, tfToken, currentVersion)
}
for tfToken, computed := range info.DataSources {
ds, _ := dMap.GetOk(tfToken)
aliasDataSource(info, ds, &applyAliases, hist.DataSources,
computed, tfToken, currentVersion)
}
for _, f := range applyAliases {
f()
}
if err := md.Set(artifact, aliasMetadataKey, hist); err != nil {
// Set fails only when `hist` is not serializable. Because `hist` is
// composed of marshallable, non-cyclic types, this is impossible.
contract.AssertNoErrorf(err, "History failed to serialize")
}
return nil
}
const aliasMetadataKey = "auto-aliasing"
func getHistory(artifact ProviderMetadata) (aliasHistory, error) {
hist, ok, err := md.Get[aliasHistory](artifact, aliasMetadataKey)
if err != nil {
return aliasHistory{}, err
}
if !ok {
hist = aliasHistory{
Resources: map[string]*tokenHistory[tokens.Type]{},
DataSources: map[string]*tokenHistory[tokens.ModuleMember]{},
}
}
return hist, nil
}
func aliasResource(
p *ProviderInfo, res shim.Resource,
applyResourceAliases *[]func(),
hist map[string]*tokenHistory[tokens.Type], computed *ResourceInfo,
tfToken string, version int,
) {
prev, hasPrev := hist[tfToken]
if !hasPrev {
// It's not in the history, so it must be new. Stick it in the history for
// next time.
hist[tfToken] = &tokenHistory[tokens.Type]{
Current: computed.Tok,
}
} else {
// We don't do this eagerly because aliasResource is called while
// iterating over p.Resources which aliasOrRenameResource mutates.
*applyResourceAliases = append(*applyResourceAliases,
func() { aliasOrRenameResource(p, computed, tfToken, prev, version) })
}
// Apply Aliasing to MaxItemOne by traversing the field tree and applying the
// stored value.
//
// Note: If the user explicitly sets a MaxItemOne value, that value is respected
// and overwrites the current history.'
if res == nil {
return
}
// If we are behind the major version, reset the fields and the major version.
if hist[tfToken].MajorVersion < version {
hist[tfToken].MajorVersion = version
hist[tfToken].Fields = nil
}
applyResourceMaxItemsOneAliasing(res, &hist[tfToken].Fields, &computed.Fields)
}
// applyResourceMaxItemsOneAliasing traverses a shim.Resource, applying walk to each field in the resource.
func applyResourceMaxItemsOneAliasing(
r shim.Resource, hist *map[string]*fieldHistory, info *map[string]*SchemaInfo,
) (bool, bool) {
if r == nil {
return hist != nil, info != nil
}
m := r.Schema()
if m == nil {
return hist != nil, info != nil
}
var rHasH, rHasI bool
m.Range(func(k string, v shim.Schema) bool {
h, hasH := getNonNil(hist, k)
i, hasI := getNonNil(info, k)
fieldHasHist, fieldHasInfo := applyMaxItemsOneAliasing(v, h, i)
hasH = hasH || fieldHasHist
hasI = hasI || fieldHasInfo
if !hasH {
delete(*hist, k)
}
if !hasI {
delete(*info, k)
}
rHasH = rHasH || hasH
rHasI = rHasI || hasI
return true
})
return rHasH, rHasI
}
// When walking the schema tree for a resource, we create mirroring trees in
// *fieldHistory and *SchemaInfo. To avoid polluting either tree (and
// interfering with other actions such as SetAutonaming), we clean up the paths
// that we created but did not store any information into.
//
// For example, consider the schema for a field of type `Object{ Key1:
// List[String] }`. The schema tree for this field looks like this:
//
// Object:
// Fields:
// Key1:
// List:
// Elem:
// String
//
// When we walk the tree, we create an almost identical history tree:
//
// Object:
// Fields:
// Key1:
// List:
// MaxItemsOne: false
// Elem:
// String
//
// We stored the additional piece of information `MaxItemsOne: false`. We need to
// keep enough of the tree to maintain that information, but no more. We can
// discard the unnecessary `Elem: String`.
//
// This keeps the tree as clean as possible for other processes which expect a
// `nil` element when making changes. Since other processes (like SetAutonaming)
// act on edge nodes (like our String), this allows us to inter-operate with them
// without interference.
//
// applyMaxItemsOneAliasing traverses a generic shim.Schema recursively, applying fieldHistory to
// SchemaInfo and vise versa as necessary to avoid breaking changes in the
// resulting sdk.
func applyMaxItemsOneAliasing(schema shim.Schema, h *fieldHistory, info *SchemaInfo) (hasH bool, hasI bool) {
//revive:disable-next-line:empty-block
if schema == nil || (schema.Type() != shim.TypeList && schema.Type() != shim.TypeSet) {
// MaxItemsOne does not apply, so do nothing
} else if info.MaxItemsOne != nil {
// The user has overwritten the value, so we will just record that.
h.MaxItemsOne = info.MaxItemsOne
hasH = true
} else if h.MaxItemsOne != nil {
// If we have a previous value in the history, we keep it as is.
info.MaxItemsOne = h.MaxItemsOne
hasI = true
} else {
// There is no history for this value, so we bake it into the
// alias history.
h.MaxItemsOne = BoolRef(IsMaxItemsOne(schema, info))
hasH = true
}
// Ensure that the h.Elem and info.Elem fields are non-nil so they can be
// safely recursed on.
//
// If the .Elem existed before this function, we mark it as unsafe to cleanup.
var hasElemH, hasElemI bool
populateElem := func() {
if h.Elem == nil {
h.Elem = &fieldHistory{}
} else {
hasElemH = true
}
if info.Elem == nil {
info.Elem = &SchemaInfo{}
} else {
hasElemI = true
}
}
// Cleanup after we have walked a .Elem value.
//
// If the .Elem field was created in populateElem and the field was not
// changed, we then delete the field.
cleanupElem := func(elemHist, elemInfo bool) {
hasElemH = hasElemH || elemHist
hasElemI = hasElemI || elemInfo
if !hasElemH {
h.Elem = nil
}
if !hasElemI {
info.Elem = nil
}
}
e := schema.Elem()
switch e := e.(type) {
case shim.Resource:
populateElem()
eHasH, eHasI := applyResourceMaxItemsOneAliasing(e, &h.Elem.Fields, &info.Elem.Fields)
cleanupElem(eHasH, eHasI)
case shim.Schema:
populateElem()
eHasH, eHasI := applyMaxItemsOneAliasing(e, h.Elem, info.Elem)
cleanupElem(eHasH, eHasI)
}
return hasH || hasElemH, hasI || hasElemI
}
func getNonNil[K comparable, V any](m *map[K]*V, key K) (_ *V, alreadyThere bool) {
contract.Assertf(m != nil, "Cannot restore map if ptr is nil")
if *m == nil {
*m = map[K]*V{}
}
v := (*m)[key]
if v == nil {
var new V
v = &new
(*m)[key] = v
} else {
alreadyThere = true
}
return v, alreadyThere
}
func aliasOrRenameResource(
p *ProviderInfo,
res *ResourceInfo, tfToken string,
hist *tokenHistory[tokens.Type], currentVersion int,
) {
var alreadyPresent bool
for _, a := range hist.Past {
if a.Name == hist.Current {
alreadyPresent = true
break
}
}
if !alreadyPresent && res.Tok != hist.Current {
// The resource is in history, but the name has changed. Update the new current name
// and add the old name to the history.
hist.Past = append(hist.Past, alias[tokens.Type]{
Name: hist.Current,
InCodegen: true,
MajorVersion: currentVersion,
})
hist.Current = res.Tok
}
for _, a := range hist.Past {
legacy := a.Name
// Only respect hard aliases introduced in the same major version
if a.InCodegen && a.MajorVersion == currentVersion {
p.RenameResourceWithAlias(tfToken, legacy,
res.Tok, legacy.Module().Name().String(),
res.Tok.Module().Name().String(), res)
} else {
res.Aliases = append(res.Aliases,
AliasInfo{Type: (*string)(&legacy)})
}
}
}
func aliasDataSource(
p *ProviderInfo,
ds shim.Resource,
queue *[]func(),
hist map[string]*tokenHistory[tokens.ModuleMember],
computed *DataSourceInfo,
tfToken string,
version int,
) {
prev, hasPrev := hist[tfToken]
if !hasPrev {
// It's not in the history, so it must be new. Stick it in the history for
// next time.
hist[tfToken] = &tokenHistory[tokens.ModuleMember]{
Current: computed.Tok,
MajorVersion: version,
}
} else {
*queue = append(*queue,
func() { aliasOrRenameDataSource(p, computed, tfToken, prev, version) })
}
if ds == nil {
return
}
// If we are behind the major version, reset the fields and the major version.
if hist[tfToken].MajorVersion < version {
hist[tfToken].MajorVersion = version
hist[tfToken].Fields = nil
}
applyResourceMaxItemsOneAliasing(ds, &hist[tfToken].Fields, &computed.Fields)
}
func aliasOrRenameDataSource(
p *ProviderInfo,
ds *DataSourceInfo, tfToken string,
prev *tokenHistory[tokens.ModuleMember],
currentVersion int,
) {
// re-fetch the resource, to make sure we have the right pointer.
computed, ok := p.DataSources[tfToken]
if !ok {
// The DataSource to alias has been removed. There
// is nothing to alias anymore.
return
}
var alreadyPresent bool
for _, a := range prev.Past {
if a.Name == prev.Current {
alreadyPresent = true
break
}
}
if !alreadyPresent && ds.Tok != prev.Current {
prev.Past = append(prev.Past, alias[tokens.ModuleMember]{
Name: prev.Current,
MajorVersion: currentVersion,
})
}
for _, a := range prev.Past {
if a.MajorVersion != currentVersion {
continue
}
legacy := a.Name
p.RenameDataSource(tfToken, legacy,
computed.Tok, legacy.Module().Name().String(),
computed.Tok.Module().Name().String(), computed)
}
} | // [SchemaInfo.MaxItemsOne] changes are also important because they involve flattening and
// pluralizing names. Collections (lists or sets) marked with MaxItems=1 are projected as
// scalar types in Pulumi SDKs. Therefore changes to the MaxItems property may be breaking | random_line_split |
cht.rs | // This file is part of Substrate.
// Copyright (C) 2017-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! Canonical hash trie definitions and helper functions.
//!
//! Each CHT is a trie mapping block numbers to canonical hash.
//! One is generated for every `SIZE` blocks, allowing us to discard those blocks in
//! favor of the trie root. When the "ancient" blocks need to be accessed, we simply
//! request an inclusion proof of a specific block number against the trie with the
//! root has. A correct proof implies that the claimed block is identical to the one
//! we discarded.
use codec::Encode;
use hash_db;
use sp_trie;
use sp_core::{convert_hash, H256};
use sp_runtime::traits::{AtLeast32Bit, Header as HeaderT, One, Zero};
use sp_state_machine::{
prove_read_on_trie_backend, read_proof_check, read_proof_check_on_proving_backend,
Backend as StateBackend, InMemoryBackend, MemoryDB, StorageProof, TrieBackend,
};
use sp_blockchain::{Error as ClientError, Result as ClientResult};
/// The size of each CHT. This value is passed to every CHT-related function from
/// production code. Other values are passed from tests.
const SIZE: u32 = 2048;
/// Gets default CHT size.
pub fn size<N: From<u32>>() -> N {
SIZE.into()
}
/// Returns Some(cht_number) if CHT is need to be built when the block with given number is
/// canonized.
pub fn | <N>(cht_size: N, block_num: N) -> Option<N>
where
N: Clone + AtLeast32Bit,
{
let block_cht_num = block_to_cht_number(cht_size.clone(), block_num.clone())?;
let two = N::one() + N::one();
if block_cht_num < two {
return None
}
let cht_start = start_number(cht_size, block_cht_num.clone());
if cht_start != block_num {
return None
}
Some(block_cht_num - two)
}
/// Returns Some(max_cht_number) if CHT has ever been built given maximal canonical block number.
pub fn max_cht_number<N>(cht_size: N, max_canonical_block: N) -> Option<N>
where
N: Clone + AtLeast32Bit,
{
let max_cht_number = block_to_cht_number(cht_size, max_canonical_block)?;
let two = N::one() + N::one();
if max_cht_number < two {
return None
}
Some(max_cht_number - two)
}
/// Compute a CHT root from an iterator of block hashes. Fails if shorter than
/// SIZE items. The items are assumed to proceed sequentially from `start_number(cht_num)`.
/// Discards the trie's nodes.
pub fn compute_root<Header, Hasher, I>(
cht_size: Header::Number,
cht_num: Header::Number,
hashes: I,
) -> ClientResult<Hasher::Out>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord,
I: IntoIterator<Item = ClientResult<Option<Header::Hash>>>,
{
use sp_trie::TrieConfiguration;
Ok(sp_trie::trie_types::Layout::<Hasher>::trie_root(build_pairs::<Header, I>(
cht_size, cht_num, hashes,
)?))
}
/// Build CHT-based header proof.
pub fn build_proof<Header, Hasher, BlocksI, HashesI>(
cht_size: Header::Number,
cht_num: Header::Number,
blocks: BlocksI,
hashes: HashesI,
) -> ClientResult<StorageProof>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord + codec::Codec,
BlocksI: IntoIterator<Item = Header::Number>,
HashesI: IntoIterator<Item = ClientResult<Option<Header::Hash>>>,
{
let transaction = build_pairs::<Header, _>(cht_size, cht_num, hashes)?
.into_iter()
.map(|(k, v)| (k, Some(v)))
.collect::<Vec<_>>();
let mut storage = InMemoryBackend::<Hasher>::default().update(vec![(None, transaction)]);
let trie_storage =
storage.as_trie_backend().expect("InMemoryState::as_trie_backend always returns Some; qed");
prove_read_on_trie_backend(
trie_storage,
blocks.into_iter().map(|number| encode_cht_key(number)),
)
.map_err(ClientError::Execution)
}
/// Check CHT-based header proof.
pub fn check_proof<Header, Hasher>(
local_root: Header::Hash,
local_number: Header::Number,
remote_hash: Header::Hash,
remote_proof: StorageProof,
) -> ClientResult<()>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord + codec::Codec,
{
do_check_proof::<Header, Hasher, _>(
local_root,
local_number,
remote_hash,
move |local_root, local_cht_key| {
read_proof_check::<Hasher, _>(
local_root,
remote_proof,
::std::iter::once(local_cht_key),
)
.map(|mut map| map.remove(local_cht_key).expect("checked proof of local_cht_key; qed"))
.map_err(|e| ClientError::from(e))
},
)
}
/// Check CHT-based header proof on pre-created proving backend.
pub fn check_proof_on_proving_backend<Header, Hasher>(
local_root: Header::Hash,
local_number: Header::Number,
remote_hash: Header::Hash,
proving_backend: &TrieBackend<MemoryDB<Hasher>, Hasher>,
) -> ClientResult<()>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord + codec::Codec,
{
do_check_proof::<Header, Hasher, _>(
local_root,
local_number,
remote_hash,
|_, local_cht_key| {
read_proof_check_on_proving_backend::<Hasher>(proving_backend, local_cht_key)
.map_err(|e| ClientError::from(e))
},
)
}
/// Check CHT-based header proof using passed checker function.
fn do_check_proof<Header, Hasher, F>(
local_root: Header::Hash,
local_number: Header::Number,
remote_hash: Header::Hash,
checker: F,
) -> ClientResult<()>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord,
F: FnOnce(Hasher::Out, &[u8]) -> ClientResult<Option<Vec<u8>>>,
{
let root: Hasher::Out = convert_hash(&local_root);
let local_cht_key = encode_cht_key(local_number);
let local_cht_value = checker(root, &local_cht_key)?;
let local_cht_value = local_cht_value.ok_or_else(|| ClientError::InvalidCHTProof)?;
let local_hash =
decode_cht_value(&local_cht_value).ok_or_else(|| ClientError::InvalidCHTProof)?;
match &local_hash[..] == remote_hash.as_ref() {
true => Ok(()),
false => Err(ClientError::InvalidCHTProof.into()),
}
}
/// Group ordered blocks by CHT number and call functor with blocks of each group.
pub fn for_each_cht_group<Header, I, F, P>(
cht_size: Header::Number,
blocks: I,
mut functor: F,
mut functor_param: P,
) -> ClientResult<()>
where
Header: HeaderT,
I: IntoIterator<Item = Header::Number>,
F: FnMut(P, Header::Number, Vec<Header::Number>) -> ClientResult<P>,
{
let mut current_cht_num = None;
let mut current_cht_blocks = Vec::new();
for block in blocks {
let new_cht_num = match block_to_cht_number(cht_size, block) {
Some(new_cht_num) => new_cht_num,
None =>
return Err(ClientError::Backend(format!(
"Cannot compute CHT root for the block #{}",
block
))
.into()),
};
let advance_to_next_cht = current_cht_num.is_some() && current_cht_num != Some(new_cht_num);
if advance_to_next_cht {
let current_cht_num = current_cht_num.expect(
"advance_to_next_cht is true;
it is true only when current_cht_num is Some; qed",
);
assert!(
new_cht_num > current_cht_num,
"for_each_cht_group only supports ordered iterators"
);
functor_param =
functor(functor_param, current_cht_num, std::mem::take(&mut current_cht_blocks))?;
}
current_cht_blocks.push(block);
current_cht_num = Some(new_cht_num);
}
if let Some(current_cht_num) = current_cht_num {
functor(functor_param, current_cht_num, std::mem::take(&mut current_cht_blocks))?;
}
Ok(())
}
/// Build pairs for computing CHT.
fn build_pairs<Header, I>(
cht_size: Header::Number,
cht_num: Header::Number,
hashes: I,
) -> ClientResult<Vec<(Vec<u8>, Vec<u8>)>>
where
Header: HeaderT,
I: IntoIterator<Item = ClientResult<Option<Header::Hash>>>,
{
let start_num = start_number(cht_size, cht_num);
let mut pairs = Vec::new();
let mut hash_index = Header::Number::zero();
for hash in hashes.into_iter() {
let hash =
hash?.ok_or_else(|| ClientError::from(ClientError::MissingHashRequiredForCHT))?;
pairs.push((encode_cht_key(start_num + hash_index).to_vec(), encode_cht_value(hash)));
hash_index += Header::Number::one();
if hash_index == cht_size {
break
}
}
if hash_index == cht_size {
Ok(pairs)
} else {
Err(ClientError::MissingHashRequiredForCHT)
}
}
/// Get the starting block of a given CHT.
/// CHT 0 includes block 1...SIZE,
/// CHT 1 includes block SIZE + 1 ... 2*SIZE
/// More generally: CHT N includes block (1 + N*SIZE)...((N+1)*SIZE).
/// This is because the genesis hash is assumed to be known
/// and including it would be redundant.
pub fn start_number<N: AtLeast32Bit>(cht_size: N, cht_num: N) -> N {
(cht_num * cht_size) + N::one()
}
/// Get the ending block of a given CHT.
pub fn end_number<N: AtLeast32Bit>(cht_size: N, cht_num: N) -> N {
(cht_num + N::one()) * cht_size
}
/// Convert a block number to a CHT number.
/// Returns `None` for `block_num` == 0, `Some` otherwise.
pub fn block_to_cht_number<N: AtLeast32Bit>(cht_size: N, block_num: N) -> Option<N> {
if block_num == N::zero() {
None
} else {
Some((block_num - N::one()) / cht_size)
}
}
/// Convert header number into CHT key.
pub fn encode_cht_key<N: Encode>(number: N) -> Vec<u8> {
number.encode()
}
/// Convert header hash into CHT value.
fn encode_cht_value<Hash: AsRef<[u8]>>(hash: Hash) -> Vec<u8> {
hash.as_ref().to_vec()
}
/// Convert CHT value into block header hash.
pub fn decode_cht_value(value: &[u8]) -> Option<H256> {
match value.len() {
32 => Some(H256::from_slice(&value[0..32])),
_ => None,
}
}
#[cfg(test)]
mod tests {
use super::*;
use sp_runtime::{generic, traits::BlakeTwo256};
type Header = generic::Header<u64, BlakeTwo256>;
#[test]
fn is_build_required_works() {
assert_eq!(is_build_required(SIZE, 0u32.into()), None);
assert_eq!(is_build_required(SIZE, 1u32.into()), None);
assert_eq!(is_build_required(SIZE, SIZE), None);
assert_eq!(is_build_required(SIZE, SIZE + 1), None);
assert_eq!(is_build_required(SIZE, 2 * SIZE), None);
assert_eq!(is_build_required(SIZE, 2 * SIZE + 1), Some(0));
assert_eq!(is_build_required(SIZE, 2 * SIZE + 2), None);
assert_eq!(is_build_required(SIZE, 3 * SIZE), None);
assert_eq!(is_build_required(SIZE, 3 * SIZE + 1), Some(1));
assert_eq!(is_build_required(SIZE, 3 * SIZE + 2), None);
}
#[test]
fn max_cht_number_works() {
assert_eq!(max_cht_number(SIZE, 0u32.into()), None);
assert_eq!(max_cht_number(SIZE, 1u32.into()), None);
assert_eq!(max_cht_number(SIZE, SIZE), None);
assert_eq!(max_cht_number(SIZE, SIZE + 1), None);
assert_eq!(max_cht_number(SIZE, 2 * SIZE), None);
assert_eq!(max_cht_number(SIZE, 2 * SIZE + 1), Some(0));
assert_eq!(max_cht_number(SIZE, 2 * SIZE + 2), Some(0));
assert_eq!(max_cht_number(SIZE, 3 * SIZE), Some(0));
assert_eq!(max_cht_number(SIZE, 3 * SIZE + 1), Some(1));
assert_eq!(max_cht_number(SIZE, 3 * SIZE + 2), Some(1));
}
#[test]
fn start_number_works() {
assert_eq!(start_number(SIZE, 0u32), 1u32);
assert_eq!(start_number(SIZE, 1u32), SIZE + 1);
assert_eq!(start_number(SIZE, 2u32), SIZE + SIZE + 1);
}
#[test]
fn end_number_works() {
assert_eq!(end_number(SIZE, 0u32), SIZE);
assert_eq!(end_number(SIZE, 1u32), SIZE + SIZE);
assert_eq!(end_number(SIZE, 2u32), SIZE + SIZE + SIZE);
}
#[test]
fn build_pairs_fails_when_no_enough_blocks() {
assert!(build_pairs::<Header, _>(
SIZE as _,
0,
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize / 2)
)
.is_err());
}
#[test]
fn build_pairs_fails_when_missing_block() {
assert!(build_pairs::<Header, _>(
SIZE as _,
0,
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1))))
.take(SIZE as usize / 2)
.chain(::std::iter::once(Ok(None)))
.chain(
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(2))))
.take(SIZE as usize / 2 - 1)
)
)
.is_err());
}
#[test]
fn compute_root_works() {
assert!(compute_root::<Header, BlakeTwo256, _>(
SIZE as _,
42,
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize)
)
.is_ok());
}
#[test]
#[should_panic]
fn build_proof_panics_when_querying_wrong_block() {
assert!(build_proof::<Header, BlakeTwo256, _, _>(
SIZE as _,
0,
vec![(SIZE * 1000) as u64],
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize)
)
.is_err());
}
#[test]
fn build_proof_works() {
assert!(build_proof::<Header, BlakeTwo256, _, _>(
SIZE as _,
0,
vec![(SIZE / 2) as u64],
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize)
)
.is_ok());
}
#[test]
#[should_panic]
fn for_each_cht_group_panics() {
let cht_size = SIZE as u64;
let _ = for_each_cht_group::<Header, _, _, _>(
cht_size,
vec![cht_size * 5, cht_size * 2],
|_, _, _| Ok(()),
(),
);
}
#[test]
fn for_each_cht_group_works() {
let cht_size = SIZE as u64;
let _ = for_each_cht_group::<Header, _, _, _>(
cht_size,
vec![
cht_size * 2 + 1,
cht_size * 2 + 2,
cht_size * 2 + 5,
cht_size * 4 + 1,
cht_size * 4 + 7,
cht_size * 6 + 1,
],
|_, cht_num, blocks| {
match cht_num {
2 => assert_eq!(
blocks,
vec![cht_size * 2 + 1, cht_size * 2 + 2, cht_size * 2 + 5]
),
4 => assert_eq!(blocks, vec![cht_size * 4 + 1, cht_size * 4 + 7]),
6 => assert_eq!(blocks, vec![cht_size * 6 + 1]),
_ => unreachable!(),
}
Ok(())
},
(),
);
}
}
| is_build_required | identifier_name |
cht.rs | // This file is part of Substrate.
// Copyright (C) 2017-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! Canonical hash trie definitions and helper functions.
//!
//! Each CHT is a trie mapping block numbers to canonical hash.
//! One is generated for every `SIZE` blocks, allowing us to discard those blocks in
//! favor of the trie root. When the "ancient" blocks need to be accessed, we simply
//! request an inclusion proof of a specific block number against the trie with the
//! root has. A correct proof implies that the claimed block is identical to the one
//! we discarded.
use codec::Encode;
use hash_db;
use sp_trie;
use sp_core::{convert_hash, H256};
use sp_runtime::traits::{AtLeast32Bit, Header as HeaderT, One, Zero};
use sp_state_machine::{
prove_read_on_trie_backend, read_proof_check, read_proof_check_on_proving_backend,
Backend as StateBackend, InMemoryBackend, MemoryDB, StorageProof, TrieBackend,
};
use sp_blockchain::{Error as ClientError, Result as ClientResult};
/// The size of each CHT. This value is passed to every CHT-related function from
/// production code. Other values are passed from tests.
const SIZE: u32 = 2048;
/// Gets default CHT size.
pub fn size<N: From<u32>>() -> N |
/// Returns Some(cht_number) if CHT is need to be built when the block with given number is
/// canonized.
pub fn is_build_required<N>(cht_size: N, block_num: N) -> Option<N>
where
N: Clone + AtLeast32Bit,
{
let block_cht_num = block_to_cht_number(cht_size.clone(), block_num.clone())?;
let two = N::one() + N::one();
if block_cht_num < two {
return None
}
let cht_start = start_number(cht_size, block_cht_num.clone());
if cht_start != block_num {
return None
}
Some(block_cht_num - two)
}
/// Returns Some(max_cht_number) if CHT has ever been built given maximal canonical block number.
pub fn max_cht_number<N>(cht_size: N, max_canonical_block: N) -> Option<N>
where
N: Clone + AtLeast32Bit,
{
let max_cht_number = block_to_cht_number(cht_size, max_canonical_block)?;
let two = N::one() + N::one();
if max_cht_number < two {
return None
}
Some(max_cht_number - two)
}
/// Compute a CHT root from an iterator of block hashes. Fails if shorter than
/// SIZE items. The items are assumed to proceed sequentially from `start_number(cht_num)`.
/// Discards the trie's nodes.
pub fn compute_root<Header, Hasher, I>(
cht_size: Header::Number,
cht_num: Header::Number,
hashes: I,
) -> ClientResult<Hasher::Out>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord,
I: IntoIterator<Item = ClientResult<Option<Header::Hash>>>,
{
use sp_trie::TrieConfiguration;
Ok(sp_trie::trie_types::Layout::<Hasher>::trie_root(build_pairs::<Header, I>(
cht_size, cht_num, hashes,
)?))
}
/// Build CHT-based header proof.
pub fn build_proof<Header, Hasher, BlocksI, HashesI>(
cht_size: Header::Number,
cht_num: Header::Number,
blocks: BlocksI,
hashes: HashesI,
) -> ClientResult<StorageProof>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord + codec::Codec,
BlocksI: IntoIterator<Item = Header::Number>,
HashesI: IntoIterator<Item = ClientResult<Option<Header::Hash>>>,
{
let transaction = build_pairs::<Header, _>(cht_size, cht_num, hashes)?
.into_iter()
.map(|(k, v)| (k, Some(v)))
.collect::<Vec<_>>();
let mut storage = InMemoryBackend::<Hasher>::default().update(vec![(None, transaction)]);
let trie_storage =
storage.as_trie_backend().expect("InMemoryState::as_trie_backend always returns Some; qed");
prove_read_on_trie_backend(
trie_storage,
blocks.into_iter().map(|number| encode_cht_key(number)),
)
.map_err(ClientError::Execution)
}
/// Check CHT-based header proof.
pub fn check_proof<Header, Hasher>(
local_root: Header::Hash,
local_number: Header::Number,
remote_hash: Header::Hash,
remote_proof: StorageProof,
) -> ClientResult<()>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord + codec::Codec,
{
do_check_proof::<Header, Hasher, _>(
local_root,
local_number,
remote_hash,
move |local_root, local_cht_key| {
read_proof_check::<Hasher, _>(
local_root,
remote_proof,
::std::iter::once(local_cht_key),
)
.map(|mut map| map.remove(local_cht_key).expect("checked proof of local_cht_key; qed"))
.map_err(|e| ClientError::from(e))
},
)
}
/// Check CHT-based header proof on pre-created proving backend.
pub fn check_proof_on_proving_backend<Header, Hasher>(
local_root: Header::Hash,
local_number: Header::Number,
remote_hash: Header::Hash,
proving_backend: &TrieBackend<MemoryDB<Hasher>, Hasher>,
) -> ClientResult<()>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord + codec::Codec,
{
do_check_proof::<Header, Hasher, _>(
local_root,
local_number,
remote_hash,
|_, local_cht_key| {
read_proof_check_on_proving_backend::<Hasher>(proving_backend, local_cht_key)
.map_err(|e| ClientError::from(e))
},
)
}
/// Check CHT-based header proof using passed checker function.
fn do_check_proof<Header, Hasher, F>(
local_root: Header::Hash,
local_number: Header::Number,
remote_hash: Header::Hash,
checker: F,
) -> ClientResult<()>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord,
F: FnOnce(Hasher::Out, &[u8]) -> ClientResult<Option<Vec<u8>>>,
{
let root: Hasher::Out = convert_hash(&local_root);
let local_cht_key = encode_cht_key(local_number);
let local_cht_value = checker(root, &local_cht_key)?;
let local_cht_value = local_cht_value.ok_or_else(|| ClientError::InvalidCHTProof)?;
let local_hash =
decode_cht_value(&local_cht_value).ok_or_else(|| ClientError::InvalidCHTProof)?;
match &local_hash[..] == remote_hash.as_ref() {
true => Ok(()),
false => Err(ClientError::InvalidCHTProof.into()),
}
}
/// Group ordered blocks by CHT number and call functor with blocks of each group.
pub fn for_each_cht_group<Header, I, F, P>(
cht_size: Header::Number,
blocks: I,
mut functor: F,
mut functor_param: P,
) -> ClientResult<()>
where
Header: HeaderT,
I: IntoIterator<Item = Header::Number>,
F: FnMut(P, Header::Number, Vec<Header::Number>) -> ClientResult<P>,
{
let mut current_cht_num = None;
let mut current_cht_blocks = Vec::new();
for block in blocks {
let new_cht_num = match block_to_cht_number(cht_size, block) {
Some(new_cht_num) => new_cht_num,
None =>
return Err(ClientError::Backend(format!(
"Cannot compute CHT root for the block #{}",
block
))
.into()),
};
let advance_to_next_cht = current_cht_num.is_some() && current_cht_num != Some(new_cht_num);
if advance_to_next_cht {
let current_cht_num = current_cht_num.expect(
"advance_to_next_cht is true;
it is true only when current_cht_num is Some; qed",
);
assert!(
new_cht_num > current_cht_num,
"for_each_cht_group only supports ordered iterators"
);
functor_param =
functor(functor_param, current_cht_num, std::mem::take(&mut current_cht_blocks))?;
}
current_cht_blocks.push(block);
current_cht_num = Some(new_cht_num);
}
if let Some(current_cht_num) = current_cht_num {
functor(functor_param, current_cht_num, std::mem::take(&mut current_cht_blocks))?;
}
Ok(())
}
/// Build pairs for computing CHT.
fn build_pairs<Header, I>(
cht_size: Header::Number,
cht_num: Header::Number,
hashes: I,
) -> ClientResult<Vec<(Vec<u8>, Vec<u8>)>>
where
Header: HeaderT,
I: IntoIterator<Item = ClientResult<Option<Header::Hash>>>,
{
let start_num = start_number(cht_size, cht_num);
let mut pairs = Vec::new();
let mut hash_index = Header::Number::zero();
for hash in hashes.into_iter() {
let hash =
hash?.ok_or_else(|| ClientError::from(ClientError::MissingHashRequiredForCHT))?;
pairs.push((encode_cht_key(start_num + hash_index).to_vec(), encode_cht_value(hash)));
hash_index += Header::Number::one();
if hash_index == cht_size {
break
}
}
if hash_index == cht_size {
Ok(pairs)
} else {
Err(ClientError::MissingHashRequiredForCHT)
}
}
/// Get the starting block of a given CHT.
/// CHT 0 includes block 1...SIZE,
/// CHT 1 includes block SIZE + 1 ... 2*SIZE
/// More generally: CHT N includes block (1 + N*SIZE)...((N+1)*SIZE).
/// This is because the genesis hash is assumed to be known
/// and including it would be redundant.
pub fn start_number<N: AtLeast32Bit>(cht_size: N, cht_num: N) -> N {
(cht_num * cht_size) + N::one()
}
/// Get the ending block of a given CHT.
pub fn end_number<N: AtLeast32Bit>(cht_size: N, cht_num: N) -> N {
(cht_num + N::one()) * cht_size
}
/// Convert a block number to a CHT number.
/// Returns `None` for `block_num` == 0, `Some` otherwise.
pub fn block_to_cht_number<N: AtLeast32Bit>(cht_size: N, block_num: N) -> Option<N> {
if block_num == N::zero() {
None
} else {
Some((block_num - N::one()) / cht_size)
}
}
/// Convert header number into CHT key.
pub fn encode_cht_key<N: Encode>(number: N) -> Vec<u8> {
number.encode()
}
/// Convert header hash into CHT value.
fn encode_cht_value<Hash: AsRef<[u8]>>(hash: Hash) -> Vec<u8> {
hash.as_ref().to_vec()
}
/// Convert CHT value into block header hash.
pub fn decode_cht_value(value: &[u8]) -> Option<H256> {
match value.len() {
32 => Some(H256::from_slice(&value[0..32])),
_ => None,
}
}
#[cfg(test)]
mod tests {
use super::*;
use sp_runtime::{generic, traits::BlakeTwo256};
type Header = generic::Header<u64, BlakeTwo256>;
#[test]
fn is_build_required_works() {
assert_eq!(is_build_required(SIZE, 0u32.into()), None);
assert_eq!(is_build_required(SIZE, 1u32.into()), None);
assert_eq!(is_build_required(SIZE, SIZE), None);
assert_eq!(is_build_required(SIZE, SIZE + 1), None);
assert_eq!(is_build_required(SIZE, 2 * SIZE), None);
assert_eq!(is_build_required(SIZE, 2 * SIZE + 1), Some(0));
assert_eq!(is_build_required(SIZE, 2 * SIZE + 2), None);
assert_eq!(is_build_required(SIZE, 3 * SIZE), None);
assert_eq!(is_build_required(SIZE, 3 * SIZE + 1), Some(1));
assert_eq!(is_build_required(SIZE, 3 * SIZE + 2), None);
}
#[test]
fn max_cht_number_works() {
assert_eq!(max_cht_number(SIZE, 0u32.into()), None);
assert_eq!(max_cht_number(SIZE, 1u32.into()), None);
assert_eq!(max_cht_number(SIZE, SIZE), None);
assert_eq!(max_cht_number(SIZE, SIZE + 1), None);
assert_eq!(max_cht_number(SIZE, 2 * SIZE), None);
assert_eq!(max_cht_number(SIZE, 2 * SIZE + 1), Some(0));
assert_eq!(max_cht_number(SIZE, 2 * SIZE + 2), Some(0));
assert_eq!(max_cht_number(SIZE, 3 * SIZE), Some(0));
assert_eq!(max_cht_number(SIZE, 3 * SIZE + 1), Some(1));
assert_eq!(max_cht_number(SIZE, 3 * SIZE + 2), Some(1));
}
#[test]
fn start_number_works() {
assert_eq!(start_number(SIZE, 0u32), 1u32);
assert_eq!(start_number(SIZE, 1u32), SIZE + 1);
assert_eq!(start_number(SIZE, 2u32), SIZE + SIZE + 1);
}
#[test]
fn end_number_works() {
assert_eq!(end_number(SIZE, 0u32), SIZE);
assert_eq!(end_number(SIZE, 1u32), SIZE + SIZE);
assert_eq!(end_number(SIZE, 2u32), SIZE + SIZE + SIZE);
}
#[test]
fn build_pairs_fails_when_no_enough_blocks() {
assert!(build_pairs::<Header, _>(
SIZE as _,
0,
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize / 2)
)
.is_err());
}
#[test]
fn build_pairs_fails_when_missing_block() {
assert!(build_pairs::<Header, _>(
SIZE as _,
0,
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1))))
.take(SIZE as usize / 2)
.chain(::std::iter::once(Ok(None)))
.chain(
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(2))))
.take(SIZE as usize / 2 - 1)
)
)
.is_err());
}
#[test]
fn compute_root_works() {
assert!(compute_root::<Header, BlakeTwo256, _>(
SIZE as _,
42,
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize)
)
.is_ok());
}
#[test]
#[should_panic]
fn build_proof_panics_when_querying_wrong_block() {
assert!(build_proof::<Header, BlakeTwo256, _, _>(
SIZE as _,
0,
vec![(SIZE * 1000) as u64],
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize)
)
.is_err());
}
#[test]
fn build_proof_works() {
assert!(build_proof::<Header, BlakeTwo256, _, _>(
SIZE as _,
0,
vec![(SIZE / 2) as u64],
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize)
)
.is_ok());
}
#[test]
#[should_panic]
fn for_each_cht_group_panics() {
let cht_size = SIZE as u64;
let _ = for_each_cht_group::<Header, _, _, _>(
cht_size,
vec![cht_size * 5, cht_size * 2],
|_, _, _| Ok(()),
(),
);
}
#[test]
fn for_each_cht_group_works() {
let cht_size = SIZE as u64;
let _ = for_each_cht_group::<Header, _, _, _>(
cht_size,
vec![
cht_size * 2 + 1,
cht_size * 2 + 2,
cht_size * 2 + 5,
cht_size * 4 + 1,
cht_size * 4 + 7,
cht_size * 6 + 1,
],
|_, cht_num, blocks| {
match cht_num {
2 => assert_eq!(
blocks,
vec![cht_size * 2 + 1, cht_size * 2 + 2, cht_size * 2 + 5]
),
4 => assert_eq!(blocks, vec![cht_size * 4 + 1, cht_size * 4 + 7]),
6 => assert_eq!(blocks, vec![cht_size * 6 + 1]),
_ => unreachable!(),
}
Ok(())
},
(),
);
}
}
| {
SIZE.into()
} | identifier_body |
cht.rs | // This file is part of Substrate.
// Copyright (C) 2017-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! Canonical hash trie definitions and helper functions.
//!
//! Each CHT is a trie mapping block numbers to canonical hash.
//! One is generated for every `SIZE` blocks, allowing us to discard those blocks in
//! favor of the trie root. When the "ancient" blocks need to be accessed, we simply
//! request an inclusion proof of a specific block number against the trie with the
//! root has. A correct proof implies that the claimed block is identical to the one
//! we discarded.
use codec::Encode;
use hash_db;
use sp_trie;
use sp_core::{convert_hash, H256};
use sp_runtime::traits::{AtLeast32Bit, Header as HeaderT, One, Zero};
use sp_state_machine::{
prove_read_on_trie_backend, read_proof_check, read_proof_check_on_proving_backend,
Backend as StateBackend, InMemoryBackend, MemoryDB, StorageProof, TrieBackend,
};
use sp_blockchain::{Error as ClientError, Result as ClientResult};
/// The size of each CHT. This value is passed to every CHT-related function from
/// production code. Other values are passed from tests.
const SIZE: u32 = 2048;
/// Gets default CHT size.
pub fn size<N: From<u32>>() -> N {
SIZE.into()
}
/// Returns Some(cht_number) if CHT is need to be built when the block with given number is
/// canonized.
pub fn is_build_required<N>(cht_size: N, block_num: N) -> Option<N>
where
N: Clone + AtLeast32Bit,
{
let block_cht_num = block_to_cht_number(cht_size.clone(), block_num.clone())?;
let two = N::one() + N::one();
if block_cht_num < two |
let cht_start = start_number(cht_size, block_cht_num.clone());
if cht_start != block_num {
return None
}
Some(block_cht_num - two)
}
/// Returns Some(max_cht_number) if CHT has ever been built given maximal canonical block number.
pub fn max_cht_number<N>(cht_size: N, max_canonical_block: N) -> Option<N>
where
N: Clone + AtLeast32Bit,
{
let max_cht_number = block_to_cht_number(cht_size, max_canonical_block)?;
let two = N::one() + N::one();
if max_cht_number < two {
return None
}
Some(max_cht_number - two)
}
/// Compute a CHT root from an iterator of block hashes. Fails if shorter than
/// SIZE items. The items are assumed to proceed sequentially from `start_number(cht_num)`.
/// Discards the trie's nodes.
pub fn compute_root<Header, Hasher, I>(
cht_size: Header::Number,
cht_num: Header::Number,
hashes: I,
) -> ClientResult<Hasher::Out>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord,
I: IntoIterator<Item = ClientResult<Option<Header::Hash>>>,
{
use sp_trie::TrieConfiguration;
Ok(sp_trie::trie_types::Layout::<Hasher>::trie_root(build_pairs::<Header, I>(
cht_size, cht_num, hashes,
)?))
}
/// Build CHT-based header proof.
pub fn build_proof<Header, Hasher, BlocksI, HashesI>(
cht_size: Header::Number,
cht_num: Header::Number,
blocks: BlocksI,
hashes: HashesI,
) -> ClientResult<StorageProof>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord + codec::Codec,
BlocksI: IntoIterator<Item = Header::Number>,
HashesI: IntoIterator<Item = ClientResult<Option<Header::Hash>>>,
{
let transaction = build_pairs::<Header, _>(cht_size, cht_num, hashes)?
.into_iter()
.map(|(k, v)| (k, Some(v)))
.collect::<Vec<_>>();
let mut storage = InMemoryBackend::<Hasher>::default().update(vec![(None, transaction)]);
let trie_storage =
storage.as_trie_backend().expect("InMemoryState::as_trie_backend always returns Some; qed");
prove_read_on_trie_backend(
trie_storage,
blocks.into_iter().map(|number| encode_cht_key(number)),
)
.map_err(ClientError::Execution)
}
/// Check CHT-based header proof.
pub fn check_proof<Header, Hasher>(
local_root: Header::Hash,
local_number: Header::Number,
remote_hash: Header::Hash,
remote_proof: StorageProof,
) -> ClientResult<()>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord + codec::Codec,
{
do_check_proof::<Header, Hasher, _>(
local_root,
local_number,
remote_hash,
move |local_root, local_cht_key| {
read_proof_check::<Hasher, _>(
local_root,
remote_proof,
::std::iter::once(local_cht_key),
)
.map(|mut map| map.remove(local_cht_key).expect("checked proof of local_cht_key; qed"))
.map_err(|e| ClientError::from(e))
},
)
}
/// Check CHT-based header proof on pre-created proving backend.
pub fn check_proof_on_proving_backend<Header, Hasher>(
local_root: Header::Hash,
local_number: Header::Number,
remote_hash: Header::Hash,
proving_backend: &TrieBackend<MemoryDB<Hasher>, Hasher>,
) -> ClientResult<()>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord + codec::Codec,
{
do_check_proof::<Header, Hasher, _>(
local_root,
local_number,
remote_hash,
|_, local_cht_key| {
read_proof_check_on_proving_backend::<Hasher>(proving_backend, local_cht_key)
.map_err(|e| ClientError::from(e))
},
)
}
/// Check CHT-based header proof using passed checker function.
fn do_check_proof<Header, Hasher, F>(
local_root: Header::Hash,
local_number: Header::Number,
remote_hash: Header::Hash,
checker: F,
) -> ClientResult<()>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord,
F: FnOnce(Hasher::Out, &[u8]) -> ClientResult<Option<Vec<u8>>>,
{
let root: Hasher::Out = convert_hash(&local_root);
let local_cht_key = encode_cht_key(local_number);
let local_cht_value = checker(root, &local_cht_key)?;
let local_cht_value = local_cht_value.ok_or_else(|| ClientError::InvalidCHTProof)?;
let local_hash =
decode_cht_value(&local_cht_value).ok_or_else(|| ClientError::InvalidCHTProof)?;
match &local_hash[..] == remote_hash.as_ref() {
true => Ok(()),
false => Err(ClientError::InvalidCHTProof.into()),
}
}
/// Group ordered blocks by CHT number and call functor with blocks of each group.
pub fn for_each_cht_group<Header, I, F, P>(
cht_size: Header::Number,
blocks: I,
mut functor: F,
mut functor_param: P,
) -> ClientResult<()>
where
Header: HeaderT,
I: IntoIterator<Item = Header::Number>,
F: FnMut(P, Header::Number, Vec<Header::Number>) -> ClientResult<P>,
{
let mut current_cht_num = None;
let mut current_cht_blocks = Vec::new();
for block in blocks {
let new_cht_num = match block_to_cht_number(cht_size, block) {
Some(new_cht_num) => new_cht_num,
None =>
return Err(ClientError::Backend(format!(
"Cannot compute CHT root for the block #{}",
block
))
.into()),
};
let advance_to_next_cht = current_cht_num.is_some() && current_cht_num != Some(new_cht_num);
if advance_to_next_cht {
let current_cht_num = current_cht_num.expect(
"advance_to_next_cht is true;
it is true only when current_cht_num is Some; qed",
);
assert!(
new_cht_num > current_cht_num,
"for_each_cht_group only supports ordered iterators"
);
functor_param =
functor(functor_param, current_cht_num, std::mem::take(&mut current_cht_blocks))?;
}
current_cht_blocks.push(block);
current_cht_num = Some(new_cht_num);
}
if let Some(current_cht_num) = current_cht_num {
functor(functor_param, current_cht_num, std::mem::take(&mut current_cht_blocks))?;
}
Ok(())
}
/// Build pairs for computing CHT.
fn build_pairs<Header, I>(
cht_size: Header::Number,
cht_num: Header::Number,
hashes: I,
) -> ClientResult<Vec<(Vec<u8>, Vec<u8>)>>
where
Header: HeaderT,
I: IntoIterator<Item = ClientResult<Option<Header::Hash>>>,
{
let start_num = start_number(cht_size, cht_num);
let mut pairs = Vec::new();
let mut hash_index = Header::Number::zero();
for hash in hashes.into_iter() {
let hash =
hash?.ok_or_else(|| ClientError::from(ClientError::MissingHashRequiredForCHT))?;
pairs.push((encode_cht_key(start_num + hash_index).to_vec(), encode_cht_value(hash)));
hash_index += Header::Number::one();
if hash_index == cht_size {
break
}
}
if hash_index == cht_size {
Ok(pairs)
} else {
Err(ClientError::MissingHashRequiredForCHT)
}
}
/// Get the starting block of a given CHT.
/// CHT 0 includes block 1...SIZE,
/// CHT 1 includes block SIZE + 1 ... 2*SIZE
/// More generally: CHT N includes block (1 + N*SIZE)...((N+1)*SIZE).
/// This is because the genesis hash is assumed to be known
/// and including it would be redundant.
pub fn start_number<N: AtLeast32Bit>(cht_size: N, cht_num: N) -> N {
(cht_num * cht_size) + N::one()
}
/// Get the ending block of a given CHT.
pub fn end_number<N: AtLeast32Bit>(cht_size: N, cht_num: N) -> N {
(cht_num + N::one()) * cht_size
}
/// Convert a block number to a CHT number.
/// Returns `None` for `block_num` == 0, `Some` otherwise.
pub fn block_to_cht_number<N: AtLeast32Bit>(cht_size: N, block_num: N) -> Option<N> {
if block_num == N::zero() {
None
} else {
Some((block_num - N::one()) / cht_size)
}
}
/// Convert header number into CHT key.
pub fn encode_cht_key<N: Encode>(number: N) -> Vec<u8> {
number.encode()
}
/// Convert header hash into CHT value.
fn encode_cht_value<Hash: AsRef<[u8]>>(hash: Hash) -> Vec<u8> {
hash.as_ref().to_vec()
}
/// Convert CHT value into block header hash.
pub fn decode_cht_value(value: &[u8]) -> Option<H256> {
match value.len() {
32 => Some(H256::from_slice(&value[0..32])),
_ => None,
}
}
#[cfg(test)]
mod tests {
use super::*;
use sp_runtime::{generic, traits::BlakeTwo256};
type Header = generic::Header<u64, BlakeTwo256>;
#[test]
fn is_build_required_works() {
assert_eq!(is_build_required(SIZE, 0u32.into()), None);
assert_eq!(is_build_required(SIZE, 1u32.into()), None);
assert_eq!(is_build_required(SIZE, SIZE), None);
assert_eq!(is_build_required(SIZE, SIZE + 1), None);
assert_eq!(is_build_required(SIZE, 2 * SIZE), None);
assert_eq!(is_build_required(SIZE, 2 * SIZE + 1), Some(0));
assert_eq!(is_build_required(SIZE, 2 * SIZE + 2), None);
assert_eq!(is_build_required(SIZE, 3 * SIZE), None);
assert_eq!(is_build_required(SIZE, 3 * SIZE + 1), Some(1));
assert_eq!(is_build_required(SIZE, 3 * SIZE + 2), None);
}
#[test]
fn max_cht_number_works() {
assert_eq!(max_cht_number(SIZE, 0u32.into()), None);
assert_eq!(max_cht_number(SIZE, 1u32.into()), None);
assert_eq!(max_cht_number(SIZE, SIZE), None);
assert_eq!(max_cht_number(SIZE, SIZE + 1), None);
assert_eq!(max_cht_number(SIZE, 2 * SIZE), None);
assert_eq!(max_cht_number(SIZE, 2 * SIZE + 1), Some(0));
assert_eq!(max_cht_number(SIZE, 2 * SIZE + 2), Some(0));
assert_eq!(max_cht_number(SIZE, 3 * SIZE), Some(0));
assert_eq!(max_cht_number(SIZE, 3 * SIZE + 1), Some(1));
assert_eq!(max_cht_number(SIZE, 3 * SIZE + 2), Some(1));
}
#[test]
fn start_number_works() {
assert_eq!(start_number(SIZE, 0u32), 1u32);
assert_eq!(start_number(SIZE, 1u32), SIZE + 1);
assert_eq!(start_number(SIZE, 2u32), SIZE + SIZE + 1);
}
#[test]
fn end_number_works() {
assert_eq!(end_number(SIZE, 0u32), SIZE);
assert_eq!(end_number(SIZE, 1u32), SIZE + SIZE);
assert_eq!(end_number(SIZE, 2u32), SIZE + SIZE + SIZE);
}
#[test]
fn build_pairs_fails_when_no_enough_blocks() {
assert!(build_pairs::<Header, _>(
SIZE as _,
0,
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize / 2)
)
.is_err());
}
#[test]
fn build_pairs_fails_when_missing_block() {
assert!(build_pairs::<Header, _>(
SIZE as _,
0,
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1))))
.take(SIZE as usize / 2)
.chain(::std::iter::once(Ok(None)))
.chain(
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(2))))
.take(SIZE as usize / 2 - 1)
)
)
.is_err());
}
#[test]
fn compute_root_works() {
assert!(compute_root::<Header, BlakeTwo256, _>(
SIZE as _,
42,
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize)
)
.is_ok());
}
#[test]
#[should_panic]
fn build_proof_panics_when_querying_wrong_block() {
assert!(build_proof::<Header, BlakeTwo256, _, _>(
SIZE as _,
0,
vec![(SIZE * 1000) as u64],
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize)
)
.is_err());
}
#[test]
fn build_proof_works() {
assert!(build_proof::<Header, BlakeTwo256, _, _>(
SIZE as _,
0,
vec![(SIZE / 2) as u64],
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize)
)
.is_ok());
}
#[test]
#[should_panic]
fn for_each_cht_group_panics() {
let cht_size = SIZE as u64;
let _ = for_each_cht_group::<Header, _, _, _>(
cht_size,
vec![cht_size * 5, cht_size * 2],
|_, _, _| Ok(()),
(),
);
}
#[test]
fn for_each_cht_group_works() {
let cht_size = SIZE as u64;
let _ = for_each_cht_group::<Header, _, _, _>(
cht_size,
vec![
cht_size * 2 + 1,
cht_size * 2 + 2,
cht_size * 2 + 5,
cht_size * 4 + 1,
cht_size * 4 + 7,
cht_size * 6 + 1,
],
|_, cht_num, blocks| {
match cht_num {
2 => assert_eq!(
blocks,
vec![cht_size * 2 + 1, cht_size * 2 + 2, cht_size * 2 + 5]
),
4 => assert_eq!(blocks, vec![cht_size * 4 + 1, cht_size * 4 + 7]),
6 => assert_eq!(blocks, vec![cht_size * 6 + 1]),
_ => unreachable!(),
}
Ok(())
},
(),
);
}
}
| {
return None
} | conditional_block |
cht.rs | // This file is part of Substrate.
// Copyright (C) 2017-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! Canonical hash trie definitions and helper functions.
//!
//! Each CHT is a trie mapping block numbers to canonical hash.
//! One is generated for every `SIZE` blocks, allowing us to discard those blocks in
//! favor of the trie root. When the "ancient" blocks need to be accessed, we simply
//! request an inclusion proof of a specific block number against the trie with the
//! root has. A correct proof implies that the claimed block is identical to the one
//! we discarded.
use codec::Encode;
use hash_db;
use sp_trie;
use sp_core::{convert_hash, H256};
use sp_runtime::traits::{AtLeast32Bit, Header as HeaderT, One, Zero};
use sp_state_machine::{
prove_read_on_trie_backend, read_proof_check, read_proof_check_on_proving_backend,
Backend as StateBackend, InMemoryBackend, MemoryDB, StorageProof, TrieBackend,
};
use sp_blockchain::{Error as ClientError, Result as ClientResult};
/// The size of each CHT. This value is passed to every CHT-related function from
/// production code. Other values are passed from tests.
const SIZE: u32 = 2048;
/// Gets default CHT size.
pub fn size<N: From<u32>>() -> N {
SIZE.into()
}
/// Returns Some(cht_number) if CHT is need to be built when the block with given number is
/// canonized.
pub fn is_build_required<N>(cht_size: N, block_num: N) -> Option<N>
where
N: Clone + AtLeast32Bit,
{
let block_cht_num = block_to_cht_number(cht_size.clone(), block_num.clone())?;
let two = N::one() + N::one();
if block_cht_num < two {
return None
}
let cht_start = start_number(cht_size, block_cht_num.clone());
if cht_start != block_num {
return None
}
Some(block_cht_num - two)
}
/// Returns Some(max_cht_number) if CHT has ever been built given maximal canonical block number.
pub fn max_cht_number<N>(cht_size: N, max_canonical_block: N) -> Option<N>
where
N: Clone + AtLeast32Bit,
{
let max_cht_number = block_to_cht_number(cht_size, max_canonical_block)?;
let two = N::one() + N::one();
if max_cht_number < two {
return None
}
Some(max_cht_number - two)
}
| /// SIZE items. The items are assumed to proceed sequentially from `start_number(cht_num)`.
/// Discards the trie's nodes.
pub fn compute_root<Header, Hasher, I>(
cht_size: Header::Number,
cht_num: Header::Number,
hashes: I,
) -> ClientResult<Hasher::Out>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord,
I: IntoIterator<Item = ClientResult<Option<Header::Hash>>>,
{
use sp_trie::TrieConfiguration;
Ok(sp_trie::trie_types::Layout::<Hasher>::trie_root(build_pairs::<Header, I>(
cht_size, cht_num, hashes,
)?))
}
/// Build CHT-based header proof.
pub fn build_proof<Header, Hasher, BlocksI, HashesI>(
cht_size: Header::Number,
cht_num: Header::Number,
blocks: BlocksI,
hashes: HashesI,
) -> ClientResult<StorageProof>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord + codec::Codec,
BlocksI: IntoIterator<Item = Header::Number>,
HashesI: IntoIterator<Item = ClientResult<Option<Header::Hash>>>,
{
let transaction = build_pairs::<Header, _>(cht_size, cht_num, hashes)?
.into_iter()
.map(|(k, v)| (k, Some(v)))
.collect::<Vec<_>>();
let mut storage = InMemoryBackend::<Hasher>::default().update(vec![(None, transaction)]);
let trie_storage =
storage.as_trie_backend().expect("InMemoryState::as_trie_backend always returns Some; qed");
prove_read_on_trie_backend(
trie_storage,
blocks.into_iter().map(|number| encode_cht_key(number)),
)
.map_err(ClientError::Execution)
}
/// Check CHT-based header proof.
pub fn check_proof<Header, Hasher>(
local_root: Header::Hash,
local_number: Header::Number,
remote_hash: Header::Hash,
remote_proof: StorageProof,
) -> ClientResult<()>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord + codec::Codec,
{
do_check_proof::<Header, Hasher, _>(
local_root,
local_number,
remote_hash,
move |local_root, local_cht_key| {
read_proof_check::<Hasher, _>(
local_root,
remote_proof,
::std::iter::once(local_cht_key),
)
.map(|mut map| map.remove(local_cht_key).expect("checked proof of local_cht_key; qed"))
.map_err(|e| ClientError::from(e))
},
)
}
/// Check CHT-based header proof on pre-created proving backend.
pub fn check_proof_on_proving_backend<Header, Hasher>(
local_root: Header::Hash,
local_number: Header::Number,
remote_hash: Header::Hash,
proving_backend: &TrieBackend<MemoryDB<Hasher>, Hasher>,
) -> ClientResult<()>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord + codec::Codec,
{
do_check_proof::<Header, Hasher, _>(
local_root,
local_number,
remote_hash,
|_, local_cht_key| {
read_proof_check_on_proving_backend::<Hasher>(proving_backend, local_cht_key)
.map_err(|e| ClientError::from(e))
},
)
}
/// Check CHT-based header proof using passed checker function.
fn do_check_proof<Header, Hasher, F>(
local_root: Header::Hash,
local_number: Header::Number,
remote_hash: Header::Hash,
checker: F,
) -> ClientResult<()>
where
Header: HeaderT,
Hasher: hash_db::Hasher,
Hasher::Out: Ord,
F: FnOnce(Hasher::Out, &[u8]) -> ClientResult<Option<Vec<u8>>>,
{
let root: Hasher::Out = convert_hash(&local_root);
let local_cht_key = encode_cht_key(local_number);
let local_cht_value = checker(root, &local_cht_key)?;
let local_cht_value = local_cht_value.ok_or_else(|| ClientError::InvalidCHTProof)?;
let local_hash =
decode_cht_value(&local_cht_value).ok_or_else(|| ClientError::InvalidCHTProof)?;
match &local_hash[..] == remote_hash.as_ref() {
true => Ok(()),
false => Err(ClientError::InvalidCHTProof.into()),
}
}
/// Group ordered blocks by CHT number and call functor with blocks of each group.
pub fn for_each_cht_group<Header, I, F, P>(
cht_size: Header::Number,
blocks: I,
mut functor: F,
mut functor_param: P,
) -> ClientResult<()>
where
Header: HeaderT,
I: IntoIterator<Item = Header::Number>,
F: FnMut(P, Header::Number, Vec<Header::Number>) -> ClientResult<P>,
{
let mut current_cht_num = None;
let mut current_cht_blocks = Vec::new();
for block in blocks {
let new_cht_num = match block_to_cht_number(cht_size, block) {
Some(new_cht_num) => new_cht_num,
None =>
return Err(ClientError::Backend(format!(
"Cannot compute CHT root for the block #{}",
block
))
.into()),
};
let advance_to_next_cht = current_cht_num.is_some() && current_cht_num != Some(new_cht_num);
if advance_to_next_cht {
let current_cht_num = current_cht_num.expect(
"advance_to_next_cht is true;
it is true only when current_cht_num is Some; qed",
);
assert!(
new_cht_num > current_cht_num,
"for_each_cht_group only supports ordered iterators"
);
functor_param =
functor(functor_param, current_cht_num, std::mem::take(&mut current_cht_blocks))?;
}
current_cht_blocks.push(block);
current_cht_num = Some(new_cht_num);
}
if let Some(current_cht_num) = current_cht_num {
functor(functor_param, current_cht_num, std::mem::take(&mut current_cht_blocks))?;
}
Ok(())
}
/// Build pairs for computing CHT.
fn build_pairs<Header, I>(
cht_size: Header::Number,
cht_num: Header::Number,
hashes: I,
) -> ClientResult<Vec<(Vec<u8>, Vec<u8>)>>
where
Header: HeaderT,
I: IntoIterator<Item = ClientResult<Option<Header::Hash>>>,
{
let start_num = start_number(cht_size, cht_num);
let mut pairs = Vec::new();
let mut hash_index = Header::Number::zero();
for hash in hashes.into_iter() {
let hash =
hash?.ok_or_else(|| ClientError::from(ClientError::MissingHashRequiredForCHT))?;
pairs.push((encode_cht_key(start_num + hash_index).to_vec(), encode_cht_value(hash)));
hash_index += Header::Number::one();
if hash_index == cht_size {
break
}
}
if hash_index == cht_size {
Ok(pairs)
} else {
Err(ClientError::MissingHashRequiredForCHT)
}
}
/// Get the starting block of a given CHT.
/// CHT 0 includes block 1...SIZE,
/// CHT 1 includes block SIZE + 1 ... 2*SIZE
/// More generally: CHT N includes block (1 + N*SIZE)...((N+1)*SIZE).
/// This is because the genesis hash is assumed to be known
/// and including it would be redundant.
pub fn start_number<N: AtLeast32Bit>(cht_size: N, cht_num: N) -> N {
(cht_num * cht_size) + N::one()
}
/// Get the ending block of a given CHT.
pub fn end_number<N: AtLeast32Bit>(cht_size: N, cht_num: N) -> N {
(cht_num + N::one()) * cht_size
}
/// Convert a block number to a CHT number.
/// Returns `None` for `block_num` == 0, `Some` otherwise.
pub fn block_to_cht_number<N: AtLeast32Bit>(cht_size: N, block_num: N) -> Option<N> {
if block_num == N::zero() {
None
} else {
Some((block_num - N::one()) / cht_size)
}
}
/// Convert header number into CHT key.
pub fn encode_cht_key<N: Encode>(number: N) -> Vec<u8> {
number.encode()
}
/// Convert header hash into CHT value.
fn encode_cht_value<Hash: AsRef<[u8]>>(hash: Hash) -> Vec<u8> {
hash.as_ref().to_vec()
}
/// Convert CHT value into block header hash.
pub fn decode_cht_value(value: &[u8]) -> Option<H256> {
match value.len() {
32 => Some(H256::from_slice(&value[0..32])),
_ => None,
}
}
#[cfg(test)]
mod tests {
use super::*;
use sp_runtime::{generic, traits::BlakeTwo256};
type Header = generic::Header<u64, BlakeTwo256>;
#[test]
fn is_build_required_works() {
assert_eq!(is_build_required(SIZE, 0u32.into()), None);
assert_eq!(is_build_required(SIZE, 1u32.into()), None);
assert_eq!(is_build_required(SIZE, SIZE), None);
assert_eq!(is_build_required(SIZE, SIZE + 1), None);
assert_eq!(is_build_required(SIZE, 2 * SIZE), None);
assert_eq!(is_build_required(SIZE, 2 * SIZE + 1), Some(0));
assert_eq!(is_build_required(SIZE, 2 * SIZE + 2), None);
assert_eq!(is_build_required(SIZE, 3 * SIZE), None);
assert_eq!(is_build_required(SIZE, 3 * SIZE + 1), Some(1));
assert_eq!(is_build_required(SIZE, 3 * SIZE + 2), None);
}
#[test]
fn max_cht_number_works() {
assert_eq!(max_cht_number(SIZE, 0u32.into()), None);
assert_eq!(max_cht_number(SIZE, 1u32.into()), None);
assert_eq!(max_cht_number(SIZE, SIZE), None);
assert_eq!(max_cht_number(SIZE, SIZE + 1), None);
assert_eq!(max_cht_number(SIZE, 2 * SIZE), None);
assert_eq!(max_cht_number(SIZE, 2 * SIZE + 1), Some(0));
assert_eq!(max_cht_number(SIZE, 2 * SIZE + 2), Some(0));
assert_eq!(max_cht_number(SIZE, 3 * SIZE), Some(0));
assert_eq!(max_cht_number(SIZE, 3 * SIZE + 1), Some(1));
assert_eq!(max_cht_number(SIZE, 3 * SIZE + 2), Some(1));
}
#[test]
fn start_number_works() {
assert_eq!(start_number(SIZE, 0u32), 1u32);
assert_eq!(start_number(SIZE, 1u32), SIZE + 1);
assert_eq!(start_number(SIZE, 2u32), SIZE + SIZE + 1);
}
#[test]
fn end_number_works() {
assert_eq!(end_number(SIZE, 0u32), SIZE);
assert_eq!(end_number(SIZE, 1u32), SIZE + SIZE);
assert_eq!(end_number(SIZE, 2u32), SIZE + SIZE + SIZE);
}
#[test]
fn build_pairs_fails_when_no_enough_blocks() {
assert!(build_pairs::<Header, _>(
SIZE as _,
0,
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize / 2)
)
.is_err());
}
#[test]
fn build_pairs_fails_when_missing_block() {
assert!(build_pairs::<Header, _>(
SIZE as _,
0,
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1))))
.take(SIZE as usize / 2)
.chain(::std::iter::once(Ok(None)))
.chain(
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(2))))
.take(SIZE as usize / 2 - 1)
)
)
.is_err());
}
#[test]
fn compute_root_works() {
assert!(compute_root::<Header, BlakeTwo256, _>(
SIZE as _,
42,
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize)
)
.is_ok());
}
#[test]
#[should_panic]
fn build_proof_panics_when_querying_wrong_block() {
assert!(build_proof::<Header, BlakeTwo256, _, _>(
SIZE as _,
0,
vec![(SIZE * 1000) as u64],
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize)
)
.is_err());
}
#[test]
fn build_proof_works() {
assert!(build_proof::<Header, BlakeTwo256, _, _>(
SIZE as _,
0,
vec![(SIZE / 2) as u64],
::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize)
)
.is_ok());
}
#[test]
#[should_panic]
fn for_each_cht_group_panics() {
let cht_size = SIZE as u64;
let _ = for_each_cht_group::<Header, _, _, _>(
cht_size,
vec![cht_size * 5, cht_size * 2],
|_, _, _| Ok(()),
(),
);
}
#[test]
fn for_each_cht_group_works() {
let cht_size = SIZE as u64;
let _ = for_each_cht_group::<Header, _, _, _>(
cht_size,
vec![
cht_size * 2 + 1,
cht_size * 2 + 2,
cht_size * 2 + 5,
cht_size * 4 + 1,
cht_size * 4 + 7,
cht_size * 6 + 1,
],
|_, cht_num, blocks| {
match cht_num {
2 => assert_eq!(
blocks,
vec![cht_size * 2 + 1, cht_size * 2 + 2, cht_size * 2 + 5]
),
4 => assert_eq!(blocks, vec![cht_size * 4 + 1, cht_size * 4 + 7]),
6 => assert_eq!(blocks, vec![cht_size * 6 + 1]),
_ => unreachable!(),
}
Ok(())
},
(),
);
}
} | /// Compute a CHT root from an iterator of block hashes. Fails if shorter than | random_line_split |
utils.go | package mysql
import (
"database/sql"
"fmt"
"math"
"math/rand"
"strconv"
"strings"
"time"
"unsafe"
"github.com/pingcap/errors"
"github.com/amyangfei/data-dam/pkg/models"
)
const (
queryMaxRetry = 3
letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
letterIdxBits = 6 // 6 bits to represent a letter index
letterIdxMask = 1<<letterIdxBits - 1 // All 1-bits, as many as letterIdxBits
letterIdxMax = 63 / letterIdxBits // # of letter indices fitting in 63 bits
tinyIntMax = 1 << 7
smallIntMax = 1 << 15
)
// TableName returns table name with schema
func TableName(schema, name string) string {
return fmt.Sprintf("`%s`.`%s`", escapeName(schema), escapeName(name))
}
func escapeName(name string) string {
return strings.Replace(name, "`", "``", -1)
}
func querySQL(db *sql.DB, query string, maxRetry int) (*sql.Rows, error) {
// TODO: add retry mechanism
rows, err := db.Query(query)
if err != nil {
return nil, errors.Trace(err)
}
return rows, nil
}
func getTableFromDB(db *sql.DB, schema string, name string) (*models.Table, error) {
table := &models.Table{}
table.Schema = schema
table.Name = name
table.IndexColumns = make(map[string][]*models.Column)
err := getTableColumns(db, table, queryMaxRetry)
if err != nil {
return nil, errors.Trace(err)
}
err = getTableIndex(db, table, queryMaxRetry)
if err != nil {
return nil, errors.Trace(err)
}
if len(table.Columns) == 0 {
return nil, errors.Errorf("invalid table %s.%s", schema, name)
}
return table, nil
}
func getTableColumns(db *sql.DB, table *models.Table, maxRetry int) error {
if table.Schema == "" || table.Name == "" {
return errors.New("schema/table is empty")
}
query := fmt.Sprintf("SHOW COLUMNS FROM `%s`.`%s`", table.Schema, table.Name)
rows, err := querySQL(db, query, maxRetry)
if err != nil {
return errors.Trace(err)
}
defer rows.Close()
rowColumns, err := rows.Columns()
if err != nil {
return errors.Trace(err)
}
// Show an example.
/*
mysql> show columns from test.t;
+-------+---------+------+-----+---------+-------------------+
| Field | Type | Null | Key | Default | Extra |
+-------+---------+------+-----+---------+-------------------+
| a | int(11) | NO | PRI | NULL | |
| b | int(11) | NO | PRI | NULL | |
| c | int(11) | YES | MUL | NULL | |
| d | int(11) | YES | | NULL | |
| d | json | YES | | NULL | VIRTUAL GENERATED |
+-------+---------+------+-----+---------+-------------------+
*/
idx := 0
for rows.Next() {
data := make([]sql.RawBytes, len(rowColumns))
values := make([]interface{}, len(rowColumns))
for i := range values {
values[i] = &data[i]
}
err = rows.Scan(values...)
if err != nil {
return errors.Trace(err)
}
column := &models.Column{}
column.Idx = idx
column.Name = string(data[0])
column.Tp = string(data[1])
column.Key = string(data[3])
column.Extra = string(data[5])
bracketIdx := strings.Index(column.Tp, "(")
if bracketIdx > 0 {
column.SubTp = column.Tp[bracketIdx+1 : len(column.Tp)-1]
column.Tp = column.Tp[:bracketIdx]
}
if strings.ToLower(string(data[2])) == "no" {
column.NotNull = true
}
// Check whether column has unsigned flag.
if strings.Contains(strings.ToLower(string(data[1])), "unsigned") {
column.Unsigned = true
}
table.Columns = append(table.Columns, column)
idx++
}
if rows.Err() != nil {
return errors.Trace(rows.Err())
}
return nil
}
func getTableIndex(db *sql.DB, table *models.Table, maxRetry int) error {
if table.Schema == "" || table.Name == "" {
return errors.New("schema/table is empty")
}
query := fmt.Sprintf("SHOW INDEX FROM `%s`.`%s`", table.Schema, table.Name)
rows, err := querySQL(db, query, maxRetry)
if err != nil |
defer rows.Close()
rowColumns, err := rows.Columns()
if err != nil {
return errors.Trace(err)
}
// Show an example.
/*
mysql> show index from test.t;
+-------+------------+----------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+
| Table | Non_unique | Key_name | Seq_in_index | Column_name | Collation | Cardinality | Sub_part | Packed | Null | Index_type | Comment | Index_comment |
+-------+------------+----------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+
| t | 0 | PRIMARY | 1 | a | A | 0 | NULL | NULL | | BTREE | | |
| t | 0 | PRIMARY | 2 | b | A | 0 | NULL | NULL | | BTREE | | |
| t | 0 | ucd | 1 | c | A | 0 | NULL | NULL | YES | BTREE | | |
| t | 0 | ucd | 2 | d | A | 0 | NULL | NULL | YES | BTREE | | |
+-------+------------+----------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+
*/
var columns = make(map[string][]string)
for rows.Next() {
data := make([]sql.RawBytes, len(rowColumns))
values := make([]interface{}, len(rowColumns))
for i := range values {
values[i] = &data[i]
}
err = rows.Scan(values...)
if err != nil {
return errors.Trace(err)
}
nonUnique := string(data[1])
if nonUnique == "0" {
keyName := strings.ToLower(string(data[2]))
columns[keyName] = append(columns[keyName], string(data[4]))
}
}
if rows.Err() != nil {
return errors.Trace(rows.Err())
}
table.IndexColumns = findColumns(table.Columns, columns)
return nil
}
func findColumn(columns []*models.Column, indexColumn string) *models.Column {
for _, column := range columns {
if column.Name == indexColumn {
return column
}
}
return nil
}
func findColumns(columns []*models.Column, indexColumns map[string][]string) map[string][]*models.Column {
result := make(map[string][]*models.Column)
for keyName, indexCols := range indexColumns {
cols := make([]*models.Column, 0, len(indexCols))
for _, name := range indexCols {
column := findColumn(columns, name)
if column != nil {
cols = append(cols, column)
}
}
result[keyName] = cols
}
return result
}
func findTables(db *sql.DB, schema string) ([]string, error) {
query := fmt.Sprintf("SHOW TABLES FROM `%s`", schema)
rows, err := querySQL(db, query, queryMaxRetry)
if err != nil {
return nil, errors.Trace(err)
}
defer rows.Close()
tables := make([]string, 0)
for rows.Next() {
var table string
err = rows.Scan(&table)
if err != nil {
return nil, errors.Trace(err)
}
tables = append(tables, table)
}
if rows.Err() != nil {
return nil, errors.Trace(rows.Err())
}
return tables, nil
}
func getMaxID(db *sql.DB, schema, table string) (int64, error) {
stmt := fmt.Sprintf("SELECT IFNULL(max(id), 0) FROM `%s`.`%s`", schema, table)
rows, err := db.Query(stmt)
if err != nil {
return 0, errors.Trace(err)
}
defer rows.Close()
var id int64
for rows.Next() {
if err := rows.Scan(&id); err != nil {
return 0, errors.Trace(err)
}
}
return id, nil
}
func getRandID(db *sql.DB, schema, table string) (int, error) {
stmt := fmt.Sprintf("SELECT IFNULL(id, 0) FROM `%s`.`%s` ORDER BY RAND() LIMIT 1", schema, table)
rows, err := db.Query(stmt)
if err != nil {
return 0, errors.Trace(err)
}
defer rows.Close()
var id int
for rows.Next() {
if err := rows.Scan(&id); err != nil {
return 0, errors.Trace(err)
}
}
return id, nil
}
func genRandomValue(column *models.Column) (interface{}, error) {
booleans := []string{"TRUE", "FALSE"}
upper := strings.ToUpper(column.Tp)
var value interface{}
switch upper {
case "TINYINT":
value = rand.Intn(tinyIntMax)
case "SMALLINT":
value = rand.Intn(smallIntMax)
case "INT":
value = rand.Int31()
case "INTUNSIGNED":
value = rand.Int31()
case "BOOLEAN":
value = booleans[rand.Intn(len(booleans))]
case "BIGINT":
value = rand.Int63()
case "BIGINTUNSIGNED":
value = rand.Int63()
case "FLOAT":
value = rand.Float32() * math.MaxFloat32
case "DOUBLE":
value = rand.ExpFloat64()
case "DOUBLEUNSIGNED":
value = rand.ExpFloat64()
case "DECIMAL":
value = strconv.FormatFloat(rand.ExpFloat64(), 'f', 5, 64)
case "DATETIME", "TIMESTAMP", "TIMESTAMPONUPDATE":
t := genRandomTime()
value = fmt.Sprintf("%.4d-%.2d-%.2d %.2d:%.2d:%.2d", t.Year(), t.Month(), t.Day(), t.Hour(), t.Minute(), t.Second())
case "TIME":
t := genRandomTime()
value = fmt.Sprintf("%.2d:%.2d:%.2d", t.Hour(), t.Minute(), t.Second())
case "YEAR":
t := genRandomTime()
value = fmt.Sprintf("%.4d", t.Year())
case "CHAR":
n, err := strconv.Atoi(column.SubTp)
if err != nil {
return nil, errors.Trace(err)
}
value = genRandStringBytesMaskImprSrcUnsafe(n)
case "VARCHAR":
n, err := strconv.Atoi(column.SubTp)
if err != nil {
return nil, errors.Trace(err)
}
value = genRandStringBytesMaskImprSrcUnsafe(rand.Intn(n) + 1)
case "BLOB":
value = genRandomByteString(20)
case "TEXT":
value = genRandomUnicodeString(20)
case "ENUM":
candidates := strings.Split(column.SubTp, ",")
val := candidates[rand.Intn(len(candidates))]
val = val[1 : len(val)-1]
value = val
case "SET":
candidates := strings.Split(column.SubTp, ",")
s := make([]string, 0, len(candidates))
for _, candidate := range candidates {
if rand.Intn(2) == 0 {
s = append(s, candidate[1:len(candidate)-1])
}
}
value = strings.Join(s, ",")
}
return value, nil
}
func genRandomTime() time.Time {
min := time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC).Unix()
max := time.Date(2037, 12, 31, 0, 0, 0, 0, time.UTC).Unix()
delta := max - min
sec := rand.Int63n(delta) + min
return time.Unix(sec, 0)
}
// https://stackoverflow.com/a/31832326/1115857
func genRandStringBytesMaskImprSrcUnsafe(n int) string {
b := make([]byte, n)
// A src.Int63() generates 63 random bits, enough for letterIdxMax characters!
for i, cache, remain := n-1, rand.Int63(), letterIdxMax; i >= 0; {
if remain == 0 {
cache, remain = rand.Int63(), letterIdxMax
}
if idx := int(cache & letterIdxMask); idx < len(letterBytes) {
b[i] = letterBytes[idx]
i--
}
cache >>= letterIdxBits
remain--
}
return *(*string)(unsafe.Pointer(&b))
}
func genRandomUnicodeString(n int) string {
var builder strings.Builder
builder.Grow(2 + 3*n)
builder.WriteByte('\'')
for i := 0; i < n; i++ {
// 50% chance generating ASCII string, 50% chance generating Unicode string
var r rune
switch rand.Intn(2) {
case 0:
r = rune(rand.Intn(0x80))
case 1:
r = rune(rand.Intn(0xd800))
}
switch r {
case '\'':
builder.WriteString("''")
case '\\':
builder.WriteString(`\\`)
default:
builder.WriteRune(r)
}
}
builder.WriteByte('\'')
return builder.String()
}
func genRandomByteString(n int) string {
var builder strings.Builder
builder.Grow(3 + 2*n)
builder.WriteString("x'")
for i := 0; i < n; i++ {
fmt.Fprintf(&builder, "%02X", rand.Intn(256))
}
builder.WriteString("'")
return builder.String()
}
| {
return errors.Trace(err)
} | conditional_block |
utils.go | package mysql
import (
"database/sql"
"fmt"
"math"
"math/rand"
"strconv"
"strings"
"time"
"unsafe"
"github.com/pingcap/errors"
"github.com/amyangfei/data-dam/pkg/models"
)
const (
queryMaxRetry = 3
letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
letterIdxBits = 6 // 6 bits to represent a letter index
letterIdxMask = 1<<letterIdxBits - 1 // All 1-bits, as many as letterIdxBits
letterIdxMax = 63 / letterIdxBits // # of letter indices fitting in 63 bits
tinyIntMax = 1 << 7
smallIntMax = 1 << 15
)
// TableName returns table name with schema
func TableName(schema, name string) string {
return fmt.Sprintf("`%s`.`%s`", escapeName(schema), escapeName(name))
}
func escapeName(name string) string |
func querySQL(db *sql.DB, query string, maxRetry int) (*sql.Rows, error) {
// TODO: add retry mechanism
rows, err := db.Query(query)
if err != nil {
return nil, errors.Trace(err)
}
return rows, nil
}
func getTableFromDB(db *sql.DB, schema string, name string) (*models.Table, error) {
table := &models.Table{}
table.Schema = schema
table.Name = name
table.IndexColumns = make(map[string][]*models.Column)
err := getTableColumns(db, table, queryMaxRetry)
if err != nil {
return nil, errors.Trace(err)
}
err = getTableIndex(db, table, queryMaxRetry)
if err != nil {
return nil, errors.Trace(err)
}
if len(table.Columns) == 0 {
return nil, errors.Errorf("invalid table %s.%s", schema, name)
}
return table, nil
}
func getTableColumns(db *sql.DB, table *models.Table, maxRetry int) error {
if table.Schema == "" || table.Name == "" {
return errors.New("schema/table is empty")
}
query := fmt.Sprintf("SHOW COLUMNS FROM `%s`.`%s`", table.Schema, table.Name)
rows, err := querySQL(db, query, maxRetry)
if err != nil {
return errors.Trace(err)
}
defer rows.Close()
rowColumns, err := rows.Columns()
if err != nil {
return errors.Trace(err)
}
// Show an example.
/*
mysql> show columns from test.t;
+-------+---------+------+-----+---------+-------------------+
| Field | Type | Null | Key | Default | Extra |
+-------+---------+------+-----+---------+-------------------+
| a | int(11) | NO | PRI | NULL | |
| b | int(11) | NO | PRI | NULL | |
| c | int(11) | YES | MUL | NULL | |
| d | int(11) | YES | | NULL | |
| d | json | YES | | NULL | VIRTUAL GENERATED |
+-------+---------+------+-----+---------+-------------------+
*/
idx := 0
for rows.Next() {
data := make([]sql.RawBytes, len(rowColumns))
values := make([]interface{}, len(rowColumns))
for i := range values {
values[i] = &data[i]
}
err = rows.Scan(values...)
if err != nil {
return errors.Trace(err)
}
column := &models.Column{}
column.Idx = idx
column.Name = string(data[0])
column.Tp = string(data[1])
column.Key = string(data[3])
column.Extra = string(data[5])
bracketIdx := strings.Index(column.Tp, "(")
if bracketIdx > 0 {
column.SubTp = column.Tp[bracketIdx+1 : len(column.Tp)-1]
column.Tp = column.Tp[:bracketIdx]
}
if strings.ToLower(string(data[2])) == "no" {
column.NotNull = true
}
// Check whether column has unsigned flag.
if strings.Contains(strings.ToLower(string(data[1])), "unsigned") {
column.Unsigned = true
}
table.Columns = append(table.Columns, column)
idx++
}
if rows.Err() != nil {
return errors.Trace(rows.Err())
}
return nil
}
func getTableIndex(db *sql.DB, table *models.Table, maxRetry int) error {
if table.Schema == "" || table.Name == "" {
return errors.New("schema/table is empty")
}
query := fmt.Sprintf("SHOW INDEX FROM `%s`.`%s`", table.Schema, table.Name)
rows, err := querySQL(db, query, maxRetry)
if err != nil {
return errors.Trace(err)
}
defer rows.Close()
rowColumns, err := rows.Columns()
if err != nil {
return errors.Trace(err)
}
// Show an example.
/*
mysql> show index from test.t;
+-------+------------+----------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+
| Table | Non_unique | Key_name | Seq_in_index | Column_name | Collation | Cardinality | Sub_part | Packed | Null | Index_type | Comment | Index_comment |
+-------+------------+----------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+
| t | 0 | PRIMARY | 1 | a | A | 0 | NULL | NULL | | BTREE | | |
| t | 0 | PRIMARY | 2 | b | A | 0 | NULL | NULL | | BTREE | | |
| t | 0 | ucd | 1 | c | A | 0 | NULL | NULL | YES | BTREE | | |
| t | 0 | ucd | 2 | d | A | 0 | NULL | NULL | YES | BTREE | | |
+-------+------------+----------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+
*/
var columns = make(map[string][]string)
for rows.Next() {
data := make([]sql.RawBytes, len(rowColumns))
values := make([]interface{}, len(rowColumns))
for i := range values {
values[i] = &data[i]
}
err = rows.Scan(values...)
if err != nil {
return errors.Trace(err)
}
nonUnique := string(data[1])
if nonUnique == "0" {
keyName := strings.ToLower(string(data[2]))
columns[keyName] = append(columns[keyName], string(data[4]))
}
}
if rows.Err() != nil {
return errors.Trace(rows.Err())
}
table.IndexColumns = findColumns(table.Columns, columns)
return nil
}
func findColumn(columns []*models.Column, indexColumn string) *models.Column {
for _, column := range columns {
if column.Name == indexColumn {
return column
}
}
return nil
}
func findColumns(columns []*models.Column, indexColumns map[string][]string) map[string][]*models.Column {
result := make(map[string][]*models.Column)
for keyName, indexCols := range indexColumns {
cols := make([]*models.Column, 0, len(indexCols))
for _, name := range indexCols {
column := findColumn(columns, name)
if column != nil {
cols = append(cols, column)
}
}
result[keyName] = cols
}
return result
}
func findTables(db *sql.DB, schema string) ([]string, error) {
query := fmt.Sprintf("SHOW TABLES FROM `%s`", schema)
rows, err := querySQL(db, query, queryMaxRetry)
if err != nil {
return nil, errors.Trace(err)
}
defer rows.Close()
tables := make([]string, 0)
for rows.Next() {
var table string
err = rows.Scan(&table)
if err != nil {
return nil, errors.Trace(err)
}
tables = append(tables, table)
}
if rows.Err() != nil {
return nil, errors.Trace(rows.Err())
}
return tables, nil
}
func getMaxID(db *sql.DB, schema, table string) (int64, error) {
stmt := fmt.Sprintf("SELECT IFNULL(max(id), 0) FROM `%s`.`%s`", schema, table)
rows, err := db.Query(stmt)
if err != nil {
return 0, errors.Trace(err)
}
defer rows.Close()
var id int64
for rows.Next() {
if err := rows.Scan(&id); err != nil {
return 0, errors.Trace(err)
}
}
return id, nil
}
func getRandID(db *sql.DB, schema, table string) (int, error) {
stmt := fmt.Sprintf("SELECT IFNULL(id, 0) FROM `%s`.`%s` ORDER BY RAND() LIMIT 1", schema, table)
rows, err := db.Query(stmt)
if err != nil {
return 0, errors.Trace(err)
}
defer rows.Close()
var id int
for rows.Next() {
if err := rows.Scan(&id); err != nil {
return 0, errors.Trace(err)
}
}
return id, nil
}
func genRandomValue(column *models.Column) (interface{}, error) {
booleans := []string{"TRUE", "FALSE"}
upper := strings.ToUpper(column.Tp)
var value interface{}
switch upper {
case "TINYINT":
value = rand.Intn(tinyIntMax)
case "SMALLINT":
value = rand.Intn(smallIntMax)
case "INT":
value = rand.Int31()
case "INTUNSIGNED":
value = rand.Int31()
case "BOOLEAN":
value = booleans[rand.Intn(len(booleans))]
case "BIGINT":
value = rand.Int63()
case "BIGINTUNSIGNED":
value = rand.Int63()
case "FLOAT":
value = rand.Float32() * math.MaxFloat32
case "DOUBLE":
value = rand.ExpFloat64()
case "DOUBLEUNSIGNED":
value = rand.ExpFloat64()
case "DECIMAL":
value = strconv.FormatFloat(rand.ExpFloat64(), 'f', 5, 64)
case "DATETIME", "TIMESTAMP", "TIMESTAMPONUPDATE":
t := genRandomTime()
value = fmt.Sprintf("%.4d-%.2d-%.2d %.2d:%.2d:%.2d", t.Year(), t.Month(), t.Day(), t.Hour(), t.Minute(), t.Second())
case "TIME":
t := genRandomTime()
value = fmt.Sprintf("%.2d:%.2d:%.2d", t.Hour(), t.Minute(), t.Second())
case "YEAR":
t := genRandomTime()
value = fmt.Sprintf("%.4d", t.Year())
case "CHAR":
n, err := strconv.Atoi(column.SubTp)
if err != nil {
return nil, errors.Trace(err)
}
value = genRandStringBytesMaskImprSrcUnsafe(n)
case "VARCHAR":
n, err := strconv.Atoi(column.SubTp)
if err != nil {
return nil, errors.Trace(err)
}
value = genRandStringBytesMaskImprSrcUnsafe(rand.Intn(n) + 1)
case "BLOB":
value = genRandomByteString(20)
case "TEXT":
value = genRandomUnicodeString(20)
case "ENUM":
candidates := strings.Split(column.SubTp, ",")
val := candidates[rand.Intn(len(candidates))]
val = val[1 : len(val)-1]
value = val
case "SET":
candidates := strings.Split(column.SubTp, ",")
s := make([]string, 0, len(candidates))
for _, candidate := range candidates {
if rand.Intn(2) == 0 {
s = append(s, candidate[1:len(candidate)-1])
}
}
value = strings.Join(s, ",")
}
return value, nil
}
func genRandomTime() time.Time {
min := time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC).Unix()
max := time.Date(2037, 12, 31, 0, 0, 0, 0, time.UTC).Unix()
delta := max - min
sec := rand.Int63n(delta) + min
return time.Unix(sec, 0)
}
// https://stackoverflow.com/a/31832326/1115857
func genRandStringBytesMaskImprSrcUnsafe(n int) string {
b := make([]byte, n)
// A src.Int63() generates 63 random bits, enough for letterIdxMax characters!
for i, cache, remain := n-1, rand.Int63(), letterIdxMax; i >= 0; {
if remain == 0 {
cache, remain = rand.Int63(), letterIdxMax
}
if idx := int(cache & letterIdxMask); idx < len(letterBytes) {
b[i] = letterBytes[idx]
i--
}
cache >>= letterIdxBits
remain--
}
return *(*string)(unsafe.Pointer(&b))
}
func genRandomUnicodeString(n int) string {
var builder strings.Builder
builder.Grow(2 + 3*n)
builder.WriteByte('\'')
for i := 0; i < n; i++ {
// 50% chance generating ASCII string, 50% chance generating Unicode string
var r rune
switch rand.Intn(2) {
case 0:
r = rune(rand.Intn(0x80))
case 1:
r = rune(rand.Intn(0xd800))
}
switch r {
case '\'':
builder.WriteString("''")
case '\\':
builder.WriteString(`\\`)
default:
builder.WriteRune(r)
}
}
builder.WriteByte('\'')
return builder.String()
}
func genRandomByteString(n int) string {
var builder strings.Builder
builder.Grow(3 + 2*n)
builder.WriteString("x'")
for i := 0; i < n; i++ {
fmt.Fprintf(&builder, "%02X", rand.Intn(256))
}
builder.WriteString("'")
return builder.String()
}
| {
return strings.Replace(name, "`", "``", -1)
} | identifier_body |
utils.go | package mysql
import (
"database/sql"
"fmt"
"math"
"math/rand"
"strconv"
"strings"
"time"
"unsafe"
"github.com/pingcap/errors"
"github.com/amyangfei/data-dam/pkg/models"
)
const (
queryMaxRetry = 3
letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
letterIdxBits = 6 // 6 bits to represent a letter index
letterIdxMask = 1<<letterIdxBits - 1 // All 1-bits, as many as letterIdxBits
letterIdxMax = 63 / letterIdxBits // # of letter indices fitting in 63 bits
tinyIntMax = 1 << 7
smallIntMax = 1 << 15
)
// TableName returns table name with schema
func TableName(schema, name string) string {
return fmt.Sprintf("`%s`.`%s`", escapeName(schema), escapeName(name))
}
func | (name string) string {
return strings.Replace(name, "`", "``", -1)
}
func querySQL(db *sql.DB, query string, maxRetry int) (*sql.Rows, error) {
// TODO: add retry mechanism
rows, err := db.Query(query)
if err != nil {
return nil, errors.Trace(err)
}
return rows, nil
}
func getTableFromDB(db *sql.DB, schema string, name string) (*models.Table, error) {
table := &models.Table{}
table.Schema = schema
table.Name = name
table.IndexColumns = make(map[string][]*models.Column)
err := getTableColumns(db, table, queryMaxRetry)
if err != nil {
return nil, errors.Trace(err)
}
err = getTableIndex(db, table, queryMaxRetry)
if err != nil {
return nil, errors.Trace(err)
}
if len(table.Columns) == 0 {
return nil, errors.Errorf("invalid table %s.%s", schema, name)
}
return table, nil
}
func getTableColumns(db *sql.DB, table *models.Table, maxRetry int) error {
if table.Schema == "" || table.Name == "" {
return errors.New("schema/table is empty")
}
query := fmt.Sprintf("SHOW COLUMNS FROM `%s`.`%s`", table.Schema, table.Name)
rows, err := querySQL(db, query, maxRetry)
if err != nil {
return errors.Trace(err)
}
defer rows.Close()
rowColumns, err := rows.Columns()
if err != nil {
return errors.Trace(err)
}
// Show an example.
/*
mysql> show columns from test.t;
+-------+---------+------+-----+---------+-------------------+
| Field | Type | Null | Key | Default | Extra |
+-------+---------+------+-----+---------+-------------------+
| a | int(11) | NO | PRI | NULL | |
| b | int(11) | NO | PRI | NULL | |
| c | int(11) | YES | MUL | NULL | |
| d | int(11) | YES | | NULL | |
| d | json | YES | | NULL | VIRTUAL GENERATED |
+-------+---------+------+-----+---------+-------------------+
*/
idx := 0
for rows.Next() {
data := make([]sql.RawBytes, len(rowColumns))
values := make([]interface{}, len(rowColumns))
for i := range values {
values[i] = &data[i]
}
err = rows.Scan(values...)
if err != nil {
return errors.Trace(err)
}
column := &models.Column{}
column.Idx = idx
column.Name = string(data[0])
column.Tp = string(data[1])
column.Key = string(data[3])
column.Extra = string(data[5])
bracketIdx := strings.Index(column.Tp, "(")
if bracketIdx > 0 {
column.SubTp = column.Tp[bracketIdx+1 : len(column.Tp)-1]
column.Tp = column.Tp[:bracketIdx]
}
if strings.ToLower(string(data[2])) == "no" {
column.NotNull = true
}
// Check whether column has unsigned flag.
if strings.Contains(strings.ToLower(string(data[1])), "unsigned") {
column.Unsigned = true
}
table.Columns = append(table.Columns, column)
idx++
}
if rows.Err() != nil {
return errors.Trace(rows.Err())
}
return nil
}
func getTableIndex(db *sql.DB, table *models.Table, maxRetry int) error {
if table.Schema == "" || table.Name == "" {
return errors.New("schema/table is empty")
}
query := fmt.Sprintf("SHOW INDEX FROM `%s`.`%s`", table.Schema, table.Name)
rows, err := querySQL(db, query, maxRetry)
if err != nil {
return errors.Trace(err)
}
defer rows.Close()
rowColumns, err := rows.Columns()
if err != nil {
return errors.Trace(err)
}
// Show an example.
/*
mysql> show index from test.t;
+-------+------------+----------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+
| Table | Non_unique | Key_name | Seq_in_index | Column_name | Collation | Cardinality | Sub_part | Packed | Null | Index_type | Comment | Index_comment |
+-------+------------+----------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+
| t | 0 | PRIMARY | 1 | a | A | 0 | NULL | NULL | | BTREE | | |
| t | 0 | PRIMARY | 2 | b | A | 0 | NULL | NULL | | BTREE | | |
| t | 0 | ucd | 1 | c | A | 0 | NULL | NULL | YES | BTREE | | |
| t | 0 | ucd | 2 | d | A | 0 | NULL | NULL | YES | BTREE | | |
+-------+------------+----------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+
*/
var columns = make(map[string][]string)
for rows.Next() {
data := make([]sql.RawBytes, len(rowColumns))
values := make([]interface{}, len(rowColumns))
for i := range values {
values[i] = &data[i]
}
err = rows.Scan(values...)
if err != nil {
return errors.Trace(err)
}
nonUnique := string(data[1])
if nonUnique == "0" {
keyName := strings.ToLower(string(data[2]))
columns[keyName] = append(columns[keyName], string(data[4]))
}
}
if rows.Err() != nil {
return errors.Trace(rows.Err())
}
table.IndexColumns = findColumns(table.Columns, columns)
return nil
}
func findColumn(columns []*models.Column, indexColumn string) *models.Column {
for _, column := range columns {
if column.Name == indexColumn {
return column
}
}
return nil
}
func findColumns(columns []*models.Column, indexColumns map[string][]string) map[string][]*models.Column {
result := make(map[string][]*models.Column)
for keyName, indexCols := range indexColumns {
cols := make([]*models.Column, 0, len(indexCols))
for _, name := range indexCols {
column := findColumn(columns, name)
if column != nil {
cols = append(cols, column)
}
}
result[keyName] = cols
}
return result
}
func findTables(db *sql.DB, schema string) ([]string, error) {
query := fmt.Sprintf("SHOW TABLES FROM `%s`", schema)
rows, err := querySQL(db, query, queryMaxRetry)
if err != nil {
return nil, errors.Trace(err)
}
defer rows.Close()
tables := make([]string, 0)
for rows.Next() {
var table string
err = rows.Scan(&table)
if err != nil {
return nil, errors.Trace(err)
}
tables = append(tables, table)
}
if rows.Err() != nil {
return nil, errors.Trace(rows.Err())
}
return tables, nil
}
func getMaxID(db *sql.DB, schema, table string) (int64, error) {
stmt := fmt.Sprintf("SELECT IFNULL(max(id), 0) FROM `%s`.`%s`", schema, table)
rows, err := db.Query(stmt)
if err != nil {
return 0, errors.Trace(err)
}
defer rows.Close()
var id int64
for rows.Next() {
if err := rows.Scan(&id); err != nil {
return 0, errors.Trace(err)
}
}
return id, nil
}
func getRandID(db *sql.DB, schema, table string) (int, error) {
stmt := fmt.Sprintf("SELECT IFNULL(id, 0) FROM `%s`.`%s` ORDER BY RAND() LIMIT 1", schema, table)
rows, err := db.Query(stmt)
if err != nil {
return 0, errors.Trace(err)
}
defer rows.Close()
var id int
for rows.Next() {
if err := rows.Scan(&id); err != nil {
return 0, errors.Trace(err)
}
}
return id, nil
}
func genRandomValue(column *models.Column) (interface{}, error) {
booleans := []string{"TRUE", "FALSE"}
upper := strings.ToUpper(column.Tp)
var value interface{}
switch upper {
case "TINYINT":
value = rand.Intn(tinyIntMax)
case "SMALLINT":
value = rand.Intn(smallIntMax)
case "INT":
value = rand.Int31()
case "INTUNSIGNED":
value = rand.Int31()
case "BOOLEAN":
value = booleans[rand.Intn(len(booleans))]
case "BIGINT":
value = rand.Int63()
case "BIGINTUNSIGNED":
value = rand.Int63()
case "FLOAT":
value = rand.Float32() * math.MaxFloat32
case "DOUBLE":
value = rand.ExpFloat64()
case "DOUBLEUNSIGNED":
value = rand.ExpFloat64()
case "DECIMAL":
value = strconv.FormatFloat(rand.ExpFloat64(), 'f', 5, 64)
case "DATETIME", "TIMESTAMP", "TIMESTAMPONUPDATE":
t := genRandomTime()
value = fmt.Sprintf("%.4d-%.2d-%.2d %.2d:%.2d:%.2d", t.Year(), t.Month(), t.Day(), t.Hour(), t.Minute(), t.Second())
case "TIME":
t := genRandomTime()
value = fmt.Sprintf("%.2d:%.2d:%.2d", t.Hour(), t.Minute(), t.Second())
case "YEAR":
t := genRandomTime()
value = fmt.Sprintf("%.4d", t.Year())
case "CHAR":
n, err := strconv.Atoi(column.SubTp)
if err != nil {
return nil, errors.Trace(err)
}
value = genRandStringBytesMaskImprSrcUnsafe(n)
case "VARCHAR":
n, err := strconv.Atoi(column.SubTp)
if err != nil {
return nil, errors.Trace(err)
}
value = genRandStringBytesMaskImprSrcUnsafe(rand.Intn(n) + 1)
case "BLOB":
value = genRandomByteString(20)
case "TEXT":
value = genRandomUnicodeString(20)
case "ENUM":
candidates := strings.Split(column.SubTp, ",")
val := candidates[rand.Intn(len(candidates))]
val = val[1 : len(val)-1]
value = val
case "SET":
candidates := strings.Split(column.SubTp, ",")
s := make([]string, 0, len(candidates))
for _, candidate := range candidates {
if rand.Intn(2) == 0 {
s = append(s, candidate[1:len(candidate)-1])
}
}
value = strings.Join(s, ",")
}
return value, nil
}
func genRandomTime() time.Time {
min := time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC).Unix()
max := time.Date(2037, 12, 31, 0, 0, 0, 0, time.UTC).Unix()
delta := max - min
sec := rand.Int63n(delta) + min
return time.Unix(sec, 0)
}
// https://stackoverflow.com/a/31832326/1115857
func genRandStringBytesMaskImprSrcUnsafe(n int) string {
b := make([]byte, n)
// A src.Int63() generates 63 random bits, enough for letterIdxMax characters!
for i, cache, remain := n-1, rand.Int63(), letterIdxMax; i >= 0; {
if remain == 0 {
cache, remain = rand.Int63(), letterIdxMax
}
if idx := int(cache & letterIdxMask); idx < len(letterBytes) {
b[i] = letterBytes[idx]
i--
}
cache >>= letterIdxBits
remain--
}
return *(*string)(unsafe.Pointer(&b))
}
func genRandomUnicodeString(n int) string {
var builder strings.Builder
builder.Grow(2 + 3*n)
builder.WriteByte('\'')
for i := 0; i < n; i++ {
// 50% chance generating ASCII string, 50% chance generating Unicode string
var r rune
switch rand.Intn(2) {
case 0:
r = rune(rand.Intn(0x80))
case 1:
r = rune(rand.Intn(0xd800))
}
switch r {
case '\'':
builder.WriteString("''")
case '\\':
builder.WriteString(`\\`)
default:
builder.WriteRune(r)
}
}
builder.WriteByte('\'')
return builder.String()
}
func genRandomByteString(n int) string {
var builder strings.Builder
builder.Grow(3 + 2*n)
builder.WriteString("x'")
for i := 0; i < n; i++ {
fmt.Fprintf(&builder, "%02X", rand.Intn(256))
}
builder.WriteString("'")
return builder.String()
}
| escapeName | identifier_name |
utils.go | package mysql
import (
"database/sql"
"fmt"
"math"
"math/rand"
"strconv"
"strings"
"time"
"unsafe"
"github.com/pingcap/errors"
"github.com/amyangfei/data-dam/pkg/models"
)
const (
queryMaxRetry = 3
letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
letterIdxBits = 6 // 6 bits to represent a letter index
letterIdxMask = 1<<letterIdxBits - 1 // All 1-bits, as many as letterIdxBits
letterIdxMax = 63 / letterIdxBits // # of letter indices fitting in 63 bits
tinyIntMax = 1 << 7
smallIntMax = 1 << 15
)
// TableName returns table name with schema
func TableName(schema, name string) string {
return fmt.Sprintf("`%s`.`%s`", escapeName(schema), escapeName(name))
}
func escapeName(name string) string {
return strings.Replace(name, "`", "``", -1)
}
func querySQL(db *sql.DB, query string, maxRetry int) (*sql.Rows, error) {
// TODO: add retry mechanism
rows, err := db.Query(query)
if err != nil {
return nil, errors.Trace(err)
}
return rows, nil
}
func getTableFromDB(db *sql.DB, schema string, name string) (*models.Table, error) {
table := &models.Table{}
table.Schema = schema
table.Name = name
table.IndexColumns = make(map[string][]*models.Column)
err := getTableColumns(db, table, queryMaxRetry)
if err != nil {
return nil, errors.Trace(err)
}
err = getTableIndex(db, table, queryMaxRetry)
if err != nil {
return nil, errors.Trace(err)
}
if len(table.Columns) == 0 {
return nil, errors.Errorf("invalid table %s.%s", schema, name)
}
return table, nil
}
func getTableColumns(db *sql.DB, table *models.Table, maxRetry int) error {
if table.Schema == "" || table.Name == "" {
return errors.New("schema/table is empty")
}
query := fmt.Sprintf("SHOW COLUMNS FROM `%s`.`%s`", table.Schema, table.Name)
rows, err := querySQL(db, query, maxRetry)
if err != nil {
return errors.Trace(err)
} | if err != nil {
return errors.Trace(err)
}
// Show an example.
/*
mysql> show columns from test.t;
+-------+---------+------+-----+---------+-------------------+
| Field | Type | Null | Key | Default | Extra |
+-------+---------+------+-----+---------+-------------------+
| a | int(11) | NO | PRI | NULL | |
| b | int(11) | NO | PRI | NULL | |
| c | int(11) | YES | MUL | NULL | |
| d | int(11) | YES | | NULL | |
| d | json | YES | | NULL | VIRTUAL GENERATED |
+-------+---------+------+-----+---------+-------------------+
*/
idx := 0
for rows.Next() {
data := make([]sql.RawBytes, len(rowColumns))
values := make([]interface{}, len(rowColumns))
for i := range values {
values[i] = &data[i]
}
err = rows.Scan(values...)
if err != nil {
return errors.Trace(err)
}
column := &models.Column{}
column.Idx = idx
column.Name = string(data[0])
column.Tp = string(data[1])
column.Key = string(data[3])
column.Extra = string(data[5])
bracketIdx := strings.Index(column.Tp, "(")
if bracketIdx > 0 {
column.SubTp = column.Tp[bracketIdx+1 : len(column.Tp)-1]
column.Tp = column.Tp[:bracketIdx]
}
if strings.ToLower(string(data[2])) == "no" {
column.NotNull = true
}
// Check whether column has unsigned flag.
if strings.Contains(strings.ToLower(string(data[1])), "unsigned") {
column.Unsigned = true
}
table.Columns = append(table.Columns, column)
idx++
}
if rows.Err() != nil {
return errors.Trace(rows.Err())
}
return nil
}
func getTableIndex(db *sql.DB, table *models.Table, maxRetry int) error {
if table.Schema == "" || table.Name == "" {
return errors.New("schema/table is empty")
}
query := fmt.Sprintf("SHOW INDEX FROM `%s`.`%s`", table.Schema, table.Name)
rows, err := querySQL(db, query, maxRetry)
if err != nil {
return errors.Trace(err)
}
defer rows.Close()
rowColumns, err := rows.Columns()
if err != nil {
return errors.Trace(err)
}
// Show an example.
/*
mysql> show index from test.t;
+-------+------------+----------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+
| Table | Non_unique | Key_name | Seq_in_index | Column_name | Collation | Cardinality | Sub_part | Packed | Null | Index_type | Comment | Index_comment |
+-------+------------+----------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+
| t | 0 | PRIMARY | 1 | a | A | 0 | NULL | NULL | | BTREE | | |
| t | 0 | PRIMARY | 2 | b | A | 0 | NULL | NULL | | BTREE | | |
| t | 0 | ucd | 1 | c | A | 0 | NULL | NULL | YES | BTREE | | |
| t | 0 | ucd | 2 | d | A | 0 | NULL | NULL | YES | BTREE | | |
+-------+------------+----------+--------------+-------------+-----------+-------------+----------+--------+------+------------+---------+---------------+
*/
var columns = make(map[string][]string)
for rows.Next() {
data := make([]sql.RawBytes, len(rowColumns))
values := make([]interface{}, len(rowColumns))
for i := range values {
values[i] = &data[i]
}
err = rows.Scan(values...)
if err != nil {
return errors.Trace(err)
}
nonUnique := string(data[1])
if nonUnique == "0" {
keyName := strings.ToLower(string(data[2]))
columns[keyName] = append(columns[keyName], string(data[4]))
}
}
if rows.Err() != nil {
return errors.Trace(rows.Err())
}
table.IndexColumns = findColumns(table.Columns, columns)
return nil
}
func findColumn(columns []*models.Column, indexColumn string) *models.Column {
for _, column := range columns {
if column.Name == indexColumn {
return column
}
}
return nil
}
func findColumns(columns []*models.Column, indexColumns map[string][]string) map[string][]*models.Column {
result := make(map[string][]*models.Column)
for keyName, indexCols := range indexColumns {
cols := make([]*models.Column, 0, len(indexCols))
for _, name := range indexCols {
column := findColumn(columns, name)
if column != nil {
cols = append(cols, column)
}
}
result[keyName] = cols
}
return result
}
func findTables(db *sql.DB, schema string) ([]string, error) {
query := fmt.Sprintf("SHOW TABLES FROM `%s`", schema)
rows, err := querySQL(db, query, queryMaxRetry)
if err != nil {
return nil, errors.Trace(err)
}
defer rows.Close()
tables := make([]string, 0)
for rows.Next() {
var table string
err = rows.Scan(&table)
if err != nil {
return nil, errors.Trace(err)
}
tables = append(tables, table)
}
if rows.Err() != nil {
return nil, errors.Trace(rows.Err())
}
return tables, nil
}
func getMaxID(db *sql.DB, schema, table string) (int64, error) {
stmt := fmt.Sprintf("SELECT IFNULL(max(id), 0) FROM `%s`.`%s`", schema, table)
rows, err := db.Query(stmt)
if err != nil {
return 0, errors.Trace(err)
}
defer rows.Close()
var id int64
for rows.Next() {
if err := rows.Scan(&id); err != nil {
return 0, errors.Trace(err)
}
}
return id, nil
}
func getRandID(db *sql.DB, schema, table string) (int, error) {
stmt := fmt.Sprintf("SELECT IFNULL(id, 0) FROM `%s`.`%s` ORDER BY RAND() LIMIT 1", schema, table)
rows, err := db.Query(stmt)
if err != nil {
return 0, errors.Trace(err)
}
defer rows.Close()
var id int
for rows.Next() {
if err := rows.Scan(&id); err != nil {
return 0, errors.Trace(err)
}
}
return id, nil
}
func genRandomValue(column *models.Column) (interface{}, error) {
booleans := []string{"TRUE", "FALSE"}
upper := strings.ToUpper(column.Tp)
var value interface{}
switch upper {
case "TINYINT":
value = rand.Intn(tinyIntMax)
case "SMALLINT":
value = rand.Intn(smallIntMax)
case "INT":
value = rand.Int31()
case "INTUNSIGNED":
value = rand.Int31()
case "BOOLEAN":
value = booleans[rand.Intn(len(booleans))]
case "BIGINT":
value = rand.Int63()
case "BIGINTUNSIGNED":
value = rand.Int63()
case "FLOAT":
value = rand.Float32() * math.MaxFloat32
case "DOUBLE":
value = rand.ExpFloat64()
case "DOUBLEUNSIGNED":
value = rand.ExpFloat64()
case "DECIMAL":
value = strconv.FormatFloat(rand.ExpFloat64(), 'f', 5, 64)
case "DATETIME", "TIMESTAMP", "TIMESTAMPONUPDATE":
t := genRandomTime()
value = fmt.Sprintf("%.4d-%.2d-%.2d %.2d:%.2d:%.2d", t.Year(), t.Month(), t.Day(), t.Hour(), t.Minute(), t.Second())
case "TIME":
t := genRandomTime()
value = fmt.Sprintf("%.2d:%.2d:%.2d", t.Hour(), t.Minute(), t.Second())
case "YEAR":
t := genRandomTime()
value = fmt.Sprintf("%.4d", t.Year())
case "CHAR":
n, err := strconv.Atoi(column.SubTp)
if err != nil {
return nil, errors.Trace(err)
}
value = genRandStringBytesMaskImprSrcUnsafe(n)
case "VARCHAR":
n, err := strconv.Atoi(column.SubTp)
if err != nil {
return nil, errors.Trace(err)
}
value = genRandStringBytesMaskImprSrcUnsafe(rand.Intn(n) + 1)
case "BLOB":
value = genRandomByteString(20)
case "TEXT":
value = genRandomUnicodeString(20)
case "ENUM":
candidates := strings.Split(column.SubTp, ",")
val := candidates[rand.Intn(len(candidates))]
val = val[1 : len(val)-1]
value = val
case "SET":
candidates := strings.Split(column.SubTp, ",")
s := make([]string, 0, len(candidates))
for _, candidate := range candidates {
if rand.Intn(2) == 0 {
s = append(s, candidate[1:len(candidate)-1])
}
}
value = strings.Join(s, ",")
}
return value, nil
}
func genRandomTime() time.Time {
min := time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC).Unix()
max := time.Date(2037, 12, 31, 0, 0, 0, 0, time.UTC).Unix()
delta := max - min
sec := rand.Int63n(delta) + min
return time.Unix(sec, 0)
}
// https://stackoverflow.com/a/31832326/1115857
func genRandStringBytesMaskImprSrcUnsafe(n int) string {
b := make([]byte, n)
// A src.Int63() generates 63 random bits, enough for letterIdxMax characters!
for i, cache, remain := n-1, rand.Int63(), letterIdxMax; i >= 0; {
if remain == 0 {
cache, remain = rand.Int63(), letterIdxMax
}
if idx := int(cache & letterIdxMask); idx < len(letterBytes) {
b[i] = letterBytes[idx]
i--
}
cache >>= letterIdxBits
remain--
}
return *(*string)(unsafe.Pointer(&b))
}
func genRandomUnicodeString(n int) string {
var builder strings.Builder
builder.Grow(2 + 3*n)
builder.WriteByte('\'')
for i := 0; i < n; i++ {
// 50% chance generating ASCII string, 50% chance generating Unicode string
var r rune
switch rand.Intn(2) {
case 0:
r = rune(rand.Intn(0x80))
case 1:
r = rune(rand.Intn(0xd800))
}
switch r {
case '\'':
builder.WriteString("''")
case '\\':
builder.WriteString(`\\`)
default:
builder.WriteRune(r)
}
}
builder.WriteByte('\'')
return builder.String()
}
func genRandomByteString(n int) string {
var builder strings.Builder
builder.Grow(3 + 2*n)
builder.WriteString("x'")
for i := 0; i < n; i++ {
fmt.Fprintf(&builder, "%02X", rand.Intn(256))
}
builder.WriteString("'")
return builder.String()
} | defer rows.Close()
rowColumns, err := rows.Columns() | random_line_split |
stimuli_LSL.py | from __future__ import division
import cv2
import numpy as np
# import freenect # only use for debugging
from psychopy import *
import sys, os
from socket import *
from eeg_cnn import *
'''take image segmented by model, pick the right contours (not too small or too big) and calculate theur distance from the camera,
these contours are the targets that are flickered at different frequencies'''
'''call shell script to take image and segment it'''
os.system('./demo_test.sh')
'''contours and depth'''
screenX = 3840 # resolution of monitor being used, use to rescale and display image for psychopy
screenY = 2160
# file names
rgb_and_segm_img = 'rgb_and_segm.png' # file with rgb image and object segmentation image
depth_img = 'depth.jpg' # file with depth image
# edge detections variables
lower_threshold = 112
upper_threshold = 170
sobel_size = 3
area_min = 2000
area_max = 40000
midcont = [] # contours with areas between are_min and area_max
obj_masks = [] # masks for grating shape
center_coords = []
# object segmentation (filter out floor and walls)
pred_color = cv2.imread(rgb_and_segm_img, 1)
im_rgb = pred_color[:, :640] # crop to get rgb image alone
im_seg = pred_color[:, 640:] # crop to get object segmentation image alone
hsv = cv2.cvtColor(im_seg, cv2.COLOR_BGR2HSV) # turn image in hsv color space
hsv_mask = cv2.inRange(hsv, np.array([0,100,150]), np.array([255,255,255])) # threshold on hsv image (use as mask on hsv image) - less saturated objs are background
res = cv2.bitwise_and(im_seg,im_seg,mask=hsv_mask) # result of mask on im_seg
# # get depth image to calculate depth value (use for debugging when camera is connected to client and not to robot)
# number_of_frames = 10 # number of frames to average on
# depth = np.zeros((480, 640))
# for i in range(number_of_frames):
# frame,_ = freenect.sync_get_depth() # 11 bits
# depth += frame
# depth = depth/number_of_frames
# np.clip(depth, 0, 2**10 - 1, depth) # get rid of background noise
# depth = cv2.imread(depth_img, 0) # use for debugging, gives wrong depth (since it reads 8 bit image - need 10 bit)
# load depth image sent by robot (server) and saved by client
depth = np.load('depth.npy') # read numpy array (10 bit grayscale image)
# edge detection
contour_list = []
canny = cv2.Canny(res, lower_threshold, upper_threshold, apertureSize=sobel_size) # finds edges, but not always closed contours (needed for cv2.findContours)
dilated = cv2.dilate(canny, np.ones((3, 3))) # makes sure canny edges are closed for findContours
_, contours, hierarchy = cv2.findContours(dilated,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE) # finds contours around inner and outer canny edges, use hierarchy to select only inner ones
# cv2.drawContours(res, contours, -1, (255,255,0), 1) # draw all contours - debugging
# in hierarchy, if first element is -1 then contour has no other contour on same hierarchy level (is inner one), choose only inner contours (insde of canny edge line)
ind = 0 # initialise index for hierarchy
for c in contours:
if hierarchy[0][ind][0] == -1:
contour_list.append(c)
ind += 1
# select valid contours from inner most one
for cont in contour_list:
area = int(cv2.contourArea(cont))
if (area > area_min) and (area < area_max): # only pick contours between specified values (to avoid contours aruond objects too big or too small)
# compute center of contour
M = cv2.moments(cont)
cX = int(M["m10"] / M["m00"])
cY = int(M["m01"] / M["m00"])
# calculate min depth of all valid contours for if statement
center_neighbourhood = depth[cY-10:cY+10,cX-10:cX+10]
center_depth = center_neighbourhood.mean()
if center_depth < 1020: # don't calculate center of contours in background (most probably noise)
center_coords.append([cX,cY]) # add to list of coordinates of centers of contours
# calculate avg depth value of valid contours excluding white pixels
each_obj = np.zeros((480,640)) # blank image
cv2.drawContours(each_obj, [cont], -1, 255, -1) # fill valid contours on blank image
obj_masks.append(each_obj) # use for mask in flickering image
pts = np.where(each_obj == 255) # find coordinates of pixels in filled contours
obj_depths = depth[pts] # get grayscale value of pixels in filled contours
center_depth = obj_depths[np.where(obj_depths<1023)].mean() # take mean of non-white pixels (grayscale value < 1023)
cv2.circle(im_rgb, (cX, cY), 3, (255, 255, 255), -1)
depth_cm = int(100/((-0.00307*(center_depth))+3.33)) # convert pixel value to depth in cm, measurement not that accurate so only keep int value
cv2.putText(im_rgb, str(depth_cm)+' '+'cm', (cX - 20, cY - 80), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 0), 2)
print 'pixel value of depth: ', center_depth, 'depth in cm: ', depth_cm, 'at coordinates ', 'X: ', cX,'Y: ', cY
midcont.append(cont) # 3d list of contours
cv2.drawContours(im_rgb, midcont, -1, (255,0,0), 3) # draw valid contours
im_rgb_scaled = cv2.resize(im_rgb,None,fx=screenX/640,fy=screenY/480,interpolation = cv2.INTER_CUBIC) # 640,480 are original dimensions of image
cv2.imwrite('segmented.jpg', im_rgb_scaled)
# cv2.imshow('depth', res) # openCV conflicts with psychopy, if imshow() is called, the stimuli cannot be shown
# k = cv2.waitKey(0) & 0xFF
if len(center_coords) != 0: # show flickering stimuli if we have a target
'''stimuli'''
#initialise variables
# method 1
exp_frames = 90 # show image for twice this number of frames ( see flip() )
screen_refresh_rate = 60 # refresh rate of monitor being used
# method2
waitdur = 2 # wait time before showing stimuli, in seconds
trialdur = 3 # duration of stimuli, in seconds
tenHz = [1, 1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1,1, 1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1]
twelveHz = [1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1]
fifteenHz = [1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1]
freqs = [tenHz, twelveHz, fifteenHz]
# format contours for mask attribute in grating (to give grating the shape of the target)
for mask in obj_masks:
mask[mask==0] = -1
mask[mask==255] = 1 # 1 is black
obj_masks = obj_masks[:3] # have a maximum of 3 stimuli in image (frequency decoder trained on 3 frequencies)
center_coords = center_coords[:3]
# display image and flicker contours
# create a window
mywin = visual.Window((screenX,screenY),monitor="testMonitor",fullscr=True,units="deg")
grating_list = []
phases = [10,12,15] # frequencies of stimuli in fps
i = 0 # initialise index to assign frequencies to stimuli
# create some stimuli
img = visual.ImageStim(mywin, 'segmented.jpg') # show gratigs on top of this image
for mask in obj_masks:
grating = visual.GratingStim(win=mywin, mask=mask, units='pix', size=(screenX, -screenY), pos=[0,0], sf=3, tex='sqr') # mask is image of dim(640, 480) centered in the middle of the frame
# grating_list.append([grating,phases[i]]) # list of gratings and associated frequencies # method 1
grating_list.append([grating, freqs[i]]) # method 2
i += 1
fixation = visual.GratingStim(mywin, tex=None, mask='gauss', sf=0, size=0,name='fixation', autoLog=False) # empty frame to alternate with grating for flickering stimulus
'''receive data from eeg kit to use for CNN'''
s = socket(AF_INET,SOCK_STREAM)
host = '10.245.233.148'
port = 12396
s.connect((host, port))
rec = s.recv(12)
# show targets for 2 second without flickering, to allow user to choose
if rec == 'startstimuli':
# # method 1
# for frameN in range(60):
# img.draw()
# event.clearEvents()
# mywin.flip()
# # draw the stimuli and update the window
# stream = s.recv(10)
# print stream
# for frameN in range(exp_frames):
# img.draw() # background image
# for obj in grating_list: # draw valid contours to flicker
# frame_rate = obj[1]
# if int(frameN/(screen_refresh_rate/(frame_rate*4)))%2 != 0:
# fixation.draw()
# if int(frameN/(screen_refresh_rate/(frame_rate*4)))%2 == 0:
# obj[0].draw()
# if len(event.getKeys())>0: # press any key to exit
# break
# event.clearEvents()
# mywin.flip() # syncronizes for loop with refresh rate of screen
# method 2
mywin.flip() # syncronises loop with screen refresh rate
for seconds in range(int(waitdur*60)): # draw just image with targets
img.draw()
mywin.flip()
stream = s.recv(10)
print stream
for seconds in range(int(trialdur)): # flicker targets
for frameN in range(len(freqs[0])): # 60 frames (1 sec)
img.draw()
for obj in grating_list:
frame_f0 = obj[1] # 10, 12 or 15 hz
if frame_f0[frameN] == 1 :
obj[0].draw()
if frame_f0[frameN] == -1 :
fixation.draw()
event.clearEvents()
mywin.flip()
mywin.flip(clearBuffer=True)
# cleanup
mywin.close()
# receive data from EEG kit
sample_tot = []
while(True):
|
sample_tot = np.asarray(sample_tot[:1500]).reshape(1500,len(sample_tot[0]))
sample_tot = np.array([sample_tot]) # format needed for CNN (classification)
chosen_target = classification(sample_tot[0,:,:],0) # runs CNN on sample_tot, returns 0, 1 or 2, second parameter is not used
target_coords = center_coords[chosen_target]
else:
target_coords = [1000,1000] # if segmentation doesn't find target to flicker tell client to repeat process
np.save('target.npy', np.array(target_coords))
| sample = s.recv(104)
if len(sample) == 104:
sample = np.fromstring(sample) # recieve array of 13 elements (last 5 are metadata)
sample = sample[:-5]
sample_tot.append(sample)
if len(sample) == 0:
break | conditional_block |
stimuli_LSL.py | from __future__ import division
import cv2
import numpy as np
# import freenect # only use for debugging
from psychopy import *
import sys, os
from socket import *
from eeg_cnn import *
'''take image segmented by model, pick the right contours (not too small or too big) and calculate theur distance from the camera,
these contours are the targets that are flickered at different frequencies'''
'''call shell script to take image and segment it'''
os.system('./demo_test.sh')
'''contours and depth'''
screenX = 3840 # resolution of monitor being used, use to rescale and display image for psychopy
screenY = 2160
# file names
rgb_and_segm_img = 'rgb_and_segm.png' # file with rgb image and object segmentation image
depth_img = 'depth.jpg' # file with depth image
# edge detections variables
lower_threshold = 112
upper_threshold = 170
sobel_size = 3
area_min = 2000
area_max = 40000
midcont = [] # contours with areas between are_min and area_max
obj_masks = [] # masks for grating shape
center_coords = []
# object segmentation (filter out floor and walls)
pred_color = cv2.imread(rgb_and_segm_img, 1)
im_rgb = pred_color[:, :640] # crop to get rgb image alone
im_seg = pred_color[:, 640:] # crop to get object segmentation image alone
hsv = cv2.cvtColor(im_seg, cv2.COLOR_BGR2HSV) # turn image in hsv color space
hsv_mask = cv2.inRange(hsv, np.array([0,100,150]), np.array([255,255,255])) # threshold on hsv image (use as mask on hsv image) - less saturated objs are background
res = cv2.bitwise_and(im_seg,im_seg,mask=hsv_mask) # result of mask on im_seg
# # get depth image to calculate depth value (use for debugging when camera is connected to client and not to robot)
# number_of_frames = 10 # number of frames to average on
# depth = np.zeros((480, 640))
# for i in range(number_of_frames):
# frame,_ = freenect.sync_get_depth() # 11 bits
# depth += frame
# depth = depth/number_of_frames
# np.clip(depth, 0, 2**10 - 1, depth) # get rid of background noise
# depth = cv2.imread(depth_img, 0) # use for debugging, gives wrong depth (since it reads 8 bit image - need 10 bit)
# load depth image sent by robot (server) and saved by client
depth = np.load('depth.npy') # read numpy array (10 bit grayscale image)
# edge detection
contour_list = []
canny = cv2.Canny(res, lower_threshold, upper_threshold, apertureSize=sobel_size) # finds edges, but not always closed contours (needed for cv2.findContours)
dilated = cv2.dilate(canny, np.ones((3, 3))) # makes sure canny edges are closed for findContours
_, contours, hierarchy = cv2.findContours(dilated,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE) # finds contours around inner and outer canny edges, use hierarchy to select only inner ones
# cv2.drawContours(res, contours, -1, (255,255,0), 1) # draw all contours - debugging
# in hierarchy, if first element is -1 then contour has no other contour on same hierarchy level (is inner one), choose only inner contours (insde of canny edge line)
ind = 0 # initialise index for hierarchy
for c in contours:
if hierarchy[0][ind][0] == -1:
contour_list.append(c)
ind += 1
# select valid contours from inner most one
for cont in contour_list:
area = int(cv2.contourArea(cont))
if (area > area_min) and (area < area_max): # only pick contours between specified values (to avoid contours aruond objects too big or too small)
# compute center of contour
M = cv2.moments(cont)
cX = int(M["m10"] / M["m00"])
cY = int(M["m01"] / M["m00"])
# calculate min depth of all valid contours for if statement
center_neighbourhood = depth[cY-10:cY+10,cX-10:cX+10]
center_depth = center_neighbourhood.mean()
if center_depth < 1020: # don't calculate center of contours in background (most probably noise)
center_coords.append([cX,cY]) # add to list of coordinates of centers of contours
# calculate avg depth value of valid contours excluding white pixels
each_obj = np.zeros((480,640)) # blank image
cv2.drawContours(each_obj, [cont], -1, 255, -1) # fill valid contours on blank image
obj_masks.append(each_obj) # use for mask in flickering image
pts = np.where(each_obj == 255) # find coordinates of pixels in filled contours
obj_depths = depth[pts] # get grayscale value of pixels in filled contours
center_depth = obj_depths[np.where(obj_depths<1023)].mean() # take mean of non-white pixels (grayscale value < 1023)
cv2.circle(im_rgb, (cX, cY), 3, (255, 255, 255), -1)
depth_cm = int(100/((-0.00307*(center_depth))+3.33)) # convert pixel value to depth in cm, measurement not that accurate so only keep int value
cv2.putText(im_rgb, str(depth_cm)+' '+'cm', (cX - 20, cY - 80), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 0), 2)
print 'pixel value of depth: ', center_depth, 'depth in cm: ', depth_cm, 'at coordinates ', 'X: ', cX,'Y: ', cY
midcont.append(cont) # 3d list of contours
cv2.drawContours(im_rgb, midcont, -1, (255,0,0), 3) # draw valid contours
im_rgb_scaled = cv2.resize(im_rgb,None,fx=screenX/640,fy=screenY/480,interpolation = cv2.INTER_CUBIC) # 640,480 are original dimensions of image
cv2.imwrite('segmented.jpg', im_rgb_scaled)
# cv2.imshow('depth', res) # openCV conflicts with psychopy, if imshow() is called, the stimuli cannot be shown
# k = cv2.waitKey(0) & 0xFF
if len(center_coords) != 0: # show flickering stimuli if we have a target
'''stimuli'''
#initialise variables
# method 1
exp_frames = 90 # show image for twice this number of frames ( see flip() )
screen_refresh_rate = 60 # refresh rate of monitor being used
# method2
waitdur = 2 # wait time before showing stimuli, in seconds
trialdur = 3 # duration of stimuli, in seconds
tenHz = [1, 1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1,1, 1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1]
twelveHz = [1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1, 1, 1, 1, -1, -1]
fifteenHz = [1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1]
freqs = [tenHz, twelveHz, fifteenHz]
# format contours for mask attribute in grating (to give grating the shape of the target)
for mask in obj_masks:
mask[mask==0] = -1
mask[mask==255] = 1 # 1 is black
obj_masks = obj_masks[:3] # have a maximum of 3 stimuli in image (frequency decoder trained on 3 frequencies)
center_coords = center_coords[:3]
# display image and flicker contours
# create a window
mywin = visual.Window((screenX,screenY),monitor="testMonitor",fullscr=True,units="deg")
grating_list = []
phases = [10,12,15] # frequencies of stimuli in fps
i = 0 # initialise index to assign frequencies to stimuli
# create some stimuli
img = visual.ImageStim(mywin, 'segmented.jpg') # show gratigs on top of this image
for mask in obj_masks:
grating = visual.GratingStim(win=mywin, mask=mask, units='pix', size=(screenX, -screenY), pos=[0,0], sf=3, tex='sqr') # mask is image of dim(640, 480) centered in the middle of the frame
# grating_list.append([grating,phases[i]]) # list of gratings and associated frequencies # method 1
grating_list.append([grating, freqs[i]]) # method 2
i += 1
fixation = visual.GratingStim(mywin, tex=None, mask='gauss', sf=0, size=0,name='fixation', autoLog=False) # empty frame to alternate with grating for flickering stimulus
'''receive data from eeg kit to use for CNN'''
s = socket(AF_INET,SOCK_STREAM)
host = '10.245.233.148'
port = 12396
s.connect((host, port))
| # show targets for 2 second without flickering, to allow user to choose
if rec == 'startstimuli':
# # method 1
# for frameN in range(60):
# img.draw()
# event.clearEvents()
# mywin.flip()
# # draw the stimuli and update the window
# stream = s.recv(10)
# print stream
# for frameN in range(exp_frames):
# img.draw() # background image
# for obj in grating_list: # draw valid contours to flicker
# frame_rate = obj[1]
# if int(frameN/(screen_refresh_rate/(frame_rate*4)))%2 != 0:
# fixation.draw()
# if int(frameN/(screen_refresh_rate/(frame_rate*4)))%2 == 0:
# obj[0].draw()
# if len(event.getKeys())>0: # press any key to exit
# break
# event.clearEvents()
# mywin.flip() # syncronizes for loop with refresh rate of screen
# method 2
mywin.flip() # syncronises loop with screen refresh rate
for seconds in range(int(waitdur*60)): # draw just image with targets
img.draw()
mywin.flip()
stream = s.recv(10)
print stream
for seconds in range(int(trialdur)): # flicker targets
for frameN in range(len(freqs[0])): # 60 frames (1 sec)
img.draw()
for obj in grating_list:
frame_f0 = obj[1] # 10, 12 or 15 hz
if frame_f0[frameN] == 1 :
obj[0].draw()
if frame_f0[frameN] == -1 :
fixation.draw()
event.clearEvents()
mywin.flip()
mywin.flip(clearBuffer=True)
# cleanup
mywin.close()
# receive data from EEG kit
sample_tot = []
while(True):
sample = s.recv(104)
if len(sample) == 104:
sample = np.fromstring(sample) # recieve array of 13 elements (last 5 are metadata)
sample = sample[:-5]
sample_tot.append(sample)
if len(sample) == 0:
break
sample_tot = np.asarray(sample_tot[:1500]).reshape(1500,len(sample_tot[0]))
sample_tot = np.array([sample_tot]) # format needed for CNN (classification)
chosen_target = classification(sample_tot[0,:,:],0) # runs CNN on sample_tot, returns 0, 1 or 2, second parameter is not used
target_coords = center_coords[chosen_target]
else:
target_coords = [1000,1000] # if segmentation doesn't find target to flicker tell client to repeat process
np.save('target.npy', np.array(target_coords)) | rec = s.recv(12)
| random_line_split |
descriptor.pb.go | // Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: github.com/containerd/containerd/api/types/descriptor.proto
/*
Package types is a generated protocol buffer package.
It is generated from these files:
github.com/containerd/containerd/api/types/descriptor.proto
github.com/containerd/containerd/api/types/metrics.proto
github.com/containerd/containerd/api/types/mount.proto
github.com/containerd/containerd/api/types/platform.proto
It has these top-level messages:
Descriptor
Metric
Mount
Platform
*/
package types
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
// skipping weak import gogoproto "github.com/gogo/protobuf/gogoproto"
import github_com_opencontainers_go_digest "github.com/opencontainers/go-digest"
import strings "strings"
import reflect "reflect"
import io "io"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
// Descriptor describes a blob in a content store.
//
// This descriptor can be used to reference content from an
// oci descriptor found in a manifest.
// See https://godoc.org/github.com/opencontainers/image-spec/specs-go/v1#Descriptor
type Descriptor struct {
MediaType string `protobuf:"bytes,1,opt,name=media_type,json=mediaType,proto3" json:"media_type,omitempty"`
Digest github_com_opencontainers_go_digest.Digest `protobuf:"bytes,2,opt,name=digest,proto3,customtype=github.com/opencontainers/go-digest.Digest" json:"digest"`
Size_ int64 `protobuf:"varint,3,opt,name=size,proto3" json:"size,omitempty"`
}
func (m *Descriptor) Reset() { *m = Descriptor{} }
func (*Descriptor) ProtoMessage() {}
func (*Descriptor) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{0} }
func init() {
proto.RegisterType((*Descriptor)(nil), "containerd.types.Descriptor")
}
func (m *Descriptor) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *Descriptor) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.MediaType) > 0 {
dAtA[i] = 0xa
i++
i = encodeVarintDescriptor(dAtA, i, uint64(len(m.MediaType)))
i += copy(dAtA[i:], m.MediaType)
}
if len(m.Digest) > 0 {
dAtA[i] = 0x12
i++
i = encodeVarintDescriptor(dAtA, i, uint64(len(m.Digest)))
i += copy(dAtA[i:], m.Digest)
}
if m.Size_ != 0 {
dAtA[i] = 0x18
i++
i = encodeVarintDescriptor(dAtA, i, uint64(m.Size_))
}
return i, nil
}
func encodeVarintDescriptor(dAtA []byte, offset int, v uint64) int {
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return offset + 1
}
func (m *Descriptor) Size() (n int) |
func sovDescriptor(x uint64) (n int) {
for {
n++
x >>= 7
if x == 0 {
break
}
}
return n
}
func sozDescriptor(x uint64) (n int) {
return sovDescriptor(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (this *Descriptor) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&Descriptor{`,
`MediaType:` + fmt.Sprintf("%v", this.MediaType) + `,`,
`Digest:` + fmt.Sprintf("%v", this.Digest) + `,`,
`Size_:` + fmt.Sprintf("%v", this.Size_) + `,`,
`}`,
}, "")
return s
}
func valueToStringDescriptor(v interface{}) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("*%v", pv)
}
func (m *Descriptor) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowDescriptor
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Descriptor: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Descriptor: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field MediaType", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowDescriptor
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthDescriptor
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.MediaType = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Digest", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowDescriptor
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthDescriptor
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Digest = github_com_opencontainers_go_digest.Digest(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 3:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Size_", wireType)
}
m.Size_ = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowDescriptor
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Size_ |= (int64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
default:
iNdEx = preIndex
skippy, err := skipDescriptor(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthDescriptor
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipDescriptor(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowDescriptor
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowDescriptor
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
return iNdEx, nil
case 1:
iNdEx += 8
return iNdEx, nil
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowDescriptor
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthDescriptor
}
return iNdEx, nil
case 3:
for {
var innerWire uint64
var start int = iNdEx
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowDescriptor
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
innerWire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
innerWireType := int(innerWire & 0x7)
if innerWireType == 4 {
break
}
next, err := skipDescriptor(dAtA[start:])
if err != nil {
return 0, err
}
iNdEx = start + next
}
return iNdEx, nil
case 4:
return iNdEx, nil
case 5:
iNdEx += 4
return iNdEx, nil
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
}
panic("unreachable")
}
var (
ErrInvalidLengthDescriptor = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowDescriptor = fmt.Errorf("proto: integer overflow")
)
func init() {
proto.RegisterFile("github.com/containerd/containerd/api/types/descriptor.proto", fileDescriptorDescriptor)
}
var fileDescriptorDescriptor = []byte{
// 234 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0x4e, 0xcf, 0x2c, 0xc9,
0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xce, 0xcf, 0x2b, 0x49, 0xcc, 0xcc, 0x4b, 0x2d,
0x4a, 0x41, 0x66, 0x26, 0x16, 0x64, 0xea, 0x97, 0x54, 0x16, 0xa4, 0x16, 0xeb, 0xa7, 0xa4, 0x16,
0x27, 0x17, 0x65, 0x16, 0x94, 0xe4, 0x17, 0xe9, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x09, 0x20,
0x94, 0xe9, 0x81, 0x95, 0x48, 0x89, 0xa4, 0xe7, 0xa7, 0xe7, 0x83, 0x25, 0xf5, 0x41, 0x2c, 0x88,
0x3a, 0xa5, 0x6e, 0x46, 0x2e, 0x2e, 0x17, 0xb8, 0x66, 0x21, 0x59, 0x2e, 0xae, 0xdc, 0xd4, 0x94,
0xcc, 0xc4, 0x78, 0x90, 0x1e, 0x09, 0x46, 0x05, 0x46, 0x0d, 0xce, 0x20, 0x4e, 0xb0, 0x48, 0x48,
0x65, 0x41, 0xaa, 0x90, 0x17, 0x17, 0x5b, 0x4a, 0x66, 0x7a, 0x6a, 0x71, 0x89, 0x04, 0x13, 0x48,
0xca, 0xc9, 0xe8, 0xc4, 0x3d, 0x79, 0x86, 0x5b, 0xf7, 0xe4, 0xb5, 0x90, 0x9c, 0x9a, 0x5f, 0x90,
0x9a, 0x07, 0xb7, 0xbc, 0x58, 0x3f, 0x3d, 0x5f, 0x17, 0xa2, 0x45, 0xcf, 0x05, 0x4c, 0x05, 0x41,
0x4d, 0x10, 0x12, 0xe2, 0x62, 0x29, 0xce, 0xac, 0x4a, 0x95, 0x60, 0x56, 0x60, 0xd4, 0x60, 0x0e,
0x02, 0xb3, 0x9d, 0xbc, 0x4e, 0x3c, 0x94, 0x63, 0xb8, 0xf1, 0x50, 0x8e, 0xa1, 0xe1, 0x91, 0x1c,
0xe3, 0x89, 0x47, 0x72, 0x8c, 0x17, 0x1e, 0xc9, 0x31, 0x3e, 0x78, 0x24, 0xc7, 0x18, 0x65, 0x40,
0x7c, 0x60, 0x58, 0x83, 0xc9, 0x08, 0x86, 0x24, 0x36, 0xb0, 0x17, 0x8d, 0x01, 0x01, 0x00, 0x00,
0xff, 0xff, 0xea, 0xac, 0x78, 0x9a, 0x49, 0x01, 0x00, 0x00,
}
| {
var l int
_ = l
l = len(m.MediaType)
if l > 0 {
n += 1 + l + sovDescriptor(uint64(l))
}
l = len(m.Digest)
if l > 0 {
n += 1 + l + sovDescriptor(uint64(l))
}
if m.Size_ != 0 {
n += 1 + sovDescriptor(uint64(m.Size_))
}
return n
} | identifier_body |
descriptor.pb.go | // Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: github.com/containerd/containerd/api/types/descriptor.proto
/*
Package types is a generated protocol buffer package.
It is generated from these files:
github.com/containerd/containerd/api/types/descriptor.proto
github.com/containerd/containerd/api/types/metrics.proto
github.com/containerd/containerd/api/types/mount.proto
github.com/containerd/containerd/api/types/platform.proto
It has these top-level messages:
Descriptor
Metric
Mount
Platform
*/
package types
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
// skipping weak import gogoproto "github.com/gogo/protobuf/gogoproto"
import github_com_opencontainers_go_digest "github.com/opencontainers/go-digest"
import strings "strings"
import reflect "reflect"
import io "io"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
// Descriptor describes a blob in a content store.
//
// This descriptor can be used to reference content from an
// oci descriptor found in a manifest.
// See https://godoc.org/github.com/opencontainers/image-spec/specs-go/v1#Descriptor
type Descriptor struct {
MediaType string `protobuf:"bytes,1,opt,name=media_type,json=mediaType,proto3" json:"media_type,omitempty"`
Digest github_com_opencontainers_go_digest.Digest `protobuf:"bytes,2,opt,name=digest,proto3,customtype=github.com/opencontainers/go-digest.Digest" json:"digest"`
Size_ int64 `protobuf:"varint,3,opt,name=size,proto3" json:"size,omitempty"`
}
func (m *Descriptor) Reset() { *m = Descriptor{} }
func (*Descriptor) ProtoMessage() {}
func (*Descriptor) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{0} }
func init() {
proto.RegisterType((*Descriptor)(nil), "containerd.types.Descriptor")
}
func (m *Descriptor) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil |
return dAtA[:n], nil
}
func (m *Descriptor) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.MediaType) > 0 {
dAtA[i] = 0xa
i++
i = encodeVarintDescriptor(dAtA, i, uint64(len(m.MediaType)))
i += copy(dAtA[i:], m.MediaType)
}
if len(m.Digest) > 0 {
dAtA[i] = 0x12
i++
i = encodeVarintDescriptor(dAtA, i, uint64(len(m.Digest)))
i += copy(dAtA[i:], m.Digest)
}
if m.Size_ != 0 {
dAtA[i] = 0x18
i++
i = encodeVarintDescriptor(dAtA, i, uint64(m.Size_))
}
return i, nil
}
func encodeVarintDescriptor(dAtA []byte, offset int, v uint64) int {
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return offset + 1
}
func (m *Descriptor) Size() (n int) {
var l int
_ = l
l = len(m.MediaType)
if l > 0 {
n += 1 + l + sovDescriptor(uint64(l))
}
l = len(m.Digest)
if l > 0 {
n += 1 + l + sovDescriptor(uint64(l))
}
if m.Size_ != 0 {
n += 1 + sovDescriptor(uint64(m.Size_))
}
return n
}
func sovDescriptor(x uint64) (n int) {
for {
n++
x >>= 7
if x == 0 {
break
}
}
return n
}
func sozDescriptor(x uint64) (n int) {
return sovDescriptor(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (this *Descriptor) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&Descriptor{`,
`MediaType:` + fmt.Sprintf("%v", this.MediaType) + `,`,
`Digest:` + fmt.Sprintf("%v", this.Digest) + `,`,
`Size_:` + fmt.Sprintf("%v", this.Size_) + `,`,
`}`,
}, "")
return s
}
func valueToStringDescriptor(v interface{}) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("*%v", pv)
}
func (m *Descriptor) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowDescriptor
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Descriptor: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Descriptor: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field MediaType", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowDescriptor
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthDescriptor
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.MediaType = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Digest", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowDescriptor
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthDescriptor
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Digest = github_com_opencontainers_go_digest.Digest(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 3:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Size_", wireType)
}
m.Size_ = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowDescriptor
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Size_ |= (int64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
default:
iNdEx = preIndex
skippy, err := skipDescriptor(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthDescriptor
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipDescriptor(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowDescriptor
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowDescriptor
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
return iNdEx, nil
case 1:
iNdEx += 8
return iNdEx, nil
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowDescriptor
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthDescriptor
}
return iNdEx, nil
case 3:
for {
var innerWire uint64
var start int = iNdEx
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowDescriptor
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
innerWire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
innerWireType := int(innerWire & 0x7)
if innerWireType == 4 {
break
}
next, err := skipDescriptor(dAtA[start:])
if err != nil {
return 0, err
}
iNdEx = start + next
}
return iNdEx, nil
case 4:
return iNdEx, nil
case 5:
iNdEx += 4
return iNdEx, nil
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
}
panic("unreachable")
}
var (
ErrInvalidLengthDescriptor = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowDescriptor = fmt.Errorf("proto: integer overflow")
)
func init() {
proto.RegisterFile("github.com/containerd/containerd/api/types/descriptor.proto", fileDescriptorDescriptor)
}
var fileDescriptorDescriptor = []byte{
// 234 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0x4e, 0xcf, 0x2c, 0xc9,
0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xce, 0xcf, 0x2b, 0x49, 0xcc, 0xcc, 0x4b, 0x2d,
0x4a, 0x41, 0x66, 0x26, 0x16, 0x64, 0xea, 0x97, 0x54, 0x16, 0xa4, 0x16, 0xeb, 0xa7, 0xa4, 0x16,
0x27, 0x17, 0x65, 0x16, 0x94, 0xe4, 0x17, 0xe9, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x09, 0x20,
0x94, 0xe9, 0x81, 0x95, 0x48, 0x89, 0xa4, 0xe7, 0xa7, 0xe7, 0x83, 0x25, 0xf5, 0x41, 0x2c, 0x88,
0x3a, 0xa5, 0x6e, 0x46, 0x2e, 0x2e, 0x17, 0xb8, 0x66, 0x21, 0x59, 0x2e, 0xae, 0xdc, 0xd4, 0x94,
0xcc, 0xc4, 0x78, 0x90, 0x1e, 0x09, 0x46, 0x05, 0x46, 0x0d, 0xce, 0x20, 0x4e, 0xb0, 0x48, 0x48,
0x65, 0x41, 0xaa, 0x90, 0x17, 0x17, 0x5b, 0x4a, 0x66, 0x7a, 0x6a, 0x71, 0x89, 0x04, 0x13, 0x48,
0xca, 0xc9, 0xe8, 0xc4, 0x3d, 0x79, 0x86, 0x5b, 0xf7, 0xe4, 0xb5, 0x90, 0x9c, 0x9a, 0x5f, 0x90,
0x9a, 0x07, 0xb7, 0xbc, 0x58, 0x3f, 0x3d, 0x5f, 0x17, 0xa2, 0x45, 0xcf, 0x05, 0x4c, 0x05, 0x41,
0x4d, 0x10, 0x12, 0xe2, 0x62, 0x29, 0xce, 0xac, 0x4a, 0x95, 0x60, 0x56, 0x60, 0xd4, 0x60, 0x0e,
0x02, 0xb3, 0x9d, 0xbc, 0x4e, 0x3c, 0x94, 0x63, 0xb8, 0xf1, 0x50, 0x8e, 0xa1, 0xe1, 0x91, 0x1c,
0xe3, 0x89, 0x47, 0x72, 0x8c, 0x17, 0x1e, 0xc9, 0x31, 0x3e, 0x78, 0x24, 0xc7, 0x18, 0x65, 0x40,
0x7c, 0x60, 0x58, 0x83, 0xc9, 0x08, 0x86, 0x24, 0x36, 0xb0, 0x17, 0x8d, 0x01, 0x01, 0x00, 0x00,
0xff, 0xff, 0xea, 0xac, 0x78, 0x9a, 0x49, 0x01, 0x00, 0x00,
}
| {
return nil, err
} | conditional_block |
descriptor.pb.go | // Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: github.com/containerd/containerd/api/types/descriptor.proto
/*
Package types is a generated protocol buffer package.
It is generated from these files:
github.com/containerd/containerd/api/types/descriptor.proto
github.com/containerd/containerd/api/types/metrics.proto
github.com/containerd/containerd/api/types/mount.proto
github.com/containerd/containerd/api/types/platform.proto
It has these top-level messages:
Descriptor
Metric
Mount
Platform
*/
package types
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
// skipping weak import gogoproto "github.com/gogo/protobuf/gogoproto"
import github_com_opencontainers_go_digest "github.com/opencontainers/go-digest"
import strings "strings"
import reflect "reflect"
import io "io"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
// Descriptor describes a blob in a content store.
//
// This descriptor can be used to reference content from an
// oci descriptor found in a manifest.
// See https://godoc.org/github.com/opencontainers/image-spec/specs-go/v1#Descriptor
type Descriptor struct {
MediaType string `protobuf:"bytes,1,opt,name=media_type,json=mediaType,proto3" json:"media_type,omitempty"`
Digest github_com_opencontainers_go_digest.Digest `protobuf:"bytes,2,opt,name=digest,proto3,customtype=github.com/opencontainers/go-digest.Digest" json:"digest"`
Size_ int64 `protobuf:"varint,3,opt,name=size,proto3" json:"size,omitempty"`
}
func (m *Descriptor) Reset() { *m = Descriptor{} }
func (*Descriptor) ProtoMessage() {}
func (*Descriptor) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{0} }
func init() {
proto.RegisterType((*Descriptor)(nil), "containerd.types.Descriptor")
}
func (m *Descriptor) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *Descriptor) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.MediaType) > 0 {
dAtA[i] = 0xa
i++
i = encodeVarintDescriptor(dAtA, i, uint64(len(m.MediaType)))
i += copy(dAtA[i:], m.MediaType)
}
if len(m.Digest) > 0 {
dAtA[i] = 0x12
i++
i = encodeVarintDescriptor(dAtA, i, uint64(len(m.Digest)))
i += copy(dAtA[i:], m.Digest)
}
if m.Size_ != 0 {
dAtA[i] = 0x18
i++
i = encodeVarintDescriptor(dAtA, i, uint64(m.Size_))
}
return i, nil
}
func encodeVarintDescriptor(dAtA []byte, offset int, v uint64) int {
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return offset + 1
}
func (m *Descriptor) Size() (n int) {
var l int
_ = l
l = len(m.MediaType)
if l > 0 {
n += 1 + l + sovDescriptor(uint64(l))
}
l = len(m.Digest)
if l > 0 {
n += 1 + l + sovDescriptor(uint64(l))
}
if m.Size_ != 0 {
n += 1 + sovDescriptor(uint64(m.Size_))
}
return n
}
func sovDescriptor(x uint64) (n int) {
for {
n++
x >>= 7
if x == 0 {
break
}
}
return n
}
func sozDescriptor(x uint64) (n int) {
return sovDescriptor(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (this *Descriptor) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&Descriptor{`,
`MediaType:` + fmt.Sprintf("%v", this.MediaType) + `,`,
`Digest:` + fmt.Sprintf("%v", this.Digest) + `,`,
`Size_:` + fmt.Sprintf("%v", this.Size_) + `,`,
`}`,
}, "")
return s
}
func valueToStringDescriptor(v interface{}) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("*%v", pv)
}
func (m *Descriptor) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowDescriptor
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Descriptor: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Descriptor: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field MediaType", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowDescriptor
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthDescriptor
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.MediaType = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Digest", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowDescriptor
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthDescriptor
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
} | if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Size_", wireType)
}
m.Size_ = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowDescriptor
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Size_ |= (int64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
default:
iNdEx = preIndex
skippy, err := skipDescriptor(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthDescriptor
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipDescriptor(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowDescriptor
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowDescriptor
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
return iNdEx, nil
case 1:
iNdEx += 8
return iNdEx, nil
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowDescriptor
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthDescriptor
}
return iNdEx, nil
case 3:
for {
var innerWire uint64
var start int = iNdEx
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowDescriptor
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
innerWire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
innerWireType := int(innerWire & 0x7)
if innerWireType == 4 {
break
}
next, err := skipDescriptor(dAtA[start:])
if err != nil {
return 0, err
}
iNdEx = start + next
}
return iNdEx, nil
case 4:
return iNdEx, nil
case 5:
iNdEx += 4
return iNdEx, nil
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
}
panic("unreachable")
}
var (
ErrInvalidLengthDescriptor = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowDescriptor = fmt.Errorf("proto: integer overflow")
)
func init() {
proto.RegisterFile("github.com/containerd/containerd/api/types/descriptor.proto", fileDescriptorDescriptor)
}
var fileDescriptorDescriptor = []byte{
// 234 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0x4e, 0xcf, 0x2c, 0xc9,
0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xce, 0xcf, 0x2b, 0x49, 0xcc, 0xcc, 0x4b, 0x2d,
0x4a, 0x41, 0x66, 0x26, 0x16, 0x64, 0xea, 0x97, 0x54, 0x16, 0xa4, 0x16, 0xeb, 0xa7, 0xa4, 0x16,
0x27, 0x17, 0x65, 0x16, 0x94, 0xe4, 0x17, 0xe9, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x09, 0x20,
0x94, 0xe9, 0x81, 0x95, 0x48, 0x89, 0xa4, 0xe7, 0xa7, 0xe7, 0x83, 0x25, 0xf5, 0x41, 0x2c, 0x88,
0x3a, 0xa5, 0x6e, 0x46, 0x2e, 0x2e, 0x17, 0xb8, 0x66, 0x21, 0x59, 0x2e, 0xae, 0xdc, 0xd4, 0x94,
0xcc, 0xc4, 0x78, 0x90, 0x1e, 0x09, 0x46, 0x05, 0x46, 0x0d, 0xce, 0x20, 0x4e, 0xb0, 0x48, 0x48,
0x65, 0x41, 0xaa, 0x90, 0x17, 0x17, 0x5b, 0x4a, 0x66, 0x7a, 0x6a, 0x71, 0x89, 0x04, 0x13, 0x48,
0xca, 0xc9, 0xe8, 0xc4, 0x3d, 0x79, 0x86, 0x5b, 0xf7, 0xe4, 0xb5, 0x90, 0x9c, 0x9a, 0x5f, 0x90,
0x9a, 0x07, 0xb7, 0xbc, 0x58, 0x3f, 0x3d, 0x5f, 0x17, 0xa2, 0x45, 0xcf, 0x05, 0x4c, 0x05, 0x41,
0x4d, 0x10, 0x12, 0xe2, 0x62, 0x29, 0xce, 0xac, 0x4a, 0x95, 0x60, 0x56, 0x60, 0xd4, 0x60, 0x0e,
0x02, 0xb3, 0x9d, 0xbc, 0x4e, 0x3c, 0x94, 0x63, 0xb8, 0xf1, 0x50, 0x8e, 0xa1, 0xe1, 0x91, 0x1c,
0xe3, 0x89, 0x47, 0x72, 0x8c, 0x17, 0x1e, 0xc9, 0x31, 0x3e, 0x78, 0x24, 0xc7, 0x18, 0x65, 0x40,
0x7c, 0x60, 0x58, 0x83, 0xc9, 0x08, 0x86, 0x24, 0x36, 0xb0, 0x17, 0x8d, 0x01, 0x01, 0x00, 0x00,
0xff, 0xff, 0xea, 0xac, 0x78, 0x9a, 0x49, 0x01, 0x00, 0x00,
} | m.Digest = github_com_opencontainers_go_digest.Digest(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 3: | random_line_split |
descriptor.pb.go | // Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: github.com/containerd/containerd/api/types/descriptor.proto
/*
Package types is a generated protocol buffer package.
It is generated from these files:
github.com/containerd/containerd/api/types/descriptor.proto
github.com/containerd/containerd/api/types/metrics.proto
github.com/containerd/containerd/api/types/mount.proto
github.com/containerd/containerd/api/types/platform.proto
It has these top-level messages:
Descriptor
Metric
Mount
Platform
*/
package types
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
// skipping weak import gogoproto "github.com/gogo/protobuf/gogoproto"
import github_com_opencontainers_go_digest "github.com/opencontainers/go-digest"
import strings "strings"
import reflect "reflect"
import io "io"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
// Descriptor describes a blob in a content store.
//
// This descriptor can be used to reference content from an
// oci descriptor found in a manifest.
// See https://godoc.org/github.com/opencontainers/image-spec/specs-go/v1#Descriptor
type Descriptor struct {
MediaType string `protobuf:"bytes,1,opt,name=media_type,json=mediaType,proto3" json:"media_type,omitempty"`
Digest github_com_opencontainers_go_digest.Digest `protobuf:"bytes,2,opt,name=digest,proto3,customtype=github.com/opencontainers/go-digest.Digest" json:"digest"`
Size_ int64 `protobuf:"varint,3,opt,name=size,proto3" json:"size,omitempty"`
}
func (m *Descriptor) Reset() { *m = Descriptor{} }
func (*Descriptor) ProtoMessage() {}
func (*Descriptor) Descriptor() ([]byte, []int) { return fileDescriptorDescriptor, []int{0} }
func init() {
proto.RegisterType((*Descriptor)(nil), "containerd.types.Descriptor")
}
func (m *Descriptor) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *Descriptor) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
if len(m.MediaType) > 0 {
dAtA[i] = 0xa
i++
i = encodeVarintDescriptor(dAtA, i, uint64(len(m.MediaType)))
i += copy(dAtA[i:], m.MediaType)
}
if len(m.Digest) > 0 {
dAtA[i] = 0x12
i++
i = encodeVarintDescriptor(dAtA, i, uint64(len(m.Digest)))
i += copy(dAtA[i:], m.Digest)
}
if m.Size_ != 0 {
dAtA[i] = 0x18
i++
i = encodeVarintDescriptor(dAtA, i, uint64(m.Size_))
}
return i, nil
}
func encodeVarintDescriptor(dAtA []byte, offset int, v uint64) int {
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return offset + 1
}
func (m *Descriptor) Size() (n int) {
var l int
_ = l
l = len(m.MediaType)
if l > 0 {
n += 1 + l + sovDescriptor(uint64(l))
}
l = len(m.Digest)
if l > 0 {
n += 1 + l + sovDescriptor(uint64(l))
}
if m.Size_ != 0 {
n += 1 + sovDescriptor(uint64(m.Size_))
}
return n
}
func sovDescriptor(x uint64) (n int) {
for {
n++
x >>= 7
if x == 0 {
break
}
}
return n
}
func sozDescriptor(x uint64) (n int) {
return sovDescriptor(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (this *Descriptor) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&Descriptor{`,
`MediaType:` + fmt.Sprintf("%v", this.MediaType) + `,`,
`Digest:` + fmt.Sprintf("%v", this.Digest) + `,`,
`Size_:` + fmt.Sprintf("%v", this.Size_) + `,`,
`}`,
}, "")
return s
}
func valueToStringDescriptor(v interface{}) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("*%v", pv)
}
func (m *Descriptor) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowDescriptor
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Descriptor: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Descriptor: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field MediaType", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowDescriptor
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthDescriptor
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.MediaType = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Digest", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowDescriptor
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthDescriptor
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Digest = github_com_opencontainers_go_digest.Digest(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 3:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Size_", wireType)
}
m.Size_ = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowDescriptor
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Size_ |= (int64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
default:
iNdEx = preIndex
skippy, err := skipDescriptor(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthDescriptor
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func | (dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowDescriptor
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowDescriptor
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
return iNdEx, nil
case 1:
iNdEx += 8
return iNdEx, nil
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowDescriptor
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthDescriptor
}
return iNdEx, nil
case 3:
for {
var innerWire uint64
var start int = iNdEx
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowDescriptor
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
innerWire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
innerWireType := int(innerWire & 0x7)
if innerWireType == 4 {
break
}
next, err := skipDescriptor(dAtA[start:])
if err != nil {
return 0, err
}
iNdEx = start + next
}
return iNdEx, nil
case 4:
return iNdEx, nil
case 5:
iNdEx += 4
return iNdEx, nil
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
}
panic("unreachable")
}
var (
ErrInvalidLengthDescriptor = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowDescriptor = fmt.Errorf("proto: integer overflow")
)
func init() {
proto.RegisterFile("github.com/containerd/containerd/api/types/descriptor.proto", fileDescriptorDescriptor)
}
var fileDescriptorDescriptor = []byte{
// 234 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0x4e, 0xcf, 0x2c, 0xc9,
0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xce, 0xcf, 0x2b, 0x49, 0xcc, 0xcc, 0x4b, 0x2d,
0x4a, 0x41, 0x66, 0x26, 0x16, 0x64, 0xea, 0x97, 0x54, 0x16, 0xa4, 0x16, 0xeb, 0xa7, 0xa4, 0x16,
0x27, 0x17, 0x65, 0x16, 0x94, 0xe4, 0x17, 0xe9, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x09, 0x20,
0x94, 0xe9, 0x81, 0x95, 0x48, 0x89, 0xa4, 0xe7, 0xa7, 0xe7, 0x83, 0x25, 0xf5, 0x41, 0x2c, 0x88,
0x3a, 0xa5, 0x6e, 0x46, 0x2e, 0x2e, 0x17, 0xb8, 0x66, 0x21, 0x59, 0x2e, 0xae, 0xdc, 0xd4, 0x94,
0xcc, 0xc4, 0x78, 0x90, 0x1e, 0x09, 0x46, 0x05, 0x46, 0x0d, 0xce, 0x20, 0x4e, 0xb0, 0x48, 0x48,
0x65, 0x41, 0xaa, 0x90, 0x17, 0x17, 0x5b, 0x4a, 0x66, 0x7a, 0x6a, 0x71, 0x89, 0x04, 0x13, 0x48,
0xca, 0xc9, 0xe8, 0xc4, 0x3d, 0x79, 0x86, 0x5b, 0xf7, 0xe4, 0xb5, 0x90, 0x9c, 0x9a, 0x5f, 0x90,
0x9a, 0x07, 0xb7, 0xbc, 0x58, 0x3f, 0x3d, 0x5f, 0x17, 0xa2, 0x45, 0xcf, 0x05, 0x4c, 0x05, 0x41,
0x4d, 0x10, 0x12, 0xe2, 0x62, 0x29, 0xce, 0xac, 0x4a, 0x95, 0x60, 0x56, 0x60, 0xd4, 0x60, 0x0e,
0x02, 0xb3, 0x9d, 0xbc, 0x4e, 0x3c, 0x94, 0x63, 0xb8, 0xf1, 0x50, 0x8e, 0xa1, 0xe1, 0x91, 0x1c,
0xe3, 0x89, 0x47, 0x72, 0x8c, 0x17, 0x1e, 0xc9, 0x31, 0x3e, 0x78, 0x24, 0xc7, 0x18, 0x65, 0x40,
0x7c, 0x60, 0x58, 0x83, 0xc9, 0x08, 0x86, 0x24, 0x36, 0xb0, 0x17, 0x8d, 0x01, 0x01, 0x00, 0x00,
0xff, 0xff, 0xea, 0xac, 0x78, 0x9a, 0x49, 0x01, 0x00, 0x00,
}
| skipDescriptor | identifier_name |
main.rs | #![deny(warnings)]
#![deny(missing_docs)]
//! Command line tool for modifying hosts file on Linux/UNIX to change static hostname-IP mappings.
//!
//! Intended to be run with the suid bit set, so unprivileged users may update the hosts file. This
//! allow easy integration for jobs like updating entries after launching a docker container or
//! locally testing virtual hosts vor web projects without any requirement for privilege escalation.
//!
//! ## Engineered for Safety
//!
//! The tool has been engineered for safety and features a configurable list of hostnames for which
//! entries may be modified. If this configuration is not editable without privileges, no other
//! modifications are possible.
//!
//! Also, some key entries that might affect correct function of software like `localhost` are
//! checked before writing the new configuration.
//!
//! The new configuration is written to the file system under a different name next to the original
//! file and only moved into place as the last step. This makes the change atomic (according to
//! POSIX semantics) and any error occurring earlier leaves the existing configuration intact. After
//! an unsuccessful run, if the new placeholder file is already present, manual intervention will
//! be necessary.
#[macro_use]
extern crate structopt;
// #[macro_use]
extern crate nom;
mod config;
mod opts;
mod parse;
use crate::config::RESERVED_HOSTNAME;
use crate::config::{HostsmodConfig, DONT_TOUCH};
use crate::opts::Action;
use crate::parse::{try_parse_hosts, HostsPart, HostsPartFamily};
use std::borrow::Cow;
use std::cmp::min;
use std::fs::{rename, File, OpenOptions};
use std::io::{stdout, BufReader, Read, Write};
use std::net::IpAddr;
use structopt::StructOpt;
const PATH_HOSTSFILE: &str = "/etc/hosts";
const PATH_HOSTSFILE_NEW: &str = "/etc/hosts.new";
const PATH_CONFIG: &str = "/etc/hostsmod.yaml";
fn main() {
let hostname_os_string = hostname::get().expect("unable to determine system hostname");
let hostname = hostname_os_string
.to_str()
.expect("system hostname is not a valid UTF-8 string");
let mut opts: opts::HostsArgs = {
let app: structopt::clap::App = opts::HostsArgs::clap();
let str_about = format!(
r##"Tool for mopdifying system wide hosts file to simulate arbitrary DNS A and AAAA records.
Expects a hosts file at {:?} and a configuration in YAML format at {:?}. This
program is intended to be run by non-priviledged users with the help of setuid. It therefore has
some safety features.
Any modifications will not be persisted until the end of program execution. In the event of any
error, the original hosts file will not be modified.
The configuration defines a whitelist of hostnames that can be modified. This program will refuse
to modify any hostname not present in that list. It will also ensure that certain hostnames are
never modified:
- {:?}
- {:?}
- {:?}
- {:?}
- {:?}
- {:?} <- current hostname
The only exception is if the config variable `enable_dangerous_operations` is set to true. Then even
these reserved hostnames can be modified."##,
PATH_HOSTSFILE,
PATH_CONFIG,
config::RESERVED_LOCALHOST,
config::RESERVED_IP6_LOCALHOST,
config::RESERVED_IP6_LOOPBACK,
config::RESERVED_IP6_ALLNODES,
config::RESERVED_IP6_ALLROUTERS,
hostname
);
let app = app
// .before_help("PRE!!!")
// .after_help("POST!!!")
.about(str_about.as_ref());
opts::HostsArgs::from_clap(&app.get_matches())
};
if opts.generate_sample_config {
let mut out = stdout();
let mut sample = HostsmodConfig::default();
sample.whitelist.insert("somerandomhost.with.tld".into());
serde_yaml::to_writer(&mut out, &sample).expect("unable to write default config to stdout");
return;
}
let euid = users::get_effective_uid();
// dbg!(uid);
if euid != 0 {
eprintln!("not effectively root, forced dry-run mode");
opts.dry_run = true;
}
// dbg!(opts);
// open file
let mut file_hosts_orig = OpenOptions::new()
.read(true)
// .write(!opts.dry_run)
.write(false)
.truncate(false)
.create(false)
.open(PATH_HOSTSFILE)
.expect("unable to open hosts");
// let opt_file_hosts_new = if opts.dry_run {
// None
// } else {
// Some(
// OpenOptions::new()
// .write(true)
// .create_new(true)
// .open(PATH_HOSTSFILE_NEW)
// .expect("unable to open new hosts file for writing! Stale file from previous run?"),
// )
// };
let mut str_content = String::with_capacity(1024 * 8);
let len_content = file_hosts_orig
.read_to_string(&mut str_content)
.expect("unable to read hosts file as UTF-8 string");
let mut hosts_parts =
try_parse_hosts(&str_content).expect("unable to parse contents of hosts file");
trim_hosts_parts(&mut hosts_parts);
let hosts_parts_orig = hosts_parts.clone();
// eprintln!("PRE-actions: {:#?}", &hosts_parts);
let cfg: HostsmodConfig = {
// TODO: check config file ownership & access rights
let file_cfg = BufReader::new(File::open(PATH_CONFIG).expect("unable to open config file"));
serde_yaml::from_reader(file_cfg).expect("unable to parse configuration")
};
if opts.dry_run || opts.verbose {
if opts.verbose {
eprintln!("config: {:#?}", cfg);
}
println!("original contents:\n>>>\n{}<<<", str_content);
}
let mut found_pre = vec![false; DONT_TOUCH.len()];
if !cfg.enable_dangerous_operations {
for (dt, found) in DONT_TOUCH.iter().zip(found_pre.iter_mut()) {
let dt_host = if dt.hostname == RESERVED_HOSTNAME {
Cow::Borrowed(hostname)
} else {
Cow::Borrowed(dt.hostname.as_ref())
};
for part in &hosts_parts {
if part.matches_hostname(&dt_host) && part.matches_ip(&dt.ip) {
*found = true;
}
}
}
}
let found_pre = found_pre;
// execute actions
perform_actions(&mut opts, &mut hosts_parts, &cfg).expect("unable to modify hosts file");
if !opts.dry_run && hosts_parts == hosts_parts_orig {
if opts.verbose {
println!("no changes, not modifying hosts file");
}
return;
}
// remove redundant Empty elements
trim_hosts_parts(&mut hosts_parts);
{
let mut remove = false;
hosts_parts.retain(|item| match (item.is_empty(), remove) {
(true, true) => false,
(true, false) => {
remove = true;
true
}
(false, _) => {
remove = false;
true
}
});
}
// eprintln!("POST-actions: {:#?}", &hosts_parts);
// compare against DONT_TOUCH
let buf_generate = generate_hosts_file(len_content, &hosts_parts);
// eprintln!(">\n{}<", &buf_generate);
// safety checks
if !cfg.enable_dangerous_operations {
let mut found_post = vec![false; DONT_TOUCH.len()];
for (dt, found) in DONT_TOUCH.iter().zip(found_post.iter_mut()) {
let dt_host = if dt.hostname == RESERVED_HOSTNAME {
Cow::Borrowed(hostname)
} else {
Cow::Borrowed(dt.hostname.as_ref())
};
for part in &hosts_parts {
match (part.matches_hostname(&dt_host), part.matches_ip(&dt.ip)) {
(true, true) => {
*found = true;
}
(true, false) => {
if DONT_TOUCH
.iter()
.find(|dt_lookup| {
// eprint!("conflict: {:?} == {:?} ", part, dt_lookup);
let res = part.matches_hostname(&dt_lookup.hostname)
&& part.matches_ip(&dt_lookup.ip);
// eprintln!("{}", res);
res
})
.is_none()
{
panic!(
"untouchable entry {:?} {:?} was changed! {:?}",
dt.ip, dt_host, part
);
}
// *found = true;
}
(false, _) => {}
}
}
}
if found_post != found_pre {
dbg!(&found_pre);
dbg!(&found_post);
for (i, (pre, post)) in found_pre.iter().zip(found_post.iter()).enumerate() {
if pre != post |
}
panic!("found_post != found_pre");
}
}
if opts.dry_run || opts.verbose {
println!("generated:\n>>>\n{}<<<", &buf_generate);
}
if opts.dry_run {
println!("DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN");
println!("hosts file not modified");
return;
}
let mut file_hosts_new = OpenOptions::new()
.write(true)
.create_new(true)
.open(PATH_HOSTSFILE_NEW)
.expect("unable to open new hosts file for writing! Stale file from previous run?");
file_hosts_new
.write_all(buf_generate.as_bytes())
.expect("unable to write generated hosts file");
file_hosts_new
.set_len(buf_generate.as_bytes().len() as u64)
.expect("unable to truncate hosts file to right len");
file_hosts_new.flush().expect("unable to flush hosts file");
// close file handles
drop(file_hosts_new);
drop(file_hosts_orig);
rename(PATH_HOSTSFILE_NEW, PATH_HOSTSFILE).expect("unable to move new hosts file into place!");
}
fn trim_hosts_parts(hosts_parts: &mut Vec<HostsPart>) {
let trim = hosts_parts
.iter()
.rev()
.take_while(|part| part.is_empty())
.count();
hosts_parts.truncate(hosts_parts.len() - trim);
}
fn perform_actions(
opts: &mut opts::HostsArgs,
hosts: &mut Vec<HostsPart>,
config: &HostsmodConfig,
) -> Result<(), String> {
'loop_actions: for action in &opts.actions {
match action {
Action::Define(ip, host) => {
if !config.whitelist.contains(host) {
return Err(format!("HOST {:?} not whitelisted!", host));
}
// eprintln!("defining additionally...: {:?} += {:?}", ip, host);
let mut opt_insert = Some(hosts.len());
let mut host_found_v4 = false;
let mut host_found_v6 = false;
for (i, part) in hosts
.iter_mut()
.enumerate()
.filter(|(_i, p)| p.matches_ip(ip) || p.matches_hostname(host))
{
// eprintln!("matching entry: {:?}", part);
let matches_hostname = part.matches_hostname(host);
if part.matches_ip(ip) && matches_hostname {
// eprintln!("already defined, NOP");
//opt_insert = None;
continue 'loop_actions;
}
if matches_hostname {
match part.get_family() {
Some(HostsPartFamily::IPv4) => {
if host_found_v4 || ip.is_ipv4() {
return Err(format!(
"duplicate entry for host {:?} {:?}",
host,
HostsPartFamily::IPv4
));
}
host_found_v4 = true;
}
Some(HostsPartFamily::IPv6) => {
if host_found_v6 || ip.is_ipv6() {
return Err(format!(
"duplicate entry for host {:?} {:?}",
host,
HostsPartFamily::IPv6
));
}
host_found_v6 = true;
}
None => {}
};
}
if opt_insert.is_some() {
opt_insert = Some(i + 1);
}
}
if let Some(insert) = opt_insert {
let insert = min(insert, hosts.len());
hosts.insert(
insert,
HostsPart::Entry(ip.clone(), vec![Cow::Owned(host.clone())], None),
);
}
}
Action::DefineExclusive(ip, host) => {
if !config.whitelist.contains(host) {
return Err(format!("HOST {:?} not whitelisted!", host));
}
// eprintln!("defining exclusively...: {:?} += {:?}", ip, host);
let mut vec_remove = vec![];
for (i, _part) in hosts
.iter()
.enumerate()
.filter(|(_i, p)| p.matches_hostname(host))
{
// eprintln!("matching entry: {:?}", part);
// if part.matches_ip(ip) && part.matches_hostname(host) {
// eprintln!("already defined, NOP");
// return;
// }
// insert = i + 1;
vec_remove.push(i);
}
for remove in vec_remove.iter().rev() {
hosts.remove(*remove);
}
let insert = vec_remove.into_iter().min().unwrap_or(hosts.len());
hosts.insert(
insert,
HostsPart::Entry(ip.clone(), vec![Cow::Owned(host.clone())], None),
);
}
Action::Remove(host) => {
if !config.whitelist.contains(host) {
return Err(format!("HOST {:?} not whitelisted!", host));
}
let mut vec_remove = vec![];
let mut vec_insert = vec![];
let mut offset_remove = 0;
for (i, part) in hosts
.iter()
.enumerate()
.filter(|(_i, p)| p.matches_hostname(host))
{
match part {
HostsPart::Entry(ip, hosts, opt_comment) => {
// eprintln!("matching entry: {:?}", (&ip, &hosts, &opt_comment));
if hosts.len() > 1 {
let mut hosts_filtered = hosts.clone();
hosts_filtered.retain(|ent| ent != host);
vec_insert.push((
i,
HostsPart::Entry(
ip.clone(),
hosts_filtered,
opt_comment.clone(),
),
));
offset_remove += 1;
}
vec_remove.push(offset_remove + i);
// for h in hosts {
// if h == host {
// }
// }
}
_ => {}
}
}
// dbg!(&vec_insert);
for (idx, part) in vec_insert {
hosts.insert(idx, part);
}
// dbg!(&vec_remove);
// unimplemented!();
for remove in vec_remove.iter().rev() {
hosts.remove(*remove);
}
}
}
}
Ok(())
}
fn generate_hosts_file(len_content: usize, parsed: &Vec<HostsPart>) -> String {
let mut buf_generate = String::with_capacity(len_content);
// eprintln!("rendering: {:?}", parsed);
fn render_entry<'a>(
buf_generate: &mut String,
ip: &IpAddr,
hosts: &Vec<Cow<'a, str>>,
opt_comment: &Option<Cow<'a, str>>,
) {
use std::fmt::Write;
write!(buf_generate, "{:20}\t", ip).expect("unable to format entry IP address");
let max = hosts.iter().count() - 1;
for (i, host) in hosts.iter().enumerate() {
write!(buf_generate, "{}{}", host, if i < max { " " } else { "" })
.expect("unable to format entry hostname");
}
if let Some(comment) = opt_comment {
buf_generate.push_str(" #");
buf_generate.push_str(comment);
}
}
for part in parsed {
// eprintln!("rendering: {:?}", part);
match part {
HostsPart::Empty(empty) => {
buf_generate.push_str(empty);
}
HostsPart::Comment(comment) => {
buf_generate.push_str("#");
buf_generate.push_str(comment);
}
HostsPart::CommentedEntry(ip, hosts, opt_comment) => {
buf_generate.push_str("# ");
render_entry(&mut buf_generate, ip, hosts, opt_comment)
}
HostsPart::Entry(ip, hosts, opt_comment) => {
render_entry(&mut buf_generate, ip, hosts, opt_comment)
}
}
buf_generate.push_str("\n");
}
// buf_generate.pop();
buf_generate
}
| {
eprintln!("Difference: {:?}", DONT_TOUCH[i])
} | conditional_block |
main.rs | #![deny(warnings)]
#![deny(missing_docs)]
//! Command line tool for modifying hosts file on Linux/UNIX to change static hostname-IP mappings.
//!
//! Intended to be run with the suid bit set, so unprivileged users may update the hosts file. This
//! allow easy integration for jobs like updating entries after launching a docker container or
//! locally testing virtual hosts vor web projects without any requirement for privilege escalation.
//!
//! ## Engineered for Safety
//!
//! The tool has been engineered for safety and features a configurable list of hostnames for which
//! entries may be modified. If this configuration is not editable without privileges, no other
//! modifications are possible.
//!
//! Also, some key entries that might affect correct function of software like `localhost` are
//! checked before writing the new configuration.
//!
//! The new configuration is written to the file system under a different name next to the original
//! file and only moved into place as the last step. This makes the change atomic (according to
//! POSIX semantics) and any error occurring earlier leaves the existing configuration intact. After
//! an unsuccessful run, if the new placeholder file is already present, manual intervention will
//! be necessary.
#[macro_use]
extern crate structopt;
// #[macro_use]
extern crate nom;
mod config;
mod opts;
mod parse;
use crate::config::RESERVED_HOSTNAME;
use crate::config::{HostsmodConfig, DONT_TOUCH};
use crate::opts::Action;
use crate::parse::{try_parse_hosts, HostsPart, HostsPartFamily};
use std::borrow::Cow;
use std::cmp::min;
use std::fs::{rename, File, OpenOptions};
use std::io::{stdout, BufReader, Read, Write};
use std::net::IpAddr;
use structopt::StructOpt;
const PATH_HOSTSFILE: &str = "/etc/hosts";
const PATH_HOSTSFILE_NEW: &str = "/etc/hosts.new";
const PATH_CONFIG: &str = "/etc/hostsmod.yaml";
fn main() {
let hostname_os_string = hostname::get().expect("unable to determine system hostname");
let hostname = hostname_os_string
.to_str()
.expect("system hostname is not a valid UTF-8 string");
let mut opts: opts::HostsArgs = {
let app: structopt::clap::App = opts::HostsArgs::clap();
let str_about = format!(
r##"Tool for mopdifying system wide hosts file to simulate arbitrary DNS A and AAAA records.
Expects a hosts file at {:?} and a configuration in YAML format at {:?}. This
program is intended to be run by non-priviledged users with the help of setuid. It therefore has
some safety features.
Any modifications will not be persisted until the end of program execution. In the event of any
error, the original hosts file will not be modified.
The configuration defines a whitelist of hostnames that can be modified. This program will refuse
to modify any hostname not present in that list. It will also ensure that certain hostnames are
never modified:
- {:?}
- {:?}
- {:?}
- {:?}
- {:?}
- {:?} <- current hostname
The only exception is if the config variable `enable_dangerous_operations` is set to true. Then even
these reserved hostnames can be modified."##,
PATH_HOSTSFILE,
PATH_CONFIG,
config::RESERVED_LOCALHOST,
config::RESERVED_IP6_LOCALHOST,
config::RESERVED_IP6_LOOPBACK,
config::RESERVED_IP6_ALLNODES,
config::RESERVED_IP6_ALLROUTERS,
hostname
);
let app = app
// .before_help("PRE!!!")
// .after_help("POST!!!")
.about(str_about.as_ref());
opts::HostsArgs::from_clap(&app.get_matches())
};
if opts.generate_sample_config {
let mut out = stdout();
let mut sample = HostsmodConfig::default();
sample.whitelist.insert("somerandomhost.with.tld".into());
serde_yaml::to_writer(&mut out, &sample).expect("unable to write default config to stdout");
return;
}
let euid = users::get_effective_uid();
// dbg!(uid);
if euid != 0 {
eprintln!("not effectively root, forced dry-run mode");
opts.dry_run = true;
}
// dbg!(opts);
// open file
let mut file_hosts_orig = OpenOptions::new()
.read(true)
// .write(!opts.dry_run)
.write(false)
.truncate(false)
.create(false)
.open(PATH_HOSTSFILE)
.expect("unable to open hosts");
// let opt_file_hosts_new = if opts.dry_run {
// None
// } else {
// Some(
// OpenOptions::new()
// .write(true)
// .create_new(true)
// .open(PATH_HOSTSFILE_NEW)
// .expect("unable to open new hosts file for writing! Stale file from previous run?"),
// )
// };
let mut str_content = String::with_capacity(1024 * 8);
let len_content = file_hosts_orig
.read_to_string(&mut str_content)
.expect("unable to read hosts file as UTF-8 string");
let mut hosts_parts =
try_parse_hosts(&str_content).expect("unable to parse contents of hosts file");
trim_hosts_parts(&mut hosts_parts);
let hosts_parts_orig = hosts_parts.clone();
// eprintln!("PRE-actions: {:#?}", &hosts_parts);
let cfg: HostsmodConfig = {
// TODO: check config file ownership & access rights
let file_cfg = BufReader::new(File::open(PATH_CONFIG).expect("unable to open config file"));
serde_yaml::from_reader(file_cfg).expect("unable to parse configuration")
};
if opts.dry_run || opts.verbose {
if opts.verbose {
eprintln!("config: {:#?}", cfg);
}
println!("original contents:\n>>>\n{}<<<", str_content);
}
let mut found_pre = vec![false; DONT_TOUCH.len()];
if !cfg.enable_dangerous_operations {
for (dt, found) in DONT_TOUCH.iter().zip(found_pre.iter_mut()) {
let dt_host = if dt.hostname == RESERVED_HOSTNAME {
Cow::Borrowed(hostname)
} else {
Cow::Borrowed(dt.hostname.as_ref())
};
for part in &hosts_parts {
if part.matches_hostname(&dt_host) && part.matches_ip(&dt.ip) {
*found = true;
}
}
}
}
let found_pre = found_pre;
// execute actions
perform_actions(&mut opts, &mut hosts_parts, &cfg).expect("unable to modify hosts file");
if !opts.dry_run && hosts_parts == hosts_parts_orig {
if opts.verbose {
println!("no changes, not modifying hosts file");
}
return;
}
// remove redundant Empty elements
trim_hosts_parts(&mut hosts_parts);
{
let mut remove = false;
hosts_parts.retain(|item| match (item.is_empty(), remove) {
(true, true) => false,
(true, false) => {
remove = true;
true
}
(false, _) => {
remove = false;
true
}
});
}
// eprintln!("POST-actions: {:#?}", &hosts_parts);
// compare against DONT_TOUCH
let buf_generate = generate_hosts_file(len_content, &hosts_parts);
// eprintln!(">\n{}<", &buf_generate);
// safety checks
if !cfg.enable_dangerous_operations {
let mut found_post = vec![false; DONT_TOUCH.len()];
for (dt, found) in DONT_TOUCH.iter().zip(found_post.iter_mut()) {
let dt_host = if dt.hostname == RESERVED_HOSTNAME {
Cow::Borrowed(hostname)
} else {
Cow::Borrowed(dt.hostname.as_ref())
};
for part in &hosts_parts {
match (part.matches_hostname(&dt_host), part.matches_ip(&dt.ip)) {
(true, true) => {
*found = true;
}
(true, false) => {
if DONT_TOUCH
.iter()
.find(|dt_lookup| {
// eprint!("conflict: {:?} == {:?} ", part, dt_lookup);
let res = part.matches_hostname(&dt_lookup.hostname)
&& part.matches_ip(&dt_lookup.ip);
// eprintln!("{}", res);
res
})
.is_none()
{
panic!(
"untouchable entry {:?} {:?} was changed! {:?}",
dt.ip, dt_host, part
);
}
// *found = true;
}
(false, _) => {}
}
}
}
if found_post != found_pre {
dbg!(&found_pre);
dbg!(&found_post);
for (i, (pre, post)) in found_pre.iter().zip(found_post.iter()).enumerate() {
if pre != post {
eprintln!("Difference: {:?}", DONT_TOUCH[i])
}
}
panic!("found_post != found_pre");
}
}
if opts.dry_run || opts.verbose {
println!("generated:\n>>>\n{}<<<", &buf_generate);
}
if opts.dry_run {
println!("DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN");
println!("hosts file not modified");
return;
}
let mut file_hosts_new = OpenOptions::new()
.write(true)
.create_new(true)
.open(PATH_HOSTSFILE_NEW)
.expect("unable to open new hosts file for writing! Stale file from previous run?");
file_hosts_new
.write_all(buf_generate.as_bytes())
.expect("unable to write generated hosts file");
file_hosts_new
.set_len(buf_generate.as_bytes().len() as u64)
.expect("unable to truncate hosts file to right len");
file_hosts_new.flush().expect("unable to flush hosts file");
// close file handles
drop(file_hosts_new);
drop(file_hosts_orig);
rename(PATH_HOSTSFILE_NEW, PATH_HOSTSFILE).expect("unable to move new hosts file into place!");
}
fn trim_hosts_parts(hosts_parts: &mut Vec<HostsPart>) {
let trim = hosts_parts
.iter()
.rev()
.take_while(|part| part.is_empty())
.count();
hosts_parts.truncate(hosts_parts.len() - trim);
}
fn perform_actions(
opts: &mut opts::HostsArgs,
hosts: &mut Vec<HostsPart>,
config: &HostsmodConfig,
) -> Result<(), String> |
fn generate_hosts_file(len_content: usize, parsed: &Vec<HostsPart>) -> String {
let mut buf_generate = String::with_capacity(len_content);
// eprintln!("rendering: {:?}", parsed);
fn render_entry<'a>(
buf_generate: &mut String,
ip: &IpAddr,
hosts: &Vec<Cow<'a, str>>,
opt_comment: &Option<Cow<'a, str>>,
) {
use std::fmt::Write;
write!(buf_generate, "{:20}\t", ip).expect("unable to format entry IP address");
let max = hosts.iter().count() - 1;
for (i, host) in hosts.iter().enumerate() {
write!(buf_generate, "{}{}", host, if i < max { " " } else { "" })
.expect("unable to format entry hostname");
}
if let Some(comment) = opt_comment {
buf_generate.push_str(" #");
buf_generate.push_str(comment);
}
}
for part in parsed {
// eprintln!("rendering: {:?}", part);
match part {
HostsPart::Empty(empty) => {
buf_generate.push_str(empty);
}
HostsPart::Comment(comment) => {
buf_generate.push_str("#");
buf_generate.push_str(comment);
}
HostsPart::CommentedEntry(ip, hosts, opt_comment) => {
buf_generate.push_str("# ");
render_entry(&mut buf_generate, ip, hosts, opt_comment)
}
HostsPart::Entry(ip, hosts, opt_comment) => {
render_entry(&mut buf_generate, ip, hosts, opt_comment)
}
}
buf_generate.push_str("\n");
}
// buf_generate.pop();
buf_generate
}
| {
'loop_actions: for action in &opts.actions {
match action {
Action::Define(ip, host) => {
if !config.whitelist.contains(host) {
return Err(format!("HOST {:?} not whitelisted!", host));
}
// eprintln!("defining additionally...: {:?} += {:?}", ip, host);
let mut opt_insert = Some(hosts.len());
let mut host_found_v4 = false;
let mut host_found_v6 = false;
for (i, part) in hosts
.iter_mut()
.enumerate()
.filter(|(_i, p)| p.matches_ip(ip) || p.matches_hostname(host))
{
// eprintln!("matching entry: {:?}", part);
let matches_hostname = part.matches_hostname(host);
if part.matches_ip(ip) && matches_hostname {
// eprintln!("already defined, NOP");
//opt_insert = None;
continue 'loop_actions;
}
if matches_hostname {
match part.get_family() {
Some(HostsPartFamily::IPv4) => {
if host_found_v4 || ip.is_ipv4() {
return Err(format!(
"duplicate entry for host {:?} {:?}",
host,
HostsPartFamily::IPv4
));
}
host_found_v4 = true;
}
Some(HostsPartFamily::IPv6) => {
if host_found_v6 || ip.is_ipv6() {
return Err(format!(
"duplicate entry for host {:?} {:?}",
host,
HostsPartFamily::IPv6
));
}
host_found_v6 = true;
}
None => {}
};
}
if opt_insert.is_some() {
opt_insert = Some(i + 1);
}
}
if let Some(insert) = opt_insert {
let insert = min(insert, hosts.len());
hosts.insert(
insert,
HostsPart::Entry(ip.clone(), vec![Cow::Owned(host.clone())], None),
);
}
}
Action::DefineExclusive(ip, host) => {
if !config.whitelist.contains(host) {
return Err(format!("HOST {:?} not whitelisted!", host));
}
// eprintln!("defining exclusively...: {:?} += {:?}", ip, host);
let mut vec_remove = vec![];
for (i, _part) in hosts
.iter()
.enumerate()
.filter(|(_i, p)| p.matches_hostname(host))
{
// eprintln!("matching entry: {:?}", part);
// if part.matches_ip(ip) && part.matches_hostname(host) {
// eprintln!("already defined, NOP");
// return;
// }
// insert = i + 1;
vec_remove.push(i);
}
for remove in vec_remove.iter().rev() {
hosts.remove(*remove);
}
let insert = vec_remove.into_iter().min().unwrap_or(hosts.len());
hosts.insert(
insert,
HostsPart::Entry(ip.clone(), vec![Cow::Owned(host.clone())], None),
);
}
Action::Remove(host) => {
if !config.whitelist.contains(host) {
return Err(format!("HOST {:?} not whitelisted!", host));
}
let mut vec_remove = vec![];
let mut vec_insert = vec![];
let mut offset_remove = 0;
for (i, part) in hosts
.iter()
.enumerate()
.filter(|(_i, p)| p.matches_hostname(host))
{
match part {
HostsPart::Entry(ip, hosts, opt_comment) => {
// eprintln!("matching entry: {:?}", (&ip, &hosts, &opt_comment));
if hosts.len() > 1 {
let mut hosts_filtered = hosts.clone();
hosts_filtered.retain(|ent| ent != host);
vec_insert.push((
i,
HostsPart::Entry(
ip.clone(),
hosts_filtered,
opt_comment.clone(),
),
));
offset_remove += 1;
}
vec_remove.push(offset_remove + i);
// for h in hosts {
// if h == host {
// }
// }
}
_ => {}
}
}
// dbg!(&vec_insert);
for (idx, part) in vec_insert {
hosts.insert(idx, part);
}
// dbg!(&vec_remove);
// unimplemented!();
for remove in vec_remove.iter().rev() {
hosts.remove(*remove);
}
}
}
}
Ok(())
} | identifier_body |
main.rs | #![deny(warnings)]
#![deny(missing_docs)]
//! Command line tool for modifying hosts file on Linux/UNIX to change static hostname-IP mappings.
//!
//! Intended to be run with the suid bit set, so unprivileged users may update the hosts file. This
//! allow easy integration for jobs like updating entries after launching a docker container or
//! locally testing virtual hosts vor web projects without any requirement for privilege escalation.
//!
//! ## Engineered for Safety
//!
//! The tool has been engineered for safety and features a configurable list of hostnames for which
//! entries may be modified. If this configuration is not editable without privileges, no other
//! modifications are possible.
//!
//! Also, some key entries that might affect correct function of software like `localhost` are
//! checked before writing the new configuration.
//!
//! The new configuration is written to the file system under a different name next to the original
//! file and only moved into place as the last step. This makes the change atomic (according to
//! POSIX semantics) and any error occurring earlier leaves the existing configuration intact. After
//! an unsuccessful run, if the new placeholder file is already present, manual intervention will
//! be necessary.
#[macro_use]
extern crate structopt;
// #[macro_use]
extern crate nom;
mod config;
mod opts;
mod parse;
use crate::config::RESERVED_HOSTNAME;
use crate::config::{HostsmodConfig, DONT_TOUCH};
use crate::opts::Action;
use crate::parse::{try_parse_hosts, HostsPart, HostsPartFamily};
use std::borrow::Cow;
use std::cmp::min;
use std::fs::{rename, File, OpenOptions};
use std::io::{stdout, BufReader, Read, Write};
use std::net::IpAddr;
use structopt::StructOpt;
const PATH_HOSTSFILE: &str = "/etc/hosts";
const PATH_HOSTSFILE_NEW: &str = "/etc/hosts.new";
const PATH_CONFIG: &str = "/etc/hostsmod.yaml";
fn main() {
let hostname_os_string = hostname::get().expect("unable to determine system hostname");
let hostname = hostname_os_string
.to_str()
.expect("system hostname is not a valid UTF-8 string");
let mut opts: opts::HostsArgs = {
let app: structopt::clap::App = opts::HostsArgs::clap();
let str_about = format!(
r##"Tool for mopdifying system wide hosts file to simulate arbitrary DNS A and AAAA records.
Expects a hosts file at {:?} and a configuration in YAML format at {:?}. This
program is intended to be run by non-priviledged users with the help of setuid. It therefore has
some safety features.
Any modifications will not be persisted until the end of program execution. In the event of any
error, the original hosts file will not be modified.
The configuration defines a whitelist of hostnames that can be modified. This program will refuse
to modify any hostname not present in that list. It will also ensure that certain hostnames are
never modified:
- {:?}
- {:?}
- {:?}
- {:?}
- {:?}
- {:?} <- current hostname
The only exception is if the config variable `enable_dangerous_operations` is set to true. Then even
these reserved hostnames can be modified."##,
PATH_HOSTSFILE,
PATH_CONFIG,
config::RESERVED_LOCALHOST,
config::RESERVED_IP6_LOCALHOST,
config::RESERVED_IP6_LOOPBACK,
config::RESERVED_IP6_ALLNODES,
config::RESERVED_IP6_ALLROUTERS,
hostname
);
let app = app
// .before_help("PRE!!!")
// .after_help("POST!!!")
.about(str_about.as_ref());
opts::HostsArgs::from_clap(&app.get_matches())
};
if opts.generate_sample_config {
let mut out = stdout();
let mut sample = HostsmodConfig::default();
sample.whitelist.insert("somerandomhost.with.tld".into());
serde_yaml::to_writer(&mut out, &sample).expect("unable to write default config to stdout");
return;
}
let euid = users::get_effective_uid();
// dbg!(uid);
if euid != 0 {
eprintln!("not effectively root, forced dry-run mode");
opts.dry_run = true;
}
// dbg!(opts);
// open file
let mut file_hosts_orig = OpenOptions::new()
.read(true)
// .write(!opts.dry_run)
.write(false)
.truncate(false)
.create(false)
.open(PATH_HOSTSFILE)
.expect("unable to open hosts");
// let opt_file_hosts_new = if opts.dry_run {
// None
// } else {
// Some(
// OpenOptions::new()
// .write(true)
// .create_new(true)
// .open(PATH_HOSTSFILE_NEW)
// .expect("unable to open new hosts file for writing! Stale file from previous run?"),
// )
// };
let mut str_content = String::with_capacity(1024 * 8);
let len_content = file_hosts_orig
.read_to_string(&mut str_content)
.expect("unable to read hosts file as UTF-8 string");
let mut hosts_parts =
try_parse_hosts(&str_content).expect("unable to parse contents of hosts file");
trim_hosts_parts(&mut hosts_parts);
let hosts_parts_orig = hosts_parts.clone();
// eprintln!("PRE-actions: {:#?}", &hosts_parts);
let cfg: HostsmodConfig = {
// TODO: check config file ownership & access rights
let file_cfg = BufReader::new(File::open(PATH_CONFIG).expect("unable to open config file"));
serde_yaml::from_reader(file_cfg).expect("unable to parse configuration")
};
if opts.dry_run || opts.verbose {
if opts.verbose {
eprintln!("config: {:#?}", cfg);
}
println!("original contents:\n>>>\n{}<<<", str_content);
}
let mut found_pre = vec![false; DONT_TOUCH.len()];
if !cfg.enable_dangerous_operations {
for (dt, found) in DONT_TOUCH.iter().zip(found_pre.iter_mut()) {
let dt_host = if dt.hostname == RESERVED_HOSTNAME {
Cow::Borrowed(hostname)
} else {
Cow::Borrowed(dt.hostname.as_ref())
};
for part in &hosts_parts {
if part.matches_hostname(&dt_host) && part.matches_ip(&dt.ip) {
*found = true;
}
}
}
}
let found_pre = found_pre;
// execute actions
perform_actions(&mut opts, &mut hosts_parts, &cfg).expect("unable to modify hosts file");
if !opts.dry_run && hosts_parts == hosts_parts_orig {
if opts.verbose {
println!("no changes, not modifying hosts file");
}
return;
}
// remove redundant Empty elements
trim_hosts_parts(&mut hosts_parts);
{
let mut remove = false;
hosts_parts.retain(|item| match (item.is_empty(), remove) {
(true, true) => false,
(true, false) => {
remove = true;
true
}
(false, _) => {
remove = false;
true
}
});
}
// eprintln!("POST-actions: {:#?}", &hosts_parts);
// compare against DONT_TOUCH
let buf_generate = generate_hosts_file(len_content, &hosts_parts);
// eprintln!(">\n{}<", &buf_generate);
// safety checks
if !cfg.enable_dangerous_operations {
let mut found_post = vec![false; DONT_TOUCH.len()];
for (dt, found) in DONT_TOUCH.iter().zip(found_post.iter_mut()) {
let dt_host = if dt.hostname == RESERVED_HOSTNAME {
Cow::Borrowed(hostname)
} else {
Cow::Borrowed(dt.hostname.as_ref())
};
for part in &hosts_parts {
match (part.matches_hostname(&dt_host), part.matches_ip(&dt.ip)) {
(true, true) => {
*found = true;
}
(true, false) => {
if DONT_TOUCH
.iter()
.find(|dt_lookup| {
// eprint!("conflict: {:?} == {:?} ", part, dt_lookup);
let res = part.matches_hostname(&dt_lookup.hostname)
&& part.matches_ip(&dt_lookup.ip);
// eprintln!("{}", res);
res
})
.is_none()
{
panic!(
"untouchable entry {:?} {:?} was changed! {:?}",
dt.ip, dt_host, part
);
}
// *found = true;
}
(false, _) => {}
}
}
}
if found_post != found_pre {
dbg!(&found_pre);
dbg!(&found_post);
for (i, (pre, post)) in found_pre.iter().zip(found_post.iter()).enumerate() {
if pre != post {
eprintln!("Difference: {:?}", DONT_TOUCH[i])
}
}
panic!("found_post != found_pre");
}
}
if opts.dry_run || opts.verbose {
println!("generated:\n>>>\n{}<<<", &buf_generate);
}
if opts.dry_run {
println!("DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN");
println!("hosts file not modified");
return;
}
let mut file_hosts_new = OpenOptions::new()
.write(true)
.create_new(true)
.open(PATH_HOSTSFILE_NEW)
.expect("unable to open new hosts file for writing! Stale file from previous run?");
file_hosts_new
.write_all(buf_generate.as_bytes())
.expect("unable to write generated hosts file");
file_hosts_new
.set_len(buf_generate.as_bytes().len() as u64)
.expect("unable to truncate hosts file to right len");
file_hosts_new.flush().expect("unable to flush hosts file");
// close file handles
drop(file_hosts_new);
drop(file_hosts_orig);
rename(PATH_HOSTSFILE_NEW, PATH_HOSTSFILE).expect("unable to move new hosts file into place!");
}
fn trim_hosts_parts(hosts_parts: &mut Vec<HostsPart>) {
let trim = hosts_parts
.iter()
.rev()
.take_while(|part| part.is_empty())
.count();
hosts_parts.truncate(hosts_parts.len() - trim);
}
fn perform_actions(
opts: &mut opts::HostsArgs,
hosts: &mut Vec<HostsPart>,
config: &HostsmodConfig,
) -> Result<(), String> {
'loop_actions: for action in &opts.actions {
match action {
Action::Define(ip, host) => {
if !config.whitelist.contains(host) {
return Err(format!("HOST {:?} not whitelisted!", host));
}
// eprintln!("defining additionally...: {:?} += {:?}", ip, host);
let mut opt_insert = Some(hosts.len());
let mut host_found_v4 = false;
let mut host_found_v6 = false;
for (i, part) in hosts
.iter_mut() | // eprintln!("matching entry: {:?}", part);
let matches_hostname = part.matches_hostname(host);
if part.matches_ip(ip) && matches_hostname {
// eprintln!("already defined, NOP");
//opt_insert = None;
continue 'loop_actions;
}
if matches_hostname {
match part.get_family() {
Some(HostsPartFamily::IPv4) => {
if host_found_v4 || ip.is_ipv4() {
return Err(format!(
"duplicate entry for host {:?} {:?}",
host,
HostsPartFamily::IPv4
));
}
host_found_v4 = true;
}
Some(HostsPartFamily::IPv6) => {
if host_found_v6 || ip.is_ipv6() {
return Err(format!(
"duplicate entry for host {:?} {:?}",
host,
HostsPartFamily::IPv6
));
}
host_found_v6 = true;
}
None => {}
};
}
if opt_insert.is_some() {
opt_insert = Some(i + 1);
}
}
if let Some(insert) = opt_insert {
let insert = min(insert, hosts.len());
hosts.insert(
insert,
HostsPart::Entry(ip.clone(), vec![Cow::Owned(host.clone())], None),
);
}
}
Action::DefineExclusive(ip, host) => {
if !config.whitelist.contains(host) {
return Err(format!("HOST {:?} not whitelisted!", host));
}
// eprintln!("defining exclusively...: {:?} += {:?}", ip, host);
let mut vec_remove = vec![];
for (i, _part) in hosts
.iter()
.enumerate()
.filter(|(_i, p)| p.matches_hostname(host))
{
// eprintln!("matching entry: {:?}", part);
// if part.matches_ip(ip) && part.matches_hostname(host) {
// eprintln!("already defined, NOP");
// return;
// }
// insert = i + 1;
vec_remove.push(i);
}
for remove in vec_remove.iter().rev() {
hosts.remove(*remove);
}
let insert = vec_remove.into_iter().min().unwrap_or(hosts.len());
hosts.insert(
insert,
HostsPart::Entry(ip.clone(), vec![Cow::Owned(host.clone())], None),
);
}
Action::Remove(host) => {
if !config.whitelist.contains(host) {
return Err(format!("HOST {:?} not whitelisted!", host));
}
let mut vec_remove = vec![];
let mut vec_insert = vec![];
let mut offset_remove = 0;
for (i, part) in hosts
.iter()
.enumerate()
.filter(|(_i, p)| p.matches_hostname(host))
{
match part {
HostsPart::Entry(ip, hosts, opt_comment) => {
// eprintln!("matching entry: {:?}", (&ip, &hosts, &opt_comment));
if hosts.len() > 1 {
let mut hosts_filtered = hosts.clone();
hosts_filtered.retain(|ent| ent != host);
vec_insert.push((
i,
HostsPart::Entry(
ip.clone(),
hosts_filtered,
opt_comment.clone(),
),
));
offset_remove += 1;
}
vec_remove.push(offset_remove + i);
// for h in hosts {
// if h == host {
// }
// }
}
_ => {}
}
}
// dbg!(&vec_insert);
for (idx, part) in vec_insert {
hosts.insert(idx, part);
}
// dbg!(&vec_remove);
// unimplemented!();
for remove in vec_remove.iter().rev() {
hosts.remove(*remove);
}
}
}
}
Ok(())
}
fn generate_hosts_file(len_content: usize, parsed: &Vec<HostsPart>) -> String {
let mut buf_generate = String::with_capacity(len_content);
// eprintln!("rendering: {:?}", parsed);
fn render_entry<'a>(
buf_generate: &mut String,
ip: &IpAddr,
hosts: &Vec<Cow<'a, str>>,
opt_comment: &Option<Cow<'a, str>>,
) {
use std::fmt::Write;
write!(buf_generate, "{:20}\t", ip).expect("unable to format entry IP address");
let max = hosts.iter().count() - 1;
for (i, host) in hosts.iter().enumerate() {
write!(buf_generate, "{}{}", host, if i < max { " " } else { "" })
.expect("unable to format entry hostname");
}
if let Some(comment) = opt_comment {
buf_generate.push_str(" #");
buf_generate.push_str(comment);
}
}
for part in parsed {
// eprintln!("rendering: {:?}", part);
match part {
HostsPart::Empty(empty) => {
buf_generate.push_str(empty);
}
HostsPart::Comment(comment) => {
buf_generate.push_str("#");
buf_generate.push_str(comment);
}
HostsPart::CommentedEntry(ip, hosts, opt_comment) => {
buf_generate.push_str("# ");
render_entry(&mut buf_generate, ip, hosts, opt_comment)
}
HostsPart::Entry(ip, hosts, opt_comment) => {
render_entry(&mut buf_generate, ip, hosts, opt_comment)
}
}
buf_generate.push_str("\n");
}
// buf_generate.pop();
buf_generate
} | .enumerate()
.filter(|(_i, p)| p.matches_ip(ip) || p.matches_hostname(host))
{ | random_line_split |
main.rs | #![deny(warnings)]
#![deny(missing_docs)]
//! Command line tool for modifying hosts file on Linux/UNIX to change static hostname-IP mappings.
//!
//! Intended to be run with the suid bit set, so unprivileged users may update the hosts file. This
//! allow easy integration for jobs like updating entries after launching a docker container or
//! locally testing virtual hosts vor web projects without any requirement for privilege escalation.
//!
//! ## Engineered for Safety
//!
//! The tool has been engineered for safety and features a configurable list of hostnames for which
//! entries may be modified. If this configuration is not editable without privileges, no other
//! modifications are possible.
//!
//! Also, some key entries that might affect correct function of software like `localhost` are
//! checked before writing the new configuration.
//!
//! The new configuration is written to the file system under a different name next to the original
//! file and only moved into place as the last step. This makes the change atomic (according to
//! POSIX semantics) and any error occurring earlier leaves the existing configuration intact. After
//! an unsuccessful run, if the new placeholder file is already present, manual intervention will
//! be necessary.
#[macro_use]
extern crate structopt;
// #[macro_use]
extern crate nom;
mod config;
mod opts;
mod parse;
use crate::config::RESERVED_HOSTNAME;
use crate::config::{HostsmodConfig, DONT_TOUCH};
use crate::opts::Action;
use crate::parse::{try_parse_hosts, HostsPart, HostsPartFamily};
use std::borrow::Cow;
use std::cmp::min;
use std::fs::{rename, File, OpenOptions};
use std::io::{stdout, BufReader, Read, Write};
use std::net::IpAddr;
use structopt::StructOpt;
const PATH_HOSTSFILE: &str = "/etc/hosts";
const PATH_HOSTSFILE_NEW: &str = "/etc/hosts.new";
const PATH_CONFIG: &str = "/etc/hostsmod.yaml";
fn main() {
let hostname_os_string = hostname::get().expect("unable to determine system hostname");
let hostname = hostname_os_string
.to_str()
.expect("system hostname is not a valid UTF-8 string");
let mut opts: opts::HostsArgs = {
let app: structopt::clap::App = opts::HostsArgs::clap();
let str_about = format!(
r##"Tool for mopdifying system wide hosts file to simulate arbitrary DNS A and AAAA records.
Expects a hosts file at {:?} and a configuration in YAML format at {:?}. This
program is intended to be run by non-priviledged users with the help of setuid. It therefore has
some safety features.
Any modifications will not be persisted until the end of program execution. In the event of any
error, the original hosts file will not be modified.
The configuration defines a whitelist of hostnames that can be modified. This program will refuse
to modify any hostname not present in that list. It will also ensure that certain hostnames are
never modified:
- {:?}
- {:?}
- {:?}
- {:?}
- {:?}
- {:?} <- current hostname
The only exception is if the config variable `enable_dangerous_operations` is set to true. Then even
these reserved hostnames can be modified."##,
PATH_HOSTSFILE,
PATH_CONFIG,
config::RESERVED_LOCALHOST,
config::RESERVED_IP6_LOCALHOST,
config::RESERVED_IP6_LOOPBACK,
config::RESERVED_IP6_ALLNODES,
config::RESERVED_IP6_ALLROUTERS,
hostname
);
let app = app
// .before_help("PRE!!!")
// .after_help("POST!!!")
.about(str_about.as_ref());
opts::HostsArgs::from_clap(&app.get_matches())
};
if opts.generate_sample_config {
let mut out = stdout();
let mut sample = HostsmodConfig::default();
sample.whitelist.insert("somerandomhost.with.tld".into());
serde_yaml::to_writer(&mut out, &sample).expect("unable to write default config to stdout");
return;
}
let euid = users::get_effective_uid();
// dbg!(uid);
if euid != 0 {
eprintln!("not effectively root, forced dry-run mode");
opts.dry_run = true;
}
// dbg!(opts);
// open file
let mut file_hosts_orig = OpenOptions::new()
.read(true)
// .write(!opts.dry_run)
.write(false)
.truncate(false)
.create(false)
.open(PATH_HOSTSFILE)
.expect("unable to open hosts");
// let opt_file_hosts_new = if opts.dry_run {
// None
// } else {
// Some(
// OpenOptions::new()
// .write(true)
// .create_new(true)
// .open(PATH_HOSTSFILE_NEW)
// .expect("unable to open new hosts file for writing! Stale file from previous run?"),
// )
// };
let mut str_content = String::with_capacity(1024 * 8);
let len_content = file_hosts_orig
.read_to_string(&mut str_content)
.expect("unable to read hosts file as UTF-8 string");
let mut hosts_parts =
try_parse_hosts(&str_content).expect("unable to parse contents of hosts file");
trim_hosts_parts(&mut hosts_parts);
let hosts_parts_orig = hosts_parts.clone();
// eprintln!("PRE-actions: {:#?}", &hosts_parts);
let cfg: HostsmodConfig = {
// TODO: check config file ownership & access rights
let file_cfg = BufReader::new(File::open(PATH_CONFIG).expect("unable to open config file"));
serde_yaml::from_reader(file_cfg).expect("unable to parse configuration")
};
if opts.dry_run || opts.verbose {
if opts.verbose {
eprintln!("config: {:#?}", cfg);
}
println!("original contents:\n>>>\n{}<<<", str_content);
}
let mut found_pre = vec![false; DONT_TOUCH.len()];
if !cfg.enable_dangerous_operations {
for (dt, found) in DONT_TOUCH.iter().zip(found_pre.iter_mut()) {
let dt_host = if dt.hostname == RESERVED_HOSTNAME {
Cow::Borrowed(hostname)
} else {
Cow::Borrowed(dt.hostname.as_ref())
};
for part in &hosts_parts {
if part.matches_hostname(&dt_host) && part.matches_ip(&dt.ip) {
*found = true;
}
}
}
}
let found_pre = found_pre;
// execute actions
perform_actions(&mut opts, &mut hosts_parts, &cfg).expect("unable to modify hosts file");
if !opts.dry_run && hosts_parts == hosts_parts_orig {
if opts.verbose {
println!("no changes, not modifying hosts file");
}
return;
}
// remove redundant Empty elements
trim_hosts_parts(&mut hosts_parts);
{
let mut remove = false;
hosts_parts.retain(|item| match (item.is_empty(), remove) {
(true, true) => false,
(true, false) => {
remove = true;
true
}
(false, _) => {
remove = false;
true
}
});
}
// eprintln!("POST-actions: {:#?}", &hosts_parts);
// compare against DONT_TOUCH
let buf_generate = generate_hosts_file(len_content, &hosts_parts);
// eprintln!(">\n{}<", &buf_generate);
// safety checks
if !cfg.enable_dangerous_operations {
let mut found_post = vec![false; DONT_TOUCH.len()];
for (dt, found) in DONT_TOUCH.iter().zip(found_post.iter_mut()) {
let dt_host = if dt.hostname == RESERVED_HOSTNAME {
Cow::Borrowed(hostname)
} else {
Cow::Borrowed(dt.hostname.as_ref())
};
for part in &hosts_parts {
match (part.matches_hostname(&dt_host), part.matches_ip(&dt.ip)) {
(true, true) => {
*found = true;
}
(true, false) => {
if DONT_TOUCH
.iter()
.find(|dt_lookup| {
// eprint!("conflict: {:?} == {:?} ", part, dt_lookup);
let res = part.matches_hostname(&dt_lookup.hostname)
&& part.matches_ip(&dt_lookup.ip);
// eprintln!("{}", res);
res
})
.is_none()
{
panic!(
"untouchable entry {:?} {:?} was changed! {:?}",
dt.ip, dt_host, part
);
}
// *found = true;
}
(false, _) => {}
}
}
}
if found_post != found_pre {
dbg!(&found_pre);
dbg!(&found_post);
for (i, (pre, post)) in found_pre.iter().zip(found_post.iter()).enumerate() {
if pre != post {
eprintln!("Difference: {:?}", DONT_TOUCH[i])
}
}
panic!("found_post != found_pre");
}
}
if opts.dry_run || opts.verbose {
println!("generated:\n>>>\n{}<<<", &buf_generate);
}
if opts.dry_run {
println!("DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN DRY-RUN");
println!("hosts file not modified");
return;
}
let mut file_hosts_new = OpenOptions::new()
.write(true)
.create_new(true)
.open(PATH_HOSTSFILE_NEW)
.expect("unable to open new hosts file for writing! Stale file from previous run?");
file_hosts_new
.write_all(buf_generate.as_bytes())
.expect("unable to write generated hosts file");
file_hosts_new
.set_len(buf_generate.as_bytes().len() as u64)
.expect("unable to truncate hosts file to right len");
file_hosts_new.flush().expect("unable to flush hosts file");
// close file handles
drop(file_hosts_new);
drop(file_hosts_orig);
rename(PATH_HOSTSFILE_NEW, PATH_HOSTSFILE).expect("unable to move new hosts file into place!");
}
fn trim_hosts_parts(hosts_parts: &mut Vec<HostsPart>) {
let trim = hosts_parts
.iter()
.rev()
.take_while(|part| part.is_empty())
.count();
hosts_parts.truncate(hosts_parts.len() - trim);
}
fn perform_actions(
opts: &mut opts::HostsArgs,
hosts: &mut Vec<HostsPart>,
config: &HostsmodConfig,
) -> Result<(), String> {
'loop_actions: for action in &opts.actions {
match action {
Action::Define(ip, host) => {
if !config.whitelist.contains(host) {
return Err(format!("HOST {:?} not whitelisted!", host));
}
// eprintln!("defining additionally...: {:?} += {:?}", ip, host);
let mut opt_insert = Some(hosts.len());
let mut host_found_v4 = false;
let mut host_found_v6 = false;
for (i, part) in hosts
.iter_mut()
.enumerate()
.filter(|(_i, p)| p.matches_ip(ip) || p.matches_hostname(host))
{
// eprintln!("matching entry: {:?}", part);
let matches_hostname = part.matches_hostname(host);
if part.matches_ip(ip) && matches_hostname {
// eprintln!("already defined, NOP");
//opt_insert = None;
continue 'loop_actions;
}
if matches_hostname {
match part.get_family() {
Some(HostsPartFamily::IPv4) => {
if host_found_v4 || ip.is_ipv4() {
return Err(format!(
"duplicate entry for host {:?} {:?}",
host,
HostsPartFamily::IPv4
));
}
host_found_v4 = true;
}
Some(HostsPartFamily::IPv6) => {
if host_found_v6 || ip.is_ipv6() {
return Err(format!(
"duplicate entry for host {:?} {:?}",
host,
HostsPartFamily::IPv6
));
}
host_found_v6 = true;
}
None => {}
};
}
if opt_insert.is_some() {
opt_insert = Some(i + 1);
}
}
if let Some(insert) = opt_insert {
let insert = min(insert, hosts.len());
hosts.insert(
insert,
HostsPart::Entry(ip.clone(), vec![Cow::Owned(host.clone())], None),
);
}
}
Action::DefineExclusive(ip, host) => {
if !config.whitelist.contains(host) {
return Err(format!("HOST {:?} not whitelisted!", host));
}
// eprintln!("defining exclusively...: {:?} += {:?}", ip, host);
let mut vec_remove = vec![];
for (i, _part) in hosts
.iter()
.enumerate()
.filter(|(_i, p)| p.matches_hostname(host))
{
// eprintln!("matching entry: {:?}", part);
// if part.matches_ip(ip) && part.matches_hostname(host) {
// eprintln!("already defined, NOP");
// return;
// }
// insert = i + 1;
vec_remove.push(i);
}
for remove in vec_remove.iter().rev() {
hosts.remove(*remove);
}
let insert = vec_remove.into_iter().min().unwrap_or(hosts.len());
hosts.insert(
insert,
HostsPart::Entry(ip.clone(), vec![Cow::Owned(host.clone())], None),
);
}
Action::Remove(host) => {
if !config.whitelist.contains(host) {
return Err(format!("HOST {:?} not whitelisted!", host));
}
let mut vec_remove = vec![];
let mut vec_insert = vec![];
let mut offset_remove = 0;
for (i, part) in hosts
.iter()
.enumerate()
.filter(|(_i, p)| p.matches_hostname(host))
{
match part {
HostsPart::Entry(ip, hosts, opt_comment) => {
// eprintln!("matching entry: {:?}", (&ip, &hosts, &opt_comment));
if hosts.len() > 1 {
let mut hosts_filtered = hosts.clone();
hosts_filtered.retain(|ent| ent != host);
vec_insert.push((
i,
HostsPart::Entry(
ip.clone(),
hosts_filtered,
opt_comment.clone(),
),
));
offset_remove += 1;
}
vec_remove.push(offset_remove + i);
// for h in hosts {
// if h == host {
// }
// }
}
_ => {}
}
}
// dbg!(&vec_insert);
for (idx, part) in vec_insert {
hosts.insert(idx, part);
}
// dbg!(&vec_remove);
// unimplemented!();
for remove in vec_remove.iter().rev() {
hosts.remove(*remove);
}
}
}
}
Ok(())
}
fn generate_hosts_file(len_content: usize, parsed: &Vec<HostsPart>) -> String {
let mut buf_generate = String::with_capacity(len_content);
// eprintln!("rendering: {:?}", parsed);
fn | <'a>(
buf_generate: &mut String,
ip: &IpAddr,
hosts: &Vec<Cow<'a, str>>,
opt_comment: &Option<Cow<'a, str>>,
) {
use std::fmt::Write;
write!(buf_generate, "{:20}\t", ip).expect("unable to format entry IP address");
let max = hosts.iter().count() - 1;
for (i, host) in hosts.iter().enumerate() {
write!(buf_generate, "{}{}", host, if i < max { " " } else { "" })
.expect("unable to format entry hostname");
}
if let Some(comment) = opt_comment {
buf_generate.push_str(" #");
buf_generate.push_str(comment);
}
}
for part in parsed {
// eprintln!("rendering: {:?}", part);
match part {
HostsPart::Empty(empty) => {
buf_generate.push_str(empty);
}
HostsPart::Comment(comment) => {
buf_generate.push_str("#");
buf_generate.push_str(comment);
}
HostsPart::CommentedEntry(ip, hosts, opt_comment) => {
buf_generate.push_str("# ");
render_entry(&mut buf_generate, ip, hosts, opt_comment)
}
HostsPart::Entry(ip, hosts, opt_comment) => {
render_entry(&mut buf_generate, ip, hosts, opt_comment)
}
}
buf_generate.push_str("\n");
}
// buf_generate.pop();
buf_generate
}
| render_entry | identifier_name |
game.rs | use world::World;
use piston_window::*;
use camera::Camera;
use cgmath::{Vector2, vec3};
use cgmath::prelude::*;
use color::*;
use car::*;
use piston_window::Ellipse;
use bot::BoxRules;
use std::cell::RefCell;
use std::ops::DerefMut;
use std::time::Instant;
// `Game` contains every things to run the game
pub struct Game {
config: GameConfig,
world: World, // All objects in the game
window: PistonWindow,
bot_rules: BoxRules, // Rules to create a new bot
camera: Camera, // Camera for rendering
state: State, // Current state of game
// Wrap these caches in `RefCell` to allow interior mutability
glyphs: RefCell<Glyphs>, // Font cache
ellipse: RefCell<Ellipse>, // Model to draw a circle
}
struct State {
pub turn: Turn, // Presents movement of player
pub sprint: bool, // Player is speeding-up or not
pub spawn: f64, // Count down time to spawn a new bot
pub ended: bool, // Game is over or not
pub game_speed: f64, // Game speed in addition to player's speed
pub jump_timeout: f64, // Count down to allow the next jump
pub rotate_cam: bool, // Allow rotation of camera or not
pub bullets: i64, // The number of bullets left
pub recharge: f64, // Bullets recharge time
pub fps: f64, // Real fps of game
pub last_frame: Instant, // Moment of the last draw
}
pub enum Turn { Left, Right, None, }
// Configurable game's contansts.
// A tuple presents a range of something.
#[derive(Serialize, Deserialize, Clone)]
pub struct GameConfig {
pub title: String,
pub screen_size: ::Pixel,
pub ups: u64, // Update per second
pub max_fps: u64,
pub tunel_size: [f64; 3],
pub player_size: [f64; 3],
pub player_speed: (f64, f64), // min and max player speed
pub player_turn_speed: f64,
pub bot_size: [(f64, f64); 3], // Range of bot's size
pub bot_speed: (f64, f64),
pub bot_turn_speed: (f64, f64),
pub divider_size: [f64; 2],
pub camera_height: f64, // Height of camera (from player)
pub camera_distance: f64, // Distance from camera to player
pub decor_distance: f64, // Distance between each decoration
pub sprint_factor: f64,
pub spawn_time: (f64, f64),
pub game_sprint: f64, // The increase of game_speed
pub game_max_speed: f64,
pub player_jump_v: f64,
pub player_jump_a: f64,
pub jump_turn_decrease: f64,
pub jump_timeout: f64,
pub mouse_speed: f64,
pub trueshot_distance: f64,
pub bullet_stock: i64, // Number of bullets
pub recharge_time: f64,
pub bullet_len: f64,
pub bullet_speed: f64,
pub zoom_in: bool, // If true, zoom-in while on stare mode
}
impl Game {
pub fn new(config: GameConfig) -> Game {
let mut window: PistonWindow = WindowSettings::new(
config.title.clone(), [config.screen_size.w, config.screen_size.h])
.exit_on_esc(true).build()
.expect("Cannot create window.");
window.set_ups(config.ups);
window.set_max_fps(config.max_fps);
window.set_capture_cursor(true);
let glyphs = Glyphs::new("resources/Ubuntu-R.ttf", window.factory.clone())
.expect("Unable to load font.");
let bot_rules = BoxRules {
size: config.bot_size,
position: [(0., config.tunel_size[0]), (0., 0.), (config.tunel_size[2], config.tunel_size[2])],
speed: config.bot_speed,
turn_speed: config.bot_turn_speed,
color: vec![RED, ORANGE, VIOLET, GREEN, PALE],
jump_turn_decrease: config.jump_turn_decrease,
};
let world = World::new(&config);
let camera = Game::new_camera(&config, &world.player);
let state = State {
turn: Turn::None,
sprint: false,
spawn: 0.,
ended: false,
game_speed: 0.,
jump_timeout: 0.,
rotate_cam: false,
bullets: config.bullet_stock,
recharge: 0.,
fps: 0.,
last_frame: Instant::now(),
};
let ellipse = Ellipse {
color: BLACK.alpha(0.).into(),
border: Some(ellipse::Border {
color: RED.alpha(0.5).into(),
radius: 1.,
}),
resolution: 16,
};
Game {
config: config,
world: world,
window: window,
bot_rules: bot_rules,
camera: camera,
state: state,
glyphs: RefCell::new(glyphs),
ellipse: RefCell::new(ellipse),
}
}
fn new_camera<T: Car>(config: &GameConfig, player: &T) -> Camera {
Camera::new(
config.screen_size.clone(),
vec3(0., config.camera_height, -config.camera_distance) + player.pos()
)
}
// Re-calculate fps
fn update_fps(&mut self) {
let d = self.state.last_frame.elapsed();
self.state.last_frame = Instant::now();
self.state.fps = 1. / (d.as_secs() as f64 + 1e-9*d.subsec_nanos() as f64);
}
pub fn run(&mut self) {
while let Some(e) = self.window.next() {
match e {
Input::Press(key) => self.press(key),
Input::Release(key) => self.release(key),
Input::Render(_) => {
self.update_fps();
self.draw(&e);
},
Input::Update(args) => self.update(args.dt),
Input::Move(Motion::MouseRelative(a, b)) => self.mouse_move(a as f64, b as f64),
_ => {}
}
if self.state.ended {
break;
}
}
}
fn mouse_move(&mut self, x: f64, y: f64) {
if self.state.rotate_cam {
self.camera.rotate(x*self.config.mouse_speed, y*self.config.mouse_speed, self.world.player.position);
}
}
fn press(&mut self, key: Button) {
match key {
Button::Keyboard(Key::A) => self.state.turn = Turn::Left,
Button::Keyboard(Key::D) => self.state.turn = Turn::Right,
Button::Keyboard(Key::W) => self.state.sprint = true,
Button::Keyboard(Key::Space) => if self.state.jump_timeout <= 0. {
self.state.jump_timeout = self.config.jump_timeout;
self.world.player.jump();
},
Button::Mouse(MouseButton::Right) => {
if self.config.zoom_in {
self.camera.zoom_in();
}
self.state.rotate_cam = true;
},
Button::Mouse(MouseButton::Left) => if self.state.rotate_cam && self.state.bullets > 0 {
let mut pos = self.world.player.position;
pos.y += self.world.player.size.y;
let mut d = vec3(0., 0., self.config.trueshot_distance + self.config.camera_distance);
d = self.camera.c * d.magnitude2() / d.dot(self.camera.c);
d = self.camera.eye + d - pos;
d = d * self.config.bullet_speed / d.magnitude();
self.world.add_bullet(pos, d, self.config.bullet_len);
self.state.bullets -= 1;
if self.state.bullets <= 0 {
self.state.recharge = self.config.recharge_time;
}
},
_ => (),
}
}
fn release(&mut self, key: Button) |
fn draw(&mut self, e: &Input) {
// Return a horizontal bar
macro_rules! bar {
($curr: expr, $full: expr) => {
[0.,
15.0,
self.config.screen_size.w as f64/2.*$curr/$full,
20.0,]
};
}
let jump_bar = bar!(self.state.jump_timeout, self.config.jump_timeout);
let recharge_bar = bar!(self.state.recharge, self.config.recharge_time);
let bullets_bar = bar!(self.state.bullets as f64, self.config.bullet_stock as f64);
// Closure in `draw_2d` requires unique access to `self`,
// so we use RefCell to hack it.
let mut glyphs = self.glyphs.borrow_mut();
let fps = format!("{:.3}", self.state.fps);
let lines = self.world.render(&self.camera);
self.window.draw_2d(e, |c, g| {
clear(BLACK.into(), g);
for (l, color) in lines {
line(color.into(), 1., convert(l), c.transform, g);
}
rectangle(BLUE.alpha(0.4).into(), jump_bar, c.transform, g);
rectangle(RED.alpha(0.4).into(), recharge_bar, c.transform, g);
rectangle(GREEN.alpha(0.4).into(), bullets_bar, c.transform, g);
text(WHITE.into(), 10, &fps, glyphs.deref_mut(), c.transform.trans(0., 10.), g);
});
if self.state.rotate_cam {
let w = 20.;
let x = self.config.screen_size.w as f64 /2. - w/2.;
let y = self.config.screen_size.h as f64 /2. - w/2.;
let ellipse = self.ellipse.borrow();
self.window.draw_2d(e, |c, g| {
ellipse.draw([x, y, w, w], &c.draw_state, c.transform, g);
rectangle(RED.into(), [x+w/2.-1., y+w/2.-1., 2., 2.], c.transform, g);
});
}
}
// `dt` stands for delta, duration since the last update
fn update(&mut self, dt: f64) {
// Re-calculate delta according to fps
let dt = if self.state.fps != 0. { 1./self.state.fps}
else { dt };
let old = self.world.player.position;
if self.state.bullets <= 0 {
self.state.recharge -= dt;
if self.state.recharge < 0. {
self.state.bullets = self.config.bullet_stock;
}
}
self.state.jump_timeout -= dt;
if self.state.game_speed < self.config.game_max_speed {
self.state.game_speed += dt*self.config.game_sprint;
}
if self.state.sprint {
if self.world.player.speed < self.config.player_speed.1 {
self.world.player.speed += dt*self.config.sprint_factor;
}
} else if self.world.player.speed > self.config.player_speed.0 {
self.world.player.speed -= dt*self.config.sprint_factor;
}
self.state.spawn -= dt;
if self.state.spawn < 0. {
self.world.add_bot(&self.bot_rules);
self.state.spawn += ::rnd(self.config.spawn_time);
}
match self.state.turn {
Turn::Left => self.world.player.turn_left(dt),
Turn::Right => self.world.player.turn_right(dt),
Turn::None => (),
}
// Update objects in the world
self.world.update(dt, self.state.game_speed);
// Validate things like object's boundary, bullets and boxes
// collisions.
self.world.validate();
// Update camera's location
self.camera.eye += self.world.player.position - old;
// Check for player's collision with bot
if self.world.bots.iter().any(|x| self.world.player.crashed(&x.car)) {
self.state.ended = true;
}
}
}
fn convert(x: [Vector2<f64>; 2]) -> [f64; 4] {
[x[0].x, x[0].y, x[1].x, x[1].y]
}
| {
match key {
Button::Keyboard(Key::A) => if let Turn::Left = self.state.turn {
self.state.turn = Turn::None;
},
Button::Keyboard(Key::D) => if let Turn::Right = self.state.turn {
self.state.turn = Turn::None;
},
Button::Keyboard(Key::W) => self.state.sprint = false,
Button::Mouse(MouseButton::Right) => {
self.state.rotate_cam = false;
self.camera = Game::new_camera(&self.config, &self.world.player);
},
_ => (),
}
} | identifier_body |
game.rs | use world::World;
use piston_window::*;
use camera::Camera;
use cgmath::{Vector2, vec3};
use cgmath::prelude::*;
use color::*;
use car::*;
use piston_window::Ellipse;
use bot::BoxRules;
use std::cell::RefCell;
use std::ops::DerefMut;
use std::time::Instant;
// `Game` contains every things to run the game
pub struct Game {
config: GameConfig,
world: World, // All objects in the game
window: PistonWindow,
bot_rules: BoxRules, // Rules to create a new bot
camera: Camera, // Camera for rendering
state: State, // Current state of game
// Wrap these caches in `RefCell` to allow interior mutability
glyphs: RefCell<Glyphs>, // Font cache
ellipse: RefCell<Ellipse>, // Model to draw a circle
}
struct State {
pub turn: Turn, // Presents movement of player
pub sprint: bool, // Player is speeding-up or not
pub spawn: f64, // Count down time to spawn a new bot
pub ended: bool, // Game is over or not
pub game_speed: f64, // Game speed in addition to player's speed
pub jump_timeout: f64, // Count down to allow the next jump
pub rotate_cam: bool, // Allow rotation of camera or not
pub bullets: i64, // The number of bullets left
pub recharge: f64, // Bullets recharge time
pub fps: f64, // Real fps of game
pub last_frame: Instant, // Moment of the last draw
}
pub enum Turn { Left, Right, None, }
// Configurable game's contansts.
// A tuple presents a range of something.
#[derive(Serialize, Deserialize, Clone)]
pub struct GameConfig {
pub title: String,
pub screen_size: ::Pixel,
pub ups: u64, // Update per second
pub max_fps: u64,
pub tunel_size: [f64; 3],
pub player_size: [f64; 3],
pub player_speed: (f64, f64), // min and max player speed
pub player_turn_speed: f64,
pub bot_size: [(f64, f64); 3], // Range of bot's size
pub bot_speed: (f64, f64),
pub bot_turn_speed: (f64, f64),
pub divider_size: [f64; 2],
pub camera_height: f64, // Height of camera (from player)
pub camera_distance: f64, // Distance from camera to player
pub decor_distance: f64, // Distance between each decoration
pub sprint_factor: f64,
pub spawn_time: (f64, f64),
pub game_sprint: f64, // The increase of game_speed
pub game_max_speed: f64,
pub player_jump_v: f64,
pub player_jump_a: f64,
pub jump_turn_decrease: f64,
pub jump_timeout: f64,
pub mouse_speed: f64,
pub trueshot_distance: f64,
pub bullet_stock: i64, // Number of bullets
pub recharge_time: f64,
pub bullet_len: f64,
pub bullet_speed: f64,
pub zoom_in: bool, // If true, zoom-in while on stare mode
}
impl Game {
pub fn new(config: GameConfig) -> Game {
let mut window: PistonWindow = WindowSettings::new(
config.title.clone(), [config.screen_size.w, config.screen_size.h])
.exit_on_esc(true).build()
.expect("Cannot create window.");
window.set_ups(config.ups);
window.set_max_fps(config.max_fps);
window.set_capture_cursor(true);
let glyphs = Glyphs::new("resources/Ubuntu-R.ttf", window.factory.clone())
.expect("Unable to load font.");
let bot_rules = BoxRules {
size: config.bot_size,
position: [(0., config.tunel_size[0]), (0., 0.), (config.tunel_size[2], config.tunel_size[2])],
speed: config.bot_speed,
turn_speed: config.bot_turn_speed,
color: vec![RED, ORANGE, VIOLET, GREEN, PALE],
jump_turn_decrease: config.jump_turn_decrease,
};
let world = World::new(&config);
let camera = Game::new_camera(&config, &world.player);
let state = State {
turn: Turn::None,
sprint: false,
spawn: 0.,
ended: false,
game_speed: 0.,
jump_timeout: 0.,
rotate_cam: false,
bullets: config.bullet_stock,
recharge: 0.,
fps: 0.,
last_frame: Instant::now(),
};
let ellipse = Ellipse {
color: BLACK.alpha(0.).into(),
border: Some(ellipse::Border {
color: RED.alpha(0.5).into(),
radius: 1.,
}),
resolution: 16,
};
Game {
config: config,
world: world,
window: window,
bot_rules: bot_rules,
camera: camera,
state: state,
glyphs: RefCell::new(glyphs),
ellipse: RefCell::new(ellipse),
}
}
fn | <T: Car>(config: &GameConfig, player: &T) -> Camera {
Camera::new(
config.screen_size.clone(),
vec3(0., config.camera_height, -config.camera_distance) + player.pos()
)
}
// Re-calculate fps
fn update_fps(&mut self) {
let d = self.state.last_frame.elapsed();
self.state.last_frame = Instant::now();
self.state.fps = 1. / (d.as_secs() as f64 + 1e-9*d.subsec_nanos() as f64);
}
pub fn run(&mut self) {
while let Some(e) = self.window.next() {
match e {
Input::Press(key) => self.press(key),
Input::Release(key) => self.release(key),
Input::Render(_) => {
self.update_fps();
self.draw(&e);
},
Input::Update(args) => self.update(args.dt),
Input::Move(Motion::MouseRelative(a, b)) => self.mouse_move(a as f64, b as f64),
_ => {}
}
if self.state.ended {
break;
}
}
}
fn mouse_move(&mut self, x: f64, y: f64) {
if self.state.rotate_cam {
self.camera.rotate(x*self.config.mouse_speed, y*self.config.mouse_speed, self.world.player.position);
}
}
fn press(&mut self, key: Button) {
match key {
Button::Keyboard(Key::A) => self.state.turn = Turn::Left,
Button::Keyboard(Key::D) => self.state.turn = Turn::Right,
Button::Keyboard(Key::W) => self.state.sprint = true,
Button::Keyboard(Key::Space) => if self.state.jump_timeout <= 0. {
self.state.jump_timeout = self.config.jump_timeout;
self.world.player.jump();
},
Button::Mouse(MouseButton::Right) => {
if self.config.zoom_in {
self.camera.zoom_in();
}
self.state.rotate_cam = true;
},
Button::Mouse(MouseButton::Left) => if self.state.rotate_cam && self.state.bullets > 0 {
let mut pos = self.world.player.position;
pos.y += self.world.player.size.y;
let mut d = vec3(0., 0., self.config.trueshot_distance + self.config.camera_distance);
d = self.camera.c * d.magnitude2() / d.dot(self.camera.c);
d = self.camera.eye + d - pos;
d = d * self.config.bullet_speed / d.magnitude();
self.world.add_bullet(pos, d, self.config.bullet_len);
self.state.bullets -= 1;
if self.state.bullets <= 0 {
self.state.recharge = self.config.recharge_time;
}
},
_ => (),
}
}
fn release(&mut self, key: Button) {
match key {
Button::Keyboard(Key::A) => if let Turn::Left = self.state.turn {
self.state.turn = Turn::None;
},
Button::Keyboard(Key::D) => if let Turn::Right = self.state.turn {
self.state.turn = Turn::None;
},
Button::Keyboard(Key::W) => self.state.sprint = false,
Button::Mouse(MouseButton::Right) => {
self.state.rotate_cam = false;
self.camera = Game::new_camera(&self.config, &self.world.player);
},
_ => (),
}
}
fn draw(&mut self, e: &Input) {
// Return a horizontal bar
macro_rules! bar {
($curr: expr, $full: expr) => {
[0.,
15.0,
self.config.screen_size.w as f64/2.*$curr/$full,
20.0,]
};
}
let jump_bar = bar!(self.state.jump_timeout, self.config.jump_timeout);
let recharge_bar = bar!(self.state.recharge, self.config.recharge_time);
let bullets_bar = bar!(self.state.bullets as f64, self.config.bullet_stock as f64);
// Closure in `draw_2d` requires unique access to `self`,
// so we use RefCell to hack it.
let mut glyphs = self.glyphs.borrow_mut();
let fps = format!("{:.3}", self.state.fps);
let lines = self.world.render(&self.camera);
self.window.draw_2d(e, |c, g| {
clear(BLACK.into(), g);
for (l, color) in lines {
line(color.into(), 1., convert(l), c.transform, g);
}
rectangle(BLUE.alpha(0.4).into(), jump_bar, c.transform, g);
rectangle(RED.alpha(0.4).into(), recharge_bar, c.transform, g);
rectangle(GREEN.alpha(0.4).into(), bullets_bar, c.transform, g);
text(WHITE.into(), 10, &fps, glyphs.deref_mut(), c.transform.trans(0., 10.), g);
});
if self.state.rotate_cam {
let w = 20.;
let x = self.config.screen_size.w as f64 /2. - w/2.;
let y = self.config.screen_size.h as f64 /2. - w/2.;
let ellipse = self.ellipse.borrow();
self.window.draw_2d(e, |c, g| {
ellipse.draw([x, y, w, w], &c.draw_state, c.transform, g);
rectangle(RED.into(), [x+w/2.-1., y+w/2.-1., 2., 2.], c.transform, g);
});
}
}
// `dt` stands for delta, duration since the last update
fn update(&mut self, dt: f64) {
// Re-calculate delta according to fps
let dt = if self.state.fps != 0. { 1./self.state.fps}
else { dt };
let old = self.world.player.position;
if self.state.bullets <= 0 {
self.state.recharge -= dt;
if self.state.recharge < 0. {
self.state.bullets = self.config.bullet_stock;
}
}
self.state.jump_timeout -= dt;
if self.state.game_speed < self.config.game_max_speed {
self.state.game_speed += dt*self.config.game_sprint;
}
if self.state.sprint {
if self.world.player.speed < self.config.player_speed.1 {
self.world.player.speed += dt*self.config.sprint_factor;
}
} else if self.world.player.speed > self.config.player_speed.0 {
self.world.player.speed -= dt*self.config.sprint_factor;
}
self.state.spawn -= dt;
if self.state.spawn < 0. {
self.world.add_bot(&self.bot_rules);
self.state.spawn += ::rnd(self.config.spawn_time);
}
match self.state.turn {
Turn::Left => self.world.player.turn_left(dt),
Turn::Right => self.world.player.turn_right(dt),
Turn::None => (),
}
// Update objects in the world
self.world.update(dt, self.state.game_speed);
// Validate things like object's boundary, bullets and boxes
// collisions.
self.world.validate();
// Update camera's location
self.camera.eye += self.world.player.position - old;
// Check for player's collision with bot
if self.world.bots.iter().any(|x| self.world.player.crashed(&x.car)) {
self.state.ended = true;
}
}
}
fn convert(x: [Vector2<f64>; 2]) -> [f64; 4] {
[x[0].x, x[0].y, x[1].x, x[1].y]
}
| new_camera | identifier_name |
game.rs | use world::World;
use piston_window::*;
use camera::Camera;
use cgmath::{Vector2, vec3};
use cgmath::prelude::*;
use color::*;
use car::*;
use piston_window::Ellipse;
use bot::BoxRules;
use std::cell::RefCell;
use std::ops::DerefMut;
use std::time::Instant;
// `Game` contains every things to run the game
pub struct Game {
config: GameConfig,
world: World, // All objects in the game
window: PistonWindow,
bot_rules: BoxRules, // Rules to create a new bot
camera: Camera, // Camera for rendering
state: State, // Current state of game
// Wrap these caches in `RefCell` to allow interior mutability
glyphs: RefCell<Glyphs>, // Font cache
ellipse: RefCell<Ellipse>, // Model to draw a circle
}
struct State {
pub turn: Turn, // Presents movement of player
pub sprint: bool, // Player is speeding-up or not
pub spawn: f64, // Count down time to spawn a new bot
pub ended: bool, // Game is over or not
pub game_speed: f64, // Game speed in addition to player's speed
pub jump_timeout: f64, // Count down to allow the next jump
pub rotate_cam: bool, // Allow rotation of camera or not
pub bullets: i64, // The number of bullets left
pub recharge: f64, // Bullets recharge time
pub fps: f64, // Real fps of game
pub last_frame: Instant, // Moment of the last draw
}
pub enum Turn { Left, Right, None, }
// Configurable game's contansts.
// A tuple presents a range of something.
#[derive(Serialize, Deserialize, Clone)]
pub struct GameConfig {
pub title: String,
pub screen_size: ::Pixel,
pub ups: u64, // Update per second
pub max_fps: u64,
pub tunel_size: [f64; 3],
pub player_size: [f64; 3],
pub player_speed: (f64, f64), // min and max player speed
pub player_turn_speed: f64,
pub bot_size: [(f64, f64); 3], // Range of bot's size
pub bot_speed: (f64, f64),
pub bot_turn_speed: (f64, f64),
pub divider_size: [f64; 2],
pub camera_height: f64, // Height of camera (from player)
pub camera_distance: f64, // Distance from camera to player
pub decor_distance: f64, // Distance between each decoration
pub sprint_factor: f64,
pub spawn_time: (f64, f64),
pub game_sprint: f64, // The increase of game_speed
pub game_max_speed: f64,
pub player_jump_v: f64,
pub player_jump_a: f64,
pub jump_turn_decrease: f64,
pub jump_timeout: f64,
pub mouse_speed: f64,
pub trueshot_distance: f64,
pub bullet_stock: i64, // Number of bullets
pub recharge_time: f64,
pub bullet_len: f64,
pub bullet_speed: f64,
pub zoom_in: bool, // If true, zoom-in while on stare mode
}
impl Game {
pub fn new(config: GameConfig) -> Game {
let mut window: PistonWindow = WindowSettings::new(
config.title.clone(), [config.screen_size.w, config.screen_size.h])
.exit_on_esc(true).build()
.expect("Cannot create window.");
window.set_ups(config.ups);
window.set_max_fps(config.max_fps);
window.set_capture_cursor(true);
let glyphs = Glyphs::new("resources/Ubuntu-R.ttf", window.factory.clone())
.expect("Unable to load font.");
let bot_rules = BoxRules {
size: config.bot_size,
position: [(0., config.tunel_size[0]), (0., 0.), (config.tunel_size[2], config.tunel_size[2])],
speed: config.bot_speed,
turn_speed: config.bot_turn_speed,
color: vec![RED, ORANGE, VIOLET, GREEN, PALE],
jump_turn_decrease: config.jump_turn_decrease,
};
let world = World::new(&config);
let camera = Game::new_camera(&config, &world.player);
let state = State {
turn: Turn::None,
sprint: false,
spawn: 0.,
ended: false,
game_speed: 0.,
jump_timeout: 0.,
rotate_cam: false,
bullets: config.bullet_stock,
recharge: 0.,
fps: 0.,
last_frame: Instant::now(),
};
let ellipse = Ellipse {
color: BLACK.alpha(0.).into(),
border: Some(ellipse::Border {
color: RED.alpha(0.5).into(),
radius: 1.,
}),
resolution: 16,
};
Game {
config: config,
world: world,
window: window,
bot_rules: bot_rules,
camera: camera,
state: state,
glyphs: RefCell::new(glyphs),
ellipse: RefCell::new(ellipse),
}
}
fn new_camera<T: Car>(config: &GameConfig, player: &T) -> Camera {
Camera::new(
config.screen_size.clone(),
vec3(0., config.camera_height, -config.camera_distance) + player.pos()
)
}
// Re-calculate fps
fn update_fps(&mut self) {
let d = self.state.last_frame.elapsed();
self.state.last_frame = Instant::now();
self.state.fps = 1. / (d.as_secs() as f64 + 1e-9*d.subsec_nanos() as f64);
}
pub fn run(&mut self) {
while let Some(e) = self.window.next() {
match e {
Input::Press(key) => self.press(key),
Input::Release(key) => self.release(key),
Input::Render(_) => {
self.update_fps();
self.draw(&e);
},
Input::Update(args) => self.update(args.dt),
Input::Move(Motion::MouseRelative(a, b)) => self.mouse_move(a as f64, b as f64),
_ => {}
}
if self.state.ended {
break;
}
}
}
fn mouse_move(&mut self, x: f64, y: f64) {
if self.state.rotate_cam {
self.camera.rotate(x*self.config.mouse_speed, y*self.config.mouse_speed, self.world.player.position);
}
}
fn press(&mut self, key: Button) {
match key {
Button::Keyboard(Key::A) => self.state.turn = Turn::Left,
Button::Keyboard(Key::D) => self.state.turn = Turn::Right,
Button::Keyboard(Key::W) => self.state.sprint = true,
Button::Keyboard(Key::Space) => if self.state.jump_timeout <= 0. {
self.state.jump_timeout = self.config.jump_timeout;
self.world.player.jump();
},
Button::Mouse(MouseButton::Right) => {
if self.config.zoom_in {
self.camera.zoom_in();
}
self.state.rotate_cam = true;
},
Button::Mouse(MouseButton::Left) => if self.state.rotate_cam && self.state.bullets > 0 {
let mut pos = self.world.player.position;
pos.y += self.world.player.size.y;
let mut d = vec3(0., 0., self.config.trueshot_distance + self.config.camera_distance);
d = self.camera.c * d.magnitude2() / d.dot(self.camera.c);
d = self.camera.eye + d - pos;
d = d * self.config.bullet_speed / d.magnitude();
self.world.add_bullet(pos, d, self.config.bullet_len);
self.state.bullets -= 1;
if self.state.bullets <= 0 {
self.state.recharge = self.config.recharge_time;
}
},
_ => (),
}
}
fn release(&mut self, key: Button) {
match key {
Button::Keyboard(Key::A) => if let Turn::Left = self.state.turn {
self.state.turn = Turn::None;
},
Button::Keyboard(Key::D) => if let Turn::Right = self.state.turn {
self.state.turn = Turn::None;
},
Button::Keyboard(Key::W) => self.state.sprint = false,
Button::Mouse(MouseButton::Right) => {
self.state.rotate_cam = false;
self.camera = Game::new_camera(&self.config, &self.world.player);
},
_ => (),
}
}
fn draw(&mut self, e: &Input) {
// Return a horizontal bar
macro_rules! bar {
($curr: expr, $full: expr) => {
[0.,
15.0,
self.config.screen_size.w as f64/2.*$curr/$full,
20.0,]
};
}
let jump_bar = bar!(self.state.jump_timeout, self.config.jump_timeout);
let recharge_bar = bar!(self.state.recharge, self.config.recharge_time);
let bullets_bar = bar!(self.state.bullets as f64, self.config.bullet_stock as f64);
// Closure in `draw_2d` requires unique access to `self`,
// so we use RefCell to hack it.
let mut glyphs = self.glyphs.borrow_mut();
let fps = format!("{:.3}", self.state.fps);
let lines = self.world.render(&self.camera);
self.window.draw_2d(e, |c, g| {
clear(BLACK.into(), g);
for (l, color) in lines {
line(color.into(), 1., convert(l), c.transform, g);
}
rectangle(BLUE.alpha(0.4).into(), jump_bar, c.transform, g);
rectangle(RED.alpha(0.4).into(), recharge_bar, c.transform, g);
rectangle(GREEN.alpha(0.4).into(), bullets_bar, c.transform, g);
text(WHITE.into(), 10, &fps, glyphs.deref_mut(), c.transform.trans(0., 10.), g);
});
if self.state.rotate_cam {
let w = 20.;
let x = self.config.screen_size.w as f64 /2. - w/2.;
let y = self.config.screen_size.h as f64 /2. - w/2.;
let ellipse = self.ellipse.borrow();
self.window.draw_2d(e, |c, g| {
ellipse.draw([x, y, w, w], &c.draw_state, c.transform, g);
rectangle(RED.into(), [x+w/2.-1., y+w/2.-1., 2., 2.], c.transform, g);
});
}
}
// `dt` stands for delta, duration since the last update
fn update(&mut self, dt: f64) {
// Re-calculate delta according to fps
let dt = if self.state.fps != 0. { 1./self.state.fps}
else { dt };
let old = self.world.player.position;
if self.state.bullets <= 0 {
self.state.recharge -= dt;
if self.state.recharge < 0. {
self.state.bullets = self.config.bullet_stock;
}
}
self.state.jump_timeout -= dt;
if self.state.game_speed < self.config.game_max_speed {
self.state.game_speed += dt*self.config.game_sprint; | self.world.player.speed += dt*self.config.sprint_factor;
}
} else if self.world.player.speed > self.config.player_speed.0 {
self.world.player.speed -= dt*self.config.sprint_factor;
}
self.state.spawn -= dt;
if self.state.spawn < 0. {
self.world.add_bot(&self.bot_rules);
self.state.spawn += ::rnd(self.config.spawn_time);
}
match self.state.turn {
Turn::Left => self.world.player.turn_left(dt),
Turn::Right => self.world.player.turn_right(dt),
Turn::None => (),
}
// Update objects in the world
self.world.update(dt, self.state.game_speed);
// Validate things like object's boundary, bullets and boxes
// collisions.
self.world.validate();
// Update camera's location
self.camera.eye += self.world.player.position - old;
// Check for player's collision with bot
if self.world.bots.iter().any(|x| self.world.player.crashed(&x.car)) {
self.state.ended = true;
}
}
}
fn convert(x: [Vector2<f64>; 2]) -> [f64; 4] {
[x[0].x, x[0].y, x[1].x, x[1].y]
} | }
if self.state.sprint {
if self.world.player.speed < self.config.player_speed.1 { | random_line_split |
graph.rs | use crate::{CommandEncoder, CommandEncoderOutput};
use generational_arena::Arena;
use moonwave_resources::{BindGroup, Buffer, ResourceRc, SampledTexture, TextureView};
use multimap::MultiMap;
use parking_lot::{RwLock, RwLockReadGuard};
use rayon::{prelude::*, ThreadPool};
use std::{
collections::HashMap,
fmt::{Debug, Formatter},
sync::Arc,
};
pub use generational_arena::Index;
pub trait FrameGraphNode: Send + Sync + 'static {
fn execute(
&self,
_inputs: &[Option<FrameNodeValue>],
_outputs: &mut [Option<FrameNodeValue>],
_encoder: &mut CommandEncoder,
) {
}
fn execute_raw(
&self,
inputs: &[Option<FrameNodeValue>],
outputs: &mut [Option<FrameNodeValue>],
device: &wgpu::Device,
_queue: &wgpu::Queue,
_sc_frame: &wgpu::SwapChainFrame,
) -> CommandEncoderOutput {
let mut encoder = CommandEncoder::new(device, "NodeGraphEncoder");
self.execute(inputs, outputs, &mut encoder);
encoder.finish()
}
}
const MAX_LAYERS: usize = 8;
const MAX_NODES_PER_LAYER: usize = 8;
const MAX_INPUT_OUTPUTS_PER_NODE: usize = 16;
struct ConnectedNode {
name: String,
node: Arc<dyn FrameGraphNode>,
inputs: [Option<Index>; MAX_INPUT_OUTPUTS_PER_NODE],
}
struct ConnectedEdges {
owner_node_index: Index,
output_index: usize,
}
pub struct FrameGraph {
node_arena: RwLock<Arena<ConnectedNode>>,
edges_arena: RwLock<Arena<ConnectedEdges>>,
end_node: Index,
output_map: Vec<Vec<Option<FrameNodeValue>>>,
levels_map: MultiMap<usize, TraversedGraphNode>,
traversed_node_cache: HashMap<Index, usize>,
}
impl FrameGraph {
/// Creates a new empty graph.
pub fn new<T: FrameGraphNode>(end_node: T) -> Self {
let mut node_arena = Arena::with_capacity(MAX_LAYERS * MAX_NODES_PER_LAYER);
let end_node = node_arena.insert(ConnectedNode {
name: "EndNode".to_string(),
node: Arc::new(end_node),
inputs: [None; MAX_INPUT_OUTPUTS_PER_NODE],
});
Self {
node_arena: RwLock::new(node_arena),
edges_arena: RwLock::new(Arena::with_capacity(
MAX_LAYERS * MAX_INPUT_OUTPUTS_PER_NODE * MAX_NODES_PER_LAYER,
)),
output_map: vec![vec![None; MAX_NODES_PER_LAYER * MAX_INPUT_OUTPUTS_PER_NODE]; MAX_LAYERS],
levels_map: MultiMap::with_capacity(MAX_LAYERS),
traversed_node_cache: HashMap::with_capacity(
MAX_LAYERS * MAX_INPUT_OUTPUTS_PER_NODE * MAX_NODES_PER_LAYER,
),
end_node,
}
}
/// Returns the end node.
pub fn get_end_node(&self) -> Index {
self.end_node
}
/// Resets the frame graph by removing all nodes and sets up a new end node.
pub fn reset(&mut self) {
let mut nodes = self.node_arena.write();
let end_node_impl = nodes.get(self.end_node).unwrap().node.clone();
nodes.clear();
self.traversed_node_cache.clear();
self.edges_arena.write().clear();
self.end_node = nodes.insert(ConnectedNode {
name: "EndNode".to_string(),
node: end_node_impl,
inputs: [None; MAX_INPUT_OUTPUTS_PER_NODE],
});
}
/// Add a new node into the graph.
pub fn add_node<T: FrameGraphNode>(&self, node: T, name: &str) -> Index {
self.node_arena.write().insert(ConnectedNode {
name: name.to_string(),
node: Arc::new(node),
inputs: [None; MAX_INPUT_OUTPUTS_PER_NODE],
})
}
/// Connects one nodes output to another nodes input.
pub fn connect(
&self,
source: Index,
source_output: usize,
destination: Index,
destination_input: usize,
) -> Result<(), GraphConnectError> {
// Validate connection parameters.
if destination_input >= MAX_INPUT_OUTPUTS_PER_NODE {
return Err(GraphConnectError::MaximumInputsReached);
};
if source_output >= MAX_INPUT_OUTPUTS_PER_NODE {
return Err(GraphConnectError::MaximumInputsReached);
};
let mut edges = self.edges_arena.write();
let mut nodes = self.node_arena.write();
let destination_node = nodes
.get_mut(destination)
.ok_or(GraphConnectError::InvalidDestination)?;
// Target input is already connected.
if destination_node.inputs[destination_input].is_some() {
return Err(GraphConnectError::AlreadyConnected);
}
// Target input is empty so simply create the connection.
let edge = edges.insert(ConnectedEdges {
owner_node_index: source,
output_index: source_output,
});
destination_node.inputs[destination_input] = Some(edge);
Ok(())
}
fn traverse_node(
cache: &mut HashMap<Index, usize>,
levels_map: &mut MultiMap<usize, TraversedGraphNode>,
nodes: &RwLockReadGuard<Arena<ConnectedNode>>,
edges: &RwLockReadGuard<Arena<ConnectedEdges>>,
node_index: Index,
level: usize,
) {
//Build traverse node with input/output mapping info.
let mut traversed_node = TraversedGraphNode {
index: node_index,
inputs: [None; MAX_INPUT_OUTPUTS_PER_NODE],
};
// Remove from dependencies from all levels lower
let mut has_retained = false;
for l in level..0 {
// Remove previous traversed node from level.
let vec = levels_map.get_vec_mut(&l).unwrap();
let before_len = vec.len();
vec.retain(|x| x.index != node_index);
if before_len != vec.len() {
has_retained = true;
}
}
// Update all inputs that still reference kicked out node.
if has_retained {
for l in level..0 {
let vec = levels_map.get_vec_mut(&l).unwrap();
for node in vec {
for input in &mut node.inputs {
if let Some((nlevel, _, index)) = input {
if index == &node_index {
*nlevel = level;
}
}
}
}
}
}
// Loop through all inputs
let next_level = level + 1;
let node = nodes.get(node_index).unwrap();
for (input_index, input) in node.inputs.iter().enumerate() {
if let Some(input) = input {
let edge = edges.get(*input).unwrap();
let inner_node = edge.owner_node_index;
traversed_node.inputs[input_index] = Some((next_level, edge.output_index, inner_node));
Self::traverse_node(cache, levels_map, nodes, edges, inner_node, next_level);
}
}
// Store traversed node at level.
//let traversed_index = levels_map.get_vec(&level).map(|x| x.len()).unwrap_or(0);
//cache.insert(node_index, traversed_index);
// TODO: Due to retaining this index breaks currently :'(
levels_map.insert(level, traversed_node);
}
/// Executes the graph using the given scheduler.
pub fn execute<T: DeviceHost>(
&mut self,
sc_frame: Arc<wgpu::SwapChainFrame>,
device_host: &'static T,
pool: &ThreadPool,
) {
{
{
optick::event!("FrameGraph::traverse");
// Gain read access to nodes and connections.
let nodes = self.node_arena.read();
let edges = self.edges_arena.read();
// Start traversing from end.
self.levels_map.clear();
Self::traverse_node(
&mut self.traversed_node_cache,
&mut self.levels_map,
&nodes,
&edges,
self.end_node,
0,
);
}
let cache = &mut self.traversed_node_cache;
// Create async executer.
let mut local_pool = futures::executor::LocalPool::new();
let local_spawner = local_pool.spawner();
// Execute in levels order
let mut all_levels = self.levels_map.keys().cloned().collect::<Vec<_>>();
all_levels.sort_unstable();
let max_levels = all_levels.len();
for level in all_levels.into_iter().rev() { | optick::tag!("level", level as u32);
// Get rid of duplicated nodes.
let mut nodes_in_level = self.levels_map.get_vec_mut(&level).unwrap().clone();
nodes_in_level.sort_unstable_by_key(|x| x.index);
nodes_in_level.dedup_by_key(|x| x.index);
// Build cache for this level
for (index, node) in nodes_in_level.iter().enumerate() {
cache.insert(node.index, index);
}
// Get chunks
let nodes = self.node_arena.read();
let read_nodes = nodes_in_level
.iter()
.map(|node| (nodes.get(node.index).unwrap(), node.inputs))
.collect::<Vec<_>>();
let mut empty = [Vec::with_capacity(0)];
#[allow(clippy::type_complexity)]
let (outputs, previous_outputs): (
&mut [Vec<Option<FrameNodeValue>>],
&mut [Vec<Option<FrameNodeValue>>],
) = if level == (max_levels - 1) {
(&mut self.output_map, &mut empty)
} else {
self.output_map.split_at_mut(level + 1)
};
let outputs_per_node = outputs[outputs.len() - 1]
.chunks_mut(MAX_INPUT_OUTPUTS_PER_NODE)
.enumerate()
.collect::<Vec<_>>();
// Execute
let encoder_outputs = pool.install(|| {
read_nodes
.par_iter()
.zip(outputs_per_node)
.enumerate()
.map(|(_i, ((node, inputs), (_oi, outputs)))| {
optick::event!("FrameGraph::node");
// Prepare node execution
optick::tag!("name", node.name);
let node_trait = node.node.clone();
let label = format!("NodeCommandEncoder_{}", node.name);
// Map outputs -> inputs.
/*
for (idx, input) in inputs.iter().enumerate() {
if let Some((target_level, output_index, node_index)) = input {
let i = cache.get(&node_index).unwrap();
println!(
"Mapping input #{} to level = {} ({}) and index = {} ({}, {})",
idx,
target_level,
previous_outputs.len() - (target_level - level),
i * MAX_INPUT_OUTPUTS_PER_NODE + output_index,
i,
output_index
);
} else {
println!("Mapping input #{} to None", i);
}
}
*/
let inputs = inputs
.iter()
.map(|input| {
input.map(|(target_level, output_index, node_index)| {
let i = cache.get(&node_index).unwrap();
&previous_outputs[previous_outputs.len() - (target_level - level)]
[i * MAX_INPUT_OUTPUTS_PER_NODE + output_index]
})
})
.map(|input| match input {
Some(Some(rf)) => Some(rf.clone()),
_ => None,
})
.collect::<Vec<_>>();
let sc_cloned = sc_frame.clone();
let out = {
optick::event!("FrameGraph::record_commands");
optick::tag!("name", label);
// Execute node asynchronisly.
node_trait.execute_raw(
&inputs,
outputs,
device_host.get_device(),
device_host.get_queue(),
&*sc_cloned,
)
};
out
})
.collect::<Vec<_>>()
});
{
optick::event!("FrameGraph::submit_level");
optick::tag!("level", level as u32);
let mut buffers = Vec::with_capacity(encoder_outputs.len());
for out in encoder_outputs {
if let Some(buffer) = out.command_buffer {
buffers.push(buffer);
}
}
device_host.get_queue().submit(buffers);
}
}
}
// Reset
optick::event!("FrameGraph::reset");
self.reset();
}
}
#[derive(Clone)]
pub enum FrameNodeValue {
Buffer(ResourceRc<Buffer>),
BindGroup(ResourceRc<BindGroup>),
TextureView(ResourceRc<TextureView>),
SampledTexture(SampledTexture),
}
impl std::fmt::Debug for FrameNodeValue {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Self::Buffer(_) => f.write_str("Buffer"),
Self::BindGroup(_) => f.write_str("BindGroup"),
Self::TextureView(_) => f.write_str("Texture"),
Self::SampledTexture(_) => f.write_str("SampledTexture"),
}
}
}
use thiserror::Error;
#[derive(Error, Debug)]
pub enum GraphConnectError {
#[error("The target node has reached its input limit")]
MaximumInputsReached,
#[error("The source node has reached its outputs limit")]
MaximumOutputsReached,
#[error("The target node does not exist")]
InvalidDestination,
#[error("The target nodes input is already connected")]
AlreadyConnected,
}
#[derive(Clone)]
struct TraversedGraphNode {
index: Index,
inputs: [Option<(usize, usize, Index)>; MAX_INPUT_OUTPUTS_PER_NODE],
}
pub trait DeviceHost: Send + Sync + 'static {
fn get_device(&self) -> &wgpu::Device;
fn get_queue(&self) -> &wgpu::Queue;
}
macro_rules! impl_get_node_specific {
($getter:ident, $ty:ident, $rty:ty) => {
impl FrameNodeValue {
pub fn $getter(&self) -> &$rty {
match self {
FrameNodeValue::$ty(group) => group,
_ => panic!(
"Unexpected frame node value, expected '{}' but received '{:?}'",
stringify!($ty),
self
),
}
}
}
};
}
impl_get_node_specific!(get_bind_group, BindGroup, ResourceRc<BindGroup>);
impl_get_node_specific!(get_texture_view, TextureView, ResourceRc<TextureView>);
impl_get_node_specific!(get_sampled_texture, SampledTexture, SampledTexture); | optick::event!("FrameGraph::execute_level"); | random_line_split |
graph.rs | use crate::{CommandEncoder, CommandEncoderOutput};
use generational_arena::Arena;
use moonwave_resources::{BindGroup, Buffer, ResourceRc, SampledTexture, TextureView};
use multimap::MultiMap;
use parking_lot::{RwLock, RwLockReadGuard};
use rayon::{prelude::*, ThreadPool};
use std::{
collections::HashMap,
fmt::{Debug, Formatter},
sync::Arc,
};
pub use generational_arena::Index;
pub trait FrameGraphNode: Send + Sync + 'static {
fn execute(
&self,
_inputs: &[Option<FrameNodeValue>],
_outputs: &mut [Option<FrameNodeValue>],
_encoder: &mut CommandEncoder,
) {
}
fn execute_raw(
&self,
inputs: &[Option<FrameNodeValue>],
outputs: &mut [Option<FrameNodeValue>],
device: &wgpu::Device,
_queue: &wgpu::Queue,
_sc_frame: &wgpu::SwapChainFrame,
) -> CommandEncoderOutput {
let mut encoder = CommandEncoder::new(device, "NodeGraphEncoder");
self.execute(inputs, outputs, &mut encoder);
encoder.finish()
}
}
const MAX_LAYERS: usize = 8;
const MAX_NODES_PER_LAYER: usize = 8;
const MAX_INPUT_OUTPUTS_PER_NODE: usize = 16;
struct ConnectedNode {
name: String,
node: Arc<dyn FrameGraphNode>,
inputs: [Option<Index>; MAX_INPUT_OUTPUTS_PER_NODE],
}
struct ConnectedEdges {
owner_node_index: Index,
output_index: usize,
}
pub struct | {
node_arena: RwLock<Arena<ConnectedNode>>,
edges_arena: RwLock<Arena<ConnectedEdges>>,
end_node: Index,
output_map: Vec<Vec<Option<FrameNodeValue>>>,
levels_map: MultiMap<usize, TraversedGraphNode>,
traversed_node_cache: HashMap<Index, usize>,
}
impl FrameGraph {
/// Creates a new empty graph.
pub fn new<T: FrameGraphNode>(end_node: T) -> Self {
let mut node_arena = Arena::with_capacity(MAX_LAYERS * MAX_NODES_PER_LAYER);
let end_node = node_arena.insert(ConnectedNode {
name: "EndNode".to_string(),
node: Arc::new(end_node),
inputs: [None; MAX_INPUT_OUTPUTS_PER_NODE],
});
Self {
node_arena: RwLock::new(node_arena),
edges_arena: RwLock::new(Arena::with_capacity(
MAX_LAYERS * MAX_INPUT_OUTPUTS_PER_NODE * MAX_NODES_PER_LAYER,
)),
output_map: vec![vec![None; MAX_NODES_PER_LAYER * MAX_INPUT_OUTPUTS_PER_NODE]; MAX_LAYERS],
levels_map: MultiMap::with_capacity(MAX_LAYERS),
traversed_node_cache: HashMap::with_capacity(
MAX_LAYERS * MAX_INPUT_OUTPUTS_PER_NODE * MAX_NODES_PER_LAYER,
),
end_node,
}
}
/// Returns the end node.
pub fn get_end_node(&self) -> Index {
self.end_node
}
/// Resets the frame graph by removing all nodes and sets up a new end node.
pub fn reset(&mut self) {
let mut nodes = self.node_arena.write();
let end_node_impl = nodes.get(self.end_node).unwrap().node.clone();
nodes.clear();
self.traversed_node_cache.clear();
self.edges_arena.write().clear();
self.end_node = nodes.insert(ConnectedNode {
name: "EndNode".to_string(),
node: end_node_impl,
inputs: [None; MAX_INPUT_OUTPUTS_PER_NODE],
});
}
/// Add a new node into the graph.
pub fn add_node<T: FrameGraphNode>(&self, node: T, name: &str) -> Index {
self.node_arena.write().insert(ConnectedNode {
name: name.to_string(),
node: Arc::new(node),
inputs: [None; MAX_INPUT_OUTPUTS_PER_NODE],
})
}
/// Connects one nodes output to another nodes input.
pub fn connect(
&self,
source: Index,
source_output: usize,
destination: Index,
destination_input: usize,
) -> Result<(), GraphConnectError> {
// Validate connection parameters.
if destination_input >= MAX_INPUT_OUTPUTS_PER_NODE {
return Err(GraphConnectError::MaximumInputsReached);
};
if source_output >= MAX_INPUT_OUTPUTS_PER_NODE {
return Err(GraphConnectError::MaximumInputsReached);
};
let mut edges = self.edges_arena.write();
let mut nodes = self.node_arena.write();
let destination_node = nodes
.get_mut(destination)
.ok_or(GraphConnectError::InvalidDestination)?;
// Target input is already connected.
if destination_node.inputs[destination_input].is_some() {
return Err(GraphConnectError::AlreadyConnected);
}
// Target input is empty so simply create the connection.
let edge = edges.insert(ConnectedEdges {
owner_node_index: source,
output_index: source_output,
});
destination_node.inputs[destination_input] = Some(edge);
Ok(())
}
fn traverse_node(
cache: &mut HashMap<Index, usize>,
levels_map: &mut MultiMap<usize, TraversedGraphNode>,
nodes: &RwLockReadGuard<Arena<ConnectedNode>>,
edges: &RwLockReadGuard<Arena<ConnectedEdges>>,
node_index: Index,
level: usize,
) {
//Build traverse node with input/output mapping info.
let mut traversed_node = TraversedGraphNode {
index: node_index,
inputs: [None; MAX_INPUT_OUTPUTS_PER_NODE],
};
// Remove from dependencies from all levels lower
let mut has_retained = false;
for l in level..0 {
// Remove previous traversed node from level.
let vec = levels_map.get_vec_mut(&l).unwrap();
let before_len = vec.len();
vec.retain(|x| x.index != node_index);
if before_len != vec.len() {
has_retained = true;
}
}
// Update all inputs that still reference kicked out node.
if has_retained {
for l in level..0 {
let vec = levels_map.get_vec_mut(&l).unwrap();
for node in vec {
for input in &mut node.inputs {
if let Some((nlevel, _, index)) = input {
if index == &node_index {
*nlevel = level;
}
}
}
}
}
}
// Loop through all inputs
let next_level = level + 1;
let node = nodes.get(node_index).unwrap();
for (input_index, input) in node.inputs.iter().enumerate() {
if let Some(input) = input {
let edge = edges.get(*input).unwrap();
let inner_node = edge.owner_node_index;
traversed_node.inputs[input_index] = Some((next_level, edge.output_index, inner_node));
Self::traverse_node(cache, levels_map, nodes, edges, inner_node, next_level);
}
}
// Store traversed node at level.
//let traversed_index = levels_map.get_vec(&level).map(|x| x.len()).unwrap_or(0);
//cache.insert(node_index, traversed_index);
// TODO: Due to retaining this index breaks currently :'(
levels_map.insert(level, traversed_node);
}
/// Executes the graph using the given scheduler.
pub fn execute<T: DeviceHost>(
&mut self,
sc_frame: Arc<wgpu::SwapChainFrame>,
device_host: &'static T,
pool: &ThreadPool,
) {
{
{
optick::event!("FrameGraph::traverse");
// Gain read access to nodes and connections.
let nodes = self.node_arena.read();
let edges = self.edges_arena.read();
// Start traversing from end.
self.levels_map.clear();
Self::traverse_node(
&mut self.traversed_node_cache,
&mut self.levels_map,
&nodes,
&edges,
self.end_node,
0,
);
}
let cache = &mut self.traversed_node_cache;
// Create async executer.
let mut local_pool = futures::executor::LocalPool::new();
let local_spawner = local_pool.spawner();
// Execute in levels order
let mut all_levels = self.levels_map.keys().cloned().collect::<Vec<_>>();
all_levels.sort_unstable();
let max_levels = all_levels.len();
for level in all_levels.into_iter().rev() {
optick::event!("FrameGraph::execute_level");
optick::tag!("level", level as u32);
// Get rid of duplicated nodes.
let mut nodes_in_level = self.levels_map.get_vec_mut(&level).unwrap().clone();
nodes_in_level.sort_unstable_by_key(|x| x.index);
nodes_in_level.dedup_by_key(|x| x.index);
// Build cache for this level
for (index, node) in nodes_in_level.iter().enumerate() {
cache.insert(node.index, index);
}
// Get chunks
let nodes = self.node_arena.read();
let read_nodes = nodes_in_level
.iter()
.map(|node| (nodes.get(node.index).unwrap(), node.inputs))
.collect::<Vec<_>>();
let mut empty = [Vec::with_capacity(0)];
#[allow(clippy::type_complexity)]
let (outputs, previous_outputs): (
&mut [Vec<Option<FrameNodeValue>>],
&mut [Vec<Option<FrameNodeValue>>],
) = if level == (max_levels - 1) {
(&mut self.output_map, &mut empty)
} else {
self.output_map.split_at_mut(level + 1)
};
let outputs_per_node = outputs[outputs.len() - 1]
.chunks_mut(MAX_INPUT_OUTPUTS_PER_NODE)
.enumerate()
.collect::<Vec<_>>();
// Execute
let encoder_outputs = pool.install(|| {
read_nodes
.par_iter()
.zip(outputs_per_node)
.enumerate()
.map(|(_i, ((node, inputs), (_oi, outputs)))| {
optick::event!("FrameGraph::node");
// Prepare node execution
optick::tag!("name", node.name);
let node_trait = node.node.clone();
let label = format!("NodeCommandEncoder_{}", node.name);
// Map outputs -> inputs.
/*
for (idx, input) in inputs.iter().enumerate() {
if let Some((target_level, output_index, node_index)) = input {
let i = cache.get(&node_index).unwrap();
println!(
"Mapping input #{} to level = {} ({}) and index = {} ({}, {})",
idx,
target_level,
previous_outputs.len() - (target_level - level),
i * MAX_INPUT_OUTPUTS_PER_NODE + output_index,
i,
output_index
);
} else {
println!("Mapping input #{} to None", i);
}
}
*/
let inputs = inputs
.iter()
.map(|input| {
input.map(|(target_level, output_index, node_index)| {
let i = cache.get(&node_index).unwrap();
&previous_outputs[previous_outputs.len() - (target_level - level)]
[i * MAX_INPUT_OUTPUTS_PER_NODE + output_index]
})
})
.map(|input| match input {
Some(Some(rf)) => Some(rf.clone()),
_ => None,
})
.collect::<Vec<_>>();
let sc_cloned = sc_frame.clone();
let out = {
optick::event!("FrameGraph::record_commands");
optick::tag!("name", label);
// Execute node asynchronisly.
node_trait.execute_raw(
&inputs,
outputs,
device_host.get_device(),
device_host.get_queue(),
&*sc_cloned,
)
};
out
})
.collect::<Vec<_>>()
});
{
optick::event!("FrameGraph::submit_level");
optick::tag!("level", level as u32);
let mut buffers = Vec::with_capacity(encoder_outputs.len());
for out in encoder_outputs {
if let Some(buffer) = out.command_buffer {
buffers.push(buffer);
}
}
device_host.get_queue().submit(buffers);
}
}
}
// Reset
optick::event!("FrameGraph::reset");
self.reset();
}
}
#[derive(Clone)]
pub enum FrameNodeValue {
Buffer(ResourceRc<Buffer>),
BindGroup(ResourceRc<BindGroup>),
TextureView(ResourceRc<TextureView>),
SampledTexture(SampledTexture),
}
impl std::fmt::Debug for FrameNodeValue {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Self::Buffer(_) => f.write_str("Buffer"),
Self::BindGroup(_) => f.write_str("BindGroup"),
Self::TextureView(_) => f.write_str("Texture"),
Self::SampledTexture(_) => f.write_str("SampledTexture"),
}
}
}
use thiserror::Error;
#[derive(Error, Debug)]
pub enum GraphConnectError {
#[error("The target node has reached its input limit")]
MaximumInputsReached,
#[error("The source node has reached its outputs limit")]
MaximumOutputsReached,
#[error("The target node does not exist")]
InvalidDestination,
#[error("The target nodes input is already connected")]
AlreadyConnected,
}
#[derive(Clone)]
struct TraversedGraphNode {
index: Index,
inputs: [Option<(usize, usize, Index)>; MAX_INPUT_OUTPUTS_PER_NODE],
}
pub trait DeviceHost: Send + Sync + 'static {
fn get_device(&self) -> &wgpu::Device;
fn get_queue(&self) -> &wgpu::Queue;
}
macro_rules! impl_get_node_specific {
($getter:ident, $ty:ident, $rty:ty) => {
impl FrameNodeValue {
pub fn $getter(&self) -> &$rty {
match self {
FrameNodeValue::$ty(group) => group,
_ => panic!(
"Unexpected frame node value, expected '{}' but received '{:?}'",
stringify!($ty),
self
),
}
}
}
};
}
impl_get_node_specific!(get_bind_group, BindGroup, ResourceRc<BindGroup>);
impl_get_node_specific!(get_texture_view, TextureView, ResourceRc<TextureView>);
impl_get_node_specific!(get_sampled_texture, SampledTexture, SampledTexture);
| FrameGraph | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.