file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
sha1.rs | // Implements http://rosettacode.org/wiki/SHA-1
// straight port from golang crypto/sha1
// library implementation
#![feature(core)]
use std::num::Wrapping as wr;
use std::slice::bytes::copy_memory;
use std::io::{Write, Result};
// The size of a SHA1 checksum in bytes.
const SIZE: usize = 20;
// The blocksize of SHA1 in bytes.
const CHUNK:usize = 64;
const INIT:[wr<u32>; 5] = [wr(0x67452301),wr(0xEFCDAB89), wr(0x98BADCFE),
wr(0x10325476), wr(0xC3D2E1F0)];
#[cfg(not(test))]
fn main() {
let mut d = Digest::new();
let _ = write!(&mut d, "The quick brown fox jumps over the lazy dog");
let sha1=d.sha1();
for h in &sha1 {
print!("{:x} ", *h);
}
}
// digest represents the partial evaluation of a checksum.
struct Digest {
h: [wr<u32>; 5],
x: [u8; CHUNK],
nx: usize,
len: u64
}
impl Digest {
fn new() -> Digest {
Digest {
h: INIT,
x: [0u8; CHUNK],
nx: 0,
len:0u64
}
}
fn sha1(&mut self) -> [u8; SIZE] {
let mut len = self.len;
// Padding. Add a 1 bit and 0 bits until 56 bytes mod 64.
let mut tmp : [u8; 64] = [0u8; 64];
tmp[0] = 0x80u8;
let m:usize=(len%64u64) as usize;
if m < 56 {
self.write_all(&tmp[0..56-m]).unwrap();
} else {
self.write_all(&tmp[0..64+56-m]).unwrap();
}
// Length in bits (=lengh in bytes*8=shift 3 bits to the right).
len = len << 3;
for i in (0..8) {
tmp[i] = (len >> (56 - 8*i)) as u8;
}
self.write_all(&tmp[0..8]).unwrap();
assert!(self.nx == 0);
let mut digest : [u8; SIZE]=[0u8; SIZE];
for (i, s) in self.h.iter().enumerate() {
digest[i*4] = (*s >> 24).0 as u8;
digest[i*4+1] = (*s >> 16).0 as u8;
digest[i*4+2] = (*s >> 8).0 as u8;
digest[i*4+3] = s.0 as u8;
}
digest
}
fn process_block(&self, data:&[u8]) -> [wr<u32>; 5]{
let k:[u32; 4] = [0x5A827999, 0x6ED9EBA1, 0x8F1BBCDC, 0xCA62C1D6];
#[inline]
fn part(a: wr<u32>, b: wr<u32>) -> (wr<u32>, wr<u32>) {
((a<<5 | a>>(32-5)), (b<<30 | b>>(32-30)))
}
let mut w :[u32; 16] = [0u32; 16];
let (mut h0, mut h1, mut h2, mut h3, mut h4) =
(self.h[0], self.h[1], self.h[2], self.h[3], self.h[4]);
let mut p = data;
while p.len() >= CHUNK {
for i in (0..16) {
let j = i * 4;
w[i] = (p[j] as u32)<<24 |
(p[j+1] as u32)<<16 |
(p[j+2] as u32) <<8 |
p[j+3] as u32;
}
let (mut a, mut b, mut c, mut d, mut e) = (h0, h1, h2, h3, h4);
for i in (0..16) {
let f = b & c | (!b) & d;
let (a5, b30) = part(a, b);
let t = a5 + f + e + wr(w[i&0xf]) + wr(k[0]);
b=a; a=t; e=d; d=c; c=b30;
}
for i in (16..20) {
let tmp = w[(i-3)&0xf] ^ w[(i-8)&0xf] ^ w[(i-14)&0xf] ^ w[(i)&0xf];
w[i&0xf] = tmp<<1 | tmp>>(32-1);
let f = b & c | (!b) & d;
let (a5, b30) = part(a, b);
let t = a5 + f + e + wr(w[i&0xf]) + wr(k[0]);
b=a; a=t; e=d; d=c; c=b30;
}
for i in (20..40) {
let tmp = w[(i-3)&0xf] ^ w[(i-8)&0xf] ^ w[(i-14)&0xf] ^ w[(i)&0xf];
w[i&0xf] = tmp<<1 | tmp>>(32-1);
let f = b ^ c ^ d;
let (a5, b30) = part(a, b);
let t = a5 + f + e + wr(w[i&0xf]) + wr(k[1]);
b=a; a=t; e=d; d=c; c=b30;
}
for i in (40..60) {
let tmp = w[(i-3)&0xf] ^ w[(i-8)&0xf] ^ w[(i-14)&0xf] ^ w[(i)&0xf];
w[i&0xf] = tmp<<1 | tmp>>(32-1);
let f = ((b | c) & d) | (b & c);
let (a5, b30) = part(a, b);
let t = a5 + f + e + wr(w[i&0xf]) + wr(k[2]);
b=a; a=t; e=d; d=c; c=b30;
}
for i in (60..80) {
let tmp = w[(i-3)&0xf] ^ w[(i-8)&0xf] ^ w[(i-14)&0xf] ^ w[(i)&0xf];
w[i&0xf] = tmp<<1 | tmp>>(32-1);
let f = b ^ c ^ d;
let (a5, b30) = part(a, b);
let t = a5 + f + e + wr(w[i&0xf]) + wr(k[3]);
b=a; a=t; e=d; d=c; c=b30;
}
h0 = h0 + a;
h1 = h1 + b;
h2 = h2 + c;
h3 = h3 + d;
h4 = h4 + e;
p = &p[CHUNK..];
}
[h0, h1, h2, h3, h4]
}
}
impl Write for Digest {
fn write(&mut self, buf: &[u8]) -> Result<usize> {
try!(self.write_all(buf));
Ok(buf.len())
}
#[inline]
fn write_all(&mut self, buf: &[u8]) -> Result<()> {
let mut buf_m = buf;
self.len += buf_m.len() as u64;
if self.nx > 0 {
let mut n = buf_m.len();
if n > CHUNK - self.nx {
n = CHUNK - self.nx;
}
for i in (0..n) {
self.x[self.nx + i] = *buf_m.get(i).unwrap();
}
self.nx += n;
if self.nx == CHUNK {
let x = &(self.x[..]);
self.h=self.process_block(x);
self.nx = 0;
}
buf_m = &buf_m[n..];
}
if buf_m.len() >= CHUNK {
let n = buf_m.len() &!(CHUNK - 1);
let x = &(self.x[n..]);
self.h=self.process_block(x); | assert!(self.x.len() >= ln);
copy_memory(buf_m, &mut self.x);
self.nx = ln;
}
Ok(())
}
fn flush(&mut self) -> Result<()> { Ok(()) }
}
#[test]
fn known_sha1s() {
let input_output = [
(
"His money is twice tainted: 'taint yours and 'taint mine.",
[0x59u8, 0x7f, 0x6a, 0x54, 0x0, 0x10, 0xf9, 0x4c,
0x15, 0xd7, 0x18, 0x6, 0xa9, 0x9a, 0x2c, 0x87, 0x10,
0xe7, 0x47, 0xbd]
),
(
"The quick brown fox jumps over the lazy dog",
[0x2fu8, 0xd4, 0xe1, 0xc6, 0x7a, 0x2d,
0x28, 0xfc, 0xed, 0x84, 0x9e, 0xe1, 0xbb, 0x76
, 0xe7, 0x39, 0x1b, 0x93, 0xeb, 0x12]
),
(
"The quick brown fox jumps over the lazy cog",
[0xdeu8 ,0x9f ,0x2c ,0x7f ,0xd2 ,0x5e ,0x1b ,0x3a
,0xfa ,0xd3 ,0xe8 ,0x5a ,0x0b ,0xd1 ,0x7d ,0x9b
,0x10 ,0x0d ,0xb4,0xb3]
)];
for &(i, o) in &input_output {
let mut d = Digest::new();
let _ = write!(&mut d, "{}", i);
let sha1=d.sha1();
assert_eq!(sha1, o);
}
} | buf_m = &buf_m[n..];
}
let ln=buf_m.len();
if ln > 0 { | random_line_split |
sha1.rs | // Implements http://rosettacode.org/wiki/SHA-1
// straight port from golang crypto/sha1
// library implementation
#![feature(core)]
use std::num::Wrapping as wr;
use std::slice::bytes::copy_memory;
use std::io::{Write, Result};
// The size of a SHA1 checksum in bytes.
const SIZE: usize = 20;
// The blocksize of SHA1 in bytes.
const CHUNK:usize = 64;
const INIT:[wr<u32>; 5] = [wr(0x67452301),wr(0xEFCDAB89), wr(0x98BADCFE),
wr(0x10325476), wr(0xC3D2E1F0)];
#[cfg(not(test))]
fn main() {
let mut d = Digest::new();
let _ = write!(&mut d, "The quick brown fox jumps over the lazy dog");
let sha1=d.sha1();
for h in &sha1 {
print!("{:x} ", *h);
}
}
// digest represents the partial evaluation of a checksum.
struct Digest {
h: [wr<u32>; 5],
x: [u8; CHUNK],
nx: usize,
len: u64
}
impl Digest {
fn new() -> Digest {
Digest {
h: INIT,
x: [0u8; CHUNK],
nx: 0,
len:0u64
}
}
fn sha1(&mut self) -> [u8; SIZE] {
let mut len = self.len;
// Padding. Add a 1 bit and 0 bits until 56 bytes mod 64.
let mut tmp : [u8; 64] = [0u8; 64];
tmp[0] = 0x80u8;
let m:usize=(len%64u64) as usize;
if m < 56 {
self.write_all(&tmp[0..56-m]).unwrap();
} else {
self.write_all(&tmp[0..64+56-m]).unwrap();
}
// Length in bits (=lengh in bytes*8=shift 3 bits to the right).
len = len << 3;
for i in (0..8) {
tmp[i] = (len >> (56 - 8*i)) as u8;
}
self.write_all(&tmp[0..8]).unwrap();
assert!(self.nx == 0);
let mut digest : [u8; SIZE]=[0u8; SIZE];
for (i, s) in self.h.iter().enumerate() {
digest[i*4] = (*s >> 24).0 as u8;
digest[i*4+1] = (*s >> 16).0 as u8;
digest[i*4+2] = (*s >> 8).0 as u8;
digest[i*4+3] = s.0 as u8;
}
digest
}
fn process_block(&self, data:&[u8]) -> [wr<u32>; 5]{
let k:[u32; 4] = [0x5A827999, 0x6ED9EBA1, 0x8F1BBCDC, 0xCA62C1D6];
#[inline]
fn part(a: wr<u32>, b: wr<u32>) -> (wr<u32>, wr<u32>) {
((a<<5 | a>>(32-5)), (b<<30 | b>>(32-30)))
}
let mut w :[u32; 16] = [0u32; 16];
let (mut h0, mut h1, mut h2, mut h3, mut h4) =
(self.h[0], self.h[1], self.h[2], self.h[3], self.h[4]);
let mut p = data;
while p.len() >= CHUNK {
for i in (0..16) {
let j = i * 4;
w[i] = (p[j] as u32)<<24 |
(p[j+1] as u32)<<16 |
(p[j+2] as u32) <<8 |
p[j+3] as u32;
}
let (mut a, mut b, mut c, mut d, mut e) = (h0, h1, h2, h3, h4);
for i in (0..16) {
let f = b & c | (!b) & d;
let (a5, b30) = part(a, b);
let t = a5 + f + e + wr(w[i&0xf]) + wr(k[0]);
b=a; a=t; e=d; d=c; c=b30;
}
for i in (16..20) {
let tmp = w[(i-3)&0xf] ^ w[(i-8)&0xf] ^ w[(i-14)&0xf] ^ w[(i)&0xf];
w[i&0xf] = tmp<<1 | tmp>>(32-1);
let f = b & c | (!b) & d;
let (a5, b30) = part(a, b);
let t = a5 + f + e + wr(w[i&0xf]) + wr(k[0]);
b=a; a=t; e=d; d=c; c=b30;
}
for i in (20..40) {
let tmp = w[(i-3)&0xf] ^ w[(i-8)&0xf] ^ w[(i-14)&0xf] ^ w[(i)&0xf];
w[i&0xf] = tmp<<1 | tmp>>(32-1);
let f = b ^ c ^ d;
let (a5, b30) = part(a, b);
let t = a5 + f + e + wr(w[i&0xf]) + wr(k[1]);
b=a; a=t; e=d; d=c; c=b30;
}
for i in (40..60) {
let tmp = w[(i-3)&0xf] ^ w[(i-8)&0xf] ^ w[(i-14)&0xf] ^ w[(i)&0xf];
w[i&0xf] = tmp<<1 | tmp>>(32-1);
let f = ((b | c) & d) | (b & c);
let (a5, b30) = part(a, b);
let t = a5 + f + e + wr(w[i&0xf]) + wr(k[2]);
b=a; a=t; e=d; d=c; c=b30;
}
for i in (60..80) {
let tmp = w[(i-3)&0xf] ^ w[(i-8)&0xf] ^ w[(i-14)&0xf] ^ w[(i)&0xf];
w[i&0xf] = tmp<<1 | tmp>>(32-1);
let f = b ^ c ^ d;
let (a5, b30) = part(a, b);
let t = a5 + f + e + wr(w[i&0xf]) + wr(k[3]);
b=a; a=t; e=d; d=c; c=b30;
}
h0 = h0 + a;
h1 = h1 + b;
h2 = h2 + c;
h3 = h3 + d;
h4 = h4 + e;
p = &p[CHUNK..];
}
[h0, h1, h2, h3, h4]
}
}
impl Write for Digest {
fn write(&mut self, buf: &[u8]) -> Result<usize> {
try!(self.write_all(buf));
Ok(buf.len())
}
#[inline]
fn write_all(&mut self, buf: &[u8]) -> Result<()> {
let mut buf_m = buf;
self.len += buf_m.len() as u64;
if self.nx > 0 {
let mut n = buf_m.len();
if n > CHUNK - self.nx {
n = CHUNK - self.nx;
}
for i in (0..n) {
self.x[self.nx + i] = *buf_m.get(i).unwrap();
}
self.nx += n;
if self.nx == CHUNK {
let x = &(self.x[..]);
self.h=self.process_block(x);
self.nx = 0;
}
buf_m = &buf_m[n..];
}
if buf_m.len() >= CHUNK {
let n = buf_m.len() &!(CHUNK - 1);
let x = &(self.x[n..]);
self.h=self.process_block(x);
buf_m = &buf_m[n..];
}
let ln=buf_m.len();
if ln > 0 |
Ok(())
}
fn flush(&mut self) -> Result<()> { Ok(()) }
}
#[test]
fn known_sha1s() {
let input_output = [
(
"His money is twice tainted: 'taint yours and 'taint mine.",
[0x59u8, 0x7f, 0x6a, 0x54, 0x0, 0x10, 0xf9, 0x4c,
0x15, 0xd7, 0x18, 0x6, 0xa9, 0x9a, 0x2c, 0x87, 0x10,
0xe7, 0x47, 0xbd]
),
(
"The quick brown fox jumps over the lazy dog",
[0x2fu8, 0xd4, 0xe1, 0xc6, 0x7a, 0x2d,
0x28, 0xfc, 0xed, 0x84, 0x9e, 0xe1, 0xbb, 0x76
, 0xe7, 0x39, 0x1b, 0x93, 0xeb, 0x12]
),
(
"The quick brown fox jumps over the lazy cog",
[0xdeu8 ,0x9f ,0x2c ,0x7f ,0xd2 ,0x5e ,0x1b ,0x3a
,0xfa ,0xd3 ,0xe8 ,0x5a ,0x0b ,0xd1 ,0x7d ,0x9b
,0x10 ,0x0d ,0xb4,0xb3]
)];
for &(i, o) in &input_output {
let mut d = Digest::new();
let _ = write!(&mut d, "{}", i);
let sha1=d.sha1();
assert_eq!(sha1, o);
}
}
| {
assert!(self.x.len() >= ln);
copy_memory(buf_m, &mut self.x);
self.nx = ln;
} | conditional_block |
sha1.rs | // Implements http://rosettacode.org/wiki/SHA-1
// straight port from golang crypto/sha1
// library implementation
#![feature(core)]
use std::num::Wrapping as wr;
use std::slice::bytes::copy_memory;
use std::io::{Write, Result};
// The size of a SHA1 checksum in bytes.
const SIZE: usize = 20;
// The blocksize of SHA1 in bytes.
const CHUNK:usize = 64;
const INIT:[wr<u32>; 5] = [wr(0x67452301),wr(0xEFCDAB89), wr(0x98BADCFE),
wr(0x10325476), wr(0xC3D2E1F0)];
#[cfg(not(test))]
fn main() {
let mut d = Digest::new();
let _ = write!(&mut d, "The quick brown fox jumps over the lazy dog");
let sha1=d.sha1();
for h in &sha1 {
print!("{:x} ", *h);
}
}
// digest represents the partial evaluation of a checksum.
struct Digest {
h: [wr<u32>; 5],
x: [u8; CHUNK],
nx: usize,
len: u64
}
impl Digest {
fn new() -> Digest {
Digest {
h: INIT,
x: [0u8; CHUNK],
nx: 0,
len:0u64
}
}
fn sha1(&mut self) -> [u8; SIZE] {
let mut len = self.len;
// Padding. Add a 1 bit and 0 bits until 56 bytes mod 64.
let mut tmp : [u8; 64] = [0u8; 64];
tmp[0] = 0x80u8;
let m:usize=(len%64u64) as usize;
if m < 56 {
self.write_all(&tmp[0..56-m]).unwrap();
} else {
self.write_all(&tmp[0..64+56-m]).unwrap();
}
// Length in bits (=lengh in bytes*8=shift 3 bits to the right).
len = len << 3;
for i in (0..8) {
tmp[i] = (len >> (56 - 8*i)) as u8;
}
self.write_all(&tmp[0..8]).unwrap();
assert!(self.nx == 0);
let mut digest : [u8; SIZE]=[0u8; SIZE];
for (i, s) in self.h.iter().enumerate() {
digest[i*4] = (*s >> 24).0 as u8;
digest[i*4+1] = (*s >> 16).0 as u8;
digest[i*4+2] = (*s >> 8).0 as u8;
digest[i*4+3] = s.0 as u8;
}
digest
}
fn process_block(&self, data:&[u8]) -> [wr<u32>; 5]{
let k:[u32; 4] = [0x5A827999, 0x6ED9EBA1, 0x8F1BBCDC, 0xCA62C1D6];
#[inline]
fn part(a: wr<u32>, b: wr<u32>) -> (wr<u32>, wr<u32>) {
((a<<5 | a>>(32-5)), (b<<30 | b>>(32-30)))
}
let mut w :[u32; 16] = [0u32; 16];
let (mut h0, mut h1, mut h2, mut h3, mut h4) =
(self.h[0], self.h[1], self.h[2], self.h[3], self.h[4]);
let mut p = data;
while p.len() >= CHUNK {
for i in (0..16) {
let j = i * 4;
w[i] = (p[j] as u32)<<24 |
(p[j+1] as u32)<<16 |
(p[j+2] as u32) <<8 |
p[j+3] as u32;
}
let (mut a, mut b, mut c, mut d, mut e) = (h0, h1, h2, h3, h4);
for i in (0..16) {
let f = b & c | (!b) & d;
let (a5, b30) = part(a, b);
let t = a5 + f + e + wr(w[i&0xf]) + wr(k[0]);
b=a; a=t; e=d; d=c; c=b30;
}
for i in (16..20) {
let tmp = w[(i-3)&0xf] ^ w[(i-8)&0xf] ^ w[(i-14)&0xf] ^ w[(i)&0xf];
w[i&0xf] = tmp<<1 | tmp>>(32-1);
let f = b & c | (!b) & d;
let (a5, b30) = part(a, b);
let t = a5 + f + e + wr(w[i&0xf]) + wr(k[0]);
b=a; a=t; e=d; d=c; c=b30;
}
for i in (20..40) {
let tmp = w[(i-3)&0xf] ^ w[(i-8)&0xf] ^ w[(i-14)&0xf] ^ w[(i)&0xf];
w[i&0xf] = tmp<<1 | tmp>>(32-1);
let f = b ^ c ^ d;
let (a5, b30) = part(a, b);
let t = a5 + f + e + wr(w[i&0xf]) + wr(k[1]);
b=a; a=t; e=d; d=c; c=b30;
}
for i in (40..60) {
let tmp = w[(i-3)&0xf] ^ w[(i-8)&0xf] ^ w[(i-14)&0xf] ^ w[(i)&0xf];
w[i&0xf] = tmp<<1 | tmp>>(32-1);
let f = ((b | c) & d) | (b & c);
let (a5, b30) = part(a, b);
let t = a5 + f + e + wr(w[i&0xf]) + wr(k[2]);
b=a; a=t; e=d; d=c; c=b30;
}
for i in (60..80) {
let tmp = w[(i-3)&0xf] ^ w[(i-8)&0xf] ^ w[(i-14)&0xf] ^ w[(i)&0xf];
w[i&0xf] = tmp<<1 | tmp>>(32-1);
let f = b ^ c ^ d;
let (a5, b30) = part(a, b);
let t = a5 + f + e + wr(w[i&0xf]) + wr(k[3]);
b=a; a=t; e=d; d=c; c=b30;
}
h0 = h0 + a;
h1 = h1 + b;
h2 = h2 + c;
h3 = h3 + d;
h4 = h4 + e;
p = &p[CHUNK..];
}
[h0, h1, h2, h3, h4]
}
}
impl Write for Digest {
fn write(&mut self, buf: &[u8]) -> Result<usize> {
try!(self.write_all(buf));
Ok(buf.len())
}
#[inline]
fn write_all(&mut self, buf: &[u8]) -> Result<()> {
let mut buf_m = buf;
self.len += buf_m.len() as u64;
if self.nx > 0 {
let mut n = buf_m.len();
if n > CHUNK - self.nx {
n = CHUNK - self.nx;
}
for i in (0..n) {
self.x[self.nx + i] = *buf_m.get(i).unwrap();
}
self.nx += n;
if self.nx == CHUNK {
let x = &(self.x[..]);
self.h=self.process_block(x);
self.nx = 0;
}
buf_m = &buf_m[n..];
}
if buf_m.len() >= CHUNK {
let n = buf_m.len() &!(CHUNK - 1);
let x = &(self.x[n..]);
self.h=self.process_block(x);
buf_m = &buf_m[n..];
}
let ln=buf_m.len();
if ln > 0 {
assert!(self.x.len() >= ln);
copy_memory(buf_m, &mut self.x);
self.nx = ln;
}
Ok(())
}
fn | (&mut self) -> Result<()> { Ok(()) }
}
#[test]
fn known_sha1s() {
let input_output = [
(
"His money is twice tainted: 'taint yours and 'taint mine.",
[0x59u8, 0x7f, 0x6a, 0x54, 0x0, 0x10, 0xf9, 0x4c,
0x15, 0xd7, 0x18, 0x6, 0xa9, 0x9a, 0x2c, 0x87, 0x10,
0xe7, 0x47, 0xbd]
),
(
"The quick brown fox jumps over the lazy dog",
[0x2fu8, 0xd4, 0xe1, 0xc6, 0x7a, 0x2d,
0x28, 0xfc, 0xed, 0x84, 0x9e, 0xe1, 0xbb, 0x76
, 0xe7, 0x39, 0x1b, 0x93, 0xeb, 0x12]
),
(
"The quick brown fox jumps over the lazy cog",
[0xdeu8 ,0x9f ,0x2c ,0x7f ,0xd2 ,0x5e ,0x1b ,0x3a
,0xfa ,0xd3 ,0xe8 ,0x5a ,0x0b ,0xd1 ,0x7d ,0x9b
,0x10 ,0x0d ,0xb4,0xb3]
)];
for &(i, o) in &input_output {
let mut d = Digest::new();
let _ = write!(&mut d, "{}", i);
let sha1=d.sha1();
assert_eq!(sha1, o);
}
}
| flush | identifier_name |
alignment-gep-tup-like-1.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct | <A,B> {
a: A, b: B
}
fn f<A:Copy + 'static>(a: A, b: u16) -> @fn() -> (A, u16) {
let result: @fn() -> (A, u16) = || (copy a, b);
result
}
pub fn main() {
let (a, b) = f(22_u64, 44u16)();
info!("a=%? b=%?", a, b);
assert_eq!(a, 22u64);
assert_eq!(b, 44u16);
}
| pair | identifier_name |
alignment-gep-tup-like-1.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct pair<A,B> {
a: A, b: B
}
fn f<A:Copy + 'static>(a: A, b: u16) -> @fn() -> (A, u16) {
let result: @fn() -> (A, u16) = || (copy a, b);
result | pub fn main() {
let (a, b) = f(22_u64, 44u16)();
info!("a=%? b=%?", a, b);
assert_eq!(a, 22u64);
assert_eq!(b, 44u16);
} | }
| random_line_split |
CreateSelectable.tsx | import React, { Component, ComponentType } from 'react'
import { getBoundsForNode, TComputedBounds, TGetBoundsForNodeArgs } from './utils'
import { TSelectableItemState, TSelectableItemProps } from './Selectable.types'
import { SelectableGroupContext } from './SelectableGroup.context'
type TAddedProps = Partial<Pick<TSelectableItemProps, 'isSelected'>>
export const createSelectable = <T extends any>(
WrappedComponent: ComponentType<TSelectableItemProps & T>
): ComponentType<T & TAddedProps> =>
class SelectableItem extends Component<T & TAddedProps, TSelectableItemState> {
static contextType = SelectableGroupContext
static defaultProps = {
isSelected: false,
}
state = {
isSelected: this.props.isSelected,
isSelecting: false,
}
node: HTMLElement | null = null
bounds: TComputedBounds[] | null = null
componentDidMount() {
this.updateBounds()
this.context.selectable.register(this)
}
| () {
this.context.selectable.unregister(this)
}
updateBounds = (containerScroll?: TGetBoundsForNodeArgs) => {
this.bounds = getBoundsForNode(this.node!, containerScroll)
}
getSelectableRef = (ref: HTMLElement | null) => {
this.node = ref
}
render() {
return (
<WrappedComponent {...this.props} {...this.state} selectableRef={this.getSelectableRef} />
)
}
}
| componentWillUnmount | identifier_name |
CreateSelectable.tsx | import React, { Component, ComponentType } from 'react'
import { getBoundsForNode, TComputedBounds, TGetBoundsForNodeArgs } from './utils'
import { TSelectableItemState, TSelectableItemProps } from './Selectable.types'
import { SelectableGroupContext } from './SelectableGroup.context'
type TAddedProps = Partial<Pick<TSelectableItemProps, 'isSelected'>>
export const createSelectable = <T extends any>(
WrappedComponent: ComponentType<TSelectableItemProps & T>
): ComponentType<T & TAddedProps> =>
class SelectableItem extends Component<T & TAddedProps, TSelectableItemState> {
static contextType = SelectableGroupContext
static defaultProps = {
isSelected: false,
}
state = {
isSelected: this.props.isSelected,
isSelecting: false,
}
node: HTMLElement | null = null
bounds: TComputedBounds[] | null = null
componentDidMount() {
this.updateBounds()
this.context.selectable.register(this)
}
componentWillUnmount() |
updateBounds = (containerScroll?: TGetBoundsForNodeArgs) => {
this.bounds = getBoundsForNode(this.node!, containerScroll)
}
getSelectableRef = (ref: HTMLElement | null) => {
this.node = ref
}
render() {
return (
<WrappedComponent {...this.props} {...this.state} selectableRef={this.getSelectableRef} />
)
}
}
| {
this.context.selectable.unregister(this)
} | identifier_body |
CreateSelectable.tsx | import React, { Component, ComponentType } from 'react'
import { getBoundsForNode, TComputedBounds, TGetBoundsForNodeArgs } from './utils'
import { TSelectableItemState, TSelectableItemProps } from './Selectable.types'
import { SelectableGroupContext } from './SelectableGroup.context'
type TAddedProps = Partial<Pick<TSelectableItemProps, 'isSelected'>>
export const createSelectable = <T extends any>(
WrappedComponent: ComponentType<TSelectableItemProps & T>
): ComponentType<T & TAddedProps> =>
class SelectableItem extends Component<T & TAddedProps, TSelectableItemState> {
static contextType = SelectableGroupContext
static defaultProps = {
isSelected: false,
}
state = { | isSelecting: false,
}
node: HTMLElement | null = null
bounds: TComputedBounds[] | null = null
componentDidMount() {
this.updateBounds()
this.context.selectable.register(this)
}
componentWillUnmount() {
this.context.selectable.unregister(this)
}
updateBounds = (containerScroll?: TGetBoundsForNodeArgs) => {
this.bounds = getBoundsForNode(this.node!, containerScroll)
}
getSelectableRef = (ref: HTMLElement | null) => {
this.node = ref
}
render() {
return (
<WrappedComponent {...this.props} {...this.state} selectableRef={this.getSelectableRef} />
)
}
} | isSelected: this.props.isSelected, | random_line_split |
main.py | # https://github.com/Naish21/themostat
'''
* The MIT License (MIT)
*
* Copyright (c) 2016 Jorge Aranda Moro
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
'''
#This part is to connect to the WiFi
#In this case: SSID: TP-LINK_F3D4B2 & PASS: 90546747
WIFISSID='koen'
WIFIPASS='/*Casa*/'
def do_connect():
from network import WLAN
sta_if = WLAN(network.STA_IF)
if not sta_if.isconnected():
print('connecting to network...')
sta_if.active(True)
sta_if.connect(WIFISSID, WIFIPASS)
while not sta_if.isconnected():
pass
print('network config:', sta_if.ifconfig())
#---End Wifi Config---
from machine import Pin
led = Pin(2, Pin.OUT, value=1)
#---MQTT Sending---
from time import sleep_ms
from ubinascii import hexlify
from machine import unique_id
#import socket
from umqtt import MQTTClient
SERVER = "192.168.31.16"
CLIENT_ID = hexlify(unique_id())
TOPIC1 = b"/cultivo/temp"
TOPIC2 = b"/scultivo/hum"
TOPIC3 = b"/cultivo/alarma"
def | (server=SERVER, topic="/cultivo", dato=None):
try:
c = MQTTClient(CLIENT_ID, server)
c.connect()
c.publish(topic, dato)
sleep_ms(200)
c.disconnect()
#led.value(1)
except Exception as e:
pass
#led.value(0)
state = 0
def sub_cb(topic, msg):
global state
print((topic, msg))
if msg == b"on":
led.value(0)
state = 1
elif msg == b"off":
led.value(1)
state = 0
def recepcionMQTT(server=SERVER, topic=TOPIC3):
c = MQTTClient(CLIENT_ID, server)
# Subscribed messages will be delivered to this callback
c.set_callback(sub_cb)
c.connect()
c.subscribe(topic)
print("Connected to %s, subscribed to %s topic" % (server, topic))
try:
c.wait_msg()
finally:
c.disconnect()
#---End MQTT Sending---
#---DHT22---
from dht import DHT22
ds = DHT22(Pin(4)) #DHT22 connected to GPIO4
def medirTemHum():
try:
ds.measure()
tem = ds.temperature()
hum = ds.humidity()
#ed.value(1)
return (tem,hum)
except Exception as e:
#led.value(0)
return (-1,-1)
#---End DHT22---
#---Main Program---
sleep_ms(10000)
while True:
(tem,hum) = medirTemHum()
envioMQTT(SERVER,TOPIC1,str(tem))
envioMQTT(SERVER,TOPIC2,str(hum))
recepcionMQTT()
sleep_ms(10000)
#---END Main Program---
| envioMQTT | identifier_name |
main.py | # https://github.com/Naish21/themostat
'''
* The MIT License (MIT)
*
* Copyright (c) 2016 Jorge Aranda Moro
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
'''
#This part is to connect to the WiFi
#In this case: SSID: TP-LINK_F3D4B2 & PASS: 90546747
WIFISSID='koen'
WIFIPASS='/*Casa*/'
def do_connect():
from network import WLAN
sta_if = WLAN(network.STA_IF)
if not sta_if.isconnected():
print('connecting to network...')
sta_if.active(True)
sta_if.connect(WIFISSID, WIFIPASS)
while not sta_if.isconnected():
pass
print('network config:', sta_if.ifconfig())
#---End Wifi Config---
from machine import Pin
led = Pin(2, Pin.OUT, value=1)
#---MQTT Sending---
from time import sleep_ms
from ubinascii import hexlify
from machine import unique_id
#import socket
from umqtt import MQTTClient
SERVER = "192.168.31.16"
CLIENT_ID = hexlify(unique_id())
TOPIC1 = b"/cultivo/temp"
TOPIC2 = b"/scultivo/hum"
TOPIC3 = b"/cultivo/alarma"
def envioMQTT(server=SERVER, topic="/cultivo", dato=None):
try:
c = MQTTClient(CLIENT_ID, server)
c.connect()
c.publish(topic, dato)
sleep_ms(200)
c.disconnect()
#led.value(1)
except Exception as e:
pass
#led.value(0)
state = 0
def sub_cb(topic, msg):
global state
print((topic, msg))
if msg == b"on":
|
elif msg == b"off":
led.value(1)
state = 0
def recepcionMQTT(server=SERVER, topic=TOPIC3):
c = MQTTClient(CLIENT_ID, server)
# Subscribed messages will be delivered to this callback
c.set_callback(sub_cb)
c.connect()
c.subscribe(topic)
print("Connected to %s, subscribed to %s topic" % (server, topic))
try:
c.wait_msg()
finally:
c.disconnect()
#---End MQTT Sending---
#---DHT22---
from dht import DHT22
ds = DHT22(Pin(4)) #DHT22 connected to GPIO4
def medirTemHum():
try:
ds.measure()
tem = ds.temperature()
hum = ds.humidity()
#ed.value(1)
return (tem,hum)
except Exception as e:
#led.value(0)
return (-1,-1)
#---End DHT22---
#---Main Program---
sleep_ms(10000)
while True:
(tem,hum) = medirTemHum()
envioMQTT(SERVER,TOPIC1,str(tem))
envioMQTT(SERVER,TOPIC2,str(hum))
recepcionMQTT()
sleep_ms(10000)
#---END Main Program---
| led.value(0)
state = 1 | conditional_block |
main.py | # https://github.com/Naish21/themostat
'''
* The MIT License (MIT)
*
* Copyright (c) 2016 Jorge Aranda Moro
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
'''
#This part is to connect to the WiFi
#In this case: SSID: TP-LINK_F3D4B2 & PASS: 90546747
WIFISSID='koen'
WIFIPASS='/*Casa*/'
def do_connect():
from network import WLAN
sta_if = WLAN(network.STA_IF)
if not sta_if.isconnected():
print('connecting to network...')
sta_if.active(True)
sta_if.connect(WIFISSID, WIFIPASS)
while not sta_if.isconnected():
pass
print('network config:', sta_if.ifconfig())
#---End Wifi Config---
from machine import Pin
led = Pin(2, Pin.OUT, value=1)
#---MQTT Sending---
from time import sleep_ms
from ubinascii import hexlify
from machine import unique_id
#import socket
from umqtt import MQTTClient
SERVER = "192.168.31.16"
CLIENT_ID = hexlify(unique_id())
TOPIC1 = b"/cultivo/temp"
TOPIC2 = b"/scultivo/hum"
TOPIC3 = b"/cultivo/alarma"
def envioMQTT(server=SERVER, topic="/cultivo", dato=None):
try:
c = MQTTClient(CLIENT_ID, server)
c.connect()
c.publish(topic, dato)
sleep_ms(200)
c.disconnect()
#led.value(1)
except Exception as e:
pass
#led.value(0)
state = 0
def sub_cb(topic, msg):
global state
print((topic, msg))
if msg == b"on":
led.value(0)
state = 1
elif msg == b"off":
led.value(1)
state = 0
def recepcionMQTT(server=SERVER, topic=TOPIC3):
c = MQTTClient(CLIENT_ID, server)
# Subscribed messages will be delivered to this callback
c.set_callback(sub_cb)
c.connect()
c.subscribe(topic)
print("Connected to %s, subscribed to %s topic" % (server, topic))
try:
c.wait_msg()
finally:
c.disconnect()
#---End MQTT Sending---
#---DHT22---
from dht import DHT22
ds = DHT22(Pin(4)) #DHT22 connected to GPIO4
def medirTemHum():
|
#---End DHT22---
#---Main Program---
sleep_ms(10000)
while True:
(tem,hum) = medirTemHum()
envioMQTT(SERVER,TOPIC1,str(tem))
envioMQTT(SERVER,TOPIC2,str(hum))
recepcionMQTT()
sleep_ms(10000)
#---END Main Program---
| try:
ds.measure()
tem = ds.temperature()
hum = ds.humidity()
#ed.value(1)
return (tem,hum)
except Exception as e:
#led.value(0)
return (-1,-1) | identifier_body |
main.py | # https://github.com/Naish21/themostat
'''
* The MIT License (MIT)
*
* Copyright (c) 2016 Jorge Aranda Moro
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
'''
#This part is to connect to the WiFi
#In this case: SSID: TP-LINK_F3D4B2 & PASS: 90546747
WIFISSID='koen'
WIFIPASS='/*Casa*/'
def do_connect():
from network import WLAN
sta_if = WLAN(network.STA_IF)
if not sta_if.isconnected():
print('connecting to network...')
sta_if.active(True)
sta_if.connect(WIFISSID, WIFIPASS)
while not sta_if.isconnected():
pass
print('network config:', sta_if.ifconfig())
#---End Wifi Config---
from machine import Pin
led = Pin(2, Pin.OUT, value=1)
#---MQTT Sending---
from time import sleep_ms
from ubinascii import hexlify
from machine import unique_id
#import socket
from umqtt import MQTTClient
SERVER = "192.168.31.16"
CLIENT_ID = hexlify(unique_id())
TOPIC1 = b"/cultivo/temp"
TOPIC2 = b"/scultivo/hum"
TOPIC3 = b"/cultivo/alarma"
def envioMQTT(server=SERVER, topic="/cultivo", dato=None):
try:
c = MQTTClient(CLIENT_ID, server)
c.connect()
c.publish(topic, dato)
sleep_ms(200)
c.disconnect()
#led.value(1)
except Exception as e:
pass
#led.value(0)
state = 0
def sub_cb(topic, msg):
global state
print((topic, msg))
if msg == b"on":
led.value(0)
state = 1
elif msg == b"off":
led.value(1)
state = 0
def recepcionMQTT(server=SERVER, topic=TOPIC3):
c = MQTTClient(CLIENT_ID, server)
# Subscribed messages will be delivered to this callback
c.set_callback(sub_cb)
c.connect()
c.subscribe(topic)
print("Connected to %s, subscribed to %s topic" % (server, topic))
try:
c.wait_msg()
finally:
c.disconnect()
#---End MQTT Sending---
#---DHT22---
from dht import DHT22
ds = DHT22(Pin(4)) #DHT22 connected to GPIO4
def medirTemHum():
try:
ds.measure()
tem = ds.temperature()
hum = ds.humidity()
#ed.value(1)
return (tem,hum)
except Exception as e: | #---End DHT22---
#---Main Program---
sleep_ms(10000)
while True:
(tem,hum) = medirTemHum()
envioMQTT(SERVER,TOPIC1,str(tem))
envioMQTT(SERVER,TOPIC2,str(hum))
recepcionMQTT()
sleep_ms(10000)
#---END Main Program--- | #led.value(0)
return (-1,-1)
| random_line_split |
CSM.js | /**
* @author vHawk / https://github.com/vHawk/
*/
import {
Vector2,
Vector3,
DirectionalLight,
MathUtils,
ShaderChunk,
Matrix4,
Box3
} from '../../../build/three.module.js';
import Frustum from './Frustum.js';
import Shader from './Shader.js';
const _cameraToLightMatrix = new Matrix4();
const _lightSpaceFrustum = new Frustum();
const _center = new Vector3();
const _bbox = new Box3();
const _uniformArray = [];
const _logArray = [];
export class CSM {
constructor( data ) {
data = data || {};
this.camera = data.camera;
this.parent = data.parent;
this.cascades = data.cascades || 3;
this.maxFar = data.maxFar || 100000;
this.mode = data.mode || 'practical';
this.shadowMapSize = data.shadowMapSize || 2048;
this.shadowBias = data.shadowBias || 0.000001;
this.lightDirection = data.lightDirection || new Vector3( 1, - 1, 1 ).normalize();
this.lightIntensity = data.lightIntensity || 1;
this.lightNear = data.lightNear || 1;
this.lightFar = data.lightFar || 2000;
this.lightMargin = data.lightMargin || 200;
this.customSplitsCallback = data.customSplitsCallback;
this.fade = false;
this.mainFrustum = new Frustum();
this.frustums = [];
this.breaks = [];
this.lights = [];
this.shaders = new Map();
this.createLights();
this.updateFrustums();
this.injectInclude();
}
createLights() {
for ( let i = 0; i < this.cascades; i ++ ) {
const light = new DirectionalLight( 0xffffff, this.lightIntensity );
light.castShadow = true;
light.shadow.mapSize.width = this.shadowMapSize;
light.shadow.mapSize.height = this.shadowMapSize;
light.shadow.camera.near = this.lightNear;
light.shadow.camera.far = this.lightFar;
light.shadow.bias = this.shadowBias;
this.parent.add( light );
this.parent.add( light.target );
this.lights.push( light );
}
}
initCascades() {
const camera = this.camera;
camera.updateProjectionMatrix();
this.mainFrustum.setFromProjectionMatrix( camera.projectionMatrix, this.maxFar );
this.mainFrustum.split( this.breaks, this.frustums );
}
updateShadowBounds() {
const frustums = this.frustums;
for ( let i = 0; i < frustums.length; i ++ ) {
const light = this.lights[ i ];
const shadowCam = light.shadow.camera;
const frustum = this.frustums[ i ];
// Get the two points that represent that furthest points on the frustum assuming
// that's either the diagonal across the far plane or the diagonal across the whole
// frustum itself.
const nearVerts = frustum.vertices.near;
const farVerts = frustum.vertices.far;
const point1 = farVerts[ 0 ];
let point2;
if ( point1.distanceTo( farVerts[ 2 ] ) > point1.distanceTo( nearVerts[ 2 ] ) ) {
point2 = farVerts[ 2 ];
} else {
point2 = nearVerts[ 2 ];
}
let squaredBBWidth = point1.distanceTo( point2 );
if ( this.fade ) {
// expand the shadow extents by the fade margin if fade is enabled.
const camera = this.camera;
const far = Math.max( camera.far, this.maxFar );
const linearDepth = frustum.vertices.far[ 0 ].z / ( far - camera.near );
const margin = 0.25 * Math.pow( linearDepth, 2.0 ) * ( far - camera.near );
squaredBBWidth += margin;
}
shadowCam.left = - squaredBBWidth / 2;
shadowCam.right = squaredBBWidth / 2;
shadowCam.top = squaredBBWidth / 2;
shadowCam.bottom = - squaredBBWidth / 2;
shadowCam.updateProjectionMatrix();
}
}
getBreaks() {
const camera = this.camera;
const far = Math.min( camera.far, this.maxFar );
this.breaks.length = 0;
switch ( this.mode ) {
case 'uniform':
uniformSplit( this.cascades, camera.near, far, this.breaks );
break;
case 'logarithmic':
logarithmicSplit( this.cascades, camera.near, far, this.breaks );
break;
case 'practical':
practicalSplit( this.cascades, camera.near, far, 0.5, this.breaks );
break;
case 'custom':
if ( this.customSplitsCallback === undefined ) console.error( 'CSM: Custom split scheme callback not defined.' );
this.customSplitsCallback( this.cascades, camera.near, far, this.breaks );
break;
}
function uniformSplit( amount, near, far, target ) {
for ( let i = 1; i < amount; i ++ ) {
target.push( ( near + ( far - near ) * i / amount ) / far );
}
target.push( 1 );
}
function logarithmicSplit( amount, near, far, target ) {
for ( let i = 1; i < amount; i ++ ) {
target.push( ( near * ( far / near ) ** ( i / amount ) ) / far );
}
target.push( 1 );
}
function practicalSplit( amount, near, far, lambda, target ) {
_uniformArray.length = 0;
_logArray.length = 0;
logarithmicSplit( amount, near, far, _logArray );
uniformSplit( amount, near, far, _uniformArray );
for ( let i = 1; i < amount; i ++ ) {
target.push( MathUtils.lerp( _uniformArray[ i - 1 ], _logArray[ i - 1 ], lambda ) );
}
target.push( 1 );
}
}
update() {
const camera = this.camera;
const frustums = this.frustums;
for ( let i = 0; i < frustums.length; i ++ ) {
const light = this.lights[ i ];
const shadowCam = light.shadow.camera;
const texelWidth = ( shadowCam.right - shadowCam.left ) / this.shadowMapSize;
const texelHeight = ( shadowCam.top - shadowCam.bottom ) / this.shadowMapSize;
light.shadow.camera.updateMatrixWorld( true );
_cameraToLightMatrix.multiplyMatrices( light.shadow.camera.matrixWorldInverse, camera.matrixWorld );
frustums[ i ].toSpace( _cameraToLightMatrix, _lightSpaceFrustum );
const nearVerts = _lightSpaceFrustum.vertices.near;
const farVerts = _lightSpaceFrustum.vertices.far;
_bbox.makeEmpty();
for ( let j = 0; j < 4; j ++ ) {
_bbox.expandByPoint( nearVerts[ j ] );
_bbox.expandByPoint( farVerts[ j ] );
}
_bbox.getCenter( _center );
_center.z = _bbox.max.z + this.lightMargin;
_center.x = Math.floor( _center.x / texelWidth ) * texelWidth;
_center.y = Math.floor( _center.y / texelHeight ) * texelHeight;
_center.applyMatrix4( light.shadow.camera.matrixWorld );
light.position.copy( _center );
light.target.position.copy( _center );
light.target.position.x += this.lightDirection.x;
light.target.position.y += this.lightDirection.y;
light.target.position.z += this.lightDirection.z;
}
}
injectInclude() {
ShaderChunk.lights_fragment_begin = Shader.lights_fragment_begin;
ShaderChunk.lights_pars_begin = Shader.lights_pars_begin;
}
setupMaterial( material ) {
material.defines = material.defines || {};
material.defines.USE_CSM = 1;
material.defines.CSM_CASCADES = this.cascades;
if ( this.fade ) {
material.defines.CSM_FADE = '';
}
const breaksVec2 = [];
const scope = this;
const shaders = this.shaders;
material.onBeforeCompile = function ( shader ) {
const far = Math.min( scope.camera.far, scope.maxFar );
scope.getExtendedBreaks( breaksVec2 );
shader.uniforms.CSM_cascades = { value: breaksVec2 };
shader.uniforms.cameraNear = { value: scope.camera.near };
shader.uniforms.shadowFar = { value: far };
shaders.set( material, shader );
};
shaders.set( material, null );
}
updateUniforms() {
const far = Math.min( this.camera.far, this.maxFar );
const shaders = this.shaders;
shaders.forEach( function ( shader, material ) {
if ( shader !== null ) {
const uniforms = shader.uniforms;
this.getExtendedBreaks( uniforms.CSM_cascades.value );
uniforms.cameraNear.value = this.camera.near;
uniforms.shadowFar.value = far;
}
if ( ! this.fade && 'CSM_FADE' in material.defines ) {
delete material.defines.CSM_FADE;
material.needsUpdate = true;
} else if ( this.fade && ! ( 'CSM_FADE' in material.defines ) ) {
material.defines.CSM_FADE = '';
material.needsUpdate = true;
}
}, this );
}
getExtendedBreaks( target ) |
updateFrustums() {
this.getBreaks();
this.initCascades();
this.updateShadowBounds();
this.updateUniforms();
}
remove() {
for ( let i = 0; i < this.lights.length; i ++ ) {
this.parent.remove( this.lights[ i ] );
}
}
dispose() {
const shaders = this.shaders;
shaders.forEach( function ( shader, material ) {
delete material.onBeforeCompile;
delete material.defines.USE_CSM;
delete material.defines.CSM_CASCADES;
delete material.defines.CSM_FADE;
if ( shader !== null ) {
delete shader.uniforms.CSM_cascades;
delete shader.uniforms.cameraNear;
delete shader.uniforms.shadowFar;
}
material.needsUpdate = true;
} );
shaders.clear();
}
}
| {
while ( target.length < this.breaks.length ) {
target.push( new Vector2() );
}
target.length = this.breaks.length;
for ( let i = 0; i < this.cascades; i ++ ) {
let amount = this.breaks[ i ];
let prev = this.breaks[ i - 1 ] || 0;
target[ i ].x = prev;
target[ i ].y = amount;
}
} | identifier_body |
CSM.js | /**
* @author vHawk / https://github.com/vHawk/
*/
import {
Vector2,
Vector3,
DirectionalLight,
MathUtils,
ShaderChunk,
Matrix4,
Box3
} from '../../../build/three.module.js';
import Frustum from './Frustum.js';
import Shader from './Shader.js';
const _cameraToLightMatrix = new Matrix4();
const _lightSpaceFrustum = new Frustum();
const _center = new Vector3();
const _bbox = new Box3();
const _uniformArray = [];
const _logArray = [];
export class CSM {
constructor( data ) {
data = data || {};
this.camera = data.camera;
this.parent = data.parent;
this.cascades = data.cascades || 3;
this.maxFar = data.maxFar || 100000;
this.mode = data.mode || 'practical';
this.shadowMapSize = data.shadowMapSize || 2048;
this.shadowBias = data.shadowBias || 0.000001;
this.lightDirection = data.lightDirection || new Vector3( 1, - 1, 1 ).normalize();
this.lightIntensity = data.lightIntensity || 1;
this.lightNear = data.lightNear || 1;
this.lightFar = data.lightFar || 2000;
this.lightMargin = data.lightMargin || 200;
this.customSplitsCallback = data.customSplitsCallback;
this.fade = false;
this.mainFrustum = new Frustum();
this.frustums = [];
this.breaks = [];
this.lights = [];
this.shaders = new Map();
this.createLights();
this.updateFrustums();
this.injectInclude();
}
createLights() {
for ( let i = 0; i < this.cascades; i ++ ) {
const light = new DirectionalLight( 0xffffff, this.lightIntensity );
light.castShadow = true;
light.shadow.mapSize.width = this.shadowMapSize;
light.shadow.mapSize.height = this.shadowMapSize;
light.shadow.camera.near = this.lightNear;
light.shadow.camera.far = this.lightFar;
light.shadow.bias = this.shadowBias;
this.parent.add( light );
this.parent.add( light.target );
this.lights.push( light );
}
}
initCascades() {
const camera = this.camera;
camera.updateProjectionMatrix();
this.mainFrustum.setFromProjectionMatrix( camera.projectionMatrix, this.maxFar );
this.mainFrustum.split( this.breaks, this.frustums );
}
| () {
const frustums = this.frustums;
for ( let i = 0; i < frustums.length; i ++ ) {
const light = this.lights[ i ];
const shadowCam = light.shadow.camera;
const frustum = this.frustums[ i ];
// Get the two points that represent that furthest points on the frustum assuming
// that's either the diagonal across the far plane or the diagonal across the whole
// frustum itself.
const nearVerts = frustum.vertices.near;
const farVerts = frustum.vertices.far;
const point1 = farVerts[ 0 ];
let point2;
if ( point1.distanceTo( farVerts[ 2 ] ) > point1.distanceTo( nearVerts[ 2 ] ) ) {
point2 = farVerts[ 2 ];
} else {
point2 = nearVerts[ 2 ];
}
let squaredBBWidth = point1.distanceTo( point2 );
if ( this.fade ) {
// expand the shadow extents by the fade margin if fade is enabled.
const camera = this.camera;
const far = Math.max( camera.far, this.maxFar );
const linearDepth = frustum.vertices.far[ 0 ].z / ( far - camera.near );
const margin = 0.25 * Math.pow( linearDepth, 2.0 ) * ( far - camera.near );
squaredBBWidth += margin;
}
shadowCam.left = - squaredBBWidth / 2;
shadowCam.right = squaredBBWidth / 2;
shadowCam.top = squaredBBWidth / 2;
shadowCam.bottom = - squaredBBWidth / 2;
shadowCam.updateProjectionMatrix();
}
}
getBreaks() {
const camera = this.camera;
const far = Math.min( camera.far, this.maxFar );
this.breaks.length = 0;
switch ( this.mode ) {
case 'uniform':
uniformSplit( this.cascades, camera.near, far, this.breaks );
break;
case 'logarithmic':
logarithmicSplit( this.cascades, camera.near, far, this.breaks );
break;
case 'practical':
practicalSplit( this.cascades, camera.near, far, 0.5, this.breaks );
break;
case 'custom':
if ( this.customSplitsCallback === undefined ) console.error( 'CSM: Custom split scheme callback not defined.' );
this.customSplitsCallback( this.cascades, camera.near, far, this.breaks );
break;
}
function uniformSplit( amount, near, far, target ) {
for ( let i = 1; i < amount; i ++ ) {
target.push( ( near + ( far - near ) * i / amount ) / far );
}
target.push( 1 );
}
function logarithmicSplit( amount, near, far, target ) {
for ( let i = 1; i < amount; i ++ ) {
target.push( ( near * ( far / near ) ** ( i / amount ) ) / far );
}
target.push( 1 );
}
function practicalSplit( amount, near, far, lambda, target ) {
_uniformArray.length = 0;
_logArray.length = 0;
logarithmicSplit( amount, near, far, _logArray );
uniformSplit( amount, near, far, _uniformArray );
for ( let i = 1; i < amount; i ++ ) {
target.push( MathUtils.lerp( _uniformArray[ i - 1 ], _logArray[ i - 1 ], lambda ) );
}
target.push( 1 );
}
}
update() {
const camera = this.camera;
const frustums = this.frustums;
for ( let i = 0; i < frustums.length; i ++ ) {
const light = this.lights[ i ];
const shadowCam = light.shadow.camera;
const texelWidth = ( shadowCam.right - shadowCam.left ) / this.shadowMapSize;
const texelHeight = ( shadowCam.top - shadowCam.bottom ) / this.shadowMapSize;
light.shadow.camera.updateMatrixWorld( true );
_cameraToLightMatrix.multiplyMatrices( light.shadow.camera.matrixWorldInverse, camera.matrixWorld );
frustums[ i ].toSpace( _cameraToLightMatrix, _lightSpaceFrustum );
const nearVerts = _lightSpaceFrustum.vertices.near;
const farVerts = _lightSpaceFrustum.vertices.far;
_bbox.makeEmpty();
for ( let j = 0; j < 4; j ++ ) {
_bbox.expandByPoint( nearVerts[ j ] );
_bbox.expandByPoint( farVerts[ j ] );
}
_bbox.getCenter( _center );
_center.z = _bbox.max.z + this.lightMargin;
_center.x = Math.floor( _center.x / texelWidth ) * texelWidth;
_center.y = Math.floor( _center.y / texelHeight ) * texelHeight;
_center.applyMatrix4( light.shadow.camera.matrixWorld );
light.position.copy( _center );
light.target.position.copy( _center );
light.target.position.x += this.lightDirection.x;
light.target.position.y += this.lightDirection.y;
light.target.position.z += this.lightDirection.z;
}
}
injectInclude() {
ShaderChunk.lights_fragment_begin = Shader.lights_fragment_begin;
ShaderChunk.lights_pars_begin = Shader.lights_pars_begin;
}
setupMaterial( material ) {
material.defines = material.defines || {};
material.defines.USE_CSM = 1;
material.defines.CSM_CASCADES = this.cascades;
if ( this.fade ) {
material.defines.CSM_FADE = '';
}
const breaksVec2 = [];
const scope = this;
const shaders = this.shaders;
material.onBeforeCompile = function ( shader ) {
const far = Math.min( scope.camera.far, scope.maxFar );
scope.getExtendedBreaks( breaksVec2 );
shader.uniforms.CSM_cascades = { value: breaksVec2 };
shader.uniforms.cameraNear = { value: scope.camera.near };
shader.uniforms.shadowFar = { value: far };
shaders.set( material, shader );
};
shaders.set( material, null );
}
updateUniforms() {
const far = Math.min( this.camera.far, this.maxFar );
const shaders = this.shaders;
shaders.forEach( function ( shader, material ) {
if ( shader !== null ) {
const uniforms = shader.uniforms;
this.getExtendedBreaks( uniforms.CSM_cascades.value );
uniforms.cameraNear.value = this.camera.near;
uniforms.shadowFar.value = far;
}
if ( ! this.fade && 'CSM_FADE' in material.defines ) {
delete material.defines.CSM_FADE;
material.needsUpdate = true;
} else if ( this.fade && ! ( 'CSM_FADE' in material.defines ) ) {
material.defines.CSM_FADE = '';
material.needsUpdate = true;
}
}, this );
}
getExtendedBreaks( target ) {
while ( target.length < this.breaks.length ) {
target.push( new Vector2() );
}
target.length = this.breaks.length;
for ( let i = 0; i < this.cascades; i ++ ) {
let amount = this.breaks[ i ];
let prev = this.breaks[ i - 1 ] || 0;
target[ i ].x = prev;
target[ i ].y = amount;
}
}
updateFrustums() {
this.getBreaks();
this.initCascades();
this.updateShadowBounds();
this.updateUniforms();
}
remove() {
for ( let i = 0; i < this.lights.length; i ++ ) {
this.parent.remove( this.lights[ i ] );
}
}
dispose() {
const shaders = this.shaders;
shaders.forEach( function ( shader, material ) {
delete material.onBeforeCompile;
delete material.defines.USE_CSM;
delete material.defines.CSM_CASCADES;
delete material.defines.CSM_FADE;
if ( shader !== null ) {
delete shader.uniforms.CSM_cascades;
delete shader.uniforms.cameraNear;
delete shader.uniforms.shadowFar;
}
material.needsUpdate = true;
} );
shaders.clear();
}
}
| updateShadowBounds | identifier_name |
CSM.js | /**
* @author vHawk / https://github.com/vHawk/
*/
import {
Vector2,
Vector3,
DirectionalLight,
MathUtils,
ShaderChunk,
Matrix4,
Box3
} from '../../../build/three.module.js';
import Frustum from './Frustum.js';
import Shader from './Shader.js';
const _cameraToLightMatrix = new Matrix4();
const _lightSpaceFrustum = new Frustum();
const _center = new Vector3();
const _bbox = new Box3();
const _uniformArray = [];
const _logArray = [];
export class CSM {
constructor( data ) {
data = data || {};
this.camera = data.camera;
this.parent = data.parent;
this.cascades = data.cascades || 3;
this.maxFar = data.maxFar || 100000;
this.mode = data.mode || 'practical';
this.shadowMapSize = data.shadowMapSize || 2048;
this.shadowBias = data.shadowBias || 0.000001;
this.lightDirection = data.lightDirection || new Vector3( 1, - 1, 1 ).normalize();
this.lightIntensity = data.lightIntensity || 1;
this.lightNear = data.lightNear || 1;
this.lightFar = data.lightFar || 2000;
this.lightMargin = data.lightMargin || 200;
this.customSplitsCallback = data.customSplitsCallback;
this.fade = false;
this.mainFrustum = new Frustum();
this.frustums = [];
this.breaks = [];
this.lights = [];
this.shaders = new Map();
this.createLights();
this.updateFrustums();
this.injectInclude();
}
createLights() {
for ( let i = 0; i < this.cascades; i ++ ) {
const light = new DirectionalLight( 0xffffff, this.lightIntensity );
light.castShadow = true;
light.shadow.mapSize.width = this.shadowMapSize;
light.shadow.mapSize.height = this.shadowMapSize;
light.shadow.camera.near = this.lightNear;
light.shadow.camera.far = this.lightFar;
light.shadow.bias = this.shadowBias;
this.parent.add( light );
this.parent.add( light.target );
this.lights.push( light );
}
}
initCascades() {
const camera = this.camera;
camera.updateProjectionMatrix();
this.mainFrustum.setFromProjectionMatrix( camera.projectionMatrix, this.maxFar );
this.mainFrustum.split( this.breaks, this.frustums );
}
updateShadowBounds() {
const frustums = this.frustums;
for ( let i = 0; i < frustums.length; i ++ ) {
const light = this.lights[ i ];
const shadowCam = light.shadow.camera;
const frustum = this.frustums[ i ];
// Get the two points that represent that furthest points on the frustum assuming
// that's either the diagonal across the far plane or the diagonal across the whole
// frustum itself.
const nearVerts = frustum.vertices.near;
const farVerts = frustum.vertices.far;
const point1 = farVerts[ 0 ];
let point2;
if ( point1.distanceTo( farVerts[ 2 ] ) > point1.distanceTo( nearVerts[ 2 ] ) ) {
point2 = farVerts[ 2 ];
} else {
point2 = nearVerts[ 2 ];
}
let squaredBBWidth = point1.distanceTo( point2 );
if ( this.fade ) {
// expand the shadow extents by the fade margin if fade is enabled.
const camera = this.camera;
const far = Math.max( camera.far, this.maxFar );
const linearDepth = frustum.vertices.far[ 0 ].z / ( far - camera.near );
const margin = 0.25 * Math.pow( linearDepth, 2.0 ) * ( far - camera.near );
squaredBBWidth += margin;
}
shadowCam.left = - squaredBBWidth / 2;
shadowCam.right = squaredBBWidth / 2;
shadowCam.top = squaredBBWidth / 2;
shadowCam.bottom = - squaredBBWidth / 2;
shadowCam.updateProjectionMatrix();
}
}
getBreaks() {
const camera = this.camera;
const far = Math.min( camera.far, this.maxFar );
this.breaks.length = 0;
switch ( this.mode ) {
case 'uniform':
uniformSplit( this.cascades, camera.near, far, this.breaks );
break;
case 'logarithmic':
logarithmicSplit( this.cascades, camera.near, far, this.breaks );
break;
case 'practical':
practicalSplit( this.cascades, camera.near, far, 0.5, this.breaks );
break;
case 'custom':
if ( this.customSplitsCallback === undefined ) console.error( 'CSM: Custom split scheme callback not defined.' );
this.customSplitsCallback( this.cascades, camera.near, far, this.breaks );
break;
}
function uniformSplit( amount, near, far, target ) {
for ( let i = 1; i < amount; i ++ ) |
target.push( 1 );
}
function logarithmicSplit( amount, near, far, target ) {
for ( let i = 1; i < amount; i ++ ) {
target.push( ( near * ( far / near ) ** ( i / amount ) ) / far );
}
target.push( 1 );
}
function practicalSplit( amount, near, far, lambda, target ) {
_uniformArray.length = 0;
_logArray.length = 0;
logarithmicSplit( amount, near, far, _logArray );
uniformSplit( amount, near, far, _uniformArray );
for ( let i = 1; i < amount; i ++ ) {
target.push( MathUtils.lerp( _uniformArray[ i - 1 ], _logArray[ i - 1 ], lambda ) );
}
target.push( 1 );
}
}
update() {
const camera = this.camera;
const frustums = this.frustums;
for ( let i = 0; i < frustums.length; i ++ ) {
const light = this.lights[ i ];
const shadowCam = light.shadow.camera;
const texelWidth = ( shadowCam.right - shadowCam.left ) / this.shadowMapSize;
const texelHeight = ( shadowCam.top - shadowCam.bottom ) / this.shadowMapSize;
light.shadow.camera.updateMatrixWorld( true );
_cameraToLightMatrix.multiplyMatrices( light.shadow.camera.matrixWorldInverse, camera.matrixWorld );
frustums[ i ].toSpace( _cameraToLightMatrix, _lightSpaceFrustum );
const nearVerts = _lightSpaceFrustum.vertices.near;
const farVerts = _lightSpaceFrustum.vertices.far;
_bbox.makeEmpty();
for ( let j = 0; j < 4; j ++ ) {
_bbox.expandByPoint( nearVerts[ j ] );
_bbox.expandByPoint( farVerts[ j ] );
}
_bbox.getCenter( _center );
_center.z = _bbox.max.z + this.lightMargin;
_center.x = Math.floor( _center.x / texelWidth ) * texelWidth;
_center.y = Math.floor( _center.y / texelHeight ) * texelHeight;
_center.applyMatrix4( light.shadow.camera.matrixWorld );
light.position.copy( _center );
light.target.position.copy( _center );
light.target.position.x += this.lightDirection.x;
light.target.position.y += this.lightDirection.y;
light.target.position.z += this.lightDirection.z;
}
}
injectInclude() {
ShaderChunk.lights_fragment_begin = Shader.lights_fragment_begin;
ShaderChunk.lights_pars_begin = Shader.lights_pars_begin;
}
setupMaterial( material ) {
material.defines = material.defines || {};
material.defines.USE_CSM = 1;
material.defines.CSM_CASCADES = this.cascades;
if ( this.fade ) {
material.defines.CSM_FADE = '';
}
const breaksVec2 = [];
const scope = this;
const shaders = this.shaders;
material.onBeforeCompile = function ( shader ) {
const far = Math.min( scope.camera.far, scope.maxFar );
scope.getExtendedBreaks( breaksVec2 );
shader.uniforms.CSM_cascades = { value: breaksVec2 };
shader.uniforms.cameraNear = { value: scope.camera.near };
shader.uniforms.shadowFar = { value: far };
shaders.set( material, shader );
};
shaders.set( material, null );
}
updateUniforms() {
const far = Math.min( this.camera.far, this.maxFar );
const shaders = this.shaders;
shaders.forEach( function ( shader, material ) {
if ( shader !== null ) {
const uniforms = shader.uniforms;
this.getExtendedBreaks( uniforms.CSM_cascades.value );
uniforms.cameraNear.value = this.camera.near;
uniforms.shadowFar.value = far;
}
if ( ! this.fade && 'CSM_FADE' in material.defines ) {
delete material.defines.CSM_FADE;
material.needsUpdate = true;
} else if ( this.fade && ! ( 'CSM_FADE' in material.defines ) ) {
material.defines.CSM_FADE = '';
material.needsUpdate = true;
}
}, this );
}
getExtendedBreaks( target ) {
while ( target.length < this.breaks.length ) {
target.push( new Vector2() );
}
target.length = this.breaks.length;
for ( let i = 0; i < this.cascades; i ++ ) {
let amount = this.breaks[ i ];
let prev = this.breaks[ i - 1 ] || 0;
target[ i ].x = prev;
target[ i ].y = amount;
}
}
updateFrustums() {
this.getBreaks();
this.initCascades();
this.updateShadowBounds();
this.updateUniforms();
}
remove() {
for ( let i = 0; i < this.lights.length; i ++ ) {
this.parent.remove( this.lights[ i ] );
}
}
dispose() {
const shaders = this.shaders;
shaders.forEach( function ( shader, material ) {
delete material.onBeforeCompile;
delete material.defines.USE_CSM;
delete material.defines.CSM_CASCADES;
delete material.defines.CSM_FADE;
if ( shader !== null ) {
delete shader.uniforms.CSM_cascades;
delete shader.uniforms.cameraNear;
delete shader.uniforms.shadowFar;
}
material.needsUpdate = true;
} );
shaders.clear();
}
}
| {
target.push( ( near + ( far - near ) * i / amount ) / far );
} | conditional_block |
CSM.js | /**
* @author vHawk / https://github.com/vHawk/
*/
import {
Vector2,
Vector3,
DirectionalLight,
MathUtils,
ShaderChunk,
Matrix4,
Box3
} from '../../../build/three.module.js';
import Frustum from './Frustum.js';
import Shader from './Shader.js';
const _cameraToLightMatrix = new Matrix4();
const _lightSpaceFrustum = new Frustum();
const _center = new Vector3();
const _bbox = new Box3();
const _uniformArray = [];
const _logArray = [];
export class CSM {
constructor( data ) {
data = data || {};
this.camera = data.camera;
this.parent = data.parent;
this.cascades = data.cascades || 3;
this.maxFar = data.maxFar || 100000;
this.mode = data.mode || 'practical';
this.shadowMapSize = data.shadowMapSize || 2048;
this.shadowBias = data.shadowBias || 0.000001;
this.lightDirection = data.lightDirection || new Vector3( 1, - 1, 1 ).normalize();
this.lightIntensity = data.lightIntensity || 1;
this.lightNear = data.lightNear || 1;
this.lightFar = data.lightFar || 2000;
this.lightMargin = data.lightMargin || 200;
this.customSplitsCallback = data.customSplitsCallback;
this.fade = false;
this.mainFrustum = new Frustum();
this.frustums = [];
this.breaks = [];
this.lights = [];
this.shaders = new Map();
this.createLights();
this.updateFrustums();
this.injectInclude();
}
createLights() {
for ( let i = 0; i < this.cascades; i ++ ) {
const light = new DirectionalLight( 0xffffff, this.lightIntensity );
light.castShadow = true;
light.shadow.mapSize.width = this.shadowMapSize;
light.shadow.mapSize.height = this.shadowMapSize;
light.shadow.camera.near = this.lightNear;
light.shadow.camera.far = this.lightFar;
light.shadow.bias = this.shadowBias;
this.parent.add( light );
this.parent.add( light.target );
this.lights.push( light );
}
}
initCascades() {
const camera = this.camera;
camera.updateProjectionMatrix();
this.mainFrustum.setFromProjectionMatrix( camera.projectionMatrix, this.maxFar );
this.mainFrustum.split( this.breaks, this.frustums );
}
updateShadowBounds() {
const frustums = this.frustums;
for ( let i = 0; i < frustums.length; i ++ ) {
const light = this.lights[ i ];
const shadowCam = light.shadow.camera;
const frustum = this.frustums[ i ];
// Get the two points that represent that furthest points on the frustum assuming
// that's either the diagonal across the far plane or the diagonal across the whole
// frustum itself.
const nearVerts = frustum.vertices.near;
const farVerts = frustum.vertices.far;
const point1 = farVerts[ 0 ];
let point2;
if ( point1.distanceTo( farVerts[ 2 ] ) > point1.distanceTo( nearVerts[ 2 ] ) ) {
point2 = farVerts[ 2 ];
} else {
point2 = nearVerts[ 2 ];
}
let squaredBBWidth = point1.distanceTo( point2 );
if ( this.fade ) {
// expand the shadow extents by the fade margin if fade is enabled.
const camera = this.camera;
const far = Math.max( camera.far, this.maxFar );
const linearDepth = frustum.vertices.far[ 0 ].z / ( far - camera.near );
const margin = 0.25 * Math.pow( linearDepth, 2.0 ) * ( far - camera.near ); | squaredBBWidth += margin;
}
shadowCam.left = - squaredBBWidth / 2;
shadowCam.right = squaredBBWidth / 2;
shadowCam.top = squaredBBWidth / 2;
shadowCam.bottom = - squaredBBWidth / 2;
shadowCam.updateProjectionMatrix();
}
}
getBreaks() {
const camera = this.camera;
const far = Math.min( camera.far, this.maxFar );
this.breaks.length = 0;
switch ( this.mode ) {
case 'uniform':
uniformSplit( this.cascades, camera.near, far, this.breaks );
break;
case 'logarithmic':
logarithmicSplit( this.cascades, camera.near, far, this.breaks );
break;
case 'practical':
practicalSplit( this.cascades, camera.near, far, 0.5, this.breaks );
break;
case 'custom':
if ( this.customSplitsCallback === undefined ) console.error( 'CSM: Custom split scheme callback not defined.' );
this.customSplitsCallback( this.cascades, camera.near, far, this.breaks );
break;
}
function uniformSplit( amount, near, far, target ) {
for ( let i = 1; i < amount; i ++ ) {
target.push( ( near + ( far - near ) * i / amount ) / far );
}
target.push( 1 );
}
function logarithmicSplit( amount, near, far, target ) {
for ( let i = 1; i < amount; i ++ ) {
target.push( ( near * ( far / near ) ** ( i / amount ) ) / far );
}
target.push( 1 );
}
function practicalSplit( amount, near, far, lambda, target ) {
_uniformArray.length = 0;
_logArray.length = 0;
logarithmicSplit( amount, near, far, _logArray );
uniformSplit( amount, near, far, _uniformArray );
for ( let i = 1; i < amount; i ++ ) {
target.push( MathUtils.lerp( _uniformArray[ i - 1 ], _logArray[ i - 1 ], lambda ) );
}
target.push( 1 );
}
}
update() {
const camera = this.camera;
const frustums = this.frustums;
for ( let i = 0; i < frustums.length; i ++ ) {
const light = this.lights[ i ];
const shadowCam = light.shadow.camera;
const texelWidth = ( shadowCam.right - shadowCam.left ) / this.shadowMapSize;
const texelHeight = ( shadowCam.top - shadowCam.bottom ) / this.shadowMapSize;
light.shadow.camera.updateMatrixWorld( true );
_cameraToLightMatrix.multiplyMatrices( light.shadow.camera.matrixWorldInverse, camera.matrixWorld );
frustums[ i ].toSpace( _cameraToLightMatrix, _lightSpaceFrustum );
const nearVerts = _lightSpaceFrustum.vertices.near;
const farVerts = _lightSpaceFrustum.vertices.far;
_bbox.makeEmpty();
for ( let j = 0; j < 4; j ++ ) {
_bbox.expandByPoint( nearVerts[ j ] );
_bbox.expandByPoint( farVerts[ j ] );
}
_bbox.getCenter( _center );
_center.z = _bbox.max.z + this.lightMargin;
_center.x = Math.floor( _center.x / texelWidth ) * texelWidth;
_center.y = Math.floor( _center.y / texelHeight ) * texelHeight;
_center.applyMatrix4( light.shadow.camera.matrixWorld );
light.position.copy( _center );
light.target.position.copy( _center );
light.target.position.x += this.lightDirection.x;
light.target.position.y += this.lightDirection.y;
light.target.position.z += this.lightDirection.z;
}
}
injectInclude() {
ShaderChunk.lights_fragment_begin = Shader.lights_fragment_begin;
ShaderChunk.lights_pars_begin = Shader.lights_pars_begin;
}
setupMaterial( material ) {
material.defines = material.defines || {};
material.defines.USE_CSM = 1;
material.defines.CSM_CASCADES = this.cascades;
if ( this.fade ) {
material.defines.CSM_FADE = '';
}
const breaksVec2 = [];
const scope = this;
const shaders = this.shaders;
material.onBeforeCompile = function ( shader ) {
const far = Math.min( scope.camera.far, scope.maxFar );
scope.getExtendedBreaks( breaksVec2 );
shader.uniforms.CSM_cascades = { value: breaksVec2 };
shader.uniforms.cameraNear = { value: scope.camera.near };
shader.uniforms.shadowFar = { value: far };
shaders.set( material, shader );
};
shaders.set( material, null );
}
updateUniforms() {
const far = Math.min( this.camera.far, this.maxFar );
const shaders = this.shaders;
shaders.forEach( function ( shader, material ) {
if ( shader !== null ) {
const uniforms = shader.uniforms;
this.getExtendedBreaks( uniforms.CSM_cascades.value );
uniforms.cameraNear.value = this.camera.near;
uniforms.shadowFar.value = far;
}
if ( ! this.fade && 'CSM_FADE' in material.defines ) {
delete material.defines.CSM_FADE;
material.needsUpdate = true;
} else if ( this.fade && ! ( 'CSM_FADE' in material.defines ) ) {
material.defines.CSM_FADE = '';
material.needsUpdate = true;
}
}, this );
}
getExtendedBreaks( target ) {
while ( target.length < this.breaks.length ) {
target.push( new Vector2() );
}
target.length = this.breaks.length;
for ( let i = 0; i < this.cascades; i ++ ) {
let amount = this.breaks[ i ];
let prev = this.breaks[ i - 1 ] || 0;
target[ i ].x = prev;
target[ i ].y = amount;
}
}
updateFrustums() {
this.getBreaks();
this.initCascades();
this.updateShadowBounds();
this.updateUniforms();
}
remove() {
for ( let i = 0; i < this.lights.length; i ++ ) {
this.parent.remove( this.lights[ i ] );
}
}
dispose() {
const shaders = this.shaders;
shaders.forEach( function ( shader, material ) {
delete material.onBeforeCompile;
delete material.defines.USE_CSM;
delete material.defines.CSM_CASCADES;
delete material.defines.CSM_FADE;
if ( shader !== null ) {
delete shader.uniforms.CSM_cascades;
delete shader.uniforms.cameraNear;
delete shader.uniforms.shadowFar;
}
material.needsUpdate = true;
} );
shaders.clear();
}
} | random_line_split |
|
glyph.rs | // This whole file is strongly inspired by: https://github.com/jeaye/q3/blob/master/src/client/ui/ttf/glyph.rs
// available under the BSD-3 licence.
// It has been modified to work with gl-rs, nalgebra, and rust-freetype
use na::Vector2;
/// A ttf glyph.
pub struct | {
#[doc(hidden)]
pub tex: Vector2<f32>,
#[doc(hidden)]
pub advance: Vector2<f32>,
#[doc(hidden)]
pub dimensions: Vector2<f32>,
#[doc(hidden)]
pub offset: Vector2<f32>,
#[doc(hidden)]
pub buffer: Vec<u8>,
}
impl Glyph {
/// Creates a new empty glyph.
pub fn new(
tex: Vector2<f32>,
advance: Vector2<f32>,
dimensions: Vector2<f32>,
offset: Vector2<f32>,
buffer: Vec<u8>,
) -> Glyph {
Glyph {
tex,
advance,
dimensions,
offset,
buffer,
}
}
}
| Glyph | identifier_name |
glyph.rs | // This whole file is strongly inspired by: https://github.com/jeaye/q3/blob/master/src/client/ui/ttf/glyph.rs
// available under the BSD-3 licence.
// It has been modified to work with gl-rs, nalgebra, and rust-freetype
use na::Vector2;
/// A ttf glyph.
pub struct Glyph {
#[doc(hidden)]
pub tex: Vector2<f32>,
#[doc(hidden)]
pub advance: Vector2<f32>,
#[doc(hidden)]
pub dimensions: Vector2<f32>,
#[doc(hidden)]
pub offset: Vector2<f32>,
#[doc(hidden)]
pub buffer: Vec<u8>,
}
impl Glyph {
/// Creates a new empty glyph.
pub fn new(
tex: Vector2<f32>,
advance: Vector2<f32>,
dimensions: Vector2<f32>,
offset: Vector2<f32>,
buffer: Vec<u8>,
) -> Glyph |
}
| {
Glyph {
tex,
advance,
dimensions,
offset,
buffer,
}
} | identifier_body |
glyph.rs | // This whole file is strongly inspired by: https://github.com/jeaye/q3/blob/master/src/client/ui/ttf/glyph.rs
// available under the BSD-3 licence.
// It has been modified to work with gl-rs, nalgebra, and rust-freetype
use na::Vector2;
/// A ttf glyph.
pub struct Glyph {
#[doc(hidden)]
pub tex: Vector2<f32>,
#[doc(hidden)]
pub advance: Vector2<f32>,
#[doc(hidden)]
pub dimensions: Vector2<f32>,
#[doc(hidden)]
pub offset: Vector2<f32>,
#[doc(hidden)]
pub buffer: Vec<u8>,
}
impl Glyph {
/// Creates a new empty glyph.
pub fn new(
tex: Vector2<f32>,
advance: Vector2<f32>,
dimensions: Vector2<f32>,
offset: Vector2<f32>,
buffer: Vec<u8>,
) -> Glyph {
Glyph {
tex,
advance,
dimensions,
offset,
buffer, | }
}
} | random_line_split |
|
ChartBtnView.js | define([ 'backbone', 'metro', 'highcharts' ], function(Backbone, Metro, Highcharts) {
var ChartBtnView = Backbone.View.extend({
className: 'chart-btn-view menu-btn',
events: {
'click': 'toggle',
'mouseover': 'over',
'mouseout': 'out',
},
initialize: function(){
//ensure correct scope
_.bindAll(this, 'render', 'unrender', 'toggle', 'over', 'out', 'setInfo');
//info dialog
this.dialog = $('<div data-role="dialog" id="chart_dialog">');
this.dialog.addClass('padding20');
this.dialog.attr('data-close-button', 'true');
this.dialog.append('<div id="chart" style="min-width: 1200px; height: 500px; margin: 0 auto">');
//add to page
this.render();
},
render: function() {
var $button = $('<span class="mif-chart-dots">');
$(this.el).html($button);
$(this.el).attr('title', 'chart plot...');
$('body > .container').append($(this.el));
$('body > .container').append(this.dialog);
return this;
},
unrender: function() {
$(this.el).remove();
this.dialog.remove();
}, | toggleMetroDialog('#chart_dialog');
},
over: function() {
$(this.el).addClass('expand');
},
out: function() {
$(this.el).removeClass('expand');
},
setInfo: function(content) {
this.dialog.append(content);
}
});
return ChartBtnView;
}); |
toggle: function() { | random_line_split |
config.example.js | 'use strict';
exports.port = process.env.PORT || 3000;
exports.mongodb = {
uri: process.env.MONGOLAB_URI || process.env.MONGOHQ_URL || 'localhost/lulucrawler'
};
exports.getThisUrl = '';
exports.companyName = '';
exports.projectName = 'luluCrawler';
exports.systemEmail = '[email protected]';
exports.cryptoKey = 'k3yb0ardc4t';
exports.loginAttempts = {
forIp: 50,
forIpAndUser: 7,
logExpiration: '20m'
};
exports.smtp = {
from: { | credentials: {
user: process.env.SMTP_USERNAME || '[email protected]',
password: process.env.SMTP_PASSWORD || 'bl4rg!',
host: process.env.SMTP_HOST || 'smtp.gmail.com',
ssl: true
}
}; | name: process.env.SMTP_FROM_NAME || exports.projectName +' Website',
address: process.env.SMTP_FROM_ADDRESS || '[email protected]'
}, | random_line_split |
address.ts | import { ADDRESS_STATUS, ADDRESS_TYPE, RECEIVE_ADDRESS, SEND_ADDRESS } from '../constants';
import { Address, Recipient } from '../interfaces';
import { ContactEmail } from '../interfaces/contacts';
import { canonizeInternalEmail } from './email';
import { unary } from './function';
export const getIsAddressDisabled = (address: Address) => {
return address.Status === ADDRESS_STATUS.STATUS_DISABLED;
};
export const getIsAddressActive = (address: Address) => {
return (
address.Status === ADDRESS_STATUS.STATUS_ENABLED &&
address.Receive === RECEIVE_ADDRESS.RECEIVE_YES && | address.Send === SEND_ADDRESS.SEND_YES
);
};
export const getActiveAddresses = (addresses: Address[]): Address[] => {
return addresses.filter(unary(getIsAddressActive));
};
export const hasAddresses = (addresses: Address[] | undefined): boolean => {
return Array.isArray(addresses) && addresses.length > 0;
};
export const getHasOnlyExternalAddresses = (addresses: Address[]) => {
return addresses.every(({ Type }) => Type === ADDRESS_TYPE.TYPE_EXTERNAL);
};
export const contactToRecipient = (contact: Partial<ContactEmail> = {}, groupPath?: string): Partial<Recipient> => ({
Name: contact.Name,
Address: contact.Email,
ContactID: contact.ContactID,
Group: groupPath,
});
export const findUserAddress = (userEmail?: string, addresses: Address[] = []) => {
if (!userEmail) {
return undefined;
}
const canonicalUserEmail = canonizeInternalEmail(userEmail);
return addresses.find(({ Email }) => canonizeInternalEmail(Email) === canonicalUserEmail);
}; | random_line_split |
|
address.ts | import { ADDRESS_STATUS, ADDRESS_TYPE, RECEIVE_ADDRESS, SEND_ADDRESS } from '../constants';
import { Address, Recipient } from '../interfaces';
import { ContactEmail } from '../interfaces/contacts';
import { canonizeInternalEmail } from './email';
import { unary } from './function';
export const getIsAddressDisabled = (address: Address) => {
return address.Status === ADDRESS_STATUS.STATUS_DISABLED;
};
export const getIsAddressActive = (address: Address) => {
return (
address.Status === ADDRESS_STATUS.STATUS_ENABLED &&
address.Receive === RECEIVE_ADDRESS.RECEIVE_YES &&
address.Send === SEND_ADDRESS.SEND_YES
);
};
export const getActiveAddresses = (addresses: Address[]): Address[] => {
return addresses.filter(unary(getIsAddressActive));
};
export const hasAddresses = (addresses: Address[] | undefined): boolean => {
return Array.isArray(addresses) && addresses.length > 0;
};
export const getHasOnlyExternalAddresses = (addresses: Address[]) => {
return addresses.every(({ Type }) => Type === ADDRESS_TYPE.TYPE_EXTERNAL);
};
export const contactToRecipient = (contact: Partial<ContactEmail> = {}, groupPath?: string): Partial<Recipient> => ({
Name: contact.Name,
Address: contact.Email,
ContactID: contact.ContactID,
Group: groupPath,
});
export const findUserAddress = (userEmail?: string, addresses: Address[] = []) => {
if (!userEmail) |
const canonicalUserEmail = canonizeInternalEmail(userEmail);
return addresses.find(({ Email }) => canonizeInternalEmail(Email) === canonicalUserEmail);
};
| {
return undefined;
} | conditional_block |
admin.py | # -*- coding: utf-8 -*-
import os
from django.contrib import admin
from django import forms
from django.utils import simplejson
from django.utils.safestring import mark_safe
from django.core.urlresolvers import reverse
from django.conf.urls import url, patterns
from django.shortcuts import render
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.template.loader import render_to_string
from .models import Submission, Assignment, Attachment, exercises
from .middleware import get_current_request
def get_user_exercises(user):
try:
assignment = Assignment.objects.get(user = user)
return [e for e in exercises if e['id'] in assignment.exercises]
except Assignment.DoesNotExist:
return []
readonly_fields = ('submitted_by', 'first_name', 'last_name', 'email', 'key', 'key_sent')
class AttachmentWidget(forms.Widget):
def render(self, name, value, *args, **kwargs):
if value:
a_tag = '<a href="%s">%s</a>' % (value, value)
else:
a_tag = 'brak'
return mark_safe(('<input type="hidden" name="%s" value="%s"/>' % (name, value)) + a_tag)
class SubmissionFormBase(forms.ModelForm):
class Meta:
model = Submission
exclude = ('answers', 'marks', 'contact', 'end_time') + readonly_fields
def get_open_answer(answers, exercise):
def get_option(options, id):
for option in options:
if option['id'] == int(id):
return option
exercise_id = str(exercise['id'])
answer = answers[exercise_id]
if exercise['type'] == 'open':
if isinstance(answer, list):
toret = ''
for part in answer:
field = get_option(exercise['fields'], part['id'])
toret += '- %s:\n\n%s\n\n' % (field['caption'], part['text'])
else:
toret = answer
if exercise['type'] == 'edumed_wybor':
ok = set(map(str, exercise['answer'])) == set(map(str,answer['closed_part']))
toret = u'Czesc testowa [%s]:\n' % ('poprawna' if ok else 'niepoprawna')
if len(answer['closed_part']):
for selected in answer['closed_part']:
option = get_option(exercise['options'], selected)
toret += '%s: %s\n' % (selected, option['text'])
else:
toret += u'<nie wybrano odpowiedzi>\n'
toret += u'\nCzesc otwarta (%s):\n\n' % ' '.join(exercise['open_part'])
toret += answer['open_part']
return toret
def get_form(request, submission):
fields = dict()
if submission.answers:
answers = simplejson.loads(submission.answers)
user_exercises = get_user_exercises(request.user)
for exercise in exercises:
if exercise not in user_exercises:
continue
answer_field_name = 'exercise_%s' % exercise['id']
mark_field_name = 'markof_%s_by_%s' % (exercise['id'], request.user.id)
if exercise['type'] in ('open', 'file_upload') or exercise.get('open_part', None):
if exercise['type'] == 'file_upload':
try:
attachment = Attachment.objects.get(submission = submission, exercise_id = exercise['id'])
except Attachment.DoesNotExist:
attachment = None
widget = AttachmentWidget
initial = attachment.file.url if attachment else None
else:
widget = forms.Textarea(attrs={'readonly':True})
initial = get_open_answer(answers, exercise)
fields[answer_field_name] = forms.CharField(
widget = widget,
initial = initial,
label = 'Rozwiązanie zadania %s' % exercise['id']
)
fields[mark_field_name] = forms.ChoiceField(
choices = [(None, '-')] + [(i,i) for i in range(exercise['max_points']+1)],
initial = submission.get_mark(user_id = request.user.id, exercise_id = exercise['id']),
label = u'Twoja ocena zadania %s' % exercise['id']
)
if not request.user.is_superuser:
class Meta(SubmissionFormBase.Meta):
pass
Meta.exclude += ('examiners',)
fields['Meta'] = Meta
return type('SubmissionForm', (SubmissionFormBase,), fields)
class SubmissionAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'todo', 'examiners_repr')
readonly_fields = readonly_fields
def get_form(self, request, obj, **kwargs):
return get_form(request, obj)
def submitted_by(self, instance):
if instance.contact:
return '<a href="%s">%s</a>' % (
reverse('admin:contact_contact_change', args = [instance.contact.id]),
instance.contact.contact
)
return '-'
submitted_by.allow_tags = True
submitted_by.short_description = "Zgłoszony/a przez"
def todo(self, submission):
user = get_current_request().user
user_exercises = get_user_exercises(user)
user_marks = submission.marks.get(str(user.id), {})
return ','.join([str(e['id']) for e in user_exercises if str(e['id']) not in user_marks.keys()])
todo.short_description = 'Twoje nieocenione zadania'
def examiners_repr(self, submission):
return ', '.join([u.username for u in submission.examiners.all()])
examiners_repr.short_description = 'Przypisani do zgłoszenia'
def save_model(self, request, submission, form, change):
for name, value in form.cleaned_data.items():
if name.startswith('markof_'):
parts = name.split('_')
exercise_id = parts[1]
user_id = parts[3]
submission.set_mark(user_id = user_id, exercise_id = exercise_id, mark = value)
submission.save()
def changelist_view(self, request, extra_context=None):
context = dict(examiners = [])
assignments = Assignment.objects.all()
if not request.user.is_superuser:
assignments = assignments.filter(user = request.user)
for assignment in assignments:
examiner = dict(name = assignment.user.username, todo = 0)
for submission in Submission.objects.filter(examiners = assignment.user):
for | context['examiners'].append(examiner)
return super(SubmissionAdmin, self).changelist_view(request, extra_context = context)
def queryset(self, request):
qs = super(SubmissionAdmin, self).queryset(request)
if not request.user.is_superuser:
qs = qs.filter(examiners = request.user)
return qs
def get_urls(self):
urls = super(SubmissionAdmin, self).get_urls()
return patterns('',
url(r'^report/$', self.admin_site.admin_view(report_view), name='wtem_admin_report')
) + super(SubmissionAdmin, self).get_urls()
class SubmissionsSet:
def __init__(self, submissions):
self.submissions = submissions
self.examiners_by_exercise = dict()
for submission in submissions:
for user_id, marks in submission.marks.items():
user = User.objects.get(pk=user_id)
for exercise_id in marks.keys():
examiners = self.examiners_by_exercise.setdefault(exercise_id, [])
if not user in examiners:
examiners.append(user)
def report_view(request):
submissions = sorted(Submission.objects.all(), key = lambda s: -s.final_result)
toret = render_to_string('wtem/admin_report.csv', dict(
submissionsSet = SubmissionsSet(submissions),
exercise_ids = map(str, range(1,len(exercises)+1))
))
response = HttpResponse(toret, content_type = 'text/csv')
response['Content-Disposition'] = 'attachment; filename="wyniki.csv"'
return response
admin.site.register(Submission, SubmissionAdmin)
admin.site.register(Assignment) | exercise_id in assignment.exercises:
if submission.get_mark(user_id = assignment.user.id, exercise_id = exercise_id) is None:
examiner['todo'] += 1
| conditional_block |
admin.py | # -*- coding: utf-8 -*-
import os
from django.contrib import admin
from django import forms
from django.utils import simplejson
from django.utils.safestring import mark_safe
from django.core.urlresolvers import reverse
from django.conf.urls import url, patterns
from django.shortcuts import render
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.template.loader import render_to_string
from .models import Submission, Assignment, Attachment, exercises
from .middleware import get_current_request
def get_user_exercises(user):
try:
assignment = Assignment.objects.get(user = user)
return [e for e in exercises if e['id'] in assignment.exercises]
except Assignment.DoesNotExist:
return []
readonly_fields = ('submitted_by', 'first_name', 'last_name', 'email', 'key', 'key_sent')
class AttachmentWidget(forms.Widget):
def render(self, name, value, *args, **kwargs):
if value:
a_tag = '<a href="%s">%s</a>' % (value, value)
else:
a_tag = 'brak'
return mark_safe(('<input type="hidden" name="%s" value="%s"/>' % (name, value)) + a_tag)
class SubmissionFormBase(forms.ModelForm):
class Meta:
model = Submission
exclude = ('answers', 'marks', 'contact', 'end_time') + readonly_fields
def get_open_answer(answers, exercise):
def get_option(options, id):
|
exercise_id = str(exercise['id'])
answer = answers[exercise_id]
if exercise['type'] == 'open':
if isinstance(answer, list):
toret = ''
for part in answer:
field = get_option(exercise['fields'], part['id'])
toret += '- %s:\n\n%s\n\n' % (field['caption'], part['text'])
else:
toret = answer
if exercise['type'] == 'edumed_wybor':
ok = set(map(str, exercise['answer'])) == set(map(str,answer['closed_part']))
toret = u'Czesc testowa [%s]:\n' % ('poprawna' if ok else 'niepoprawna')
if len(answer['closed_part']):
for selected in answer['closed_part']:
option = get_option(exercise['options'], selected)
toret += '%s: %s\n' % (selected, option['text'])
else:
toret += u'<nie wybrano odpowiedzi>\n'
toret += u'\nCzesc otwarta (%s):\n\n' % ' '.join(exercise['open_part'])
toret += answer['open_part']
return toret
def get_form(request, submission):
fields = dict()
if submission.answers:
answers = simplejson.loads(submission.answers)
user_exercises = get_user_exercises(request.user)
for exercise in exercises:
if exercise not in user_exercises:
continue
answer_field_name = 'exercise_%s' % exercise['id']
mark_field_name = 'markof_%s_by_%s' % (exercise['id'], request.user.id)
if exercise['type'] in ('open', 'file_upload') or exercise.get('open_part', None):
if exercise['type'] == 'file_upload':
try:
attachment = Attachment.objects.get(submission = submission, exercise_id = exercise['id'])
except Attachment.DoesNotExist:
attachment = None
widget = AttachmentWidget
initial = attachment.file.url if attachment else None
else:
widget = forms.Textarea(attrs={'readonly':True})
initial = get_open_answer(answers, exercise)
fields[answer_field_name] = forms.CharField(
widget = widget,
initial = initial,
label = 'Rozwiązanie zadania %s' % exercise['id']
)
fields[mark_field_name] = forms.ChoiceField(
choices = [(None, '-')] + [(i,i) for i in range(exercise['max_points']+1)],
initial = submission.get_mark(user_id = request.user.id, exercise_id = exercise['id']),
label = u'Twoja ocena zadania %s' % exercise['id']
)
if not request.user.is_superuser:
class Meta(SubmissionFormBase.Meta):
pass
Meta.exclude += ('examiners',)
fields['Meta'] = Meta
return type('SubmissionForm', (SubmissionFormBase,), fields)
class SubmissionAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'todo', 'examiners_repr')
readonly_fields = readonly_fields
def get_form(self, request, obj, **kwargs):
return get_form(request, obj)
def submitted_by(self, instance):
if instance.contact:
return '<a href="%s">%s</a>' % (
reverse('admin:contact_contact_change', args = [instance.contact.id]),
instance.contact.contact
)
return '-'
submitted_by.allow_tags = True
submitted_by.short_description = "Zgłoszony/a przez"
def todo(self, submission):
user = get_current_request().user
user_exercises = get_user_exercises(user)
user_marks = submission.marks.get(str(user.id), {})
return ','.join([str(e['id']) for e in user_exercises if str(e['id']) not in user_marks.keys()])
todo.short_description = 'Twoje nieocenione zadania'
def examiners_repr(self, submission):
return ', '.join([u.username for u in submission.examiners.all()])
examiners_repr.short_description = 'Przypisani do zgłoszenia'
def save_model(self, request, submission, form, change):
for name, value in form.cleaned_data.items():
if name.startswith('markof_'):
parts = name.split('_')
exercise_id = parts[1]
user_id = parts[3]
submission.set_mark(user_id = user_id, exercise_id = exercise_id, mark = value)
submission.save()
def changelist_view(self, request, extra_context=None):
context = dict(examiners = [])
assignments = Assignment.objects.all()
if not request.user.is_superuser:
assignments = assignments.filter(user = request.user)
for assignment in assignments:
examiner = dict(name = assignment.user.username, todo = 0)
for submission in Submission.objects.filter(examiners = assignment.user):
for exercise_id in assignment.exercises:
if submission.get_mark(user_id = assignment.user.id, exercise_id = exercise_id) is None:
examiner['todo'] += 1
context['examiners'].append(examiner)
return super(SubmissionAdmin, self).changelist_view(request, extra_context = context)
def queryset(self, request):
qs = super(SubmissionAdmin, self).queryset(request)
if not request.user.is_superuser:
qs = qs.filter(examiners = request.user)
return qs
def get_urls(self):
urls = super(SubmissionAdmin, self).get_urls()
return patterns('',
url(r'^report/$', self.admin_site.admin_view(report_view), name='wtem_admin_report')
) + super(SubmissionAdmin, self).get_urls()
class SubmissionsSet:
def __init__(self, submissions):
self.submissions = submissions
self.examiners_by_exercise = dict()
for submission in submissions:
for user_id, marks in submission.marks.items():
user = User.objects.get(pk=user_id)
for exercise_id in marks.keys():
examiners = self.examiners_by_exercise.setdefault(exercise_id, [])
if not user in examiners:
examiners.append(user)
def report_view(request):
submissions = sorted(Submission.objects.all(), key = lambda s: -s.final_result)
toret = render_to_string('wtem/admin_report.csv', dict(
submissionsSet = SubmissionsSet(submissions),
exercise_ids = map(str, range(1,len(exercises)+1))
))
response = HttpResponse(toret, content_type = 'text/csv')
response['Content-Disposition'] = 'attachment; filename="wyniki.csv"'
return response
admin.site.register(Submission, SubmissionAdmin)
admin.site.register(Assignment) | for option in options:
if option['id'] == int(id):
return option | identifier_body |
admin.py | # -*- coding: utf-8 -*-
import os
from django.contrib import admin
from django import forms
from django.utils import simplejson
from django.utils.safestring import mark_safe
from django.core.urlresolvers import reverse
from django.conf.urls import url, patterns
from django.shortcuts import render
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.template.loader import render_to_string
from .models import Submission, Assignment, Attachment, exercises
from .middleware import get_current_request
def get_user_exercises(user):
try:
assignment = Assignment.objects.get(user = user)
return [e for e in exercises if e['id'] in assignment.exercises]
except Assignment.DoesNotExist:
return []
readonly_fields = ('submitted_by', 'first_name', 'last_name', 'email', 'key', 'key_sent')
class AttachmentWidget(forms.Widget):
def render(self, name, value, *args, **kwargs):
if value:
a_tag = '<a href="%s">%s</a>' % (value, value)
else:
a_tag = 'brak'
return mark_safe(('<input type="hidden" name="%s" value="%s"/>' % (name, value)) + a_tag)
class SubmissionFormBase(forms.ModelForm):
class Meta:
model = Submission
exclude = ('answers', 'marks', 'contact', 'end_time') + readonly_fields
def | (answers, exercise):
def get_option(options, id):
for option in options:
if option['id'] == int(id):
return option
exercise_id = str(exercise['id'])
answer = answers[exercise_id]
if exercise['type'] == 'open':
if isinstance(answer, list):
toret = ''
for part in answer:
field = get_option(exercise['fields'], part['id'])
toret += '- %s:\n\n%s\n\n' % (field['caption'], part['text'])
else:
toret = answer
if exercise['type'] == 'edumed_wybor':
ok = set(map(str, exercise['answer'])) == set(map(str,answer['closed_part']))
toret = u'Czesc testowa [%s]:\n' % ('poprawna' if ok else 'niepoprawna')
if len(answer['closed_part']):
for selected in answer['closed_part']:
option = get_option(exercise['options'], selected)
toret += '%s: %s\n' % (selected, option['text'])
else:
toret += u'<nie wybrano odpowiedzi>\n'
toret += u'\nCzesc otwarta (%s):\n\n' % ' '.join(exercise['open_part'])
toret += answer['open_part']
return toret
def get_form(request, submission):
fields = dict()
if submission.answers:
answers = simplejson.loads(submission.answers)
user_exercises = get_user_exercises(request.user)
for exercise in exercises:
if exercise not in user_exercises:
continue
answer_field_name = 'exercise_%s' % exercise['id']
mark_field_name = 'markof_%s_by_%s' % (exercise['id'], request.user.id)
if exercise['type'] in ('open', 'file_upload') or exercise.get('open_part', None):
if exercise['type'] == 'file_upload':
try:
attachment = Attachment.objects.get(submission = submission, exercise_id = exercise['id'])
except Attachment.DoesNotExist:
attachment = None
widget = AttachmentWidget
initial = attachment.file.url if attachment else None
else:
widget = forms.Textarea(attrs={'readonly':True})
initial = get_open_answer(answers, exercise)
fields[answer_field_name] = forms.CharField(
widget = widget,
initial = initial,
label = 'Rozwiązanie zadania %s' % exercise['id']
)
fields[mark_field_name] = forms.ChoiceField(
choices = [(None, '-')] + [(i,i) for i in range(exercise['max_points']+1)],
initial = submission.get_mark(user_id = request.user.id, exercise_id = exercise['id']),
label = u'Twoja ocena zadania %s' % exercise['id']
)
if not request.user.is_superuser:
class Meta(SubmissionFormBase.Meta):
pass
Meta.exclude += ('examiners',)
fields['Meta'] = Meta
return type('SubmissionForm', (SubmissionFormBase,), fields)
class SubmissionAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'todo', 'examiners_repr')
readonly_fields = readonly_fields
def get_form(self, request, obj, **kwargs):
return get_form(request, obj)
def submitted_by(self, instance):
if instance.contact:
return '<a href="%s">%s</a>' % (
reverse('admin:contact_contact_change', args = [instance.contact.id]),
instance.contact.contact
)
return '-'
submitted_by.allow_tags = True
submitted_by.short_description = "Zgłoszony/a przez"
def todo(self, submission):
user = get_current_request().user
user_exercises = get_user_exercises(user)
user_marks = submission.marks.get(str(user.id), {})
return ','.join([str(e['id']) for e in user_exercises if str(e['id']) not in user_marks.keys()])
todo.short_description = 'Twoje nieocenione zadania'
def examiners_repr(self, submission):
return ', '.join([u.username for u in submission.examiners.all()])
examiners_repr.short_description = 'Przypisani do zgłoszenia'
def save_model(self, request, submission, form, change):
for name, value in form.cleaned_data.items():
if name.startswith('markof_'):
parts = name.split('_')
exercise_id = parts[1]
user_id = parts[3]
submission.set_mark(user_id = user_id, exercise_id = exercise_id, mark = value)
submission.save()
def changelist_view(self, request, extra_context=None):
context = dict(examiners = [])
assignments = Assignment.objects.all()
if not request.user.is_superuser:
assignments = assignments.filter(user = request.user)
for assignment in assignments:
examiner = dict(name = assignment.user.username, todo = 0)
for submission in Submission.objects.filter(examiners = assignment.user):
for exercise_id in assignment.exercises:
if submission.get_mark(user_id = assignment.user.id, exercise_id = exercise_id) is None:
examiner['todo'] += 1
context['examiners'].append(examiner)
return super(SubmissionAdmin, self).changelist_view(request, extra_context = context)
def queryset(self, request):
qs = super(SubmissionAdmin, self).queryset(request)
if not request.user.is_superuser:
qs = qs.filter(examiners = request.user)
return qs
def get_urls(self):
urls = super(SubmissionAdmin, self).get_urls()
return patterns('',
url(r'^report/$', self.admin_site.admin_view(report_view), name='wtem_admin_report')
) + super(SubmissionAdmin, self).get_urls()
class SubmissionsSet:
def __init__(self, submissions):
self.submissions = submissions
self.examiners_by_exercise = dict()
for submission in submissions:
for user_id, marks in submission.marks.items():
user = User.objects.get(pk=user_id)
for exercise_id in marks.keys():
examiners = self.examiners_by_exercise.setdefault(exercise_id, [])
if not user in examiners:
examiners.append(user)
def report_view(request):
submissions = sorted(Submission.objects.all(), key = lambda s: -s.final_result)
toret = render_to_string('wtem/admin_report.csv', dict(
submissionsSet = SubmissionsSet(submissions),
exercise_ids = map(str, range(1,len(exercises)+1))
))
response = HttpResponse(toret, content_type = 'text/csv')
response['Content-Disposition'] = 'attachment; filename="wyniki.csv"'
return response
admin.site.register(Submission, SubmissionAdmin)
admin.site.register(Assignment) | get_open_answer | identifier_name |
admin.py | # -*- coding: utf-8 -*-
import os
from django.contrib import admin
from django import forms
from django.utils import simplejson
from django.utils.safestring import mark_safe
from django.core.urlresolvers import reverse
from django.conf.urls import url, patterns
from django.shortcuts import render
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.template.loader import render_to_string
from .models import Submission, Assignment, Attachment, exercises
from .middleware import get_current_request
def get_user_exercises(user):
try:
assignment = Assignment.objects.get(user = user)
return [e for e in exercises if e['id'] in assignment.exercises]
except Assignment.DoesNotExist:
return []
| if value:
a_tag = '<a href="%s">%s</a>' % (value, value)
else:
a_tag = 'brak'
return mark_safe(('<input type="hidden" name="%s" value="%s"/>' % (name, value)) + a_tag)
class SubmissionFormBase(forms.ModelForm):
class Meta:
model = Submission
exclude = ('answers', 'marks', 'contact', 'end_time') + readonly_fields
def get_open_answer(answers, exercise):
def get_option(options, id):
for option in options:
if option['id'] == int(id):
return option
exercise_id = str(exercise['id'])
answer = answers[exercise_id]
if exercise['type'] == 'open':
if isinstance(answer, list):
toret = ''
for part in answer:
field = get_option(exercise['fields'], part['id'])
toret += '- %s:\n\n%s\n\n' % (field['caption'], part['text'])
else:
toret = answer
if exercise['type'] == 'edumed_wybor':
ok = set(map(str, exercise['answer'])) == set(map(str,answer['closed_part']))
toret = u'Czesc testowa [%s]:\n' % ('poprawna' if ok else 'niepoprawna')
if len(answer['closed_part']):
for selected in answer['closed_part']:
option = get_option(exercise['options'], selected)
toret += '%s: %s\n' % (selected, option['text'])
else:
toret += u'<nie wybrano odpowiedzi>\n'
toret += u'\nCzesc otwarta (%s):\n\n' % ' '.join(exercise['open_part'])
toret += answer['open_part']
return toret
def get_form(request, submission):
fields = dict()
if submission.answers:
answers = simplejson.loads(submission.answers)
user_exercises = get_user_exercises(request.user)
for exercise in exercises:
if exercise not in user_exercises:
continue
answer_field_name = 'exercise_%s' % exercise['id']
mark_field_name = 'markof_%s_by_%s' % (exercise['id'], request.user.id)
if exercise['type'] in ('open', 'file_upload') or exercise.get('open_part', None):
if exercise['type'] == 'file_upload':
try:
attachment = Attachment.objects.get(submission = submission, exercise_id = exercise['id'])
except Attachment.DoesNotExist:
attachment = None
widget = AttachmentWidget
initial = attachment.file.url if attachment else None
else:
widget = forms.Textarea(attrs={'readonly':True})
initial = get_open_answer(answers, exercise)
fields[answer_field_name] = forms.CharField(
widget = widget,
initial = initial,
label = 'Rozwiązanie zadania %s' % exercise['id']
)
fields[mark_field_name] = forms.ChoiceField(
choices = [(None, '-')] + [(i,i) for i in range(exercise['max_points']+1)],
initial = submission.get_mark(user_id = request.user.id, exercise_id = exercise['id']),
label = u'Twoja ocena zadania %s' % exercise['id']
)
if not request.user.is_superuser:
class Meta(SubmissionFormBase.Meta):
pass
Meta.exclude += ('examiners',)
fields['Meta'] = Meta
return type('SubmissionForm', (SubmissionFormBase,), fields)
class SubmissionAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'todo', 'examiners_repr')
readonly_fields = readonly_fields
def get_form(self, request, obj, **kwargs):
return get_form(request, obj)
def submitted_by(self, instance):
if instance.contact:
return '<a href="%s">%s</a>' % (
reverse('admin:contact_contact_change', args = [instance.contact.id]),
instance.contact.contact
)
return '-'
submitted_by.allow_tags = True
submitted_by.short_description = "Zgłoszony/a przez"
def todo(self, submission):
user = get_current_request().user
user_exercises = get_user_exercises(user)
user_marks = submission.marks.get(str(user.id), {})
return ','.join([str(e['id']) for e in user_exercises if str(e['id']) not in user_marks.keys()])
todo.short_description = 'Twoje nieocenione zadania'
def examiners_repr(self, submission):
return ', '.join([u.username for u in submission.examiners.all()])
examiners_repr.short_description = 'Przypisani do zgłoszenia'
def save_model(self, request, submission, form, change):
for name, value in form.cleaned_data.items():
if name.startswith('markof_'):
parts = name.split('_')
exercise_id = parts[1]
user_id = parts[3]
submission.set_mark(user_id = user_id, exercise_id = exercise_id, mark = value)
submission.save()
def changelist_view(self, request, extra_context=None):
context = dict(examiners = [])
assignments = Assignment.objects.all()
if not request.user.is_superuser:
assignments = assignments.filter(user = request.user)
for assignment in assignments:
examiner = dict(name = assignment.user.username, todo = 0)
for submission in Submission.objects.filter(examiners = assignment.user):
for exercise_id in assignment.exercises:
if submission.get_mark(user_id = assignment.user.id, exercise_id = exercise_id) is None:
examiner['todo'] += 1
context['examiners'].append(examiner)
return super(SubmissionAdmin, self).changelist_view(request, extra_context = context)
def queryset(self, request):
qs = super(SubmissionAdmin, self).queryset(request)
if not request.user.is_superuser:
qs = qs.filter(examiners = request.user)
return qs
def get_urls(self):
urls = super(SubmissionAdmin, self).get_urls()
return patterns('',
url(r'^report/$', self.admin_site.admin_view(report_view), name='wtem_admin_report')
) + super(SubmissionAdmin, self).get_urls()
class SubmissionsSet:
def __init__(self, submissions):
self.submissions = submissions
self.examiners_by_exercise = dict()
for submission in submissions:
for user_id, marks in submission.marks.items():
user = User.objects.get(pk=user_id)
for exercise_id in marks.keys():
examiners = self.examiners_by_exercise.setdefault(exercise_id, [])
if not user in examiners:
examiners.append(user)
def report_view(request):
submissions = sorted(Submission.objects.all(), key = lambda s: -s.final_result)
toret = render_to_string('wtem/admin_report.csv', dict(
submissionsSet = SubmissionsSet(submissions),
exercise_ids = map(str, range(1,len(exercises)+1))
))
response = HttpResponse(toret, content_type = 'text/csv')
response['Content-Disposition'] = 'attachment; filename="wyniki.csv"'
return response
admin.site.register(Submission, SubmissionAdmin)
admin.site.register(Assignment) |
readonly_fields = ('submitted_by', 'first_name', 'last_name', 'email', 'key', 'key_sent')
class AttachmentWidget(forms.Widget):
def render(self, name, value, *args, **kwargs): | random_line_split |
index.ts | import * as S from './addTodo.css';
import { ITodo } from '../../../../interfaces/ITodo';
export const AddTodoComponent: angular.IComponentOptions = {
template: `
<form ng-submit="$ctrl.addTodo()">
<input class="${S['new-todo']}" placeholder="What needs to be done?" ng-model="$ctrl.newTodo" autofocus>
<input class="${S['toggle-all']}" type="checkbox" ng-checked="$ctrl.isAllCompleted" ng-click="$ctrl.toggleAll($event)">
</form>
`,
bindings: {
list: '<',
onAdd: '&',
onToggleAll: '&'
},
controller: class implements angular.IController {
onAdd: { (todo: { todo: string; }): void; };
onToggleAll: { (completed: { completed: boolean; }): void; };
newTodo: string = '';
isAllCompleted: boolean;
constructor() { }
$onChanges(changes: { list: angular.IChangesObject<Array<ITodo>> }) {
this.isAllCompleted = changes.list.currentValue ? changes.list.currentValue.every(t => t.completed) : false;
}
addTodo() |
toggleAll(e: MouseEvent) {
this.onToggleAll({ completed: (<HTMLInputElement>e.target).checked });
}
}
};
| {
this.onAdd({ todo: this.newTodo });
this.newTodo = '';
} | identifier_body |
index.ts | import * as S from './addTodo.css';
import { ITodo } from '../../../../interfaces/ITodo';
export const AddTodoComponent: angular.IComponentOptions = {
template: `
<form ng-submit="$ctrl.addTodo()">
<input class="${S['new-todo']}" placeholder="What needs to be done?" ng-model="$ctrl.newTodo" autofocus>
<input class="${S['toggle-all']}" type="checkbox" ng-checked="$ctrl.isAllCompleted" ng-click="$ctrl.toggleAll($event)">
</form>
`,
bindings: {
list: '<', |
onAdd: { (todo: { todo: string; }): void; };
onToggleAll: { (completed: { completed: boolean; }): void; };
newTodo: string = '';
isAllCompleted: boolean;
constructor() { }
$onChanges(changes: { list: angular.IChangesObject<Array<ITodo>> }) {
this.isAllCompleted = changes.list.currentValue ? changes.list.currentValue.every(t => t.completed) : false;
}
addTodo() {
this.onAdd({ todo: this.newTodo });
this.newTodo = '';
}
toggleAll(e: MouseEvent) {
this.onToggleAll({ completed: (<HTMLInputElement>e.target).checked });
}
}
}; | onAdd: '&',
onToggleAll: '&'
},
controller: class implements angular.IController { | random_line_split |
index.ts | import * as S from './addTodo.css';
import { ITodo } from '../../../../interfaces/ITodo';
export const AddTodoComponent: angular.IComponentOptions = {
template: `
<form ng-submit="$ctrl.addTodo()">
<input class="${S['new-todo']}" placeholder="What needs to be done?" ng-model="$ctrl.newTodo" autofocus>
<input class="${S['toggle-all']}" type="checkbox" ng-checked="$ctrl.isAllCompleted" ng-click="$ctrl.toggleAll($event)">
</form>
`,
bindings: {
list: '<',
onAdd: '&',
onToggleAll: '&'
},
controller: class implements angular.IController {
onAdd: { (todo: { todo: string; }): void; };
onToggleAll: { (completed: { completed: boolean; }): void; };
newTodo: string = '';
isAllCompleted: boolean;
constructor() { }
$onChanges(changes: { list: angular.IChangesObject<Array<ITodo>> }) {
this.isAllCompleted = changes.list.currentValue ? changes.list.currentValue.every(t => t.completed) : false;
}
| () {
this.onAdd({ todo: this.newTodo });
this.newTodo = '';
}
toggleAll(e: MouseEvent) {
this.onToggleAll({ completed: (<HTMLInputElement>e.target).checked });
}
}
};
| addTodo | identifier_name |
main_with_threads.py | #!/usr/bin/env python3
import re
import json
from threading import Thread
from rx import Observable
import APIReaderTwitter as Twitter
emoji_re = re.compile(u'['
u'\U0001F300-\U0001F64F'
u'\U0001F680-\U0001F6FF'
u'\u2600-\u26FF\u2700-\u27BF]+',
re.UNICODE)
# Util
def process_stream(s, f):
return s.subscribe(f)
def count_emoji(text):
return len(re.findall(emoji_re, text))
# Filters
def is_tweet(element):
return set(('favorited', 'favorite_count', 'retweeted', 'retweet_count')) <= element.keys()
def is_retweet(element):
return 'retweeted_status' in element
def is_spanish_tweet(element):
return element['lang'] == 'es'
def is_japanese_tweet(element):
return element['lang'] == 'ja'
def | (element):
return len(element['entities']['hashtags']) > 0
# Tests
def tweet_has_more_than_two_emojis(element):
return count_emoji(element['text']) > 2
# https://dev.twitter.com/overview/api/tweets
def original_tweet_has_less_than_50_retweets(element):
return element['retweeted_status']['retweet_count'] <= 50
def hashtag_longer_than_twelve_letters(element):
return all(len(h['text']) > 12 for h in element['entities']['hashtags'])
# Expects
# All retweeted tweets should have less than 50 retweets
def retweet_test(element):
if not original_tweet_has_less_than_50_retweets(element):
print('=> Retweet:', json.dumps(element['retweeted_status']['retweet_count'], indent=4), "retweets")
# All elements should have less than 2 emojis
def emoji_test(element):
if tweet_has_more_than_two_emojis(element):
print("=> Emoji:", count_emoji(element['text']))
# All elements should have hashtags shorter that 12 characters
def hashtag_test(element):
if hashtag_longer_than_twelve_letters(element):
print('=> Hashtag:', json.dumps(element['entities']['hashtags'], indent=4))
if __name__ == "__main__":
stream = Observable.from_(Twitter.get_iterable())
tweets = stream.filter(is_tweet)
# Keep only retweets
retweeted_tweets = tweets.filter(is_retweet)
# Keep only tweets in japanese
in_japanese_tweets = tweets.filter(is_japanese_tweet)
# Keep all tweets in spanish that contain a hashtag
spanish_hashtags = tweets.filter(is_spanish_tweet).filter(has_hashtags)
threads = [
Thread(target=process_stream, args=(in_japanese_tweets, emoji_test)),
Thread(target=process_stream, args=(spanish_hashtags, hashtag_test)),
Thread(target=process_stream, args=(retweeted_tweets, retweet_test))
]
for t in threads:
t.start()
| has_hashtags | identifier_name |
main_with_threads.py | #!/usr/bin/env python3
import re
import json
from threading import Thread
from rx import Observable
import APIReaderTwitter as Twitter
emoji_re = re.compile(u'['
u'\U0001F300-\U0001F64F'
u'\U0001F680-\U0001F6FF'
u'\u2600-\u26FF\u2700-\u27BF]+',
re.UNICODE)
# Util
def process_stream(s, f):
return s.subscribe(f)
def count_emoji(text):
return len(re.findall(emoji_re, text)) |
def is_retweet(element):
return 'retweeted_status' in element
def is_spanish_tweet(element):
return element['lang'] == 'es'
def is_japanese_tweet(element):
return element['lang'] == 'ja'
def has_hashtags(element):
return len(element['entities']['hashtags']) > 0
# Tests
def tweet_has_more_than_two_emojis(element):
return count_emoji(element['text']) > 2
# https://dev.twitter.com/overview/api/tweets
def original_tweet_has_less_than_50_retweets(element):
return element['retweeted_status']['retweet_count'] <= 50
def hashtag_longer_than_twelve_letters(element):
return all(len(h['text']) > 12 for h in element['entities']['hashtags'])
# Expects
# All retweeted tweets should have less than 50 retweets
def retweet_test(element):
if not original_tweet_has_less_than_50_retweets(element):
print('=> Retweet:', json.dumps(element['retweeted_status']['retweet_count'], indent=4), "retweets")
# All elements should have less than 2 emojis
def emoji_test(element):
if tweet_has_more_than_two_emojis(element):
print("=> Emoji:", count_emoji(element['text']))
# All elements should have hashtags shorter that 12 characters
def hashtag_test(element):
if hashtag_longer_than_twelve_letters(element):
print('=> Hashtag:', json.dumps(element['entities']['hashtags'], indent=4))
if __name__ == "__main__":
stream = Observable.from_(Twitter.get_iterable())
tweets = stream.filter(is_tweet)
# Keep only retweets
retweeted_tweets = tweets.filter(is_retweet)
# Keep only tweets in japanese
in_japanese_tweets = tweets.filter(is_japanese_tweet)
# Keep all tweets in spanish that contain a hashtag
spanish_hashtags = tweets.filter(is_spanish_tweet).filter(has_hashtags)
threads = [
Thread(target=process_stream, args=(in_japanese_tweets, emoji_test)),
Thread(target=process_stream, args=(spanish_hashtags, hashtag_test)),
Thread(target=process_stream, args=(retweeted_tweets, retweet_test))
]
for t in threads:
t.start() |
# Filters
def is_tweet(element):
return set(('favorited', 'favorite_count', 'retweeted', 'retweet_count')) <= element.keys() | random_line_split |
main_with_threads.py | #!/usr/bin/env python3
import re
import json
from threading import Thread
from rx import Observable
import APIReaderTwitter as Twitter
emoji_re = re.compile(u'['
u'\U0001F300-\U0001F64F'
u'\U0001F680-\U0001F6FF'
u'\u2600-\u26FF\u2700-\u27BF]+',
re.UNICODE)
# Util
def process_stream(s, f):
return s.subscribe(f)
def count_emoji(text):
return len(re.findall(emoji_re, text))
# Filters
def is_tweet(element):
return set(('favorited', 'favorite_count', 'retweeted', 'retweet_count')) <= element.keys()
def is_retweet(element):
return 'retweeted_status' in element
def is_spanish_tweet(element):
return element['lang'] == 'es'
def is_japanese_tweet(element):
return element['lang'] == 'ja'
def has_hashtags(element):
return len(element['entities']['hashtags']) > 0
# Tests
def tweet_has_more_than_two_emojis(element):
return count_emoji(element['text']) > 2
# https://dev.twitter.com/overview/api/tweets
def original_tweet_has_less_than_50_retweets(element):
return element['retweeted_status']['retweet_count'] <= 50
def hashtag_longer_than_twelve_letters(element):
return all(len(h['text']) > 12 for h in element['entities']['hashtags'])
# Expects
# All retweeted tweets should have less than 50 retweets
def retweet_test(element):
if not original_tweet_has_less_than_50_retweets(element):
print('=> Retweet:', json.dumps(element['retweeted_status']['retweet_count'], indent=4), "retweets")
# All elements should have less than 2 emojis
def emoji_test(element):
if tweet_has_more_than_two_emojis(element):
print("=> Emoji:", count_emoji(element['text']))
# All elements should have hashtags shorter that 12 characters
def hashtag_test(element):
if hashtag_longer_than_twelve_letters(element):
print('=> Hashtag:', json.dumps(element['entities']['hashtags'], indent=4))
if __name__ == "__main__":
| stream = Observable.from_(Twitter.get_iterable())
tweets = stream.filter(is_tweet)
# Keep only retweets
retweeted_tweets = tweets.filter(is_retweet)
# Keep only tweets in japanese
in_japanese_tweets = tweets.filter(is_japanese_tweet)
# Keep all tweets in spanish that contain a hashtag
spanish_hashtags = tweets.filter(is_spanish_tweet).filter(has_hashtags)
threads = [
Thread(target=process_stream, args=(in_japanese_tweets, emoji_test)),
Thread(target=process_stream, args=(spanish_hashtags, hashtag_test)),
Thread(target=process_stream, args=(retweeted_tweets, retweet_test))
]
for t in threads:
t.start() | conditional_block |
|
main_with_threads.py | #!/usr/bin/env python3
import re
import json
from threading import Thread
from rx import Observable
import APIReaderTwitter as Twitter
emoji_re = re.compile(u'['
u'\U0001F300-\U0001F64F'
u'\U0001F680-\U0001F6FF'
u'\u2600-\u26FF\u2700-\u27BF]+',
re.UNICODE)
# Util
def process_stream(s, f):
return s.subscribe(f)
def count_emoji(text):
return len(re.findall(emoji_re, text))
# Filters
def is_tweet(element):
return set(('favorited', 'favorite_count', 'retweeted', 'retweet_count')) <= element.keys()
def is_retweet(element):
|
def is_spanish_tweet(element):
return element['lang'] == 'es'
def is_japanese_tweet(element):
return element['lang'] == 'ja'
def has_hashtags(element):
return len(element['entities']['hashtags']) > 0
# Tests
def tweet_has_more_than_two_emojis(element):
return count_emoji(element['text']) > 2
# https://dev.twitter.com/overview/api/tweets
def original_tweet_has_less_than_50_retweets(element):
return element['retweeted_status']['retweet_count'] <= 50
def hashtag_longer_than_twelve_letters(element):
return all(len(h['text']) > 12 for h in element['entities']['hashtags'])
# Expects
# All retweeted tweets should have less than 50 retweets
def retweet_test(element):
if not original_tweet_has_less_than_50_retweets(element):
print('=> Retweet:', json.dumps(element['retweeted_status']['retweet_count'], indent=4), "retweets")
# All elements should have less than 2 emojis
def emoji_test(element):
if tweet_has_more_than_two_emojis(element):
print("=> Emoji:", count_emoji(element['text']))
# All elements should have hashtags shorter that 12 characters
def hashtag_test(element):
if hashtag_longer_than_twelve_letters(element):
print('=> Hashtag:', json.dumps(element['entities']['hashtags'], indent=4))
if __name__ == "__main__":
stream = Observable.from_(Twitter.get_iterable())
tweets = stream.filter(is_tweet)
# Keep only retweets
retweeted_tweets = tweets.filter(is_retweet)
# Keep only tweets in japanese
in_japanese_tweets = tweets.filter(is_japanese_tweet)
# Keep all tweets in spanish that contain a hashtag
spanish_hashtags = tweets.filter(is_spanish_tweet).filter(has_hashtags)
threads = [
Thread(target=process_stream, args=(in_japanese_tweets, emoji_test)),
Thread(target=process_stream, args=(spanish_hashtags, hashtag_test)),
Thread(target=process_stream, args=(retweeted_tweets, retweet_test))
]
for t in threads:
t.start()
| return 'retweeted_status' in element | identifier_body |
peripheral.rs | use bare_metal::{CriticalSection, Mutex};
use once_cell::unsync::OnceCell;
static PERIPHERALS: Mutex<OnceCell<At2XtPeripherals>> = Mutex::new(OnceCell::new());
pub struct At2XtPeripherals {
pub port: msp430g2211::PORT_1_2,
pub timer: msp430g2211::TIMER_A2,
}
impl AsRef<msp430g2211::PORT_1_2> for At2XtPeripherals {
fn as_ref(&self) -> &msp430g2211::PORT_1_2 {
&self.port
}
}
impl AsRef<msp430g2211::TIMER_A2> for At2XtPeripherals {
fn as_ref(&self) -> &msp430g2211::TIMER_A2 {
&self.timer |
impl At2XtPeripherals {
pub fn init(self, cs: &CriticalSection) -> Result<(), ()> {
// We want to consume our Peripherals struct so interrupts
// and the main thread can access the peripherals; OnceCell
// returns the data to you on error.
PERIPHERALS.borrow(*cs).set(self).map_err(|_e| {})
}
pub fn periph_ref<'a, T>(cs: &'a CriticalSection) -> Option<&'a T>
where
Self: AsRef<T>,
{
PERIPHERALS.borrow(*cs).get().map(|p| p.as_ref())
}
} | }
} | random_line_split |
peripheral.rs | use bare_metal::{CriticalSection, Mutex};
use once_cell::unsync::OnceCell;
static PERIPHERALS: Mutex<OnceCell<At2XtPeripherals>> = Mutex::new(OnceCell::new());
pub struct | {
pub port: msp430g2211::PORT_1_2,
pub timer: msp430g2211::TIMER_A2,
}
impl AsRef<msp430g2211::PORT_1_2> for At2XtPeripherals {
fn as_ref(&self) -> &msp430g2211::PORT_1_2 {
&self.port
}
}
impl AsRef<msp430g2211::TIMER_A2> for At2XtPeripherals {
fn as_ref(&self) -> &msp430g2211::TIMER_A2 {
&self.timer
}
}
impl At2XtPeripherals {
pub fn init(self, cs: &CriticalSection) -> Result<(), ()> {
// We want to consume our Peripherals struct so interrupts
// and the main thread can access the peripherals; OnceCell
// returns the data to you on error.
PERIPHERALS.borrow(*cs).set(self).map_err(|_e| {})
}
pub fn periph_ref<'a, T>(cs: &'a CriticalSection) -> Option<&'a T>
where
Self: AsRef<T>,
{
PERIPHERALS.borrow(*cs).get().map(|p| p.as_ref())
}
}
| At2XtPeripherals | identifier_name |
base.js | import BN from "../../../bn.js";
import * as utils from "../utils";
'use strict';
var getNAF = utils.getNAF;
var getJSF = utils.getJSF;
var assert = utils.assert;
function BaseCurve(type, conf) {
this.type = type;
this.p = new BN(conf.p, 16);
// Use Montgomery, when there is no fast reduction for the prime
this.red = conf.prime ? BN.red(conf.prime) : BN.mont(this.p);
// Useful for many curves
this.zero = new BN(0).toRed(this.red);
this.one = new BN(1).toRed(this.red);
this.two = new BN(2).toRed(this.red);
// Curve configuration, optional
this.n = conf.n && new BN(conf.n, 16);
this.g = conf.g && this.pointFromJSON(conf.g, conf.gRed);
// Temporary arrays
this._wnafT1 = new Array(4);
this._wnafT2 = new Array(4);
this._wnafT3 = new Array(4);
this._wnafT4 = new Array(4);
this._bitLength = this.n ? this.n.bitLength() : 0;
// Generalized Greg Maxwell's trick
var adjustCount = this.n && this.p.div(this.n);
if (!adjustCount || adjustCount.cmpn(100) > 0) {
this.redN = null;
}
else {
this._maxwellTrick = true;
this.redN = this.n.toRed(this.red);
}
}
BaseCurve.prototype.point = function point() {
throw new Error('Not implemented');
};
BaseCurve.prototype.validate = function validate() {
throw new Error('Not implemented');
};
BaseCurve.prototype._fixedNafMul = function _fixedNafMul(p, k) {
assert(p.precomputed);
var doubles = p._getDoubles();
var naf = getNAF(k, 1, this._bitLength);
var I = (1 << (doubles.step + 1)) - (doubles.step % 2 === 0 ? 2 : 1);
I /= 3;
// Translate into more windowed form
var repr = [];
var j;
var nafW;
for (j = 0; j < naf.length; j += doubles.step) {
nafW = 0;
for (var l = j + doubles.step - 1; l >= j; l--)
nafW = (nafW << 1) + naf[l];
repr.push(nafW);
}
var a = this.jpoint(null, null, null);
var b = this.jpoint(null, null, null);
for (var i = I; i > 0; i--) {
for (j = 0; j < repr.length; j++) {
nafW = repr[j];
if (nafW === i)
b = b.mixedAdd(doubles.points[j]);
else if (nafW === -i)
b = b.mixedAdd(doubles.points[j].neg());
}
a = a.add(b);
}
return a.toP();
};
BaseCurve.prototype._wnafMul = function _wnafMul(p, k) {
var w = 4;
// Precompute window
var nafPoints = p._getNAFPoints(w);
w = nafPoints.wnd;
var wnd = nafPoints.points;
// Get NAF form
var naf = getNAF(k, w, this._bitLength);
// Add `this`*(N+1) for every w-NAF index
var acc = this.jpoint(null, null, null);
for (var i = naf.length - 1; i >= 0; i--) {
// Count zeroes
for (var l = 0; i >= 0 && naf[i] === 0; i--)
l++;
if (i >= 0)
l++;
acc = acc.dblp(l);
if (i < 0)
break;
var z = naf[i];
assert(z !== 0);
if (p.type === 'affine') {
// J +- P
if (z > 0)
acc = acc.mixedAdd(wnd[(z - 1) >> 1]);
else
acc = acc.mixedAdd(wnd[(-z - 1) >> 1].neg());
}
else {
// J +- J
if (z > 0)
acc = acc.add(wnd[(z - 1) >> 1]);
else
acc = acc.add(wnd[(-z - 1) >> 1].neg());
}
}
return p.type === 'affine' ? acc.toP() : acc;
};
BaseCurve.prototype._wnafMulAdd = function _wnafMulAdd(defW, points, coeffs, len, jacobianResult) {
var wndWidth = this._wnafT1;
var wnd = this._wnafT2;
var naf = this._wnafT3;
// Fill all arrays
var max = 0;
var i;
var j;
var p;
for (i = 0; i < len; i++) {
p = points[i];
var nafPoints = p._getNAFPoints(defW);
wndWidth[i] = nafPoints.wnd;
wnd[i] = nafPoints.points;
}
// Comb small window NAFs
for (i = len - 1; i >= 1; i -= 2) {
var a = i - 1;
var b = i;
if (wndWidth[a] !== 1 || wndWidth[b] !== 1) {
naf[a] = getNAF(coeffs[a], wndWidth[a], this._bitLength);
naf[b] = getNAF(coeffs[b], wndWidth[b], this._bitLength);
max = Math.max(naf[a].length, max);
max = Math.max(naf[b].length, max);
continue;
}
var comb = [
points[a],
null,
null,
points[b],
];
// Try to avoid Projective points, if possible
if (points[a].y.cmp(points[b].y) === 0) {
comb[1] = points[a].add(points[b]);
comb[2] = points[a].toJ().mixedAdd(points[b].neg());
}
else if (points[a].y.cmp(points[b].y.redNeg()) === 0) {
comb[1] = points[a].toJ().mixedAdd(points[b]);
comb[2] = points[a].add(points[b].neg());
}
else {
comb[1] = points[a].toJ().mixedAdd(points[b]);
comb[2] = points[a].toJ().mixedAdd(points[b].neg());
}
var index = [
-3,
-1,
-5,
-7,
0,
7,
5,
1,
3,
];
var jsf = getJSF(coeffs[a], coeffs[b]);
max = Math.max(jsf[0].length, max);
naf[a] = new Array(max);
naf[b] = new Array(max);
for (j = 0; j < max; j++) {
var ja = jsf[0][j] | 0;
var jb = jsf[1][j] | 0;
naf[a][j] = index[(ja + 1) * 3 + (jb + 1)];
naf[b][j] = 0;
wnd[a] = comb;
}
}
var acc = this.jpoint(null, null, null);
var tmp = this._wnafT4;
for (i = max; i >= 0; i--) {
var k = 0;
while (i >= 0) {
var zero = true;
for (j = 0; j < len; j++) |
if (!zero)
break;
k++;
i--;
}
if (i >= 0)
k++;
acc = acc.dblp(k);
if (i < 0)
break;
for (j = 0; j < len; j++) {
var z = tmp[j];
p;
if (z === 0)
continue;
else if (z > 0)
p = wnd[j][(z - 1) >> 1];
else if (z < 0)
p = wnd[j][(-z - 1) >> 1].neg();
if (p.type === 'affine')
acc = acc.mixedAdd(p);
else
acc = acc.add(p);
}
}
// Zeroify references
for (i = 0; i < len; i++)
wnd[i] = null;
if (jacobianResult)
return acc;
else
return acc.toP();
};
function BasePoint(curve, type) {
this.curve = curve;
this.type = type;
this.precomputed = null;
}
BaseCurve.BasePoint = BasePoint;
BasePoint.prototype.eq = function eq( /*other*/) {
throw new Error('Not implemented');
};
BasePoint.prototype.validate = function validate() {
return this.curve.validate(this);
};
BaseCurve.prototype.decodePoint = function decodePoint(bytes, enc) {
bytes = utils.toArray(bytes, enc);
var len = this.p.byteLength();
// uncompressed, hybrid-odd, hybrid-even
if ((bytes[0] === 0x04 || bytes[0] === 0x06 || bytes[0] === 0x07) &&
bytes.length - 1 === 2 * len) {
if (bytes[0] === 0x06)
assert(bytes[bytes.length - 1] % 2 === 0);
else if (bytes[0] === 0x07)
assert(bytes[bytes.length - 1] % 2 === 1);
var res = this.point(bytes.slice(1, 1 + len), bytes.slice(1 + len, 1 + 2 * len));
return res;
}
else if ((bytes[0] === 0x02 || bytes[0] === 0x03) &&
bytes.length - 1 === len) {
return this.pointFromX(bytes.slice(1, 1 + len), bytes[0] === 0x03);
}
throw new Error('Unknown point format');
};
BasePoint.prototype.encodeCompressed = function encodeCompressed(enc) {
return this.encode(enc, true);
};
BasePoint.prototype._encode = function _encode(compact) {
var len = this.curve.p.byteLength();
var x = this.getX().toArray('be', len);
if (compact)
return [this.getY().isEven() ? 0x02 : 0x03].concat(x);
return [0x04].concat(x, this.getY().toArray('be', len));
};
BasePoint.prototype.encode = function encode(enc, compact) {
return utils.encode(this._encode(compact), enc);
};
BasePoint.prototype.precompute = function precompute(power) {
if (this.precomputed)
return this;
var precomputed = {
doubles: null,
naf: null,
beta: null,
};
precomputed.naf = this._getNAFPoints(8);
precomputed.doubles = this._getDoubles(4, power);
precomputed.beta = this._getBeta();
this.precomputed = precomputed;
return this;
};
BasePoint.prototype._hasDoubles = function _hasDoubles(k) {
if (!this.precomputed)
return false;
var doubles = this.precomputed.doubles;
if (!doubles)
return false;
return doubles.points.length >= Math.ceil((k.bitLength() + 1) / doubles.step);
};
BasePoint.prototype._getDoubles = function _getDoubles(step, power) {
if (this.precomputed && this.precomputed.doubles)
return this.precomputed.doubles;
var doubles = [this];
var acc = this;
for (var i = 0; i < power; i += step) {
for (var j = 0; j < step; j++)
acc = acc.dbl();
doubles.push(acc);
}
return {
step: step,
points: doubles,
};
};
BasePoint.prototype._getNAFPoints = function _getNAFPoints(wnd) {
if (this.precomputed && this.precomputed.naf)
return this.precomputed.naf;
var res = [this];
var max = (1 << wnd) - 1;
var dbl = max === 1 ? null : this.dbl();
for (var i = 1; i < max; i++)
res[i] = res[i - 1].add(dbl);
return {
wnd: wnd,
points: res,
};
};
BasePoint.prototype._getBeta = function _getBeta() {
return null;
};
BasePoint.prototype.dblp = function dblp(k) {
var r = this;
for (var i = 0; i < k; i++)
r = r.dbl();
return r;
};
export default BaseCurve;
| {
tmp[j] = naf[j][i] | 0;
if (tmp[j] !== 0)
zero = false;
} | conditional_block |
base.js | import BN from "../../../bn.js";
import * as utils from "../utils";
'use strict';
var getNAF = utils.getNAF;
var getJSF = utils.getJSF;
var assert = utils.assert;
function BaseCurve(type, conf) |
BaseCurve.prototype.point = function point() {
throw new Error('Not implemented');
};
BaseCurve.prototype.validate = function validate() {
throw new Error('Not implemented');
};
BaseCurve.prototype._fixedNafMul = function _fixedNafMul(p, k) {
assert(p.precomputed);
var doubles = p._getDoubles();
var naf = getNAF(k, 1, this._bitLength);
var I = (1 << (doubles.step + 1)) - (doubles.step % 2 === 0 ? 2 : 1);
I /= 3;
// Translate into more windowed form
var repr = [];
var j;
var nafW;
for (j = 0; j < naf.length; j += doubles.step) {
nafW = 0;
for (var l = j + doubles.step - 1; l >= j; l--)
nafW = (nafW << 1) + naf[l];
repr.push(nafW);
}
var a = this.jpoint(null, null, null);
var b = this.jpoint(null, null, null);
for (var i = I; i > 0; i--) {
for (j = 0; j < repr.length; j++) {
nafW = repr[j];
if (nafW === i)
b = b.mixedAdd(doubles.points[j]);
else if (nafW === -i)
b = b.mixedAdd(doubles.points[j].neg());
}
a = a.add(b);
}
return a.toP();
};
BaseCurve.prototype._wnafMul = function _wnafMul(p, k) {
var w = 4;
// Precompute window
var nafPoints = p._getNAFPoints(w);
w = nafPoints.wnd;
var wnd = nafPoints.points;
// Get NAF form
var naf = getNAF(k, w, this._bitLength);
// Add `this`*(N+1) for every w-NAF index
var acc = this.jpoint(null, null, null);
for (var i = naf.length - 1; i >= 0; i--) {
// Count zeroes
for (var l = 0; i >= 0 && naf[i] === 0; i--)
l++;
if (i >= 0)
l++;
acc = acc.dblp(l);
if (i < 0)
break;
var z = naf[i];
assert(z !== 0);
if (p.type === 'affine') {
// J +- P
if (z > 0)
acc = acc.mixedAdd(wnd[(z - 1) >> 1]);
else
acc = acc.mixedAdd(wnd[(-z - 1) >> 1].neg());
}
else {
// J +- J
if (z > 0)
acc = acc.add(wnd[(z - 1) >> 1]);
else
acc = acc.add(wnd[(-z - 1) >> 1].neg());
}
}
return p.type === 'affine' ? acc.toP() : acc;
};
BaseCurve.prototype._wnafMulAdd = function _wnafMulAdd(defW, points, coeffs, len, jacobianResult) {
var wndWidth = this._wnafT1;
var wnd = this._wnafT2;
var naf = this._wnafT3;
// Fill all arrays
var max = 0;
var i;
var j;
var p;
for (i = 0; i < len; i++) {
p = points[i];
var nafPoints = p._getNAFPoints(defW);
wndWidth[i] = nafPoints.wnd;
wnd[i] = nafPoints.points;
}
// Comb small window NAFs
for (i = len - 1; i >= 1; i -= 2) {
var a = i - 1;
var b = i;
if (wndWidth[a] !== 1 || wndWidth[b] !== 1) {
naf[a] = getNAF(coeffs[a], wndWidth[a], this._bitLength);
naf[b] = getNAF(coeffs[b], wndWidth[b], this._bitLength);
max = Math.max(naf[a].length, max);
max = Math.max(naf[b].length, max);
continue;
}
var comb = [
points[a],
null,
null,
points[b],
];
// Try to avoid Projective points, if possible
if (points[a].y.cmp(points[b].y) === 0) {
comb[1] = points[a].add(points[b]);
comb[2] = points[a].toJ().mixedAdd(points[b].neg());
}
else if (points[a].y.cmp(points[b].y.redNeg()) === 0) {
comb[1] = points[a].toJ().mixedAdd(points[b]);
comb[2] = points[a].add(points[b].neg());
}
else {
comb[1] = points[a].toJ().mixedAdd(points[b]);
comb[2] = points[a].toJ().mixedAdd(points[b].neg());
}
var index = [
-3,
-1,
-5,
-7,
0,
7,
5,
1,
3,
];
var jsf = getJSF(coeffs[a], coeffs[b]);
max = Math.max(jsf[0].length, max);
naf[a] = new Array(max);
naf[b] = new Array(max);
for (j = 0; j < max; j++) {
var ja = jsf[0][j] | 0;
var jb = jsf[1][j] | 0;
naf[a][j] = index[(ja + 1) * 3 + (jb + 1)];
naf[b][j] = 0;
wnd[a] = comb;
}
}
var acc = this.jpoint(null, null, null);
var tmp = this._wnafT4;
for (i = max; i >= 0; i--) {
var k = 0;
while (i >= 0) {
var zero = true;
for (j = 0; j < len; j++) {
tmp[j] = naf[j][i] | 0;
if (tmp[j] !== 0)
zero = false;
}
if (!zero)
break;
k++;
i--;
}
if (i >= 0)
k++;
acc = acc.dblp(k);
if (i < 0)
break;
for (j = 0; j < len; j++) {
var z = tmp[j];
p;
if (z === 0)
continue;
else if (z > 0)
p = wnd[j][(z - 1) >> 1];
else if (z < 0)
p = wnd[j][(-z - 1) >> 1].neg();
if (p.type === 'affine')
acc = acc.mixedAdd(p);
else
acc = acc.add(p);
}
}
// Zeroify references
for (i = 0; i < len; i++)
wnd[i] = null;
if (jacobianResult)
return acc;
else
return acc.toP();
};
function BasePoint(curve, type) {
this.curve = curve;
this.type = type;
this.precomputed = null;
}
BaseCurve.BasePoint = BasePoint;
BasePoint.prototype.eq = function eq( /*other*/) {
throw new Error('Not implemented');
};
BasePoint.prototype.validate = function validate() {
return this.curve.validate(this);
};
BaseCurve.prototype.decodePoint = function decodePoint(bytes, enc) {
bytes = utils.toArray(bytes, enc);
var len = this.p.byteLength();
// uncompressed, hybrid-odd, hybrid-even
if ((bytes[0] === 0x04 || bytes[0] === 0x06 || bytes[0] === 0x07) &&
bytes.length - 1 === 2 * len) {
if (bytes[0] === 0x06)
assert(bytes[bytes.length - 1] % 2 === 0);
else if (bytes[0] === 0x07)
assert(bytes[bytes.length - 1] % 2 === 1);
var res = this.point(bytes.slice(1, 1 + len), bytes.slice(1 + len, 1 + 2 * len));
return res;
}
else if ((bytes[0] === 0x02 || bytes[0] === 0x03) &&
bytes.length - 1 === len) {
return this.pointFromX(bytes.slice(1, 1 + len), bytes[0] === 0x03);
}
throw new Error('Unknown point format');
};
BasePoint.prototype.encodeCompressed = function encodeCompressed(enc) {
return this.encode(enc, true);
};
BasePoint.prototype._encode = function _encode(compact) {
var len = this.curve.p.byteLength();
var x = this.getX().toArray('be', len);
if (compact)
return [this.getY().isEven() ? 0x02 : 0x03].concat(x);
return [0x04].concat(x, this.getY().toArray('be', len));
};
BasePoint.prototype.encode = function encode(enc, compact) {
return utils.encode(this._encode(compact), enc);
};
BasePoint.prototype.precompute = function precompute(power) {
if (this.precomputed)
return this;
var precomputed = {
doubles: null,
naf: null,
beta: null,
};
precomputed.naf = this._getNAFPoints(8);
precomputed.doubles = this._getDoubles(4, power);
precomputed.beta = this._getBeta();
this.precomputed = precomputed;
return this;
};
BasePoint.prototype._hasDoubles = function _hasDoubles(k) {
if (!this.precomputed)
return false;
var doubles = this.precomputed.doubles;
if (!doubles)
return false;
return doubles.points.length >= Math.ceil((k.bitLength() + 1) / doubles.step);
};
BasePoint.prototype._getDoubles = function _getDoubles(step, power) {
if (this.precomputed && this.precomputed.doubles)
return this.precomputed.doubles;
var doubles = [this];
var acc = this;
for (var i = 0; i < power; i += step) {
for (var j = 0; j < step; j++)
acc = acc.dbl();
doubles.push(acc);
}
return {
step: step,
points: doubles,
};
};
BasePoint.prototype._getNAFPoints = function _getNAFPoints(wnd) {
if (this.precomputed && this.precomputed.naf)
return this.precomputed.naf;
var res = [this];
var max = (1 << wnd) - 1;
var dbl = max === 1 ? null : this.dbl();
for (var i = 1; i < max; i++)
res[i] = res[i - 1].add(dbl);
return {
wnd: wnd,
points: res,
};
};
BasePoint.prototype._getBeta = function _getBeta() {
return null;
};
BasePoint.prototype.dblp = function dblp(k) {
var r = this;
for (var i = 0; i < k; i++)
r = r.dbl();
return r;
};
export default BaseCurve;
| {
this.type = type;
this.p = new BN(conf.p, 16);
// Use Montgomery, when there is no fast reduction for the prime
this.red = conf.prime ? BN.red(conf.prime) : BN.mont(this.p);
// Useful for many curves
this.zero = new BN(0).toRed(this.red);
this.one = new BN(1).toRed(this.red);
this.two = new BN(2).toRed(this.red);
// Curve configuration, optional
this.n = conf.n && new BN(conf.n, 16);
this.g = conf.g && this.pointFromJSON(conf.g, conf.gRed);
// Temporary arrays
this._wnafT1 = new Array(4);
this._wnafT2 = new Array(4);
this._wnafT3 = new Array(4);
this._wnafT4 = new Array(4);
this._bitLength = this.n ? this.n.bitLength() : 0;
// Generalized Greg Maxwell's trick
var adjustCount = this.n && this.p.div(this.n);
if (!adjustCount || adjustCount.cmpn(100) > 0) {
this.redN = null;
}
else {
this._maxwellTrick = true;
this.redN = this.n.toRed(this.red);
}
} | identifier_body |
base.js | import BN from "../../../bn.js";
import * as utils from "../utils";
'use strict';
var getNAF = utils.getNAF;
var getJSF = utils.getJSF;
var assert = utils.assert;
function | (type, conf) {
this.type = type;
this.p = new BN(conf.p, 16);
// Use Montgomery, when there is no fast reduction for the prime
this.red = conf.prime ? BN.red(conf.prime) : BN.mont(this.p);
// Useful for many curves
this.zero = new BN(0).toRed(this.red);
this.one = new BN(1).toRed(this.red);
this.two = new BN(2).toRed(this.red);
// Curve configuration, optional
this.n = conf.n && new BN(conf.n, 16);
this.g = conf.g && this.pointFromJSON(conf.g, conf.gRed);
// Temporary arrays
this._wnafT1 = new Array(4);
this._wnafT2 = new Array(4);
this._wnafT3 = new Array(4);
this._wnafT4 = new Array(4);
this._bitLength = this.n ? this.n.bitLength() : 0;
// Generalized Greg Maxwell's trick
var adjustCount = this.n && this.p.div(this.n);
if (!adjustCount || adjustCount.cmpn(100) > 0) {
this.redN = null;
}
else {
this._maxwellTrick = true;
this.redN = this.n.toRed(this.red);
}
}
BaseCurve.prototype.point = function point() {
throw new Error('Not implemented');
};
BaseCurve.prototype.validate = function validate() {
throw new Error('Not implemented');
};
BaseCurve.prototype._fixedNafMul = function _fixedNafMul(p, k) {
assert(p.precomputed);
var doubles = p._getDoubles();
var naf = getNAF(k, 1, this._bitLength);
var I = (1 << (doubles.step + 1)) - (doubles.step % 2 === 0 ? 2 : 1);
I /= 3;
// Translate into more windowed form
var repr = [];
var j;
var nafW;
for (j = 0; j < naf.length; j += doubles.step) {
nafW = 0;
for (var l = j + doubles.step - 1; l >= j; l--)
nafW = (nafW << 1) + naf[l];
repr.push(nafW);
}
var a = this.jpoint(null, null, null);
var b = this.jpoint(null, null, null);
for (var i = I; i > 0; i--) {
for (j = 0; j < repr.length; j++) {
nafW = repr[j];
if (nafW === i)
b = b.mixedAdd(doubles.points[j]);
else if (nafW === -i)
b = b.mixedAdd(doubles.points[j].neg());
}
a = a.add(b);
}
return a.toP();
};
BaseCurve.prototype._wnafMul = function _wnafMul(p, k) {
var w = 4;
// Precompute window
var nafPoints = p._getNAFPoints(w);
w = nafPoints.wnd;
var wnd = nafPoints.points;
// Get NAF form
var naf = getNAF(k, w, this._bitLength);
// Add `this`*(N+1) for every w-NAF index
var acc = this.jpoint(null, null, null);
for (var i = naf.length - 1; i >= 0; i--) {
// Count zeroes
for (var l = 0; i >= 0 && naf[i] === 0; i--)
l++;
if (i >= 0)
l++;
acc = acc.dblp(l);
if (i < 0)
break;
var z = naf[i];
assert(z !== 0);
if (p.type === 'affine') {
// J +- P
if (z > 0)
acc = acc.mixedAdd(wnd[(z - 1) >> 1]);
else
acc = acc.mixedAdd(wnd[(-z - 1) >> 1].neg());
}
else {
// J +- J
if (z > 0)
acc = acc.add(wnd[(z - 1) >> 1]);
else
acc = acc.add(wnd[(-z - 1) >> 1].neg());
}
}
return p.type === 'affine' ? acc.toP() : acc;
};
BaseCurve.prototype._wnafMulAdd = function _wnafMulAdd(defW, points, coeffs, len, jacobianResult) {
var wndWidth = this._wnafT1;
var wnd = this._wnafT2;
var naf = this._wnafT3;
// Fill all arrays
var max = 0;
var i;
var j;
var p;
for (i = 0; i < len; i++) {
p = points[i];
var nafPoints = p._getNAFPoints(defW);
wndWidth[i] = nafPoints.wnd;
wnd[i] = nafPoints.points;
}
// Comb small window NAFs
for (i = len - 1; i >= 1; i -= 2) {
var a = i - 1;
var b = i;
if (wndWidth[a] !== 1 || wndWidth[b] !== 1) {
naf[a] = getNAF(coeffs[a], wndWidth[a], this._bitLength);
naf[b] = getNAF(coeffs[b], wndWidth[b], this._bitLength);
max = Math.max(naf[a].length, max);
max = Math.max(naf[b].length, max);
continue;
}
var comb = [
points[a],
null,
null,
points[b],
];
// Try to avoid Projective points, if possible
if (points[a].y.cmp(points[b].y) === 0) {
comb[1] = points[a].add(points[b]);
comb[2] = points[a].toJ().mixedAdd(points[b].neg());
}
else if (points[a].y.cmp(points[b].y.redNeg()) === 0) {
comb[1] = points[a].toJ().mixedAdd(points[b]);
comb[2] = points[a].add(points[b].neg());
}
else {
comb[1] = points[a].toJ().mixedAdd(points[b]);
comb[2] = points[a].toJ().mixedAdd(points[b].neg());
}
var index = [
-3,
-1,
-5,
-7,
0,
7,
5,
1,
3,
];
var jsf = getJSF(coeffs[a], coeffs[b]);
max = Math.max(jsf[0].length, max);
naf[a] = new Array(max);
naf[b] = new Array(max);
for (j = 0; j < max; j++) {
var ja = jsf[0][j] | 0;
var jb = jsf[1][j] | 0;
naf[a][j] = index[(ja + 1) * 3 + (jb + 1)];
naf[b][j] = 0;
wnd[a] = comb;
}
}
var acc = this.jpoint(null, null, null);
var tmp = this._wnafT4;
for (i = max; i >= 0; i--) {
var k = 0;
while (i >= 0) {
var zero = true;
for (j = 0; j < len; j++) {
tmp[j] = naf[j][i] | 0;
if (tmp[j] !== 0)
zero = false;
}
if (!zero)
break;
k++;
i--;
}
if (i >= 0)
k++;
acc = acc.dblp(k);
if (i < 0)
break;
for (j = 0; j < len; j++) {
var z = tmp[j];
p;
if (z === 0)
continue;
else if (z > 0)
p = wnd[j][(z - 1) >> 1];
else if (z < 0)
p = wnd[j][(-z - 1) >> 1].neg();
if (p.type === 'affine')
acc = acc.mixedAdd(p);
else
acc = acc.add(p);
}
}
// Zeroify references
for (i = 0; i < len; i++)
wnd[i] = null;
if (jacobianResult)
return acc;
else
return acc.toP();
};
function BasePoint(curve, type) {
this.curve = curve;
this.type = type;
this.precomputed = null;
}
BaseCurve.BasePoint = BasePoint;
BasePoint.prototype.eq = function eq( /*other*/) {
throw new Error('Not implemented');
};
BasePoint.prototype.validate = function validate() {
return this.curve.validate(this);
};
BaseCurve.prototype.decodePoint = function decodePoint(bytes, enc) {
bytes = utils.toArray(bytes, enc);
var len = this.p.byteLength();
// uncompressed, hybrid-odd, hybrid-even
if ((bytes[0] === 0x04 || bytes[0] === 0x06 || bytes[0] === 0x07) &&
bytes.length - 1 === 2 * len) {
if (bytes[0] === 0x06)
assert(bytes[bytes.length - 1] % 2 === 0);
else if (bytes[0] === 0x07)
assert(bytes[bytes.length - 1] % 2 === 1);
var res = this.point(bytes.slice(1, 1 + len), bytes.slice(1 + len, 1 + 2 * len));
return res;
}
else if ((bytes[0] === 0x02 || bytes[0] === 0x03) &&
bytes.length - 1 === len) {
return this.pointFromX(bytes.slice(1, 1 + len), bytes[0] === 0x03);
}
throw new Error('Unknown point format');
};
BasePoint.prototype.encodeCompressed = function encodeCompressed(enc) {
return this.encode(enc, true);
};
BasePoint.prototype._encode = function _encode(compact) {
var len = this.curve.p.byteLength();
var x = this.getX().toArray('be', len);
if (compact)
return [this.getY().isEven() ? 0x02 : 0x03].concat(x);
return [0x04].concat(x, this.getY().toArray('be', len));
};
BasePoint.prototype.encode = function encode(enc, compact) {
return utils.encode(this._encode(compact), enc);
};
BasePoint.prototype.precompute = function precompute(power) {
if (this.precomputed)
return this;
var precomputed = {
doubles: null,
naf: null,
beta: null,
};
precomputed.naf = this._getNAFPoints(8);
precomputed.doubles = this._getDoubles(4, power);
precomputed.beta = this._getBeta();
this.precomputed = precomputed;
return this;
};
BasePoint.prototype._hasDoubles = function _hasDoubles(k) {
if (!this.precomputed)
return false;
var doubles = this.precomputed.doubles;
if (!doubles)
return false;
return doubles.points.length >= Math.ceil((k.bitLength() + 1) / doubles.step);
};
BasePoint.prototype._getDoubles = function _getDoubles(step, power) {
if (this.precomputed && this.precomputed.doubles)
return this.precomputed.doubles;
var doubles = [this];
var acc = this;
for (var i = 0; i < power; i += step) {
for (var j = 0; j < step; j++)
acc = acc.dbl();
doubles.push(acc);
}
return {
step: step,
points: doubles,
};
};
BasePoint.prototype._getNAFPoints = function _getNAFPoints(wnd) {
if (this.precomputed && this.precomputed.naf)
return this.precomputed.naf;
var res = [this];
var max = (1 << wnd) - 1;
var dbl = max === 1 ? null : this.dbl();
for (var i = 1; i < max; i++)
res[i] = res[i - 1].add(dbl);
return {
wnd: wnd,
points: res,
};
};
BasePoint.prototype._getBeta = function _getBeta() {
return null;
};
BasePoint.prototype.dblp = function dblp(k) {
var r = this;
for (var i = 0; i < k; i++)
r = r.dbl();
return r;
};
export default BaseCurve;
| BaseCurve | identifier_name |
base.js | import BN from "../../../bn.js";
import * as utils from "../utils";
'use strict';
var getNAF = utils.getNAF;
var getJSF = utils.getJSF;
var assert = utils.assert;
function BaseCurve(type, conf) {
this.type = type;
this.p = new BN(conf.p, 16);
// Use Montgomery, when there is no fast reduction for the prime
this.red = conf.prime ? BN.red(conf.prime) : BN.mont(this.p);
// Useful for many curves
this.zero = new BN(0).toRed(this.red);
this.one = new BN(1).toRed(this.red);
this.two = new BN(2).toRed(this.red);
// Curve configuration, optional
this.n = conf.n && new BN(conf.n, 16);
this.g = conf.g && this.pointFromJSON(conf.g, conf.gRed);
// Temporary arrays
this._wnafT1 = new Array(4);
this._wnafT2 = new Array(4);
this._wnafT3 = new Array(4);
this._wnafT4 = new Array(4);
this._bitLength = this.n ? this.n.bitLength() : 0;
// Generalized Greg Maxwell's trick
var adjustCount = this.n && this.p.div(this.n);
if (!adjustCount || adjustCount.cmpn(100) > 0) {
this.redN = null;
}
else {
this._maxwellTrick = true;
this.redN = this.n.toRed(this.red);
}
}
BaseCurve.prototype.point = function point() {
throw new Error('Not implemented');
};
BaseCurve.prototype.validate = function validate() {
throw new Error('Not implemented');
}; | var naf = getNAF(k, 1, this._bitLength);
var I = (1 << (doubles.step + 1)) - (doubles.step % 2 === 0 ? 2 : 1);
I /= 3;
// Translate into more windowed form
var repr = [];
var j;
var nafW;
for (j = 0; j < naf.length; j += doubles.step) {
nafW = 0;
for (var l = j + doubles.step - 1; l >= j; l--)
nafW = (nafW << 1) + naf[l];
repr.push(nafW);
}
var a = this.jpoint(null, null, null);
var b = this.jpoint(null, null, null);
for (var i = I; i > 0; i--) {
for (j = 0; j < repr.length; j++) {
nafW = repr[j];
if (nafW === i)
b = b.mixedAdd(doubles.points[j]);
else if (nafW === -i)
b = b.mixedAdd(doubles.points[j].neg());
}
a = a.add(b);
}
return a.toP();
};
BaseCurve.prototype._wnafMul = function _wnafMul(p, k) {
var w = 4;
// Precompute window
var nafPoints = p._getNAFPoints(w);
w = nafPoints.wnd;
var wnd = nafPoints.points;
// Get NAF form
var naf = getNAF(k, w, this._bitLength);
// Add `this`*(N+1) for every w-NAF index
var acc = this.jpoint(null, null, null);
for (var i = naf.length - 1; i >= 0; i--) {
// Count zeroes
for (var l = 0; i >= 0 && naf[i] === 0; i--)
l++;
if (i >= 0)
l++;
acc = acc.dblp(l);
if (i < 0)
break;
var z = naf[i];
assert(z !== 0);
if (p.type === 'affine') {
// J +- P
if (z > 0)
acc = acc.mixedAdd(wnd[(z - 1) >> 1]);
else
acc = acc.mixedAdd(wnd[(-z - 1) >> 1].neg());
}
else {
// J +- J
if (z > 0)
acc = acc.add(wnd[(z - 1) >> 1]);
else
acc = acc.add(wnd[(-z - 1) >> 1].neg());
}
}
return p.type === 'affine' ? acc.toP() : acc;
};
BaseCurve.prototype._wnafMulAdd = function _wnafMulAdd(defW, points, coeffs, len, jacobianResult) {
var wndWidth = this._wnafT1;
var wnd = this._wnafT2;
var naf = this._wnafT3;
// Fill all arrays
var max = 0;
var i;
var j;
var p;
for (i = 0; i < len; i++) {
p = points[i];
var nafPoints = p._getNAFPoints(defW);
wndWidth[i] = nafPoints.wnd;
wnd[i] = nafPoints.points;
}
// Comb small window NAFs
for (i = len - 1; i >= 1; i -= 2) {
var a = i - 1;
var b = i;
if (wndWidth[a] !== 1 || wndWidth[b] !== 1) {
naf[a] = getNAF(coeffs[a], wndWidth[a], this._bitLength);
naf[b] = getNAF(coeffs[b], wndWidth[b], this._bitLength);
max = Math.max(naf[a].length, max);
max = Math.max(naf[b].length, max);
continue;
}
var comb = [
points[a],
null,
null,
points[b],
];
// Try to avoid Projective points, if possible
if (points[a].y.cmp(points[b].y) === 0) {
comb[1] = points[a].add(points[b]);
comb[2] = points[a].toJ().mixedAdd(points[b].neg());
}
else if (points[a].y.cmp(points[b].y.redNeg()) === 0) {
comb[1] = points[a].toJ().mixedAdd(points[b]);
comb[2] = points[a].add(points[b].neg());
}
else {
comb[1] = points[a].toJ().mixedAdd(points[b]);
comb[2] = points[a].toJ().mixedAdd(points[b].neg());
}
var index = [
-3,
-1,
-5,
-7,
0,
7,
5,
1,
3,
];
var jsf = getJSF(coeffs[a], coeffs[b]);
max = Math.max(jsf[0].length, max);
naf[a] = new Array(max);
naf[b] = new Array(max);
for (j = 0; j < max; j++) {
var ja = jsf[0][j] | 0;
var jb = jsf[1][j] | 0;
naf[a][j] = index[(ja + 1) * 3 + (jb + 1)];
naf[b][j] = 0;
wnd[a] = comb;
}
}
var acc = this.jpoint(null, null, null);
var tmp = this._wnafT4;
for (i = max; i >= 0; i--) {
var k = 0;
while (i >= 0) {
var zero = true;
for (j = 0; j < len; j++) {
tmp[j] = naf[j][i] | 0;
if (tmp[j] !== 0)
zero = false;
}
if (!zero)
break;
k++;
i--;
}
if (i >= 0)
k++;
acc = acc.dblp(k);
if (i < 0)
break;
for (j = 0; j < len; j++) {
var z = tmp[j];
p;
if (z === 0)
continue;
else if (z > 0)
p = wnd[j][(z - 1) >> 1];
else if (z < 0)
p = wnd[j][(-z - 1) >> 1].neg();
if (p.type === 'affine')
acc = acc.mixedAdd(p);
else
acc = acc.add(p);
}
}
// Zeroify references
for (i = 0; i < len; i++)
wnd[i] = null;
if (jacobianResult)
return acc;
else
return acc.toP();
};
function BasePoint(curve, type) {
this.curve = curve;
this.type = type;
this.precomputed = null;
}
BaseCurve.BasePoint = BasePoint;
BasePoint.prototype.eq = function eq( /*other*/) {
throw new Error('Not implemented');
};
BasePoint.prototype.validate = function validate() {
return this.curve.validate(this);
};
BaseCurve.prototype.decodePoint = function decodePoint(bytes, enc) {
bytes = utils.toArray(bytes, enc);
var len = this.p.byteLength();
// uncompressed, hybrid-odd, hybrid-even
if ((bytes[0] === 0x04 || bytes[0] === 0x06 || bytes[0] === 0x07) &&
bytes.length - 1 === 2 * len) {
if (bytes[0] === 0x06)
assert(bytes[bytes.length - 1] % 2 === 0);
else if (bytes[0] === 0x07)
assert(bytes[bytes.length - 1] % 2 === 1);
var res = this.point(bytes.slice(1, 1 + len), bytes.slice(1 + len, 1 + 2 * len));
return res;
}
else if ((bytes[0] === 0x02 || bytes[0] === 0x03) &&
bytes.length - 1 === len) {
return this.pointFromX(bytes.slice(1, 1 + len), bytes[0] === 0x03);
}
throw new Error('Unknown point format');
};
BasePoint.prototype.encodeCompressed = function encodeCompressed(enc) {
return this.encode(enc, true);
};
BasePoint.prototype._encode = function _encode(compact) {
var len = this.curve.p.byteLength();
var x = this.getX().toArray('be', len);
if (compact)
return [this.getY().isEven() ? 0x02 : 0x03].concat(x);
return [0x04].concat(x, this.getY().toArray('be', len));
};
BasePoint.prototype.encode = function encode(enc, compact) {
return utils.encode(this._encode(compact), enc);
};
BasePoint.prototype.precompute = function precompute(power) {
if (this.precomputed)
return this;
var precomputed = {
doubles: null,
naf: null,
beta: null,
};
precomputed.naf = this._getNAFPoints(8);
precomputed.doubles = this._getDoubles(4, power);
precomputed.beta = this._getBeta();
this.precomputed = precomputed;
return this;
};
BasePoint.prototype._hasDoubles = function _hasDoubles(k) {
if (!this.precomputed)
return false;
var doubles = this.precomputed.doubles;
if (!doubles)
return false;
return doubles.points.length >= Math.ceil((k.bitLength() + 1) / doubles.step);
};
BasePoint.prototype._getDoubles = function _getDoubles(step, power) {
if (this.precomputed && this.precomputed.doubles)
return this.precomputed.doubles;
var doubles = [this];
var acc = this;
for (var i = 0; i < power; i += step) {
for (var j = 0; j < step; j++)
acc = acc.dbl();
doubles.push(acc);
}
return {
step: step,
points: doubles,
};
};
BasePoint.prototype._getNAFPoints = function _getNAFPoints(wnd) {
if (this.precomputed && this.precomputed.naf)
return this.precomputed.naf;
var res = [this];
var max = (1 << wnd) - 1;
var dbl = max === 1 ? null : this.dbl();
for (var i = 1; i < max; i++)
res[i] = res[i - 1].add(dbl);
return {
wnd: wnd,
points: res,
};
};
BasePoint.prototype._getBeta = function _getBeta() {
return null;
};
BasePoint.prototype.dblp = function dblp(k) {
var r = this;
for (var i = 0; i < k; i++)
r = r.dbl();
return r;
};
export default BaseCurve; | BaseCurve.prototype._fixedNafMul = function _fixedNafMul(p, k) {
assert(p.precomputed);
var doubles = p._getDoubles(); | random_line_split |
ocrRegion.js | /*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
const models = require('./index');
/**
* A region consists of multiple lines (e.g. a column of text in a multi-column
* document).
*
*/
class | {
/**
* Create a OcrRegion.
* @member {string} [boundingBox] Bounding box of a recognized region. The
* four integers represent the x-coordinate of the left edge, the
* y-coordinate of the top edge, width, and height of the bounding box, in
* the coordinate system of the input image, after it has been rotated around
* its center according to the detected text angle (see textAngle property),
* with the origin at the top-left corner, and the y-axis pointing down.
* @member {array} [lines]
*/
constructor() {
}
/**
* Defines the metadata of OcrRegion
*
* @returns {object} metadata of OcrRegion
*
*/
mapper() {
return {
required: false,
serializedName: 'OcrRegion',
type: {
name: 'Composite',
className: 'OcrRegion',
modelProperties: {
boundingBox: {
required: false,
serializedName: 'boundingBox',
type: {
name: 'String'
}
},
lines: {
required: false,
serializedName: 'lines',
type: {
name: 'Sequence',
element: {
required: false,
serializedName: 'OcrLineElementType',
type: {
name: 'Composite',
className: 'OcrLine'
}
}
}
}
}
}
};
}
}
module.exports = OcrRegion;
| OcrRegion | identifier_name |
ocrRegion.js | /*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
const models = require('./index');
/**
* A region consists of multiple lines (e.g. a column of text in a multi-column
* document).
*
*/
class OcrRegion {
/**
* Create a OcrRegion.
* @member {string} [boundingBox] Bounding box of a recognized region. The
* four integers represent the x-coordinate of the left edge, the
* y-coordinate of the top edge, width, and height of the bounding box, in
* the coordinate system of the input image, after it has been rotated around
* its center according to the detected text angle (see textAngle property),
* with the origin at the top-left corner, and the y-axis pointing down.
* @member {array} [lines]
*/
constructor() |
/**
* Defines the metadata of OcrRegion
*
* @returns {object} metadata of OcrRegion
*
*/
mapper() {
return {
required: false,
serializedName: 'OcrRegion',
type: {
name: 'Composite',
className: 'OcrRegion',
modelProperties: {
boundingBox: {
required: false,
serializedName: 'boundingBox',
type: {
name: 'String'
}
},
lines: {
required: false,
serializedName: 'lines',
type: {
name: 'Sequence',
element: {
required: false,
serializedName: 'OcrLineElementType',
type: {
name: 'Composite',
className: 'OcrLine'
}
}
}
}
}
}
};
}
}
module.exports = OcrRegion;
| {
} | identifier_body |
ocrRegion.js | /*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
const models = require('./index');
/**
* A region consists of multiple lines (e.g. a column of text in a multi-column
* document).
*
*/
class OcrRegion {
/**
* Create a OcrRegion.
* @member {string} [boundingBox] Bounding box of a recognized region. The
* four integers represent the x-coordinate of the left edge, the
* y-coordinate of the top edge, width, and height of the bounding box, in
* the coordinate system of the input image, after it has been rotated around
* its center according to the detected text angle (see textAngle property),
* with the origin at the top-left corner, and the y-axis pointing down.
* @member {array} [lines]
*/
constructor() {
}
/**
* Defines the metadata of OcrRegion
*
* @returns {object} metadata of OcrRegion
*
*/
mapper() {
return {
required: false,
serializedName: 'OcrRegion',
type: {
name: 'Composite',
className: 'OcrRegion',
modelProperties: {
boundingBox: {
required: false,
serializedName: 'boundingBox',
type: {
name: 'String'
}
},
lines: {
required: false,
serializedName: 'lines',
type: {
name: 'Sequence',
element: {
required: false,
serializedName: 'OcrLineElementType',
type: {
name: 'Composite',
className: 'OcrLine'
}
}
}
}
}
} | };
}
}
module.exports = OcrRegion; | random_line_split |
|
paper-tests.ts | import paper = require('paper');
var canvas = document.createElement('canvas')
paper.setup(canvas);
| // Circle
var path = new paper.Path.Circle({
center: [80, 50],
radius: 35,
fillColor: 'red'
});
// Dotted Line Tool
var dottedLinePath: paper.Path;
var dottedLineTool = new paper.Tool();
dottedLineTool.onMouseDown = function(event: any) {
new paper.Layer().activate();
dottedLinePath = new paper.Path();
dottedLinePath.strokeColor = '#00';
dottedLinePath.strokeWidth = 2;
dottedLinePath.dashArray = [5, 8];
dottedLinePath.strokeCap = 'round';
dottedLinePath.strokeJoin = 'round';
dottedLinePath.add(event.point);
};
dottedLineTool.onMouseDrag = function(event: any) {
dottedLinePath.add(event.point);
};
dottedLineTool.onMouseUp = function(event: any) {
dottedLinePath.smooth();
dottedLinePath.simplify();
}; | random_line_split |
|
dst-rvalue.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Check that dynamically sized rvalues are forbidden
#![feature(box_syntax)]
pub fn | () {
let _x: Box<str> = box *"hello world";
//~^ ERROR E0161
//~^^ ERROR cannot move out of borrowed content
let array: &[isize] = &[1, 2, 3];
let _x: Box<[isize]> = box *array;
//~^ ERROR E0161
//~^^ ERROR cannot move out of borrowed content
}
| main | identifier_name |
dst-rvalue.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Check that dynamically sized rvalues are forbidden
#![feature(box_syntax)]
pub fn main() {
let _x: Box<str> = box *"hello world"; | //~^ ERROR E0161
//~^^ ERROR cannot move out of borrowed content
let array: &[isize] = &[1, 2, 3];
let _x: Box<[isize]> = box *array;
//~^ ERROR E0161
//~^^ ERROR cannot move out of borrowed content
} | random_line_split |
|
dst-rvalue.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Check that dynamically sized rvalues are forbidden
#![feature(box_syntax)]
pub fn main() | {
let _x: Box<str> = box *"hello world";
//~^ ERROR E0161
//~^^ ERROR cannot move out of borrowed content
let array: &[isize] = &[1, 2, 3];
let _x: Box<[isize]> = box *array;
//~^ ERROR E0161
//~^^ ERROR cannot move out of borrowed content
} | identifier_body |
|
main.py | # -*- coding: utf-8 -*-
#
# AWL simulator - Dummy hardware interface
#
# Copyright 2013-2014 Michael Buesch <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from __future__ import division, absolute_import, print_function, unicode_literals
from awlsim.common.compat import *
from awlsim.core.hardware import *
from awlsim.core.operators import AwlOperator
from awlsim.core.datatypes import AwlOffset
class HardwareInterface(AbstractHardwareInterface):
name = "dummy"
def __init__(self, sim, parameters={}):
AbstractHardwareInterface.__init__(self,
sim = sim,
parameters = parameters)
def doStartup(self):
pass # Do nothing
def doShutdown(self):
pass # Do nothing
def readInputs(self):
pass # Do nothing
def | (self):
pass # Do nothing
def directReadInput(self, accessWidth, accessOffset):
if accessOffset < self.inputAddressBase:
return None
# Just read the current value from the CPU and return it.
return self.sim.cpu.fetch(AwlOperator(AwlOperator.MEM_E,
accessWidth,
AwlOffset(accessOffset)))
def directWriteOutput(self, accessWidth, accessOffset, data):
if accessOffset < self.outputAddressBase:
return False
# Just pretend we wrote it somewhere.
return True
| writeOutputs | identifier_name |
main.py | # -*- coding: utf-8 -*-
#
# AWL simulator - Dummy hardware interface
#
# Copyright 2013-2014 Michael Buesch <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from __future__ import division, absolute_import, print_function, unicode_literals
from awlsim.common.compat import *
from awlsim.core.hardware import *
from awlsim.core.operators import AwlOperator
from awlsim.core.datatypes import AwlOffset
class HardwareInterface(AbstractHardwareInterface):
name = "dummy"
def __init__(self, sim, parameters={}):
AbstractHardwareInterface.__init__(self,
sim = sim,
parameters = parameters)
def doStartup(self):
pass # Do nothing
def doShutdown(self):
pass # Do nothing
def readInputs(self):
pass # Do nothing
def writeOutputs(self):
pass # Do nothing
def directReadInput(self, accessWidth, accessOffset):
if accessOffset < self.inputAddressBase:
|
# Just read the current value from the CPU and return it.
return self.sim.cpu.fetch(AwlOperator(AwlOperator.MEM_E,
accessWidth,
AwlOffset(accessOffset)))
def directWriteOutput(self, accessWidth, accessOffset, data):
if accessOffset < self.outputAddressBase:
return False
# Just pretend we wrote it somewhere.
return True
| return None | conditional_block |
main.py | # -*- coding: utf-8 -*-
#
# AWL simulator - Dummy hardware interface
#
# Copyright 2013-2014 Michael Buesch <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from __future__ import division, absolute_import, print_function, unicode_literals
from awlsim.common.compat import *
from awlsim.core.hardware import *
from awlsim.core.operators import AwlOperator
from awlsim.core.datatypes import AwlOffset
class HardwareInterface(AbstractHardwareInterface):
name = "dummy"
def __init__(self, sim, parameters={}):
AbstractHardwareInterface.__init__(self,
sim = sim,
parameters = parameters)
def doStartup(self):
pass # Do nothing
def doShutdown(self):
|
def readInputs(self):
pass # Do nothing
def writeOutputs(self):
pass # Do nothing
def directReadInput(self, accessWidth, accessOffset):
if accessOffset < self.inputAddressBase:
return None
# Just read the current value from the CPU and return it.
return self.sim.cpu.fetch(AwlOperator(AwlOperator.MEM_E,
accessWidth,
AwlOffset(accessOffset)))
def directWriteOutput(self, accessWidth, accessOffset, data):
if accessOffset < self.outputAddressBase:
return False
# Just pretend we wrote it somewhere.
return True
| pass # Do nothing | identifier_body |
main.py | # -*- coding: utf-8 -*-
#
# AWL simulator - Dummy hardware interface
#
# Copyright 2013-2014 Michael Buesch <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from __future__ import division, absolute_import, print_function, unicode_literals
from awlsim.common.compat import *
from awlsim.core.hardware import *
from awlsim.core.operators import AwlOperator
from awlsim.core.datatypes import AwlOffset
class HardwareInterface(AbstractHardwareInterface):
name = "dummy"
| parameters = parameters)
def doStartup(self):
pass # Do nothing
def doShutdown(self):
pass # Do nothing
def readInputs(self):
pass # Do nothing
def writeOutputs(self):
pass # Do nothing
def directReadInput(self, accessWidth, accessOffset):
if accessOffset < self.inputAddressBase:
return None
# Just read the current value from the CPU and return it.
return self.sim.cpu.fetch(AwlOperator(AwlOperator.MEM_E,
accessWidth,
AwlOffset(accessOffset)))
def directWriteOutput(self, accessWidth, accessOffset, data):
if accessOffset < self.outputAddressBase:
return False
# Just pretend we wrote it somewhere.
return True | def __init__(self, sim, parameters={}):
AbstractHardwareInterface.__init__(self,
sim = sim, | random_line_split |
textattributes.rs | // This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>. | use gtk::ffi;
pub struct TextAttributes {
pointer: *mut ffi::C_GtkTextAttributes
}
impl TextAttributes {
pub fn new() -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_new() };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
pub fn copy(&self) -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_copy(self.pointer) };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
pub fn copy_values_from(&self, src: &TextAttributes) {
unsafe { ffi::gtk_text_attributes_copy_values(src.pointer, self.pointer) }
}
pub fn unref(&self) {
unsafe { ffi::gtk_text_attributes_unref(self.pointer) }
}
pub fn _ref(&self) -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_ref(self.pointer) };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
}
impl_GObjectFunctions!(TextAttributes, C_GtkTextAttributes) |
//! GtkTextTag — A tag that can be applied to text in a GtkTextBuffer
| random_line_split |
textattributes.rs | // This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//! GtkTextTag — A tag that can be applied to text in a GtkTextBuffer
use gtk::ffi;
pub struct TextAttributes {
pointer: *mut ffi::C_GtkTextAttributes
}
impl TextAttributes {
pub fn new() -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_new() };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
pub fn copy(&self) -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_copy(self.pointer) };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
pub fn copy_values_from(&self, src: &TextAttributes) {
unsafe { ffi::gtk_text_attributes_copy_values(src.pointer, self.pointer) }
}
pub fn unref(&self) {
unsafe { ffi::gtk_text_attributes_unref(self.pointer) }
}
pub fn _ref(&self) -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_ref(self.pointer) };
if tmp_pointer.is_null() {
None
} else {
| }
}
impl_GObjectFunctions!(TextAttributes, C_GtkTextAttributes) | Some(TextAttributes { pointer : tmp_pointer })
}
| conditional_block |
textattributes.rs | // This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//! GtkTextTag — A tag that can be applied to text in a GtkTextBuffer
use gtk::ffi;
pub struct TextAttributes {
pointer: *mut ffi::C_GtkTextAttributes
}
impl TextAttributes {
pub fn new() -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_new() };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
pub fn copy(&self) -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_copy(self.pointer) };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
pub fn copy_values_from(&self, src: &TextAttributes) {
unsafe { ffi::gtk_text_attributes_copy_values(src.pointer, self.pointer) }
}
pub fn unref(&self) {
unsafe { ffi::gtk_text_attributes_unref(self.pointer) }
}
pub fn _r | self) -> Option<TextAttributes> {
let tmp_pointer = unsafe { ffi::gtk_text_attributes_ref(self.pointer) };
if tmp_pointer.is_null() {
None
} else {
Some(TextAttributes { pointer : tmp_pointer })
}
}
}
impl_GObjectFunctions!(TextAttributes, C_GtkTextAttributes) | ef(& | identifier_name |
validator.py | # -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.types import EventID, RoomID, UserID
from synapse.api.errors import SynapseError
from synapse.api.constants import EventTypes, Membership
class EventValidator(object):
def validate(self, event):
EventID.from_string(event.event_id)
RoomID.from_string(event.room_id)
required = [
# "auth_events",
"content",
# "hashes",
"origin",
# "prev_events",
"sender",
"type",
]
for k in required:
if not hasattr(event, k):
raise SynapseError(400, "Event does not have key %s" % (k,))
# Check that the following keys have string values
strings = [
"origin",
"sender",
"type",
]
if hasattr(event, "state_key"):
|
for s in strings:
if not isinstance(getattr(event, s), basestring):
raise SynapseError(400, "Not '%s' a string type" % (s,))
if event.type == EventTypes.Member:
if "membership" not in event.content:
raise SynapseError(400, "Content has not membership key")
if event.content["membership"] not in Membership.LIST:
raise SynapseError(400, "Invalid membership key")
# Check that the following keys have dictionary values
# TODO
# Check that the following keys have the correct format for DAGs
# TODO
def validate_new(self, event):
self.validate(event)
UserID.from_string(event.sender)
if event.type == EventTypes.Message:
strings = [
"body",
"msgtype",
]
self._ensure_strings(event.content, strings)
elif event.type == EventTypes.Topic:
self._ensure_strings(event.content, ["topic"])
elif event.type == EventTypes.Name:
self._ensure_strings(event.content, ["name"])
def _ensure_strings(self, d, keys):
for s in keys:
if s not in d:
raise SynapseError(400, "'%s' not in content" % (s,))
if not isinstance(d[s], basestring):
raise SynapseError(400, "Not '%s' a string type" % (s,))
| strings.append("state_key") | conditional_block |
validator.py | # -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.types import EventID, RoomID, UserID
from synapse.api.errors import SynapseError
from synapse.api.constants import EventTypes, Membership
class EventValidator(object):
def | (self, event):
EventID.from_string(event.event_id)
RoomID.from_string(event.room_id)
required = [
# "auth_events",
"content",
# "hashes",
"origin",
# "prev_events",
"sender",
"type",
]
for k in required:
if not hasattr(event, k):
raise SynapseError(400, "Event does not have key %s" % (k,))
# Check that the following keys have string values
strings = [
"origin",
"sender",
"type",
]
if hasattr(event, "state_key"):
strings.append("state_key")
for s in strings:
if not isinstance(getattr(event, s), basestring):
raise SynapseError(400, "Not '%s' a string type" % (s,))
if event.type == EventTypes.Member:
if "membership" not in event.content:
raise SynapseError(400, "Content has not membership key")
if event.content["membership"] not in Membership.LIST:
raise SynapseError(400, "Invalid membership key")
# Check that the following keys have dictionary values
# TODO
# Check that the following keys have the correct format for DAGs
# TODO
def validate_new(self, event):
self.validate(event)
UserID.from_string(event.sender)
if event.type == EventTypes.Message:
strings = [
"body",
"msgtype",
]
self._ensure_strings(event.content, strings)
elif event.type == EventTypes.Topic:
self._ensure_strings(event.content, ["topic"])
elif event.type == EventTypes.Name:
self._ensure_strings(event.content, ["name"])
def _ensure_strings(self, d, keys):
for s in keys:
if s not in d:
raise SynapseError(400, "'%s' not in content" % (s,))
if not isinstance(d[s], basestring):
raise SynapseError(400, "Not '%s' a string type" % (s,))
| validate | identifier_name |
validator.py | # -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.types import EventID, RoomID, UserID
from synapse.api.errors import SynapseError
from synapse.api.constants import EventTypes, Membership
class EventValidator(object):
def validate(self, event):
EventID.from_string(event.event_id)
RoomID.from_string(event.room_id)
required = [
# "auth_events",
"content",
# "hashes",
"origin",
# "prev_events",
"sender",
"type",
]
for k in required:
if not hasattr(event, k):
raise SynapseError(400, "Event does not have key %s" % (k,))
# Check that the following keys have string values
strings = [
"origin",
"sender",
"type",
]
if hasattr(event, "state_key"):
strings.append("state_key")
for s in strings:
if not isinstance(getattr(event, s), basestring):
raise SynapseError(400, "Not '%s' a string type" % (s,))
if event.type == EventTypes.Member:
if "membership" not in event.content:
raise SynapseError(400, "Content has not membership key")
if event.content["membership"] not in Membership.LIST:
raise SynapseError(400, "Invalid membership key")
# Check that the following keys have dictionary values
# TODO
# Check that the following keys have the correct format for DAGs
# TODO
def validate_new(self, event):
self.validate(event)
| UserID.from_string(event.sender)
if event.type == EventTypes.Message:
strings = [
"body",
"msgtype",
]
self._ensure_strings(event.content, strings)
elif event.type == EventTypes.Topic:
self._ensure_strings(event.content, ["topic"])
elif event.type == EventTypes.Name:
self._ensure_strings(event.content, ["name"])
def _ensure_strings(self, d, keys):
for s in keys:
if s not in d:
raise SynapseError(400, "'%s' not in content" % (s,))
if not isinstance(d[s], basestring):
raise SynapseError(400, "Not '%s' a string type" % (s,)) | random_line_split |
|
validator.py | # -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.types import EventID, RoomID, UserID
from synapse.api.errors import SynapseError
from synapse.api.constants import EventTypes, Membership
class EventValidator(object):
| def validate(self, event):
EventID.from_string(event.event_id)
RoomID.from_string(event.room_id)
required = [
# "auth_events",
"content",
# "hashes",
"origin",
# "prev_events",
"sender",
"type",
]
for k in required:
if not hasattr(event, k):
raise SynapseError(400, "Event does not have key %s" % (k,))
# Check that the following keys have string values
strings = [
"origin",
"sender",
"type",
]
if hasattr(event, "state_key"):
strings.append("state_key")
for s in strings:
if not isinstance(getattr(event, s), basestring):
raise SynapseError(400, "Not '%s' a string type" % (s,))
if event.type == EventTypes.Member:
if "membership" not in event.content:
raise SynapseError(400, "Content has not membership key")
if event.content["membership"] not in Membership.LIST:
raise SynapseError(400, "Invalid membership key")
# Check that the following keys have dictionary values
# TODO
# Check that the following keys have the correct format for DAGs
# TODO
def validate_new(self, event):
self.validate(event)
UserID.from_string(event.sender)
if event.type == EventTypes.Message:
strings = [
"body",
"msgtype",
]
self._ensure_strings(event.content, strings)
elif event.type == EventTypes.Topic:
self._ensure_strings(event.content, ["topic"])
elif event.type == EventTypes.Name:
self._ensure_strings(event.content, ["name"])
def _ensure_strings(self, d, keys):
for s in keys:
if s not in d:
raise SynapseError(400, "'%s' not in content" % (s,))
if not isinstance(d[s], basestring):
raise SynapseError(400, "Not '%s' a string type" % (s,)) | identifier_body |
|
FastBloomFilter.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from FastBitSet import FastBitSet
import math
import mmh3
class FastBloomFilter(object):
mask32 = 0xffffffff
mask64 = 0xffffffffffffffff
mask128 = 0xffffffffffffffffffffffffffffffff
seeds = [2, 3, 5, 7, 11,
13, 17, 19, 23, 29,
31, 37, 41, 43, 47,
53, 59, 61, 67, 71,
73, 79, 83, 89, 97,
101, 103, 107, 109, 113,
127, 131, 137, 139, 149,
151, 157, 163, 167, 173,
179, 181, 191, 193, 197,
199, 211, 223, 227, 229,
233, 239, 241, 251, 257,
263, 269, 271, 277, 281,
283, 293, 307, 311, 313,
317, 331, 337, 347, 349,
353, 359, 367, 373, 379,
383, 389, 397, 401, 409,
419, 421, 431, 433, 439,
443, 449, 457, 461, 463,
467, 479, 487, 491, 499,
503, 509, 521, 523, 541,
547, 557, 563, 569, 571,
577, 587, 593, 599, 601,
607, 613, 617, 619, 631,
641, 643, 647, 653, 659,
661, 673, 677, 683, 691]
def __init__(self, n, fpr=0.00001):
m = -1 * math.log(fpr, math.e) * n / math.pow(math.log(2, math.e), 2)
k = (m / n) * math.log(2, math.e)
self.n = int(math.ceil(n))
self.fpr = fpr
self.m = int(math.ceil(m))
self.k = int(k)
self.bsUnitSize = 64
self.bsCap = int(math.ceil(self.m / 64))
self.bitSet = FastBitSet(self.bsCap, self.bsUnitSize)
self.bitSetLength = self.bitSet.length
def append(self, s):
self.bitSet.setList(self.hashs(s, self.k))
def exists(self, s):
bites = self.bitSet.getList(self.hashs(s, self.k))
return not (0 in bites)
def remove(self, s):
self.bitSet.setList(self.hashs(s, self.k), False)
def clear(self):
self.bitSet.clear()
def hashs(self, s, k):
bitSetLength = self.bitSetLength
#mask = self.mask32
mask = self.mask128
seeds = self.seeds
hashs = []
for i in range(k):
#print(mmh3.hash64(s, seeds[i]))
#hashs.append((mmh3.hash(s, seeds[i]) & mask) % bitSetLength)
|
return hashs
def hashs2(self, s, k):
bitSetLength = self.bitSetLength
mask = self.mask32
hashs = []
hash1 = mmh3.hash64(s, 0)
hash2 = mmh3.hash64(s, hash1)
for i in k:
hashs.append(((hash1 + i * hash2) % bitSetLength) & mask)
return hashs
| hashs.append((mmh3.hash128(s, seeds[i]) & mask) % bitSetLength) | conditional_block |
FastBloomFilter.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from FastBitSet import FastBitSet
import math
import mmh3
class FastBloomFilter(object):
mask32 = 0xffffffff
mask64 = 0xffffffffffffffff
mask128 = 0xffffffffffffffffffffffffffffffff
seeds = [2, 3, 5, 7, 11,
13, 17, 19, 23, 29,
31, 37, 41, 43, 47,
53, 59, 61, 67, 71,
73, 79, 83, 89, 97,
101, 103, 107, 109, 113,
127, 131, 137, 139, 149,
151, 157, 163, 167, 173,
179, 181, 191, 193, 197,
199, 211, 223, 227, 229,
233, 239, 241, 251, 257,
263, 269, 271, 277, 281,
283, 293, 307, 311, 313,
317, 331, 337, 347, 349,
353, 359, 367, 373, 379,
383, 389, 397, 401, 409,
419, 421, 431, 433, 439,
443, 449, 457, 461, 463,
467, 479, 487, 491, 499,
503, 509, 521, 523, 541,
547, 557, 563, 569, 571,
577, 587, 593, 599, 601,
607, 613, 617, 619, 631,
641, 643, 647, 653, 659,
661, 673, 677, 683, 691]
def __init__(self, n, fpr=0.00001):
m = -1 * math.log(fpr, math.e) * n / math.pow(math.log(2, math.e), 2)
k = (m / n) * math.log(2, math.e)
self.n = int(math.ceil(n))
self.fpr = fpr
self.m = int(math.ceil(m))
self.k = int(k)
self.bsUnitSize = 64
self.bsCap = int(math.ceil(self.m / 64))
self.bitSet = FastBitSet(self.bsCap, self.bsUnitSize)
self.bitSetLength = self.bitSet.length
def append(self, s):
self.bitSet.setList(self.hashs(s, self.k))
def exists(self, s):
bites = self.bitSet.getList(self.hashs(s, self.k))
return not (0 in bites)
def remove(self, s):
self.bitSet.setList(self.hashs(s, self.k), False)
def clear(self):
self.bitSet.clear()
def hashs(self, s, k):
|
def hashs2(self, s, k):
bitSetLength = self.bitSetLength
mask = self.mask32
hashs = []
hash1 = mmh3.hash64(s, 0)
hash2 = mmh3.hash64(s, hash1)
for i in k:
hashs.append(((hash1 + i * hash2) % bitSetLength) & mask)
return hashs
| bitSetLength = self.bitSetLength
#mask = self.mask32
mask = self.mask128
seeds = self.seeds
hashs = []
for i in range(k):
#print(mmh3.hash64(s, seeds[i]))
#hashs.append((mmh3.hash(s, seeds[i]) & mask) % bitSetLength)
hashs.append((mmh3.hash128(s, seeds[i]) & mask) % bitSetLength)
return hashs | identifier_body |
FastBloomFilter.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from FastBitSet import FastBitSet
import math
import mmh3
class FastBloomFilter(object):
mask32 = 0xffffffff
mask64 = 0xffffffffffffffff
mask128 = 0xffffffffffffffffffffffffffffffff
seeds = [2, 3, 5, 7, 11,
13, 17, 19, 23, 29,
31, 37, 41, 43, 47,
53, 59, 61, 67, 71,
73, 79, 83, 89, 97,
101, 103, 107, 109, 113,
127, 131, 137, 139, 149,
151, 157, 163, 167, 173,
179, 181, 191, 193, 197,
199, 211, 223, 227, 229,
233, 239, 241, 251, 257,
263, 269, 271, 277, 281,
283, 293, 307, 311, 313,
317, 331, 337, 347, 349,
353, 359, 367, 373, 379,
383, 389, 397, 401, 409,
419, 421, 431, 433, 439,
443, 449, 457, 461, 463,
467, 479, 487, 491, 499,
503, 509, 521, 523, 541,
547, 557, 563, 569, 571,
577, 587, 593, 599, 601,
607, 613, 617, 619, 631,
641, 643, 647, 653, 659,
661, 673, 677, 683, 691]
def | (self, n, fpr=0.00001):
m = -1 * math.log(fpr, math.e) * n / math.pow(math.log(2, math.e), 2)
k = (m / n) * math.log(2, math.e)
self.n = int(math.ceil(n))
self.fpr = fpr
self.m = int(math.ceil(m))
self.k = int(k)
self.bsUnitSize = 64
self.bsCap = int(math.ceil(self.m / 64))
self.bitSet = FastBitSet(self.bsCap, self.bsUnitSize)
self.bitSetLength = self.bitSet.length
def append(self, s):
self.bitSet.setList(self.hashs(s, self.k))
def exists(self, s):
bites = self.bitSet.getList(self.hashs(s, self.k))
return not (0 in bites)
def remove(self, s):
self.bitSet.setList(self.hashs(s, self.k), False)
def clear(self):
self.bitSet.clear()
def hashs(self, s, k):
bitSetLength = self.bitSetLength
#mask = self.mask32
mask = self.mask128
seeds = self.seeds
hashs = []
for i in range(k):
#print(mmh3.hash64(s, seeds[i]))
#hashs.append((mmh3.hash(s, seeds[i]) & mask) % bitSetLength)
hashs.append((mmh3.hash128(s, seeds[i]) & mask) % bitSetLength)
return hashs
def hashs2(self, s, k):
bitSetLength = self.bitSetLength
mask = self.mask32
hashs = []
hash1 = mmh3.hash64(s, 0)
hash2 = mmh3.hash64(s, hash1)
for i in k:
hashs.append(((hash1 + i * hash2) % bitSetLength) & mask)
return hashs
| __init__ | identifier_name |
FastBloomFilter.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from FastBitSet import FastBitSet
import math
import mmh3
class FastBloomFilter(object):
mask32 = 0xffffffff
mask64 = 0xffffffffffffffff
mask128 = 0xffffffffffffffffffffffffffffffff
seeds = [2, 3, 5, 7, 11,
13, 17, 19, 23, 29,
31, 37, 41, 43, 47,
53, 59, 61, 67, 71,
73, 79, 83, 89, 97,
101, 103, 107, 109, 113,
127, 131, 137, 139, 149,
151, 157, 163, 167, 173,
179, 181, 191, 193, 197,
199, 211, 223, 227, 229,
233, 239, 241, 251, 257,
263, 269, 271, 277, 281,
283, 293, 307, 311, 313,
317, 331, 337, 347, 349,
353, 359, 367, 373, 379,
383, 389, 397, 401, 409,
419, 421, 431, 433, 439,
443, 449, 457, 461, 463,
467, 479, 487, 491, 499,
503, 509, 521, 523, 541,
547, 557, 563, 569, 571,
577, 587, 593, 599, 601,
607, 613, 617, 619, 631,
641, 643, 647, 653, 659,
661, 673, 677, 683, 691]
def __init__(self, n, fpr=0.00001):
m = -1 * math.log(fpr, math.e) * n / math.pow(math.log(2, math.e), 2)
k = (m / n) * math.log(2, math.e)
| self.n = int(math.ceil(n))
self.fpr = fpr
self.m = int(math.ceil(m))
self.k = int(k)
self.bsUnitSize = 64
self.bsCap = int(math.ceil(self.m / 64))
self.bitSet = FastBitSet(self.bsCap, self.bsUnitSize)
self.bitSetLength = self.bitSet.length
def append(self, s):
self.bitSet.setList(self.hashs(s, self.k))
def exists(self, s):
bites = self.bitSet.getList(self.hashs(s, self.k))
return not (0 in bites)
def remove(self, s):
self.bitSet.setList(self.hashs(s, self.k), False)
def clear(self):
self.bitSet.clear()
def hashs(self, s, k):
bitSetLength = self.bitSetLength
#mask = self.mask32
mask = self.mask128
seeds = self.seeds
hashs = []
for i in range(k):
#print(mmh3.hash64(s, seeds[i]))
#hashs.append((mmh3.hash(s, seeds[i]) & mask) % bitSetLength)
hashs.append((mmh3.hash128(s, seeds[i]) & mask) % bitSetLength)
return hashs
def hashs2(self, s, k):
bitSetLength = self.bitSetLength
mask = self.mask32
hashs = []
hash1 = mmh3.hash64(s, 0)
hash2 = mmh3.hash64(s, hash1)
for i in k:
hashs.append(((hash1 + i * hash2) % bitSetLength) & mask)
return hashs | random_line_split |
|
react-docgen-test.js | /*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
/*global jasmine, jest, describe, pit, expect, afterEach*/
// Increase default timeout (5000ms) for Travis
jasmine.getEnv().defaultTimeoutInterval = 10000;
jest.autoMockOff();
var fs = require('fs');
var path = require('path');
var rimraf = require('rimraf');
var temp = require('temp');
var spawn = require('cross-spawn-async');
function | (args, stdin) {
return new Promise(resolve => {
var docgen = spawn( // eslint-disable-line camelcase
path.join(__dirname, '../react-docgen.js'),
args
);
var stdout = '';
var stderr = '';
docgen.stdout.on('data', data => stdout += data);
docgen.stderr.on('data', data => stderr += data);
docgen.on('close', () => resolve([stdout, stderr]));
if (stdin) {
docgen.stdin.write(stdin);
}
docgen.stdin.end();
});
}
var component = fs.readFileSync(
path.join(__dirname, '../../example/components/Component.js')
);
describe('react-docgen CLI', () => {
var tempDir;
var tempComponents = [];
var tempNoComponents = [];
function createTempfiles(suffix, dir) {
if (!tempDir) {
tempDir = temp.mkdirSync();
}
if (!dir) {
dir = tempDir;
} else {
dir = path.join(tempDir, dir);
try {
fs.mkdirSync(dir);
} catch(error) {
if (error.message.indexOf('EEXIST') === -1) {
throw error;
}
}
}
if (!suffix) {
suffix = 'js';
}
var componentPath = path.join(dir, 'Component.' + suffix);
var componentFile = fs.openSync(componentPath, 'w');
fs.writeSync(componentFile, component.toString());
fs.closeSync(componentFile);
var noComponentPath = path.join(dir, 'NoComponent.' + suffix);
var noComponentFile = fs.openSync(noComponentPath, 'w');
fs.writeSync(noComponentFile, '{}');
fs.closeSync(noComponentFile);
tempComponents.push(componentPath);
tempNoComponents.push(noComponentPath);
return dir;
}
afterEach(() => {
if (tempDir) {
rimraf.sync(tempDir);
}
tempDir = null;
tempComponents.length = 0;
tempNoComponents.length = 0;
});
pit('reads from stdin', () => {
return run([], component).then(([stdout, stderr]) => {
expect(stdout.length > 0).toBe(true);
expect(stderr.length).toBe(0);
});
});
pit('reads files provided as command line arguments', () => {
createTempfiles();
return run(tempComponents.concat(tempNoComponents)).then(
([stdout, stderr]) => {
expect(stdout).toContain('Component');
expect(stderr).toContain('NoComponent');
}
);
});
pit('reads directories provided as command line arguments', () => {
tempDir = createTempfiles();
return run([tempDir]).then(([stdout, stderr]) => {
expect(stdout).toContain('Component');
expect(stderr).toContain('NoComponent');
});
});
pit('considers js and jsx by default', () => {
tempDir = createTempfiles();
createTempfiles('jsx');
createTempfiles('foo');
return run([tempDir]).then(([stdout, stderr]) => {
expect(stdout).toContain('Component.js');
expect(stdout).toContain('Component.jsx');
expect(stdout).not.toContain('Component.foo');
expect(stderr).toContain('NoComponent.js');
expect(stderr).toContain('NoComponent.jsx');
expect(stderr).not.toContain('NoComponent.foo');
});
});
pit('considers files with the specified extension', () => {
createTempfiles('foo');
createTempfiles('bar');
var verify = ([stdout, stderr]) => {
expect(stdout).toContain('Component.foo');
expect(stdout).toContain('Component.bar');
expect(stderr).toContain('NoComponent.foo');
expect(stderr).toContain('NoComponent.bar');
};
return Promise.all([
run(['--extension=foo', '--extension=bar', tempDir]).then(verify),
run(['-x', 'foo', '-x', 'bar', tempDir]).then(verify),
]);
});
pit('ignores files in node_modules and __tests__ by default', () => {
createTempfiles(null, 'node_modules');
createTempfiles(null, '__tests__');
return run([tempDir]).then(([stdout, stderr]) => {
expect(stdout).toBe('');
expect(stderr).toBe('');
});
});
pit('ignores specified folders', () => {
createTempfiles(null, 'foo');
var verify = ([stdout, stderr]) => {
expect(stdout).toBe('');
expect(stderr).toBe('');
};
return Promise.all([
run(['--ignore=foo', tempDir]).then(verify),
run(['-i', 'foo', tempDir]).then(verify),
]);
});
pit('writes to stdout', () => {
return run([], component).then(([stdout, stderr]) => {
expect(stdout.length > 0).toBe(true);
expect(stderr.length).toBe(0);
});
});
pit('writes to stderr', () => {
return run([], '{}').then(([stdout, stderr]) => {
expect(stderr.length > 0).toBe(true);
expect(stdout.length).toBe(0);
});
});
pit('writes to a file if provided', function() {
var outFile = temp.openSync();
createTempfiles();
var verify = ([stdout]) => {
expect(fs.readFileSync(outFile.path)).not.toBe('');
expect(stdout).toBe('');
};
return Promise.all([
run(['--out=' + outFile.path, tempDir]).then(verify),
run(['-o', outFile.path, tempDir]).then(verify),
]);
});
describe('--resolver', () => {
pit('accepts the names of built in resolvers', () => {
return Promise.all([
// No option passed: same as --resolver=findExportedComponentDefinition
run([
path.join(__dirname, '../../example/components/Component.js'),
]).then(([stdout]) => {
expect(stdout).toContain('Component');
}),
run([
'--resolver=findExportedComponentDefinition',
path.join(__dirname, '../../example/components/Component.js'),
]).then(([stdout]) => {
expect(stdout).toContain('Component');
}),
run([
'--resolver=findAllComponentDefinitions',
path.join(__dirname, './example/MultipleComponents.js'),
]).then(([stdout]) => {
expect(stdout).toContain('ComponentA');
expect(stdout).toContain('ComponentB');
}),
]);
});
pit('accepts a path to a resolver function', () => {
return Promise.all([
run([
'--resolver='+path.join(__dirname, './example/customResolver.js'),
path.join(__dirname, '../../example/components/Component.js'),
]).then(([stdout, stderr]) => {
console.log(stderr);
expect(stdout).toContain('Custom');
}),
]);
});
});
});
| run | identifier_name |
react-docgen-test.js | /*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
/*global jasmine, jest, describe, pit, expect, afterEach*/
// Increase default timeout (5000ms) for Travis
jasmine.getEnv().defaultTimeoutInterval = 10000;
jest.autoMockOff();
var fs = require('fs');
var path = require('path');
var rimraf = require('rimraf');
var temp = require('temp');
var spawn = require('cross-spawn-async');
function run(args, stdin) {
return new Promise(resolve => {
var docgen = spawn( // eslint-disable-line camelcase
path.join(__dirname, '../react-docgen.js'),
args
);
var stdout = '';
var stderr = '';
docgen.stdout.on('data', data => stdout += data);
docgen.stderr.on('data', data => stderr += data);
docgen.on('close', () => resolve([stdout, stderr]));
if (stdin) {
docgen.stdin.write(stdin);
}
docgen.stdin.end();
});
}
var component = fs.readFileSync(
path.join(__dirname, '../../example/components/Component.js')
);
describe('react-docgen CLI', () => {
var tempDir;
var tempComponents = [];
var tempNoComponents = [];
function createTempfiles(suffix, dir) {
if (!tempDir) {
tempDir = temp.mkdirSync();
}
if (!dir) {
dir = tempDir;
} else {
dir = path.join(tempDir, dir);
try {
fs.mkdirSync(dir);
} catch(error) {
if (error.message.indexOf('EEXIST') === -1) {
throw error;
}
}
}
if (!suffix) {
suffix = 'js';
}
var componentPath = path.join(dir, 'Component.' + suffix);
var componentFile = fs.openSync(componentPath, 'w');
fs.writeSync(componentFile, component.toString());
fs.closeSync(componentFile);
var noComponentPath = path.join(dir, 'NoComponent.' + suffix);
var noComponentFile = fs.openSync(noComponentPath, 'w');
fs.writeSync(noComponentFile, '{}');
fs.closeSync(noComponentFile);
tempComponents.push(componentPath);
tempNoComponents.push(noComponentPath);
return dir;
}
afterEach(() => {
if (tempDir) {
rimraf.sync(tempDir);
}
tempDir = null;
tempComponents.length = 0;
tempNoComponents.length = 0;
});
pit('reads from stdin', () => {
return run([], component).then(([stdout, stderr]) => {
expect(stdout.length > 0).toBe(true);
expect(stderr.length).toBe(0);
});
});
pit('reads files provided as command line arguments', () => {
createTempfiles();
return run(tempComponents.concat(tempNoComponents)).then( | expect(stdout).toContain('Component');
expect(stderr).toContain('NoComponent');
}
);
});
pit('reads directories provided as command line arguments', () => {
tempDir = createTempfiles();
return run([tempDir]).then(([stdout, stderr]) => {
expect(stdout).toContain('Component');
expect(stderr).toContain('NoComponent');
});
});
pit('considers js and jsx by default', () => {
tempDir = createTempfiles();
createTempfiles('jsx');
createTempfiles('foo');
return run([tempDir]).then(([stdout, stderr]) => {
expect(stdout).toContain('Component.js');
expect(stdout).toContain('Component.jsx');
expect(stdout).not.toContain('Component.foo');
expect(stderr).toContain('NoComponent.js');
expect(stderr).toContain('NoComponent.jsx');
expect(stderr).not.toContain('NoComponent.foo');
});
});
pit('considers files with the specified extension', () => {
createTempfiles('foo');
createTempfiles('bar');
var verify = ([stdout, stderr]) => {
expect(stdout).toContain('Component.foo');
expect(stdout).toContain('Component.bar');
expect(stderr).toContain('NoComponent.foo');
expect(stderr).toContain('NoComponent.bar');
};
return Promise.all([
run(['--extension=foo', '--extension=bar', tempDir]).then(verify),
run(['-x', 'foo', '-x', 'bar', tempDir]).then(verify),
]);
});
pit('ignores files in node_modules and __tests__ by default', () => {
createTempfiles(null, 'node_modules');
createTempfiles(null, '__tests__');
return run([tempDir]).then(([stdout, stderr]) => {
expect(stdout).toBe('');
expect(stderr).toBe('');
});
});
pit('ignores specified folders', () => {
createTempfiles(null, 'foo');
var verify = ([stdout, stderr]) => {
expect(stdout).toBe('');
expect(stderr).toBe('');
};
return Promise.all([
run(['--ignore=foo', tempDir]).then(verify),
run(['-i', 'foo', tempDir]).then(verify),
]);
});
pit('writes to stdout', () => {
return run([], component).then(([stdout, stderr]) => {
expect(stdout.length > 0).toBe(true);
expect(stderr.length).toBe(0);
});
});
pit('writes to stderr', () => {
return run([], '{}').then(([stdout, stderr]) => {
expect(stderr.length > 0).toBe(true);
expect(stdout.length).toBe(0);
});
});
pit('writes to a file if provided', function() {
var outFile = temp.openSync();
createTempfiles();
var verify = ([stdout]) => {
expect(fs.readFileSync(outFile.path)).not.toBe('');
expect(stdout).toBe('');
};
return Promise.all([
run(['--out=' + outFile.path, tempDir]).then(verify),
run(['-o', outFile.path, tempDir]).then(verify),
]);
});
describe('--resolver', () => {
pit('accepts the names of built in resolvers', () => {
return Promise.all([
// No option passed: same as --resolver=findExportedComponentDefinition
run([
path.join(__dirname, '../../example/components/Component.js'),
]).then(([stdout]) => {
expect(stdout).toContain('Component');
}),
run([
'--resolver=findExportedComponentDefinition',
path.join(__dirname, '../../example/components/Component.js'),
]).then(([stdout]) => {
expect(stdout).toContain('Component');
}),
run([
'--resolver=findAllComponentDefinitions',
path.join(__dirname, './example/MultipleComponents.js'),
]).then(([stdout]) => {
expect(stdout).toContain('ComponentA');
expect(stdout).toContain('ComponentB');
}),
]);
});
pit('accepts a path to a resolver function', () => {
return Promise.all([
run([
'--resolver='+path.join(__dirname, './example/customResolver.js'),
path.join(__dirname, '../../example/components/Component.js'),
]).then(([stdout, stderr]) => {
console.log(stderr);
expect(stdout).toContain('Custom');
}),
]);
});
});
}); | ([stdout, stderr]) => { | random_line_split |
react-docgen-test.js | /*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
/*global jasmine, jest, describe, pit, expect, afterEach*/
// Increase default timeout (5000ms) for Travis
jasmine.getEnv().defaultTimeoutInterval = 10000;
jest.autoMockOff();
var fs = require('fs');
var path = require('path');
var rimraf = require('rimraf');
var temp = require('temp');
var spawn = require('cross-spawn-async');
function run(args, stdin) {
return new Promise(resolve => {
var docgen = spawn( // eslint-disable-line camelcase
path.join(__dirname, '../react-docgen.js'),
args
);
var stdout = '';
var stderr = '';
docgen.stdout.on('data', data => stdout += data);
docgen.stderr.on('data', data => stderr += data);
docgen.on('close', () => resolve([stdout, stderr]));
if (stdin) {
docgen.stdin.write(stdin);
}
docgen.stdin.end();
});
}
var component = fs.readFileSync(
path.join(__dirname, '../../example/components/Component.js')
);
describe('react-docgen CLI', () => {
var tempDir;
var tempComponents = [];
var tempNoComponents = [];
function createTempfiles(suffix, dir) |
afterEach(() => {
if (tempDir) {
rimraf.sync(tempDir);
}
tempDir = null;
tempComponents.length = 0;
tempNoComponents.length = 0;
});
pit('reads from stdin', () => {
return run([], component).then(([stdout, stderr]) => {
expect(stdout.length > 0).toBe(true);
expect(stderr.length).toBe(0);
});
});
pit('reads files provided as command line arguments', () => {
createTempfiles();
return run(tempComponents.concat(tempNoComponents)).then(
([stdout, stderr]) => {
expect(stdout).toContain('Component');
expect(stderr).toContain('NoComponent');
}
);
});
pit('reads directories provided as command line arguments', () => {
tempDir = createTempfiles();
return run([tempDir]).then(([stdout, stderr]) => {
expect(stdout).toContain('Component');
expect(stderr).toContain('NoComponent');
});
});
pit('considers js and jsx by default', () => {
tempDir = createTempfiles();
createTempfiles('jsx');
createTempfiles('foo');
return run([tempDir]).then(([stdout, stderr]) => {
expect(stdout).toContain('Component.js');
expect(stdout).toContain('Component.jsx');
expect(stdout).not.toContain('Component.foo');
expect(stderr).toContain('NoComponent.js');
expect(stderr).toContain('NoComponent.jsx');
expect(stderr).not.toContain('NoComponent.foo');
});
});
pit('considers files with the specified extension', () => {
createTempfiles('foo');
createTempfiles('bar');
var verify = ([stdout, stderr]) => {
expect(stdout).toContain('Component.foo');
expect(stdout).toContain('Component.bar');
expect(stderr).toContain('NoComponent.foo');
expect(stderr).toContain('NoComponent.bar');
};
return Promise.all([
run(['--extension=foo', '--extension=bar', tempDir]).then(verify),
run(['-x', 'foo', '-x', 'bar', tempDir]).then(verify),
]);
});
pit('ignores files in node_modules and __tests__ by default', () => {
createTempfiles(null, 'node_modules');
createTempfiles(null, '__tests__');
return run([tempDir]).then(([stdout, stderr]) => {
expect(stdout).toBe('');
expect(stderr).toBe('');
});
});
pit('ignores specified folders', () => {
createTempfiles(null, 'foo');
var verify = ([stdout, stderr]) => {
expect(stdout).toBe('');
expect(stderr).toBe('');
};
return Promise.all([
run(['--ignore=foo', tempDir]).then(verify),
run(['-i', 'foo', tempDir]).then(verify),
]);
});
pit('writes to stdout', () => {
return run([], component).then(([stdout, stderr]) => {
expect(stdout.length > 0).toBe(true);
expect(stderr.length).toBe(0);
});
});
pit('writes to stderr', () => {
return run([], '{}').then(([stdout, stderr]) => {
expect(stderr.length > 0).toBe(true);
expect(stdout.length).toBe(0);
});
});
pit('writes to a file if provided', function() {
var outFile = temp.openSync();
createTempfiles();
var verify = ([stdout]) => {
expect(fs.readFileSync(outFile.path)).not.toBe('');
expect(stdout).toBe('');
};
return Promise.all([
run(['--out=' + outFile.path, tempDir]).then(verify),
run(['-o', outFile.path, tempDir]).then(verify),
]);
});
describe('--resolver', () => {
pit('accepts the names of built in resolvers', () => {
return Promise.all([
// No option passed: same as --resolver=findExportedComponentDefinition
run([
path.join(__dirname, '../../example/components/Component.js'),
]).then(([stdout]) => {
expect(stdout).toContain('Component');
}),
run([
'--resolver=findExportedComponentDefinition',
path.join(__dirname, '../../example/components/Component.js'),
]).then(([stdout]) => {
expect(stdout).toContain('Component');
}),
run([
'--resolver=findAllComponentDefinitions',
path.join(__dirname, './example/MultipleComponents.js'),
]).then(([stdout]) => {
expect(stdout).toContain('ComponentA');
expect(stdout).toContain('ComponentB');
}),
]);
});
pit('accepts a path to a resolver function', () => {
return Promise.all([
run([
'--resolver='+path.join(__dirname, './example/customResolver.js'),
path.join(__dirname, '../../example/components/Component.js'),
]).then(([stdout, stderr]) => {
console.log(stderr);
expect(stdout).toContain('Custom');
}),
]);
});
});
});
| {
if (!tempDir) {
tempDir = temp.mkdirSync();
}
if (!dir) {
dir = tempDir;
} else {
dir = path.join(tempDir, dir);
try {
fs.mkdirSync(dir);
} catch(error) {
if (error.message.indexOf('EEXIST') === -1) {
throw error;
}
}
}
if (!suffix) {
suffix = 'js';
}
var componentPath = path.join(dir, 'Component.' + suffix);
var componentFile = fs.openSync(componentPath, 'w');
fs.writeSync(componentFile, component.toString());
fs.closeSync(componentFile);
var noComponentPath = path.join(dir, 'NoComponent.' + suffix);
var noComponentFile = fs.openSync(noComponentPath, 'w');
fs.writeSync(noComponentFile, '{}');
fs.closeSync(noComponentFile);
tempComponents.push(componentPath);
tempNoComponents.push(noComponentPath);
return dir;
} | identifier_body |
react-docgen-test.js | /*
* Copyright (c) 2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
/*global jasmine, jest, describe, pit, expect, afterEach*/
// Increase default timeout (5000ms) for Travis
jasmine.getEnv().defaultTimeoutInterval = 10000;
jest.autoMockOff();
var fs = require('fs');
var path = require('path');
var rimraf = require('rimraf');
var temp = require('temp');
var spawn = require('cross-spawn-async');
function run(args, stdin) {
return new Promise(resolve => {
var docgen = spawn( // eslint-disable-line camelcase
path.join(__dirname, '../react-docgen.js'),
args
);
var stdout = '';
var stderr = '';
docgen.stdout.on('data', data => stdout += data);
docgen.stderr.on('data', data => stderr += data);
docgen.on('close', () => resolve([stdout, stderr]));
if (stdin) {
docgen.stdin.write(stdin);
}
docgen.stdin.end();
});
}
var component = fs.readFileSync(
path.join(__dirname, '../../example/components/Component.js')
);
describe('react-docgen CLI', () => {
var tempDir;
var tempComponents = [];
var tempNoComponents = [];
function createTempfiles(suffix, dir) {
if (!tempDir) |
if (!dir) {
dir = tempDir;
} else {
dir = path.join(tempDir, dir);
try {
fs.mkdirSync(dir);
} catch(error) {
if (error.message.indexOf('EEXIST') === -1) {
throw error;
}
}
}
if (!suffix) {
suffix = 'js';
}
var componentPath = path.join(dir, 'Component.' + suffix);
var componentFile = fs.openSync(componentPath, 'w');
fs.writeSync(componentFile, component.toString());
fs.closeSync(componentFile);
var noComponentPath = path.join(dir, 'NoComponent.' + suffix);
var noComponentFile = fs.openSync(noComponentPath, 'w');
fs.writeSync(noComponentFile, '{}');
fs.closeSync(noComponentFile);
tempComponents.push(componentPath);
tempNoComponents.push(noComponentPath);
return dir;
}
afterEach(() => {
if (tempDir) {
rimraf.sync(tempDir);
}
tempDir = null;
tempComponents.length = 0;
tempNoComponents.length = 0;
});
pit('reads from stdin', () => {
return run([], component).then(([stdout, stderr]) => {
expect(stdout.length > 0).toBe(true);
expect(stderr.length).toBe(0);
});
});
pit('reads files provided as command line arguments', () => {
createTempfiles();
return run(tempComponents.concat(tempNoComponents)).then(
([stdout, stderr]) => {
expect(stdout).toContain('Component');
expect(stderr).toContain('NoComponent');
}
);
});
pit('reads directories provided as command line arguments', () => {
tempDir = createTempfiles();
return run([tempDir]).then(([stdout, stderr]) => {
expect(stdout).toContain('Component');
expect(stderr).toContain('NoComponent');
});
});
pit('considers js and jsx by default', () => {
tempDir = createTempfiles();
createTempfiles('jsx');
createTempfiles('foo');
return run([tempDir]).then(([stdout, stderr]) => {
expect(stdout).toContain('Component.js');
expect(stdout).toContain('Component.jsx');
expect(stdout).not.toContain('Component.foo');
expect(stderr).toContain('NoComponent.js');
expect(stderr).toContain('NoComponent.jsx');
expect(stderr).not.toContain('NoComponent.foo');
});
});
pit('considers files with the specified extension', () => {
createTempfiles('foo');
createTempfiles('bar');
var verify = ([stdout, stderr]) => {
expect(stdout).toContain('Component.foo');
expect(stdout).toContain('Component.bar');
expect(stderr).toContain('NoComponent.foo');
expect(stderr).toContain('NoComponent.bar');
};
return Promise.all([
run(['--extension=foo', '--extension=bar', tempDir]).then(verify),
run(['-x', 'foo', '-x', 'bar', tempDir]).then(verify),
]);
});
pit('ignores files in node_modules and __tests__ by default', () => {
createTempfiles(null, 'node_modules');
createTempfiles(null, '__tests__');
return run([tempDir]).then(([stdout, stderr]) => {
expect(stdout).toBe('');
expect(stderr).toBe('');
});
});
pit('ignores specified folders', () => {
createTempfiles(null, 'foo');
var verify = ([stdout, stderr]) => {
expect(stdout).toBe('');
expect(stderr).toBe('');
};
return Promise.all([
run(['--ignore=foo', tempDir]).then(verify),
run(['-i', 'foo', tempDir]).then(verify),
]);
});
pit('writes to stdout', () => {
return run([], component).then(([stdout, stderr]) => {
expect(stdout.length > 0).toBe(true);
expect(stderr.length).toBe(0);
});
});
pit('writes to stderr', () => {
return run([], '{}').then(([stdout, stderr]) => {
expect(stderr.length > 0).toBe(true);
expect(stdout.length).toBe(0);
});
});
pit('writes to a file if provided', function() {
var outFile = temp.openSync();
createTempfiles();
var verify = ([stdout]) => {
expect(fs.readFileSync(outFile.path)).not.toBe('');
expect(stdout).toBe('');
};
return Promise.all([
run(['--out=' + outFile.path, tempDir]).then(verify),
run(['-o', outFile.path, tempDir]).then(verify),
]);
});
describe('--resolver', () => {
pit('accepts the names of built in resolvers', () => {
return Promise.all([
// No option passed: same as --resolver=findExportedComponentDefinition
run([
path.join(__dirname, '../../example/components/Component.js'),
]).then(([stdout]) => {
expect(stdout).toContain('Component');
}),
run([
'--resolver=findExportedComponentDefinition',
path.join(__dirname, '../../example/components/Component.js'),
]).then(([stdout]) => {
expect(stdout).toContain('Component');
}),
run([
'--resolver=findAllComponentDefinitions',
path.join(__dirname, './example/MultipleComponents.js'),
]).then(([stdout]) => {
expect(stdout).toContain('ComponentA');
expect(stdout).toContain('ComponentB');
}),
]);
});
pit('accepts a path to a resolver function', () => {
return Promise.all([
run([
'--resolver='+path.join(__dirname, './example/customResolver.js'),
path.join(__dirname, '../../example/components/Component.js'),
]).then(([stdout, stderr]) => {
console.log(stderr);
expect(stdout).toContain('Custom');
}),
]);
});
});
});
| {
tempDir = temp.mkdirSync();
} | conditional_block |
managers.py | # -*- coding: utf-8 -*-
# This technical data was produced for the U. S. Government under Contract No. W15P7T-13-C-F600, and
# is subject to the Rights in Technical Data-Noncommercial Items clause at DFARS 252.227-7013 (FEB 2012)
from django.contrib.gis.db import models
class AOIManager(models.GeoManager):
def add_filters(self, **kwargs):
|
def unassigned(self):
"""
Returns unassigned AOIs.
"""
return self.add_filters(status='Unassigned')
def assigned(self):
"""
Returns assigned AOIs.
"""
return self.add_filters(status='Assigned')
def in_work(self):
"""
Returns AOIs in work.
"""
return self.add_filters(status='In Work')
def submitted(self):
"""
Returns submitted AOIs.
"""
return self.add_filters(status='Submitted')
def completed(self):
"""
Returns completed AOIs.
"""
return self.add_filters(status='Completed')
| """
Returns the queryset with new filters
"""
return super(AOIManager, self).get_query_set().filter(**kwargs) | identifier_body |
managers.py | # -*- coding: utf-8 -*-
# This technical data was produced for the U. S. Government under Contract No. W15P7T-13-C-F600, and
# is subject to the Rights in Technical Data-Noncommercial Items clause at DFARS 252.227-7013 (FEB 2012)
from django.contrib.gis.db import models
class AOIManager(models.GeoManager):
def add_filters(self, **kwargs):
"""
Returns the queryset with new filters
"""
return super(AOIManager, self).get_query_set().filter(**kwargs)
def unassigned(self):
"""
Returns unassigned AOIs.
"""
return self.add_filters(status='Unassigned')
def assigned(self):
"""
Returns assigned AOIs.
"""
return self.add_filters(status='Assigned') | Returns AOIs in work.
"""
return self.add_filters(status='In Work')
def submitted(self):
"""
Returns submitted AOIs.
"""
return self.add_filters(status='Submitted')
def completed(self):
"""
Returns completed AOIs.
"""
return self.add_filters(status='Completed') |
def in_work(self):
""" | random_line_split |
managers.py | # -*- coding: utf-8 -*-
# This technical data was produced for the U. S. Government under Contract No. W15P7T-13-C-F600, and
# is subject to the Rights in Technical Data-Noncommercial Items clause at DFARS 252.227-7013 (FEB 2012)
from django.contrib.gis.db import models
class AOIManager(models.GeoManager):
def add_filters(self, **kwargs):
"""
Returns the queryset with new filters
"""
return super(AOIManager, self).get_query_set().filter(**kwargs)
def unassigned(self):
"""
Returns unassigned AOIs.
"""
return self.add_filters(status='Unassigned')
def assigned(self):
"""
Returns assigned AOIs.
"""
return self.add_filters(status='Assigned')
def in_work(self):
"""
Returns AOIs in work.
"""
return self.add_filters(status='In Work')
def submitted(self):
"""
Returns submitted AOIs.
"""
return self.add_filters(status='Submitted')
def | (self):
"""
Returns completed AOIs.
"""
return self.add_filters(status='Completed')
| completed | identifier_name |
collection-test.js | /* file : collection-test.js
MIT License
Copyright (c) 2016 Thomas Minier
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
'use strict';
require('chai').should();
const Collection = require('../../src/collections/collection.js');
describe('Collection', () => {
it('should be abstract', () => {
(() => new Collection()).should.throw(TypeError);
}); | }); | random_line_split |
|
models.py | import logging
import os
import datetime
import six
import humanfriendly
from pathlib import Path
from django.db import models
from django.utils.html import format_html
from django.utils.encoding import uri_to_iri
from django.core.management import call_command
from django.utils.safestring import mark_safe
from django.conf import settings
from django.contrib.auth.models import User
from django.db.models.signals import pre_save, post_init, post_save
from django.dispatch import receiver
from django.core.urlresolvers import reverse
from django.core.files import File
from sortedm2m.fields import SortedManyToManyField
from uuslug import uuslug
# from moviepy.editor import VideoFileClip # get video duration
from .my_storage import VodStorage
from admin_resumable.fields import (
ModelAdminResumableFileField, ModelAdminResumableImageField,
ModelAdminResumableMultiFileField, ModelAdminResumableRestoreFileField
)
from xpinyin import Pinyin # for pinyin search
if six.PY3:
from django.utils.encoding import smart_str
else:
from django.utils.encoding import smart_unicode as smart_str
"""
Copy data in XXX model:
>>>
from vodmanagement.models import *
objs=Vod.objects.all()
for i in range(0,10):
newobj=objs[0]
newobj.pk=None
newobj.save()
>>>
This script will copy 10 objs[0] in database
"""
class UserPermission(models.Model):
user = models.OneToOneField(User)
permission = models.CharField(max_length=100, blank=True, null=True)
end_date = models.DateTimeField(blank=True, null=True)
def __str__(self):
return str(self.user)
def has_permision(self):
delta = self.end_date.date() - datetime.date.today()
print(delta.days)
if delta.days >= 0:
return True
return False
class VodManager(models.Manager):
def active(self, *args, **kwargs):
return super(VodManager, self) # .filter(draft=False).filter(publish__lte=timezone.now())
def upload_location(instance, filename):
# filebase, extension = filename.split(".")
# return "%s/%s.%s" %(instance.id, instance.id, extension)
VodModel = instance.__class__
print('save')
if VodModel.objects.count() is not 0:
new_id = VodModel.objects.order_by("id").last().id - 1
else:
new_id = 0
"""
instance.__class__ gets the model Post. We must use this method because the model is defined below.
Then create a queryset ordered by the "id"s of each object,
Then we get the last object in the queryset with `.last()`
Which will give us the most recently created Model instance
We add 1 to it, so we get what should be the same id as the the post we are creating.
"""
print('save image')
return "%s/%s" % (new_id, filename)
def upload_image_location(instance, filename):
VodModel = instance.__class__
if VodModel.objects.count() is not 0:
new_id = VodModel.objects.order_by("id").last().id + 1
else:
new_id = 0
folder = instance.save_path
if folder == "default":
category = instance.category.name
else:
category = instance.category.name + '_' + folder
return "%s/images/%s/%s" % (category, new_id, filename)
def upload_record_image_location(instance, filename):
return "%s/images/%s" % (settings.RECORD_MEDIA_FOLDER, filename)
def default_description(instance):
default = instance.title
print(default)
return 'The %s description' % default
# Create your models here.
def default_filedir():
return settings.MEDIA_ROOT
# ---------------------------------------------------------------------
# if leave path blank,it will save it as the default file dir:settings.MEDIA_ROOT
class FileDirectory(models.Model):
path = models.CharField(max_length=512, default=default_filedir, blank=True)
class Meta:
verbose_name = '视频上传路径'
verbose_name_plural = '视频上传路径管理'
def __str__(self):
return self.path
def save(self, *args, **kwargs):
if self.path is None or self.path == "":
self.path = default_filedir()
super(FileDirectory, self).save(*args, **kwargs)
# ---------------------------------------------------------------------
# Two selections only:Common,Special purpose
TYPES = (
('common', 'Common'),
('special', 'Special purpose'),
)
VIDEO_QUALITY = [
('SD', '标清'),
('HD', '高清'),
('FHD', '超清'),
]
SAVE_PATH = (
('', settings.LOCAL_MEDIA_ROOT),
)
class VideoRegion(models.Model):
name = models.CharField(max_length=200, verbose_name='地区', unique=True)
class Meta:
verbose_name = '视频地区管理'
verbose_name_plural = '视频地区'
def __str__(self):
return self.name
class VideoCategory(models.Model):
name = models.CharField(max_length=128, verbose_name='分类名称')
type = models.CharField(max_length=128, choices=TYPES, default='common', verbose_name='类型')
isSecret = models.BooleanField(default=False, verbose_name='是否加密')
level = models.IntegerField(null=False, blank=False, default=1, choices=((1, '一级分类'), (2, '二级分类')),
verbose_name='分类等级')
subset = models.ManyToManyField('self', blank=True, verbose_name='分类关系')
class Meta:
verbose_name = '视频分类'
verbose_name_plural = '视频分类管理'
def __str__(self):
base_name = self.name + str(' (level %d)' % (self.level))
if self.subset.first() and self.level == 2:
return '--'.join([self.subset.first().name, base_name])
else:
return base_name
def save(self, *args, **kwargs):
super(VideoCategory, self).save(*args, **kwargs)
def colored_level(self):
color_code = 'red' if self.level == 1 else 'green'
return format_html(
'<span style="color:{};">{}</span>',
color_code,
self.get_level_display()
)
colored_level.short_description = '分级'
# ---------------------------------------------------------------------
class MultipleUpload(models.Model):
files = ModelAdminResumableMultiFileField(null=True, blank=True, storage=VodStorage(), verbose_name='文件')
save_path = models.CharField(max_length=128, blank=False, null=True, verbose_name='保存路径')
category = models.ForeignKey(VideoCategory, null=True, verbose_name='分类')
class Meta:
verbose_name = '批量上传'
verbose_name_plural = '批量上传管理'
# ---------------------------------------------------------------------
# TODO(hhy): Please Leave This Model Here. It Will Be Use In The Future.
# class VideoTag(models.Model):
# name = models.CharField(max_length=200, null=False, blank=False)
#
# def __str__(self):
# return self.name
class Restore(models.Model):
txt_file = models.FileField(blank=True, null=True, verbose_name='备份配置文件')
zip_file = ModelAdminResumableRestoreFileField(null=True, blank=True, storage=VodStorage(), verbose_name='压缩包')
save_path = models.CharField(max_length=128, blank=False, null=True) # ,default=FileDirectory.objects.first())
class Meta:
verbose_name = '视频导入'
verbose_name_plural = '视频导入'
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
result = super(Restore, self).save()
file_path = self.txt_file.path
call_command('loaddata', file_path)
return result
class Vod(models.Model):
title = models.CharField(max_length=120, verbose_name='标题')
# image = models.ImageField(upload_to=upload_image_location, null=True, blank=True)
# video = models.FileField(null=True,blank=True,storage=VodStorage())
image = ModelAdminResumableImageField(null=True, blank=True, storage=VodStorage(), max_length=1000,
verbose_name='缩略图')
video = ModelAdminResumableFileField(null=True, blank=True, storage=VodStorage(), max_length=1000,
verbose_name='视频')
duration = models.CharField(max_length=50, blank=True, null=True, verbose_name='时长')
local_video = models.FilePathField(path=settings.LOCAL_MEDIA_ROOT, blank=True, recursive=True)
definition = models.CharField(max_length=10, choices=VIDEO_QUALITY, blank=False, default='H', verbose_name='清晰度')
category = models.ForeignKey(VideoCategory, null=True, blank=True, verbose_name='分类')
save_path = models.CharField(max_length=128, blank=False, null=True, default='default', verbose_name='保存路径') # ,default=FileDirectory.objects.first())
year = models.CharField(max_length=10, blank=False, null=True, default=datetime.datetime.now().year, verbose_name='年份')
region = models.ForeignKey(VideoRegion, to_field='name', null=True, blank=True, on_delete=models.SET_NULL, verbose_name='地区')
file_size = models.CharField(max_length=128, default='0B', editable=False, verbose_name='文件大小')
view_count = models.IntegerField(default=0, verbose_name='观看次数')
view_count_temp = 0
creator = models.ForeignKey(User, null=True, blank=False, editable=False)
description = models.TextField(blank=True, verbose_name='简介')
select_name = models.CharField(max_length=100, blank=False, verbose_name='选集名称', default='1')
updated = models.DateTimeField(auto_now=True, auto_now_add=False)
timestamp = models.DateTimeField(auto_now=False, auto_now_add=True, verbose_name='创建时间') # The first time added
slug = models.SlugField(unique=True, blank=True)
search_word = models.CharField(max_length=10000, null=True, blank=True)
# tags = models.ManyToManyField(VideoTag, blank=True)
video_list = SortedManyToManyField('self', blank=True)
# video_list = models.ManyToManyField('self', blank=True, symmetrical=False)
active = models.IntegerField(null=True, blank=False, default=0, choices=((1, 'Yes'), (0, 'No')))
progress = models.IntegerField(null=True, blank=True, default=0)
objects = VodManager()
class Meta:
verbose_name = '视频'
verbose_name_plural = '视频列表'
ordering = ["-timestamp", "-updated"]
def save(self, without_valid=False, *args, **kwargs):
logging.debug('==== 保存点播节目 %s ====' % self.title)
p = Pinyin()
full_pinyin = p.get_pinyin(smart_str(self.title), '')
first_pinyin = p.get_initials(smart_str(self.title), '').lower()
self.search_word = " ".join([full_pinyin, first_pinyin])
logging.debug("video path:", self.video)
if self.description is None or self.description == "":
self.description = default_description(self)
if self.local_video != '' and self.local_video is not None:
basename = Path(self.local_video).relative_to(Path(settings.LOCAL_MEDIA_ROOT))
self.video.name = str(Path(settings.LOCAL_MEDIA_URL) / basename)
logging.debug("save local_video to filefield done")
if without_valid:
ret = super(Vod, self).save(*args, **kwargs)
return ret
super(Vod, self).save(*args, **kwargs)
try:
if self.video != None and self.video != '':
relative_path = Path(self.video.name).relative_to(settings.MEDIA_URL) # Djan%20go.mp4
rel_name = uri_to_iri(relative_path) # Djan go.mp4
# Make sure the self.video.name is not in the LOCAL_FOLDER
if not self.video.name.startswith(settings.LOCAL_FOLDER_NAME) and \
not self.video.name.startswith(settings.RECORD_MEDIA_FOLDER):
self.video.name = str(rel_name)
logging.debug('save_path:', self.save_path)
logging.debug('video.name:', self.video.name)
logging.debug('size:', self.video.file.size)
self.file_size = humanfriendly.format_size(self.video.file.size)
# duration = VideoFileClip(self.video.path).duration
# self.duration = time_formate(duration)
else:
print("video file is None")
except:
pass
try:
if self.image:
self.image.name = str(uri_to_iri(Path(self.image.name).relative_to(settings.MEDIA_URL)))
except:
pass
return super(Vod, self).save(*args, **kwargs)
def __unicode__(self):
return self.title
def __str__(self):
return self.title
def image_tag(self):
if self.image is not None and str(self.image) != "":
if os.path.exists(self.image.path):
return mark_safe('<img src="%s" width="160" height="90" />' % (self.image.url))
else:
return mark_safe('<img src="#" width="160" height="90" />')
else:
return mark_safe('<img src="%s" width="160" height="90" />' % (settings.DEFAULT_IMAGE_SRC))
image_tag.short_description = '缩略图'
def get_absolute_url(self):
# print("get absolute url:",self.slug)
return reverse("vod:vod-detail", kwargs={"slug": self.slug})
def add_view_count(self):
self.view_count_temp += 1
def colored_active(self):
color_code = 'red' if self.active == 0 else 'green'
return format_html(
'<span style="color:{};">{}</span>',
color_code,
self.get_active_display()
)
colored_active.short_description = '是否激活'
def video_format(self):
suffix = Path(self.video.name).suffix
color_code = 'green' if suffix in ['.mp4', '.m3u8'] else 'red'
return format_html(
'<span style="color:{};">{}</span>', | video_format.short_description = '视频文件格式'
def pre_save_post_receiver(sender, instance, *args, **kwargs):
if not instance.slug:
instance.slug = uuslug(instance.title, instance=instance)
def post_init_receiver(sender, instance, *args, **kwargs):
pass
pre_save.connect(pre_save_post_receiver, sender=Vod)
post_init.connect(post_init_receiver, sender=Vod) | color_code,
suffix
)
| random_line_split |
models.py | import logging
import os
import datetime
import six
import humanfriendly
from pathlib import Path
from django.db import models
from django.utils.html import format_html
from django.utils.encoding import uri_to_iri
from django.core.management import call_command
from django.utils.safestring import mark_safe
from django.conf import settings
from django.contrib.auth.models import User
from django.db.models.signals import pre_save, post_init, post_save
from django.dispatch import receiver
from django.core.urlresolvers import reverse
from django.core.files import File
from sortedm2m.fields import SortedManyToManyField
from uuslug import uuslug
# from moviepy.editor import VideoFileClip # get video duration
from .my_storage import VodStorage
from admin_resumable.fields import (
ModelAdminResumableFileField, ModelAdminResumableImageField,
ModelAdminResumableMultiFileField, ModelAdminResumableRestoreFileField
)
from xpinyin import Pinyin # for pinyin search
if six.PY3:
from django.utils.encoding import smart_str
else:
from django.utils.encoding import smart_unicode as smart_str
"""
Copy data in XXX model:
>>>
from vodmanagement.models import *
objs=Vod.objects.all()
for i in range(0,10):
newobj=objs[0]
newobj.pk=None
newobj.save()
>>>
This script will copy 10 objs[0] in database
"""
class UserPermission(models.Model):
user = models.OneToOneField(User)
permission = models.CharField(max_length=100, blank=True, null=True)
end_date = models.DateTimeField(blank=True, null=True)
def __str__(self):
return str(self.user)
def has_permision(self):
delta = self.end_date.date() - datetime.date.today()
print(delta.days)
if delta.days >= 0:
return True
return False
class VodManager(models.Manager):
def active(self, *args, **kwargs):
return super(VodManager, self) # .filter(draft=False).filter(publish__lte=timezone.now())
def upload_location(instance, filename):
# filebase, extension = filename.split(".")
# return "%s/%s.%s" %(instance.id, instance.id, extension)
VodModel = instance.__class__
print('save')
if VodModel.objects.count() is not 0:
new_id = VodModel.objects.order_by("id").last().id - 1
else:
new_id = 0
"""
instance.__class__ gets the model Post. We must use this method because the model is defined below.
Then create a queryset ordered by the "id"s of each object,
Then we get the last object in the queryset with `.last()`
Which will give us the most recently created Model instance
We add 1 to it, so we get what should be the same id as the the post we are creating.
"""
print('save image')
return "%s/%s" % (new_id, filename)
def upload_image_location(instance, filename):
VodModel = instance.__class__
if VodModel.objects.count() is not 0:
new_id = VodModel.objects.order_by("id").last().id + 1
else:
new_id = 0
folder = instance.save_path
if folder == "default":
category = instance.category.name
else:
category = instance.category.name + '_' + folder
return "%s/images/%s/%s" % (category, new_id, filename)
def upload_record_image_location(instance, filename):
return "%s/images/%s" % (settings.RECORD_MEDIA_FOLDER, filename)
def default_description(instance):
default = instance.title
print(default)
return 'The %s description' % default
# Create your models here.
def default_filedir():
return settings.MEDIA_ROOT
# ---------------------------------------------------------------------
# if leave path blank,it will save it as the default file dir:settings.MEDIA_ROOT
class FileDirectory(models.Model):
path = models.CharField(max_length=512, default=default_filedir, blank=True)
class Meta:
verbose_name = '视频上传路径'
verbose_name_plural = '视频上传路径管理'
def __str__(self):
return self.path
def save(self, *args, **kwargs):
if self.path is None or self.path == "":
self.path = default_filedir()
super(FileDirectory, self).save(*args, **kwargs)
# ---------------------------------------------------------------------
# Two selections only:Common,Special purpose
TYPES = (
('common', 'Common'),
('special', 'Special purpose'),
)
VIDEO_QUALITY = [
('SD', '标清'),
('HD', '高清'),
('FHD', '超清'),
]
SAVE_PATH = (
('', settings.LOCAL_MEDIA_ROOT),
)
class VideoRegion(models.Model):
name = models.CharField(max_length=200, verbose_name='地区', unique=True)
class Meta:
verbose_name = '视频地区管理'
verbose_name_plural = '视频地区'
def __str__(self):
return self.name
class VideoCategory(models.Model):
name = models.CharField(max_length=128, verbose_name='分类名称')
type = models.CharField(max_length=128, choices=TYPES, default='common', verbose_name='类型')
isSecret = models.BooleanField(default=False, verbose_name='是否加密')
level = models.IntegerField(null=False, blank=False, default=1, choices=((1, '一级分类'), (2, '二级分类')),
verbose_name='分类等级')
subset = models.ManyToManyField('self', blank=True, verbose_name='分类关系')
class Meta:
verbose_name = '视频分类'
verbose_name_plural = '视频分类管理'
def __str__(self):
base_name = self.name + str(' (level %d)' % (self.level))
if self.subset.first() and self.level == 2:
return '--'.join([self.subset.first().name, base_name])
else:
return base_name
def save(self, *args, **kwargs):
super(VideoCategory, self).save(*args, **kwargs)
def colored_level(self):
color_code = 'red' if self.level == 1 else 'green'
return format_html(
'<span style="color:{};">{}</span>',
color_code,
self.get_level_display()
)
colored_level.short_description = '分级'
# ---------------------------------------------------------------------
class MultipleUpload(models.Model):
files = ModelAdminResumableMultiFileField(null=True, blank=True, storage=VodStorage(), verbose_name='文件')
save_path = models.CharField(max_length=128, blank=False, null=True, verbose_name='保存路径')
category = models.ForeignKey(VideoCategory, null=True, verbose_name='分类')
class Meta:
verbose_name = '批量上传'
verbose_name_plural = '批量上传管理'
# ---------------------------------------------------------------------
# TODO(hhy): Please Leave This Model Here. It Will Be Use In The Future.
# class VideoTag(models.Model):
# name = models.CharField(max_length=200, null=False, blank=False)
#
# def __str__(self):
# return self.name
class Restore(models.Model):
txt_file = models.FileField(blank=True, null=True, verbose_name='备份配置文件')
zip_file = ModelAdminResumableRestoreFileField(null=True, blank=True, storage=VodStorage(), verbose_name='压缩包')
save_path = models.CharField(max_length=128, blank=False, null=True) # ,default=FileDirectory.objects.first())
class Meta:
verbose_name = '视频导入'
verbose_name_plural = '视频导入'
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
result = super(Restore, self).save()
file_path = self.txt_file.path
call_command('loaddata', file_path)
return result
class Vod(models.Model):
title = models.CharField(max_length=120, verbose_name='标题')
# image = models.ImageField(upload_to=upload_image_location, null=True, blank=True)
# video = models.FileField(null=True,blank=True,storage=VodStorage())
image = ModelAdminResumableImageField(null=True, blank=True, storage=VodStorage(), max_length=1000,
verbose_name='缩略图')
video = ModelAdminResumableFileField(null=True, blank=True, storage=VodStorage(), max_length=1000,
verbose_name='视频')
duration = models.CharField(max_length=50, blank=True, null=True, verbose_name='时长')
local_video = models.FilePathField(path=settings.LOCAL_MEDIA_ROOT, blank=True, recursive=True)
definition = models.CharField(max_length=10, choices=VIDEO_QUALITY, blank=False, default='H', verbose_name='清晰度')
category = models.ForeignKey(VideoCategory, null=True, blank=True, verbose_name='分类')
save_path = models.CharField(max_length=128, blank=False, null=True, default='default', verbose_name='保存路径') # ,default=FileDirectory.objects.first())
year = models.CharField(max_length=10, blank=False, null=True, default=datetime.datetime.now().year, verbose_name='年份')
region = models.ForeignKey(VideoRegion, to_field='name', null=True, blank=True, on_delete=models.SET_NULL, verbose_name='地区')
file_size = models.CharField(max_length=128, default='0B', editable=False, verbose_name='文件大小')
view_count = models.IntegerField(default=0, verbose_name='观看次数')
view_count_temp = 0
creator = models.ForeignKey(User, null=True, blank=False, editable=False)
description = models.TextField(blank=True, verbose_name='简介')
select_name = models.CharField(max_length=100, blank=False, verbose_name='选集名称', default='1')
updated = models.DateTimeField(auto_now=True, auto_now_add=False)
timestamp = models.DateTimeField(auto_now=False, auto_now_add=True, verbose_name='创建时间') # The first time added
slug = models.SlugField(unique=True, blank=True)
search_word = models.CharField(max_length=10000, null=True, blank=True)
# tags = models.ManyToManyField(VideoTag, blank=True)
video_list = SortedManyToManyField('self', blank=True)
# video_list = models.ManyToManyField('self', blank=True, symmetrical=False)
active = models.IntegerField(null=True, blank=False, default=0, choices=((1, 'Yes'), (0, 'No')))
progress = models.IntegerField(null=True, blank=True, default=0)
objects = VodManager()
class Meta:
verbose_name = '视频'
verbose_name_plural = '视频列表'
ordering = ["-timestamp", "-updated"]
def save(self, without_valid=False, *args, **kwargs):
logging.debug('==== 保存点播节目 %s ====' % self.title)
p = Pinyin()
full_pinyin = p.get_pinyin(smart_str(self.title), '')
first_pinyin = p.get_initials(smart_str(self.title), '').lower()
self.search_word = " ".join([full_pinyin, first_pinyin])
logging.debug("video path:", self.video)
if self.description is None or self.description == "":
self.description = default_description(self)
if self.local_video != '' and self.local_video is not None:
basename = Path(self.local_video).relative_to(Path(settings.LOCAL_MEDIA_ROOT))
self.video.name = str(Path(settings.LOCAL_MEDIA_URL) / basename)
logging.debug("save local_video to filefield done")
if without_valid:
ret = super(Vod, self).save(*args, **kwargs)
return ret
super(Vod, self).save(*args, **kwargs)
try:
if self.video != None and self.video != '':
relative_path = Path(self.video.name).relative_to(settings.MEDIA_URL) # Djan%20go.mp4
rel_name = uri_to_iri(relative_path) # Djan go.mp4
# Make sure the self.video.name is not in the LOCAL_FOLDER
if not self.video.name.startswith(settings.LOCAL_FOLDER_NAME) and \
not self.video.name.startswith(settings.RECORD_MEDIA_FOLDER):
self.video.name = str(rel_name)
logging.debug('save_path:', self.save_path)
logging.debug('video.name:', self.video.name)
logging.debug('size:', self.video.file.size)
self.file_size = humanfriendly.format_size(self.video.file.size)
# duration = VideoFileClip(self.video.path).duration
# self.duration = time_formate(duration)
else:
print("video file is None")
except:
pass
try:
if self.image:
self.image.name = str(uri_to_iri(Path(self.image.name).relative_to(settings.MEDIA_URL)))
except:
pass
return super(Vod, self).save(*args, **kwargs)
def __unicode__(self):
return self.title
def __str__(self):
return self.title
def image_tag(self):
if self.image is not None and str(self.image) != "":
if os.path.exists(self.image.path):
return mark_safe('<img src="%s" width="160" height="90" />' % (self.image.url))
else:
return mark_safe('<img src="#" width="160" height="90" />')
else:
return mark_safe('<img src="%s" width="160" height="90" />' % (settings.DEFAULT_IMAGE_SRC))
image_tag.short_description = '缩略图'
def get_absolute_url(self):
# print("get absolute url:",self.slug)
return reverse("vod:vod-detail", kwargs={"slug": self.slug})
def add_view_count(self):
self.view_count_temp += 1
def colored_active(self):
color_code = 'red' if self.active == 0 else 'green'
return format_html(
'<span style="color:{};">{}</span>',
color_code,
self.get_active_display()
)
colored_active.shor | = '是否激活'
def video_format(self):
suffix = Path(self.video.name).suffix
color_code = 'green' if suffix in ['.mp4', '.m3u8'] else 'red'
return format_html(
'<span style="color:{};">{}</span>',
color_code,
suffix
)
video_format.short_description = '视频文件格式'
def pre_save_post_receiver(sender, instance, *args, **kwargs):
if not instance.slug:
instance.slug = uuslug(instance.title, instance=instance)
def post_init_receiver(sender, instance, *args, **kwargs):
pass
pre_save.connect(pre_save_post_receiver, sender=Vod)
post_init.connect(post_init_receiver, sender=Vod)
| t_description | identifier_name |
models.py | import logging
import os
import datetime
import six
import humanfriendly
from pathlib import Path
from django.db import models
from django.utils.html import format_html
from django.utils.encoding import uri_to_iri
from django.core.management import call_command
from django.utils.safestring import mark_safe
from django.conf import settings
from django.contrib.auth.models import User
from django.db.models.signals import pre_save, post_init, post_save
from django.dispatch import receiver
from django.core.urlresolvers import reverse
from django.core.files import File
from sortedm2m.fields import SortedManyToManyField
from uuslug import uuslug
# from moviepy.editor import VideoFileClip # get video duration
from .my_storage import VodStorage
from admin_resumable.fields import (
ModelAdminResumableFileField, ModelAdminResumableImageField,
ModelAdminResumableMultiFileField, ModelAdminResumableRestoreFileField
)
from xpinyin import Pinyin # for pinyin search
if six.PY3:
from django.utils.encoding import smart_str
else:
from django.utils.encoding import smart_unicode as smart_str
"""
Copy data in XXX model:
>>>
from vodmanagement.models import *
objs=Vod.objects.all()
for i in range(0,10):
newobj=objs[0]
newobj.pk=None
newobj.save()
>>>
This script will copy 10 objs[0] in database
"""
class UserPermission(models.Model):
user = models.OneToOneField(User)
permission = models.CharField(max_length=100, blank=True, null=True)
end_date = models.DateTimeField(blank=True, null=True)
def __str__(self):
return str(self.user)
def has_permision(self):
delta = self.end_date.date() - datetime.date.today()
print(delta.days)
if delta.days >= 0:
return True
return False
class VodManager(models.Manager):
def active(self, *args, **kwargs):
return super(VodManager, self) # .filter(draft=False).filter(publish__lte=timezone.now())
def upload_location(instance, filename):
# filebase, extension = filename.split(".")
# return "%s/%s.%s" %(instance.id, instance.id, extension)
VodModel = instance.__class__
print('save')
if VodModel.objects.count() is not 0:
new_id = VodModel.objects.order_by("id").last().id - 1
else:
new_id = 0
"""
instance.__class__ gets the model Post. We must use this method because the model is defined below.
Then create a queryset ordered by the "id"s of each object,
Then we get the last object in the queryset with `.last()`
Which will give us the most recently created Model instance
We add 1 to it, so we get what should be the same id as the the post we are creating.
"""
print('save image')
return "%s/%s" % (new_id, filename)
def upload_image_location(instance, filename):
VodModel = instance.__class__
if VodModel.objects.count() is not 0:
new_id = VodModel.objects.order_by("id").last().id + 1
else:
new_id = 0
folder = instance.save_path
if folder == "default":
category = instance.category.name
else:
category = instance.category.name + '_' + folder
return "%s/images/%s/%s" % (category, new_id, filename)
def upload_record_image_location(instance, filename):
return "%s/images/%s" % (settings.RECORD_MEDIA_FOLDER, filename)
def default_description(instance):
default = instance.title
print(default)
return 'The %s description' % default
# Create your models here.
def default_filedir():
return settings.MEDIA_ROOT
# ---------------------------------------------------------------------
# if leave path blank,it will save it as the default file dir:settings.MEDIA_ROOT
class FileDirectory(models.Model):
path = models.CharField(max_length=512, default=default_filedir, blank=True)
class Meta:
verbose_name = '视频上传路径'
verbose_name_plural = '视频上传路径管理'
def __str__(self):
return self.path
def save(self, *args, **kwargs):
if self.path is None or self.path == "":
self.path = default_filedir()
super(FileDirectory, self).save(*args, **kwargs)
# ---------------------------------------------------------------------
# Two selections only:Common,Special purpose
TYPES = (
('common', 'Common'),
('special', 'Special purpose'),
)
VIDEO_QUALITY = [
('SD', '标清'),
('HD', '高清'),
('FHD', '超清'),
]
SAVE_PATH = (
('', settings.LOCAL_MEDIA_ROOT),
)
class VideoRegion(models.Model):
name = models.CharField(max_length=200, verbose_name='地区', unique=True)
class Meta:
verbose_name = '视频地区管理'
verbose_name_plural = '视频地区'
def __str__(self):
return self.name
class VideoCategory(models.Model):
name = models.CharField(max_length=128, verbose_name='分类名称')
type = models.CharField(max_length=128, choices=TYPES, default='common', verbose_name='类型')
isSecret = models.BooleanField(default=False, verbose_name='是否加密')
level = models.IntegerField(null=False, blank=False, default=1, choices=((1, '一级分类'), (2, '二级分类')),
verbose_name='分类等级')
subset = models.ManyToManyField('self', blank=True, verbose_name='分类关系')
class Meta:
verbose_name = '视频分类'
verbose_name_plural = '视频分类管理'
def __str__(self):
base_name = self.name + str(' (level %d)' % (self.level))
if self.subset.first() and self.level == 2:
return '--'.join([self.subset.first().name, base_name])
else:
return base_name
def save(self, *args, **kwargs):
super(VideoCategory, self).save(*args, **kwargs)
def colored_level(self):
color_code = 'red' if self.level == 1 else 'green'
return format_html(
'<span style="color:{};">{}</span>',
color_code,
self.get_level_display()
)
colored_level.short_description = '分级'
# ---------------------------------------------------------------------
class MultipleUpload(models.Model):
files = ModelAdminResumableMultiFileField(null=True, blank=True, storage=VodStorage(), verbose_name='文件')
save_path = models.CharField(max_length=128, blank=False, null=True, verbose_name='保存路径')
category = models.ForeignKey(VideoCategory, null=True, verbose_name='分类')
class Meta:
verbose_name = '批量上传'
verbose_name_plural = '批量上传管理'
# ---------------------------------------------------------------------
# TODO(hhy): Please Leave This Model Here. It Will Be Use In The Future.
# class VideoTag(models.Model):
# name = models.CharField(max_length=200, null=False, blank=False)
#
# def __str__(self):
# return self.name
class Restore(models.Model):
txt_file = models.FileField(blank=True, null=True, verbose_name='备份配置文件')
zip_file = ModelAdminResumableRestoreFileField(null=True, blank=True, storage=VodStorage(), verbose_name='压缩包')
save_path = models.CharField(max_length=128, blank=False, null=True) # ,default=FileDirectory.objects.first())
class Meta:
verbose_name = '视频导入'
verbose_name_plural = '视频导入'
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
result = super(Restore, self).save()
file_path = self.txt_file.path
call_command('loaddata', file_path)
return result
class Vod(models.Model):
title = models.CharField(max_length=120, verbose_name='标题')
# image = models.ImageField(upload_to=upload_image_location, null=True, blank=True)
# video = models.FileField(null=True,blank=True,storage=VodStorage())
image = ModelAdminResumableImageField(null=True, blank=True, storage=VodStorage(), max_length=1000,
verbose_name='缩略图')
video = ModelAdminResumableFileField(null=True, blank=True, storage=VodStorage(), max_length=1000,
verbose_name='视频')
duration = models.CharField(max_length=50, blank=True, null=True, verbose_name='时长')
local_video = models.FilePathField(path=settings.LOCAL_MEDIA_ROOT, blank=True, recursive=True)
definition = models.CharField(max_length=10, choices=VIDEO_QUALITY, blank=False, default='H', verbose_name='清晰度')
category = models.ForeignKey(VideoCategory, null=True, blank=True, verbose_name='分类')
save_path = models.CharField(max_length=128, blank=False, null=True, default='default', verbose_name='保存路径') # ,default=FileDirectory.objects.first())
year = models.CharField(max_length=10, blank=False, null=True, default=datetime.datetime.now().year, verbose_name='年份')
region = models.ForeignKey(VideoRegion, to_field='name', null=True, blank=True, on_delete=models.SET_NULL, verbose_name='地区')
file_size = models.CharField(max_length=128, default='0B', editable=False, verbose_name='文件大小')
view_count = models.IntegerField(default=0, verbose_name='观看次数')
view_count_temp = 0
creator = models.ForeignKey(User, null=True, blank=False, editable=False)
description = models.TextField(blank=True, verbose_name='简介')
select_name = models.CharField(max_length=100, blank=False, verbose_name='选集名称', default='1')
updated = models.DateTimeField(auto_now=True, auto_now_add=False)
timestamp = models.DateTimeField(auto_now=False, auto_now_add=True, verbose_name='创建时间') # The first time added
slug = models.SlugField(unique=True, blank=True)
search_word = models.CharField(max_length=10000, null=True, blank=True)
# tags = models.ManyToManyField(VideoTag, blank=True)
video_list = SortedManyToManyField('self', blank=True)
# video_list = models.ManyToManyField('self', blank=True, symmetrical=False)
active = models.IntegerField(null=True, blank=False, default=0, choices=((1, 'Yes'), (0, 'No')))
progress = models.IntegerField(null=True, blank=True, default=0)
objects = VodManager()
class Meta:
verbose_name = '视频'
verbose_name_plural = '视频列表'
ordering = ["-timestamp", "-updated"]
def save(self, without_valid=False, *args, **kwargs):
logging.debug('==== 保存点播节目 %s ====' % self.title)
p = Pinyin()
full_pinyin = p.get_pinyin(smart_str(self.title), '')
first_pinyin = p.get_initials(smart_str(self.title), '').lower()
self.search_word = " ".join([full_pinyin, first_pinyin])
logging.debug("video path:", self.video)
if self.description is None or self.description == "":
self.description = default_description(self)
if self.local_video != '' and self.local_video is not None:
basename = Path(self.local_video).relative_to(Path(settings.LOCAL_MEDIA_ROOT))
self.video.name = str(Path(settings.LOCAL_MEDIA_URL) / basename)
logging.debug("save local_video to filefield done")
if without_valid:
ret = super(Vod, self).save(*args, **kwargs)
return ret
super(Vod, self).save(*args, **kwargs)
try:
if self.video != None and self.video != '':
relative_path = Path(self.video.name).relative_to(settings.MEDIA_URL) # Djan%20go.mp4
rel_name = uri_to_iri(relative_path) # Djan go.mp4
# Make sure the self.video.name is not in the LOCAL_FOLDER
if not self.video.name.startswith(settings.LOCAL_FOLDER_NAME) and \
not self.video.name.startswith(settings.RECORD_MEDIA_FOLDER):
self.video.name = str(rel_name)
logging.debug('save_path:', self.save_path)
logging.debug('video.name:', self.video.name)
logging.debug('size:', self.video.file.size)
self.file_size = humanfriendly.format_size(self.video.file.size)
# duration = VideoFileClip(self.video.path).duration
# self.duration = time_formate(duration)
else:
print("video file is None")
except:
pass
try:
if self.image:
self.image.name = str(uri_to_iri(Path(self.image.name).relative_to(settings.MEDIA_URL)))
except:
pass
return super(Vod, self).save(*args, **kwargs)
def __unicode__(self):
return self.title
def __str__(self):
return self.title
def image_tag(self):
| image.path):
return mark_safe('<img src="%s" width="160" height="90" />' % (self.image.url))
else:
return mark_safe('<img src="#" width="160" height="90" />')
else:
return mark_safe('<img src="%s" width="160" height="90" />' % (settings.DEFAULT_IMAGE_SRC))
image_tag.short_description = '缩略图'
def get_absolute_url(self):
# print("get absolute url:",self.slug)
return reverse("vod:vod-detail", kwargs={"slug": self.slug})
def add_view_count(self):
self.view_count_temp += 1
def colored_active(self):
color_code = 'red' if self.active == 0 else 'green'
return format_html(
'<span style="color:{};">{}</span>',
color_code,
self.get_active_display()
)
colored_active.short_description = '是否激活'
def video_format(self):
suffix = Path(self.video.name).suffix
color_code = 'green' if suffix in ['.mp4', '.m3u8'] else 'red'
return format_html(
'<span style="color:{};">{}</span>',
color_code,
suffix
)
video_format.short_description = '视频文件格式'
def pre_save_post_receiver(sender, instance, *args, **kwargs):
if not instance.slug:
instance.slug = uuslug(instance.title, instance=instance)
def post_init_receiver(sender, instance, *args, **kwargs):
pass
pre_save.connect(pre_save_post_receiver, sender=Vod)
post_init.connect(post_init_receiver, sender=Vod)
| if self.image is not None and str(self.image) != "":
if os.path.exists(self. | conditional_block |
models.py | import logging
import os
import datetime
import six
import humanfriendly
from pathlib import Path
from django.db import models
from django.utils.html import format_html
from django.utils.encoding import uri_to_iri
from django.core.management import call_command
from django.utils.safestring import mark_safe
from django.conf import settings
from django.contrib.auth.models import User
from django.db.models.signals import pre_save, post_init, post_save
from django.dispatch import receiver
from django.core.urlresolvers import reverse
from django.core.files import File
from sortedm2m.fields import SortedManyToManyField
from uuslug import uuslug
# from moviepy.editor import VideoFileClip # get video duration
from .my_storage import VodStorage
from admin_resumable.fields import (
ModelAdminResumableFileField, ModelAdminResumableImageField,
ModelAdminResumableMultiFileField, ModelAdminResumableRestoreFileField
)
from xpinyin import Pinyin # for pinyin search
if six.PY3:
from django.utils.encoding import smart_str
else:
from django.utils.encoding import smart_unicode as smart_str
"""
Copy data in XXX model:
>>>
from vodmanagement.models import *
objs=Vod.objects.all()
for i in range(0,10):
newobj=objs[0]
newobj.pk=None
newobj.save()
>>>
This script will copy 10 objs[0] in database
"""
class UserPermission(models.Model):
user = models.OneToOneField(User)
permission = models.CharField(max_length=100, blank=True, null=True)
end_date = models.DateTimeField(blank=True, null=True)
def __str__(self):
return str(self.user)
def has_permision(self):
delta = self.end_date.date() - datetime.date.today()
print(delta.days)
if delta.days >= 0:
return True
return False
class VodManager(models.Manager):
def active(self, *args, **kwargs):
return super(VodManager, self) # .filter(draft=False).filter(publish__lte=timezone.now())
def upload_location(instance, filename):
# filebase, extension = filename.split(".")
# return "%s/%s.%s" %(instance.id, instance.id, extension)
VodModel = instance.__class__
print('save')
if VodModel.objects.count() is not 0:
new_id = VodModel.objects.order_by("id").last().id - 1
else:
new_id = 0
"""
instance.__class__ gets the model Post. We must use this method because the model is defined below.
Then create a queryset ordered by the "id"s of each object,
Then we get the last object in the queryset with `.last()`
Which will give us the most recently created Model instance
We add 1 to it, so we get what should be the same id as the the post we are creating.
"""
print('save image')
return "%s/%s" % (new_id, filename)
def upload_image_location(instance, filename):
VodModel = instance.__class__
if VodModel.objects.count() is not 0:
new_id = VodModel.objects.order_by("id").last().id + 1
else:
new_id = 0
folder = instance.save_path
if folder == "default":
category = instance.category.name
else:
category = instance.category.name + '_' + folder
return "%s/images/%s/%s" % (category, new_id, filename)
def upload_record_image_location(instance, filename):
return "%s/images/%s" % (settings.RECORD_MEDIA_FOLDER, filename)
def default_description(instance):
default = instance.title
print(default)
return 'The %s description' % default
# Create your models here.
def default_filedir():
return settings.MEDIA_ROOT
# ---------------------------------------------------------------------
# if leave path blank,it will save it as the default file dir:settings.MEDIA_ROOT
class FileDirectory(models.Model):
path = models.CharField(max_length=512, default=default_filedir, blank=True)
class Meta:
verbose_name = '视频上传路径'
verbose_name_plural = '视频上传路径管理'
def __str__(self):
return self.path
def save(self, *args, **kwargs):
if self.path is None or self.path == "":
self.path = default_filedir()
super(FileDirectory, self).save(*args, **kwargs)
# ---------------------------------------------------------------------
# Two selections only:Common,Special purpose
TYPES = (
('common', 'Common'),
('special', 'Special purpose'),
)
VIDEO_QUALITY = [
('SD', '标清'),
('HD', '高清'),
('FHD', '超清'),
]
SAVE_PATH = (
('', settings.LOCAL_MEDIA_ROOT),
)
class VideoRegion(models.Model):
name = models.CharField(max_length=200, verbose_name='地区', unique=True)
class Meta:
verbose_name = '视频地区管理'
verbose_name_plural = '视频地区'
def __str__(self):
return self.name
class VideoCategory(models.Model):
name = models.CharField(max_length=128, verbose_name='分类名称')
type = models.CharField(max_length=128, choices=TYPES, default='common', verbose_name='类型')
isSecret = models.BooleanField(default=False, verbose_name='是否加密')
level = models.IntegerField(null=False, blank=False, default=1, choices=((1, '一级分类'), (2, '二级分类')),
verbose_name='分类等级')
subset = models.ManyToManyField('self', blank=True, verbose_name='分类关系')
class Meta:
verbose_name = '视频分类'
verbose_name_plural = '视频分类管理'
def __str__(self):
base_name = self.name + | l == 2:
return '--'.join([self.subset.first().name, base_name])
else:
return base_name
def save(self, *args, **kwargs):
super(VideoCategory, self).save(*args, **kwargs)
def colored_level(self):
color_code = 'red' if self.level == 1 else 'green'
return format_html(
'<span style="color:{};">{}</span>',
color_code,
self.get_level_display()
)
colored_level.short_description = '分级'
# ---------------------------------------------------------------------
class MultipleUpload(models.Model):
files = ModelAdminResumableMultiFileField(null=True, blank=True, storage=VodStorage(), verbose_name='文件')
save_path = models.CharField(max_length=128, blank=False, null=True, verbose_name='保存路径')
category = models.ForeignKey(VideoCategory, null=True, verbose_name='分类')
class Meta:
verbose_name = '批量上传'
verbose_name_plural = '批量上传管理'
# ---------------------------------------------------------------------
# TODO(hhy): Please Leave This Model Here. It Will Be Use In The Future.
# class VideoTag(models.Model):
# name = models.CharField(max_length=200, null=False, blank=False)
#
# def __str__(self):
# return self.name
class Restore(models.Model):
txt_file = models.FileField(blank=True, null=True, verbose_name='备份配置文件')
zip_file = ModelAdminResumableRestoreFileField(null=True, blank=True, storage=VodStorage(), verbose_name='压缩包')
save_path = models.CharField(max_length=128, blank=False, null=True) # ,default=FileDirectory.objects.first())
class Meta:
verbose_name = '视频导入'
verbose_name_plural = '视频导入'
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
result = super(Restore, self).save()
file_path = self.txt_file.path
call_command('loaddata', file_path)
return result
class Vod(models.Model):
title = models.CharField(max_length=120, verbose_name='标题')
# image = models.ImageField(upload_to=upload_image_location, null=True, blank=True)
# video = models.FileField(null=True,blank=True,storage=VodStorage())
image = ModelAdminResumableImageField(null=True, blank=True, storage=VodStorage(), max_length=1000,
verbose_name='缩略图')
video = ModelAdminResumableFileField(null=True, blank=True, storage=VodStorage(), max_length=1000,
verbose_name='视频')
duration = models.CharField(max_length=50, blank=True, null=True, verbose_name='时长')
local_video = models.FilePathField(path=settings.LOCAL_MEDIA_ROOT, blank=True, recursive=True)
definition = models.CharField(max_length=10, choices=VIDEO_QUALITY, blank=False, default='H', verbose_name='清晰度')
category = models.ForeignKey(VideoCategory, null=True, blank=True, verbose_name='分类')
save_path = models.CharField(max_length=128, blank=False, null=True, default='default', verbose_name='保存路径') # ,default=FileDirectory.objects.first())
year = models.CharField(max_length=10, blank=False, null=True, default=datetime.datetime.now().year, verbose_name='年份')
region = models.ForeignKey(VideoRegion, to_field='name', null=True, blank=True, on_delete=models.SET_NULL, verbose_name='地区')
file_size = models.CharField(max_length=128, default='0B', editable=False, verbose_name='文件大小')
view_count = models.IntegerField(default=0, verbose_name='观看次数')
view_count_temp = 0
creator = models.ForeignKey(User, null=True, blank=False, editable=False)
description = models.TextField(blank=True, verbose_name='简介')
select_name = models.CharField(max_length=100, blank=False, verbose_name='选集名称', default='1')
updated = models.DateTimeField(auto_now=True, auto_now_add=False)
timestamp = models.DateTimeField(auto_now=False, auto_now_add=True, verbose_name='创建时间') # The first time added
slug = models.SlugField(unique=True, blank=True)
search_word = models.CharField(max_length=10000, null=True, blank=True)
# tags = models.ManyToManyField(VideoTag, blank=True)
video_list = SortedManyToManyField('self', blank=True)
# video_list = models.ManyToManyField('self', blank=True, symmetrical=False)
active = models.IntegerField(null=True, blank=False, default=0, choices=((1, 'Yes'), (0, 'No')))
progress = models.IntegerField(null=True, blank=True, default=0)
objects = VodManager()
class Meta:
verbose_name = '视频'
verbose_name_plural = '视频列表'
ordering = ["-timestamp", "-updated"]
def save(self, without_valid=False, *args, **kwargs):
logging.debug('==== 保存点播节目 %s ====' % self.title)
p = Pinyin()
full_pinyin = p.get_pinyin(smart_str(self.title), '')
first_pinyin = p.get_initials(smart_str(self.title), '').lower()
self.search_word = " ".join([full_pinyin, first_pinyin])
logging.debug("video path:", self.video)
if self.description is None or self.description == "":
self.description = default_description(self)
if self.local_video != '' and self.local_video is not None:
basename = Path(self.local_video).relative_to(Path(settings.LOCAL_MEDIA_ROOT))
self.video.name = str(Path(settings.LOCAL_MEDIA_URL) / basename)
logging.debug("save local_video to filefield done")
if without_valid:
ret = super(Vod, self).save(*args, **kwargs)
return ret
super(Vod, self).save(*args, **kwargs)
try:
if self.video != None and self.video != '':
relative_path = Path(self.video.name).relative_to(settings.MEDIA_URL) # Djan%20go.mp4
rel_name = uri_to_iri(relative_path) # Djan go.mp4
# Make sure the self.video.name is not in the LOCAL_FOLDER
if not self.video.name.startswith(settings.LOCAL_FOLDER_NAME) and \
not self.video.name.startswith(settings.RECORD_MEDIA_FOLDER):
self.video.name = str(rel_name)
logging.debug('save_path:', self.save_path)
logging.debug('video.name:', self.video.name)
logging.debug('size:', self.video.file.size)
self.file_size = humanfriendly.format_size(self.video.file.size)
# duration = VideoFileClip(self.video.path).duration
# self.duration = time_formate(duration)
else:
print("video file is None")
except:
pass
try:
if self.image:
self.image.name = str(uri_to_iri(Path(self.image.name).relative_to(settings.MEDIA_URL)))
except:
pass
return super(Vod, self).save(*args, **kwargs)
def __unicode__(self):
return self.title
def __str__(self):
return self.title
def image_tag(self):
if self.image is not None and str(self.image) != "":
if os.path.exists(self.image.path):
return mark_safe('<img src="%s" width="160" height="90" />' % (self.image.url))
else:
return mark_safe('<img src="#" width="160" height="90" />')
else:
return mark_safe('<img src="%s" width="160" height="90" />' % (settings.DEFAULT_IMAGE_SRC))
image_tag.short_description = '缩略图'
def get_absolute_url(self):
# print("get absolute url:",self.slug)
return reverse("vod:vod-detail", kwargs={"slug": self.slug})
def add_view_count(self):
self.view_count_temp += 1
def colored_active(self):
color_code = 'red' if self.active == 0 else 'green'
return format_html(
'<span style="color:{};">{}</span>',
color_code,
self.get_active_display()
)
colored_active.short_description = '是否激活'
def video_format(self):
suffix = Path(self.video.name).suffix
color_code = 'green' if suffix in ['.mp4', '.m3u8'] else 'red'
return format_html(
'<span style="color:{};">{}</span>',
color_code,
suffix
)
video_format.short_description = '视频文件格式'
def pre_save_post_receiver(sender, instance, *args, **kwargs):
if not instance.slug:
instance.slug = uuslug(instance.title, instance=instance)
def post_init_receiver(sender, instance, *args, **kwargs):
pass
pre_save.connect(pre_save_post_receiver, sender=Vod)
post_init.connect(post_init_receiver, sender=Vod)
| str(' (level %d)' % (self.level))
if self.subset.first() and self.leve | identifier_body |
ais_pg_monthhistogram.py | #!/usr/bin/env python
__version__ = '$Revision: 4791 $'.split()[1]
__date__ = '$Date: 2007-01-04 $'.split()[1]
__author__ = 'Kurt Schwehr'
__doc__="""
Count the transits by ship type for each month. A transit is
associated with the first month that the vessel is observed.
@requires: U{epydoc<http://epydoc.sourceforge.net/>} > 3.0alpha3
@requires: U{psycopg2<http://initd.org/projects/psycopg2>}
@requires: U{postgreSQL<http://www.postgresql.org/>} => 8.2
@requires: U{postgis<http://postgis.org>} => 8.2
@author: """+__author__+"""
@version: """ + __version__ +"""
@var __date__: Date of last svn commit
@undocumented: __version__ __author__ __doc__ parser
@status: under development
@license: GPL v2
@since: 2007-Nov-24
@todo: catagory - yeah, it is mispelled. bugger
"""
import sys, os
categories=[
'cargo_container'
# ,'fishing' # we are leaving out fishing for 2006.
,'passenger'
,'service_research'
,'tanker'
,'tug'
]
######################################################################
if __name__=='__main__':
from optparse import OptionParser
parser = OptionParser(usage="%prog [options]",version="%prog "+__version__)
parser.add_option('-b','--basename',dest='basename'
,default='month-summaries'
,help='Base file name for output')
parser.add_option('-d','--database-name',dest='databaseName',default='ais',
help='Name of database within the postgres server [default: %default]')
parser.add_option('-D','--database-host',dest='databaseHost',default='localhost',
help='Host name of the computer serving the dbx [default: %default]')
defaultUser = os.getenv('USER') #os.getlogin()
parser.add_option('-u','--database-user',dest='databaseUser',default=defaultUser,
help='Host name on which the database resides [default: %default]')
parser.add_option('-r','--restrict-table',dest='table', default='summary2006'
,help='SQL table to use for picking the ship type [default: %default]')
parser.add_option('-R','--restrict-field',dest='field', default='catagory'
,help='SQL field to use for picking the ship type [default: %default]')
parser.add_option('--excel',dest='excel',default=False,action='store_true',
help='Write excel spreadsheet')
parser.add_option('--distance',dest='distance',default=False,action='store_true',
help='Count distance in meters rather than number of transits')
parser.add_option('--use-nm',dest='useNM',default=False,action='store_true',
help='Report distances in nautical miles rather than km')
# FIX: utm zone not hardcoded would be good
# UTM Zone 19...
#sql='SELECT AsText(Transform(track,32619)) FROM tpath'
# --- EPSG 32610 : WGS 84 / UTM zone 10N
#sql='SELECT AsText(Transform(track,32610)) FROM tpath'
parser.add_option('-v','--verbose',dest='verbose',default=False,action='store_true',
help='Make the test output verbose')
(options,args) = parser.parse_args()
verbose = options.verbose
if options.distance:
if options.useNM:
options.basename+='-dist-nm'
else:
options.basename+='-dist-km'
else:
if options.useNM:
sys.exit('ERROR: can not specify nm units when not in distance mode')
import psycopg2 as psycopg
connectStr = "dbname='"+options.databaseName+"' user='"+options.databaseUser+"' host='"+options.databaseHost+"'"
if verbose:
sys.stderr.write('CONNECT: '+connectStr+'\n')
cx = psycopg.connect(connectStr)
cu = cx.cursor()
# FIX: This should be one big database query, I am sure.
catCounts={}
for category in categories:
if verbose: sys.stderr.write('Category: '+category+'\n')
#print 'FIX: remove limit'
# Yes, I know that I mispelled the field in the database
cu.execute('SELECT DISTINCT(userid) FROM summary2006 WHERE catagory=\''+category+'\';') # LIMIT 20;')
ships = cu.fetchall()
if verbose: sys.stderr.write(' Num ships: '+str(len(ships))+'\n')
monthCounts = [0,]*13 # leave 0 well alone
for ship in ships:
if options.distance:
q = 'SELECT t.id,startpos,length(Transform(track,32619)) FROM tpath'\
',(SELECT id,startpos FROM transit WHERE userid=\''+str(ship[0])+'\') AS t '\
'WHERE tpath.id=t.id;'
#if verbose: sys.stderr.write(' q='+q+'\n')
cu.execute(q)
for row in cu.fetchall():
id,startpos,lengthMeters=row
cu.execute('SELECT cg_timestamp FROM position WHERE key=\''+str(startpos)+'\';')
ts = cu.fetchone()[0]
#print lengthMeters,ts
if options.useNM:
monthCounts[ts.month]+= (lengthMeters/1000.) * 0.539956803
else:
monthCounts[ts.month]+=lengthMeters/1000.
#sys.exit('early')
else: # do transit count
cu.execute('SELECT p.cg_timestamp FROM position AS p, (SELECT startpos FROM transit WHERE userid=\''+str(ship[0])+'\') AS t WHERE key = startpos;')
for start in cu.fetchall():
monthCounts[start[0].month]+=1
if verbose: sys.stderr.write(' '+str(monthCounts)+'\n')
catCounts[category]=monthCounts
#print catCounts
# gnuplot 4.2 compatible file for histogram plots
o = file(options.basename+'.dat','w')
o.write('# Ship transit occurance by category and month\n')
o.write('Category Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec\n')
for category in categories:
o.write(category)
for i in range(1,13):
o.write(' '+str(catCounts[category][i]))
o.write('\n')
del(o)
# the above one plots backwards in gnuplot... maybe this one is better
o = file(options.basename+'-2.dat','w')
o.write('# Ship transit occurance by category and month\n')
catList = [categories[i] for i in range(len(categories))]
print catList
o.write('- '+' '.join(catList)+'\n')
monthNames = (None,'Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec')
for i in range(1,13):
o.write(monthNames[i])
for category in categories:
o.write(' '+str(catCounts[category][i]))
o.write('\n')
del(o)
basename=options.basename
gp=file(basename+'.gp','w')
gp.write('''#!/usr/bin/env gnuplot
# Generated by ais_pg_monthhistogram.py from noaadata-py\n''')
if options.distance:
gp.write("set title 'Vessel Transit Distances'\n")
if options.useNM:
gp.write("set ylabel 'Distance (nm)'\n")
else:
gp.write("set ylabel 'Distance (km)'\n")
else:
gp.write("set title 'Vessel Transit Counts'\n")
gp.write("set ylabel '# of transits'\n")
gp.write("set xlabel 'Month'\n")
gp.write('''set terminal pdf
set key left
set xtics nomirror rotate by -45
set style data linespoints
set datafile missing "-"
set output \''''+basename+'''.pdf\'
plot \''''+basename+'''-2.dat\' using 2:xtic(1) t 2, \
'' u 3 t 3,\
'' u 4 t 4,\
'' u 5 t 5,\
'' u 6 t 6
# Bar graph...
set output \''''+basename+'''-bars.pdf\'
set style data histogram
set style histogram cluster gap 1
set style fill solid border -1
set boxwidth 0.9
replot
# Stacked ...
set output \''''+basename+'''-stacked.pdf\'
set style data histogram
set style histogram rowstacked
set style fill solid border -1
set boxwidth 0.75
replot
''')
del (gp)
os.chmod(basename+'.gp',0755)
if options.excel:
import pyExcelerator as excel
workbook = excel.Workbook()
ws = workbook.add_sheet('Type Transits by Month')
ws_row=0
ws.write(ws_row,0,'Ship transits for each month broken down by ship type. Fishing left out'); ws_row+=1
if options.distance:
if options.useNM:
ws.write(ws_row,6,'Results in nautical miles (nm)'); ws_row+=1
else:
ws.write(ws_row,6,'Results in kilometers (km)'); ws_row+=1
else:
ws.write(ws_row,6,'Results in number of transits'); ws_row+=1
col=0
ws.write(ws_row,col,'Ship type'); col+=1
ws.write(ws_row,col,'Jan'); col+=1
ws.write(ws_row,col,'Feb'); col+=1
ws.write(ws_row,col,'Mar'); col+=1
ws.write(ws_row,col,'Apr'); col+=1
ws.write(ws_row,col,'May'); col+=1
ws.write(ws_row,col,'Jun'); col+=1
ws.write(ws_row,col,'Jul'); col+=1
ws.write(ws_row,col,'Aug'); col+=1
ws.write(ws_row,col,'Sep'); col+=1
ws.write(ws_row,col,'Oct'); col+=1
ws.write(ws_row,col,'Nov'); col+=1
ws.write(ws_row,col,'Dec'); #col+=1
for category in categories:
|
workbook.save(options.basename+'.xls')
| col=0
ws_row+=1
ws.write(ws_row,col,category); col+=1
for i in range(1,13):
# FIX: double check that we want to be using integers
ws.write(ws_row,col,int(catCounts[category][i])); col+=1 | conditional_block |
ais_pg_monthhistogram.py | #!/usr/bin/env python
__version__ = '$Revision: 4791 $'.split()[1]
__date__ = '$Date: 2007-01-04 $'.split()[1]
__author__ = 'Kurt Schwehr'
__doc__="""
Count the transits by ship type for each month. A transit is
associated with the first month that the vessel is observed.
@requires: U{epydoc<http://epydoc.sourceforge.net/>} > 3.0alpha3
@requires: U{psycopg2<http://initd.org/projects/psycopg2>}
@requires: U{postgreSQL<http://www.postgresql.org/>} => 8.2
@requires: U{postgis<http://postgis.org>} => 8.2
@author: """+__author__+"""
@version: """ + __version__ +"""
@var __date__: Date of last svn commit
@undocumented: __version__ __author__ __doc__ parser
@status: under development
@license: GPL v2
@since: 2007-Nov-24
@todo: catagory - yeah, it is mispelled. bugger
"""
import sys, os
categories=[
'cargo_container'
# ,'fishing' # we are leaving out fishing for 2006.
,'passenger'
,'service_research'
,'tanker'
,'tug'
]
######################################################################
if __name__=='__main__':
from optparse import OptionParser
parser = OptionParser(usage="%prog [options]",version="%prog "+__version__)
parser.add_option('-b','--basename',dest='basename'
,default='month-summaries'
,help='Base file name for output')
parser.add_option('-d','--database-name',dest='databaseName',default='ais',
help='Name of database within the postgres server [default: %default]')
parser.add_option('-D','--database-host',dest='databaseHost',default='localhost',
help='Host name of the computer serving the dbx [default: %default]')
defaultUser = os.getenv('USER') #os.getlogin()
parser.add_option('-u','--database-user',dest='databaseUser',default=defaultUser,
help='Host name on which the database resides [default: %default]')
parser.add_option('-r','--restrict-table',dest='table', default='summary2006'
,help='SQL table to use for picking the ship type [default: %default]')
parser.add_option('-R','--restrict-field',dest='field', default='catagory'
,help='SQL field to use for picking the ship type [default: %default]')
parser.add_option('--excel',dest='excel',default=False,action='store_true',
help='Write excel spreadsheet')
parser.add_option('--distance',dest='distance',default=False,action='store_true',
help='Count distance in meters rather than number of transits')
parser.add_option('--use-nm',dest='useNM',default=False,action='store_true',
help='Report distances in nautical miles rather than km')
# FIX: utm zone not hardcoded would be good
# UTM Zone 19...
#sql='SELECT AsText(Transform(track,32619)) FROM tpath'
# --- EPSG 32610 : WGS 84 / UTM zone 10N
#sql='SELECT AsText(Transform(track,32610)) FROM tpath'
parser.add_option('-v','--verbose',dest='verbose',default=False,action='store_true',
help='Make the test output verbose')
(options,args) = parser.parse_args()
verbose = options.verbose
if options.distance:
if options.useNM:
options.basename+='-dist-nm' | if options.useNM:
sys.exit('ERROR: can not specify nm units when not in distance mode')
import psycopg2 as psycopg
connectStr = "dbname='"+options.databaseName+"' user='"+options.databaseUser+"' host='"+options.databaseHost+"'"
if verbose:
sys.stderr.write('CONNECT: '+connectStr+'\n')
cx = psycopg.connect(connectStr)
cu = cx.cursor()
# FIX: This should be one big database query, I am sure.
catCounts={}
for category in categories:
if verbose: sys.stderr.write('Category: '+category+'\n')
#print 'FIX: remove limit'
# Yes, I know that I mispelled the field in the database
cu.execute('SELECT DISTINCT(userid) FROM summary2006 WHERE catagory=\''+category+'\';') # LIMIT 20;')
ships = cu.fetchall()
if verbose: sys.stderr.write(' Num ships: '+str(len(ships))+'\n')
monthCounts = [0,]*13 # leave 0 well alone
for ship in ships:
if options.distance:
q = 'SELECT t.id,startpos,length(Transform(track,32619)) FROM tpath'\
',(SELECT id,startpos FROM transit WHERE userid=\''+str(ship[0])+'\') AS t '\
'WHERE tpath.id=t.id;'
#if verbose: sys.stderr.write(' q='+q+'\n')
cu.execute(q)
for row in cu.fetchall():
id,startpos,lengthMeters=row
cu.execute('SELECT cg_timestamp FROM position WHERE key=\''+str(startpos)+'\';')
ts = cu.fetchone()[0]
#print lengthMeters,ts
if options.useNM:
monthCounts[ts.month]+= (lengthMeters/1000.) * 0.539956803
else:
monthCounts[ts.month]+=lengthMeters/1000.
#sys.exit('early')
else: # do transit count
cu.execute('SELECT p.cg_timestamp FROM position AS p, (SELECT startpos FROM transit WHERE userid=\''+str(ship[0])+'\') AS t WHERE key = startpos;')
for start in cu.fetchall():
monthCounts[start[0].month]+=1
if verbose: sys.stderr.write(' '+str(monthCounts)+'\n')
catCounts[category]=monthCounts
#print catCounts
# gnuplot 4.2 compatible file for histogram plots
o = file(options.basename+'.dat','w')
o.write('# Ship transit occurance by category and month\n')
o.write('Category Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec\n')
for category in categories:
o.write(category)
for i in range(1,13):
o.write(' '+str(catCounts[category][i]))
o.write('\n')
del(o)
# the above one plots backwards in gnuplot... maybe this one is better
o = file(options.basename+'-2.dat','w')
o.write('# Ship transit occurance by category and month\n')
catList = [categories[i] for i in range(len(categories))]
print catList
o.write('- '+' '.join(catList)+'\n')
monthNames = (None,'Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec')
for i in range(1,13):
o.write(monthNames[i])
for category in categories:
o.write(' '+str(catCounts[category][i]))
o.write('\n')
del(o)
basename=options.basename
gp=file(basename+'.gp','w')
gp.write('''#!/usr/bin/env gnuplot
# Generated by ais_pg_monthhistogram.py from noaadata-py\n''')
if options.distance:
gp.write("set title 'Vessel Transit Distances'\n")
if options.useNM:
gp.write("set ylabel 'Distance (nm)'\n")
else:
gp.write("set ylabel 'Distance (km)'\n")
else:
gp.write("set title 'Vessel Transit Counts'\n")
gp.write("set ylabel '# of transits'\n")
gp.write("set xlabel 'Month'\n")
gp.write('''set terminal pdf
set key left
set xtics nomirror rotate by -45
set style data linespoints
set datafile missing "-"
set output \''''+basename+'''.pdf\'
plot \''''+basename+'''-2.dat\' using 2:xtic(1) t 2, \
'' u 3 t 3,\
'' u 4 t 4,\
'' u 5 t 5,\
'' u 6 t 6
# Bar graph...
set output \''''+basename+'''-bars.pdf\'
set style data histogram
set style histogram cluster gap 1
set style fill solid border -1
set boxwidth 0.9
replot
# Stacked ...
set output \''''+basename+'''-stacked.pdf\'
set style data histogram
set style histogram rowstacked
set style fill solid border -1
set boxwidth 0.75
replot
''')
del (gp)
os.chmod(basename+'.gp',0755)
if options.excel:
import pyExcelerator as excel
workbook = excel.Workbook()
ws = workbook.add_sheet('Type Transits by Month')
ws_row=0
ws.write(ws_row,0,'Ship transits for each month broken down by ship type. Fishing left out'); ws_row+=1
if options.distance:
if options.useNM:
ws.write(ws_row,6,'Results in nautical miles (nm)'); ws_row+=1
else:
ws.write(ws_row,6,'Results in kilometers (km)'); ws_row+=1
else:
ws.write(ws_row,6,'Results in number of transits'); ws_row+=1
col=0
ws.write(ws_row,col,'Ship type'); col+=1
ws.write(ws_row,col,'Jan'); col+=1
ws.write(ws_row,col,'Feb'); col+=1
ws.write(ws_row,col,'Mar'); col+=1
ws.write(ws_row,col,'Apr'); col+=1
ws.write(ws_row,col,'May'); col+=1
ws.write(ws_row,col,'Jun'); col+=1
ws.write(ws_row,col,'Jul'); col+=1
ws.write(ws_row,col,'Aug'); col+=1
ws.write(ws_row,col,'Sep'); col+=1
ws.write(ws_row,col,'Oct'); col+=1
ws.write(ws_row,col,'Nov'); col+=1
ws.write(ws_row,col,'Dec'); #col+=1
for category in categories:
col=0
ws_row+=1
ws.write(ws_row,col,category); col+=1
for i in range(1,13):
# FIX: double check that we want to be using integers
ws.write(ws_row,col,int(catCounts[category][i])); col+=1
workbook.save(options.basename+'.xls') | else:
options.basename+='-dist-km'
else: | random_line_split |
storage.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <[email protected]>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
Provides:
- List; like list but returns None instead of IndexOutOfBounds
- Storage; like dictionary allowing also for `obj.foo` for `obj['foo']`
"""
try:
import cPickle as pickle
except:
import pickle
import copy_reg
import gluon.portalocker as portalocker
__all__ = ['List', 'Storage', 'Settings', 'Messages',
'StorageList', 'load_storage', 'save_storage']
DEFAULT = lambda:0
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition to `obj['foo']`, and setting obj.foo = None deletes item foo.
Example::
>>> o = Storage(a=1)
>>> print o.a
1
>>> o['a']
1
>>> o.a = 2
>>> print o['a']
2
>>> del o.a
>>> print o.a
None
"""
__slots__ = ()
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
__getitem__ = dict.get
__getattr__ = dict.get
__getnewargs__ = lambda self: getattr(dict,self).__getnewargs__(self)
__repr__ = lambda self: '<Storage %s>' % dict.__repr__(self)
# http://stackoverflow.com/questions/5247250/why-does-pickle-getstate-accept-as-a-return-value-the-very-instance-it-requi
__getstate__ = lambda self: None
__copy__ = lambda self: Storage(self)
def getlist(self, key):
"""
Returns a Storage value as a list.
If the value is a list it will be returned as-is.
If object is None, an empty list will be returned.
Otherwise, `[value]` will be returned.
Example output for a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getlist('x')
['abc']
>>> request.vars.getlist('y')
['abc', 'def']
>>> request.vars.getlist('z')
[]
"""
value = self.get(key, [])
if value is None or isinstance(value, (list, tuple)):
return value
else:
return [value]
def getfirst(self, key, default=None):
"""
Returns the first value of a list or the value itself when given a
`request.vars` style key.
If the value is a list, its first item will be returned;
otherwise, the value will be returned as-is.
Example output for a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getfirst('x')
'abc'
>>> request.vars.getfirst('y')
'abc'
>>> request.vars.getfirst('z')
"""
values = self.getlist(key)
return values[0] if values else default
def getlast(self, key, default=None):
"""
Returns the last value of a list or value itself when given a
`request.vars` style key.
If the value is a list, the last item will be returned;
otherwise, the value will be returned as-is.
Simulated output with a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getlast('x')
'abc'
>>> request.vars.getlast('y')
'def'
>>> request.vars.getlast('z')
"""
values = self.getlist(key)
return values[-1] if values else default
def pickle_storage(s):
return Storage, (dict(s),)
copy_reg.pickle(Storage, pickle_storage)
PICKABLE = (str, int, long, float, bool, list, dict, tuple, set)
class StorageList(Storage):
"""
Behaves like Storage but missing elements defaults to [] instead of None
"""
def __getitem__(self, key):
return self.__getattr__(key)
def __getattr__(self, key):
if key in self:
return self.get(key)
else:
r = []
self[key] = r
return r
def load_storage(filename):
fp = None
try:
fp = portalocker.LockedFile(filename, 'rb')
storage = pickle.load(fp)
finally:
if fp:
fp.close()
return Storage(storage)
def save_storage(storage, filename):
fp = None
try:
fp = portalocker.LockedFile(filename, 'wb')
pickle.dump(dict(storage), fp)
finally:
if fp:
fp.close()
class Settings(Storage):
def __setattr__(self, key, value):
if key != 'lock_keys' and self['lock_keys'] and key not in self:
raise SyntaxError('setting key \'%s\' does not exist' % key)
if key != 'lock_values' and self['lock_values']:
raise SyntaxError('setting value cannot be changed: %s' % key)
self[key] = value
class Messages(Settings):
def __init__(self, T):
Storage.__init__(self, T=T)
def __getattr__(self, key):
value = self[key]
if isinstance(value, str):
return self.T(value)
return value
class FastStorage(dict):
"""
Eventually this should replace class Storage but causes memory leak
because of http://bugs.python.org/issue1469629
>>> s = FastStorage()
>>> s.a = 1
>>> s.a
1
>>> s['a']
1
>>> s.b
>>> s['b']
>>> s['b']=2
>>> s['b']
2
>>> s.b
2
>>> isinstance(s,dict)
True
>>> dict(s)
{'a': 1, 'b': 2}
>>> dict(FastStorage(s))
{'a': 1, 'b': 2}
>>> import pickle
>>> s = pickle.loads(pickle.dumps(s))
>>> dict(s)
{'a': 1, 'b': 2}
>>> del s.b
>>> del s.a
>>> s.a
>>> s.b
>>> s['a']
>>> s['b']
"""
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self.__dict__ = self
def __getattr__(self, key):
return getattr(self, key) if key in self else None
def __getitem__(self, key):
return dict.get(self, key, None)
def copy(self):
self.__dict__ = {}
s = FastStorage(self)
self.__dict__ = self
return s
def __repr__(self):
return '<Storage %s>' % dict.__repr__(self)
def __getstate__(self):
return dict(self)
def __setstate__(self, sdict):
dict.__init__(self, sdict)
self.__dict__ = self
def update(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self.__dict__ = self
class List(list):
"""
Like a regular python list but a[i] if i is out of bounds returns None
instead of `IndexOutOfBounds`
"""
def __call__(self, i, default=DEFAULT, cast=None, otherwise=None):
"""Allows to use a special syntax for fast-check of `request.args()`
validity
Args:
i: index
default: use this value if arg not found
cast: type cast
otherwise: can be:
- None: results in a 404
- str: redirect to this address
- callable: calls the function (nothing is passed)
Example:
You can use::
request.args(0,default=0,cast=int,otherwise='http://error_url')
request.args(0,default=0,cast=int,otherwise=lambda:...)
"""
n = len(self)
if 0 <= i < n or -n <= i < 0:
value = self[i]
elif default is DEFAULT:
value = None
else:
value, cast = default, False
if cast:
|
return value
if __name__ == '__main__':
import doctest
doctest.testmod()
| try:
value = cast(value)
except (ValueError, TypeError):
from http import HTTP, redirect
if otherwise is None:
raise HTTP(404)
elif isinstance(otherwise, str):
redirect(otherwise)
elif callable(otherwise):
return otherwise()
else:
raise RuntimeError("invalid otherwise") | conditional_block |
storage.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <[email protected]>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
Provides:
- List; like list but returns None instead of IndexOutOfBounds | - Storage; like dictionary allowing also for `obj.foo` for `obj['foo']`
"""
try:
import cPickle as pickle
except:
import pickle
import copy_reg
import gluon.portalocker as portalocker
__all__ = ['List', 'Storage', 'Settings', 'Messages',
'StorageList', 'load_storage', 'save_storage']
DEFAULT = lambda:0
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition to `obj['foo']`, and setting obj.foo = None deletes item foo.
Example::
>>> o = Storage(a=1)
>>> print o.a
1
>>> o['a']
1
>>> o.a = 2
>>> print o['a']
2
>>> del o.a
>>> print o.a
None
"""
__slots__ = ()
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
__getitem__ = dict.get
__getattr__ = dict.get
__getnewargs__ = lambda self: getattr(dict,self).__getnewargs__(self)
__repr__ = lambda self: '<Storage %s>' % dict.__repr__(self)
# http://stackoverflow.com/questions/5247250/why-does-pickle-getstate-accept-as-a-return-value-the-very-instance-it-requi
__getstate__ = lambda self: None
__copy__ = lambda self: Storage(self)
def getlist(self, key):
"""
Returns a Storage value as a list.
If the value is a list it will be returned as-is.
If object is None, an empty list will be returned.
Otherwise, `[value]` will be returned.
Example output for a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getlist('x')
['abc']
>>> request.vars.getlist('y')
['abc', 'def']
>>> request.vars.getlist('z')
[]
"""
value = self.get(key, [])
if value is None or isinstance(value, (list, tuple)):
return value
else:
return [value]
def getfirst(self, key, default=None):
"""
Returns the first value of a list or the value itself when given a
`request.vars` style key.
If the value is a list, its first item will be returned;
otherwise, the value will be returned as-is.
Example output for a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getfirst('x')
'abc'
>>> request.vars.getfirst('y')
'abc'
>>> request.vars.getfirst('z')
"""
values = self.getlist(key)
return values[0] if values else default
def getlast(self, key, default=None):
"""
Returns the last value of a list or value itself when given a
`request.vars` style key.
If the value is a list, the last item will be returned;
otherwise, the value will be returned as-is.
Simulated output with a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getlast('x')
'abc'
>>> request.vars.getlast('y')
'def'
>>> request.vars.getlast('z')
"""
values = self.getlist(key)
return values[-1] if values else default
def pickle_storage(s):
return Storage, (dict(s),)
copy_reg.pickle(Storage, pickle_storage)
PICKABLE = (str, int, long, float, bool, list, dict, tuple, set)
class StorageList(Storage):
"""
Behaves like Storage but missing elements defaults to [] instead of None
"""
def __getitem__(self, key):
return self.__getattr__(key)
def __getattr__(self, key):
if key in self:
return self.get(key)
else:
r = []
self[key] = r
return r
def load_storage(filename):
fp = None
try:
fp = portalocker.LockedFile(filename, 'rb')
storage = pickle.load(fp)
finally:
if fp:
fp.close()
return Storage(storage)
def save_storage(storage, filename):
fp = None
try:
fp = portalocker.LockedFile(filename, 'wb')
pickle.dump(dict(storage), fp)
finally:
if fp:
fp.close()
class Settings(Storage):
def __setattr__(self, key, value):
if key != 'lock_keys' and self['lock_keys'] and key not in self:
raise SyntaxError('setting key \'%s\' does not exist' % key)
if key != 'lock_values' and self['lock_values']:
raise SyntaxError('setting value cannot be changed: %s' % key)
self[key] = value
class Messages(Settings):
def __init__(self, T):
Storage.__init__(self, T=T)
def __getattr__(self, key):
value = self[key]
if isinstance(value, str):
return self.T(value)
return value
class FastStorage(dict):
"""
Eventually this should replace class Storage but causes memory leak
because of http://bugs.python.org/issue1469629
>>> s = FastStorage()
>>> s.a = 1
>>> s.a
1
>>> s['a']
1
>>> s.b
>>> s['b']
>>> s['b']=2
>>> s['b']
2
>>> s.b
2
>>> isinstance(s,dict)
True
>>> dict(s)
{'a': 1, 'b': 2}
>>> dict(FastStorage(s))
{'a': 1, 'b': 2}
>>> import pickle
>>> s = pickle.loads(pickle.dumps(s))
>>> dict(s)
{'a': 1, 'b': 2}
>>> del s.b
>>> del s.a
>>> s.a
>>> s.b
>>> s['a']
>>> s['b']
"""
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self.__dict__ = self
def __getattr__(self, key):
return getattr(self, key) if key in self else None
def __getitem__(self, key):
return dict.get(self, key, None)
def copy(self):
self.__dict__ = {}
s = FastStorage(self)
self.__dict__ = self
return s
def __repr__(self):
return '<Storage %s>' % dict.__repr__(self)
def __getstate__(self):
return dict(self)
def __setstate__(self, sdict):
dict.__init__(self, sdict)
self.__dict__ = self
def update(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self.__dict__ = self
class List(list):
"""
Like a regular python list but a[i] if i is out of bounds returns None
instead of `IndexOutOfBounds`
"""
def __call__(self, i, default=DEFAULT, cast=None, otherwise=None):
"""Allows to use a special syntax for fast-check of `request.args()`
validity
Args:
i: index
default: use this value if arg not found
cast: type cast
otherwise: can be:
- None: results in a 404
- str: redirect to this address
- callable: calls the function (nothing is passed)
Example:
You can use::
request.args(0,default=0,cast=int,otherwise='http://error_url')
request.args(0,default=0,cast=int,otherwise=lambda:...)
"""
n = len(self)
if 0 <= i < n or -n <= i < 0:
value = self[i]
elif default is DEFAULT:
value = None
else:
value, cast = default, False
if cast:
try:
value = cast(value)
except (ValueError, TypeError):
from http import HTTP, redirect
if otherwise is None:
raise HTTP(404)
elif isinstance(otherwise, str):
redirect(otherwise)
elif callable(otherwise):
return otherwise()
else:
raise RuntimeError("invalid otherwise")
return value
if __name__ == '__main__':
import doctest
doctest.testmod() | random_line_split |
|
storage.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <[email protected]>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
Provides:
- List; like list but returns None instead of IndexOutOfBounds
- Storage; like dictionary allowing also for `obj.foo` for `obj['foo']`
"""
try:
import cPickle as pickle
except:
import pickle
import copy_reg
import gluon.portalocker as portalocker
__all__ = ['List', 'Storage', 'Settings', 'Messages',
'StorageList', 'load_storage', 'save_storage']
DEFAULT = lambda:0
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition to `obj['foo']`, and setting obj.foo = None deletes item foo.
Example::
>>> o = Storage(a=1)
>>> print o.a
1
>>> o['a']
1
>>> o.a = 2
>>> print o['a']
2
>>> del o.a
>>> print o.a
None
"""
__slots__ = ()
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
__getitem__ = dict.get
__getattr__ = dict.get
__getnewargs__ = lambda self: getattr(dict,self).__getnewargs__(self)
__repr__ = lambda self: '<Storage %s>' % dict.__repr__(self)
# http://stackoverflow.com/questions/5247250/why-does-pickle-getstate-accept-as-a-return-value-the-very-instance-it-requi
__getstate__ = lambda self: None
__copy__ = lambda self: Storage(self)
def getlist(self, key):
|
def getfirst(self, key, default=None):
"""
Returns the first value of a list or the value itself when given a
`request.vars` style key.
If the value is a list, its first item will be returned;
otherwise, the value will be returned as-is.
Example output for a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getfirst('x')
'abc'
>>> request.vars.getfirst('y')
'abc'
>>> request.vars.getfirst('z')
"""
values = self.getlist(key)
return values[0] if values else default
def getlast(self, key, default=None):
"""
Returns the last value of a list or value itself when given a
`request.vars` style key.
If the value is a list, the last item will be returned;
otherwise, the value will be returned as-is.
Simulated output with a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getlast('x')
'abc'
>>> request.vars.getlast('y')
'def'
>>> request.vars.getlast('z')
"""
values = self.getlist(key)
return values[-1] if values else default
def pickle_storage(s):
return Storage, (dict(s),)
copy_reg.pickle(Storage, pickle_storage)
PICKABLE = (str, int, long, float, bool, list, dict, tuple, set)
class StorageList(Storage):
"""
Behaves like Storage but missing elements defaults to [] instead of None
"""
def __getitem__(self, key):
return self.__getattr__(key)
def __getattr__(self, key):
if key in self:
return self.get(key)
else:
r = []
self[key] = r
return r
def load_storage(filename):
fp = None
try:
fp = portalocker.LockedFile(filename, 'rb')
storage = pickle.load(fp)
finally:
if fp:
fp.close()
return Storage(storage)
def save_storage(storage, filename):
fp = None
try:
fp = portalocker.LockedFile(filename, 'wb')
pickle.dump(dict(storage), fp)
finally:
if fp:
fp.close()
class Settings(Storage):
def __setattr__(self, key, value):
if key != 'lock_keys' and self['lock_keys'] and key not in self:
raise SyntaxError('setting key \'%s\' does not exist' % key)
if key != 'lock_values' and self['lock_values']:
raise SyntaxError('setting value cannot be changed: %s' % key)
self[key] = value
class Messages(Settings):
def __init__(self, T):
Storage.__init__(self, T=T)
def __getattr__(self, key):
value = self[key]
if isinstance(value, str):
return self.T(value)
return value
class FastStorage(dict):
"""
Eventually this should replace class Storage but causes memory leak
because of http://bugs.python.org/issue1469629
>>> s = FastStorage()
>>> s.a = 1
>>> s.a
1
>>> s['a']
1
>>> s.b
>>> s['b']
>>> s['b']=2
>>> s['b']
2
>>> s.b
2
>>> isinstance(s,dict)
True
>>> dict(s)
{'a': 1, 'b': 2}
>>> dict(FastStorage(s))
{'a': 1, 'b': 2}
>>> import pickle
>>> s = pickle.loads(pickle.dumps(s))
>>> dict(s)
{'a': 1, 'b': 2}
>>> del s.b
>>> del s.a
>>> s.a
>>> s.b
>>> s['a']
>>> s['b']
"""
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self.__dict__ = self
def __getattr__(self, key):
return getattr(self, key) if key in self else None
def __getitem__(self, key):
return dict.get(self, key, None)
def copy(self):
self.__dict__ = {}
s = FastStorage(self)
self.__dict__ = self
return s
def __repr__(self):
return '<Storage %s>' % dict.__repr__(self)
def __getstate__(self):
return dict(self)
def __setstate__(self, sdict):
dict.__init__(self, sdict)
self.__dict__ = self
def update(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self.__dict__ = self
class List(list):
"""
Like a regular python list but a[i] if i is out of bounds returns None
instead of `IndexOutOfBounds`
"""
def __call__(self, i, default=DEFAULT, cast=None, otherwise=None):
"""Allows to use a special syntax for fast-check of `request.args()`
validity
Args:
i: index
default: use this value if arg not found
cast: type cast
otherwise: can be:
- None: results in a 404
- str: redirect to this address
- callable: calls the function (nothing is passed)
Example:
You can use::
request.args(0,default=0,cast=int,otherwise='http://error_url')
request.args(0,default=0,cast=int,otherwise=lambda:...)
"""
n = len(self)
if 0 <= i < n or -n <= i < 0:
value = self[i]
elif default is DEFAULT:
value = None
else:
value, cast = default, False
if cast:
try:
value = cast(value)
except (ValueError, TypeError):
from http import HTTP, redirect
if otherwise is None:
raise HTTP(404)
elif isinstance(otherwise, str):
redirect(otherwise)
elif callable(otherwise):
return otherwise()
else:
raise RuntimeError("invalid otherwise")
return value
if __name__ == '__main__':
import doctest
doctest.testmod()
| """
Returns a Storage value as a list.
If the value is a list it will be returned as-is.
If object is None, an empty list will be returned.
Otherwise, `[value]` will be returned.
Example output for a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getlist('x')
['abc']
>>> request.vars.getlist('y')
['abc', 'def']
>>> request.vars.getlist('z')
[]
"""
value = self.get(key, [])
if value is None or isinstance(value, (list, tuple)):
return value
else:
return [value] | identifier_body |
storage.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <[email protected]>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
Provides:
- List; like list but returns None instead of IndexOutOfBounds
- Storage; like dictionary allowing also for `obj.foo` for `obj['foo']`
"""
try:
import cPickle as pickle
except:
import pickle
import copy_reg
import gluon.portalocker as portalocker
__all__ = ['List', 'Storage', 'Settings', 'Messages',
'StorageList', 'load_storage', 'save_storage']
DEFAULT = lambda:0
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition to `obj['foo']`, and setting obj.foo = None deletes item foo.
Example::
>>> o = Storage(a=1)
>>> print o.a
1
>>> o['a']
1
>>> o.a = 2
>>> print o['a']
2
>>> del o.a
>>> print o.a
None
"""
__slots__ = ()
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
__getitem__ = dict.get
__getattr__ = dict.get
__getnewargs__ = lambda self: getattr(dict,self).__getnewargs__(self)
__repr__ = lambda self: '<Storage %s>' % dict.__repr__(self)
# http://stackoverflow.com/questions/5247250/why-does-pickle-getstate-accept-as-a-return-value-the-very-instance-it-requi
__getstate__ = lambda self: None
__copy__ = lambda self: Storage(self)
def getlist(self, key):
"""
Returns a Storage value as a list.
If the value is a list it will be returned as-is.
If object is None, an empty list will be returned.
Otherwise, `[value]` will be returned.
Example output for a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getlist('x')
['abc']
>>> request.vars.getlist('y')
['abc', 'def']
>>> request.vars.getlist('z')
[]
"""
value = self.get(key, [])
if value is None or isinstance(value, (list, tuple)):
return value
else:
return [value]
def getfirst(self, key, default=None):
"""
Returns the first value of a list or the value itself when given a
`request.vars` style key.
If the value is a list, its first item will be returned;
otherwise, the value will be returned as-is.
Example output for a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getfirst('x')
'abc'
>>> request.vars.getfirst('y')
'abc'
>>> request.vars.getfirst('z')
"""
values = self.getlist(key)
return values[0] if values else default
def getlast(self, key, default=None):
"""
Returns the last value of a list or value itself when given a
`request.vars` style key.
If the value is a list, the last item will be returned;
otherwise, the value will be returned as-is.
Simulated output with a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getlast('x')
'abc'
>>> request.vars.getlast('y')
'def'
>>> request.vars.getlast('z')
"""
values = self.getlist(key)
return values[-1] if values else default
def pickle_storage(s):
return Storage, (dict(s),)
copy_reg.pickle(Storage, pickle_storage)
PICKABLE = (str, int, long, float, bool, list, dict, tuple, set)
class StorageList(Storage):
"""
Behaves like Storage but missing elements defaults to [] instead of None
"""
def __getitem__(self, key):
return self.__getattr__(key)
def __getattr__(self, key):
if key in self:
return self.get(key)
else:
r = []
self[key] = r
return r
def load_storage(filename):
fp = None
try:
fp = portalocker.LockedFile(filename, 'rb')
storage = pickle.load(fp)
finally:
if fp:
fp.close()
return Storage(storage)
def save_storage(storage, filename):
fp = None
try:
fp = portalocker.LockedFile(filename, 'wb')
pickle.dump(dict(storage), fp)
finally:
if fp:
fp.close()
class Settings(Storage):
def __setattr__(self, key, value):
if key != 'lock_keys' and self['lock_keys'] and key not in self:
raise SyntaxError('setting key \'%s\' does not exist' % key)
if key != 'lock_values' and self['lock_values']:
raise SyntaxError('setting value cannot be changed: %s' % key)
self[key] = value
class Messages(Settings):
def __init__(self, T):
Storage.__init__(self, T=T)
def __getattr__(self, key):
value = self[key]
if isinstance(value, str):
return self.T(value)
return value
class FastStorage(dict):
"""
Eventually this should replace class Storage but causes memory leak
because of http://bugs.python.org/issue1469629
>>> s = FastStorage()
>>> s.a = 1
>>> s.a
1
>>> s['a']
1
>>> s.b
>>> s['b']
>>> s['b']=2
>>> s['b']
2
>>> s.b
2
>>> isinstance(s,dict)
True
>>> dict(s)
{'a': 1, 'b': 2}
>>> dict(FastStorage(s))
{'a': 1, 'b': 2}
>>> import pickle
>>> s = pickle.loads(pickle.dumps(s))
>>> dict(s)
{'a': 1, 'b': 2}
>>> del s.b
>>> del s.a
>>> s.a
>>> s.b
>>> s['a']
>>> s['b']
"""
def | (self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self.__dict__ = self
def __getattr__(self, key):
return getattr(self, key) if key in self else None
def __getitem__(self, key):
return dict.get(self, key, None)
def copy(self):
self.__dict__ = {}
s = FastStorage(self)
self.__dict__ = self
return s
def __repr__(self):
return '<Storage %s>' % dict.__repr__(self)
def __getstate__(self):
return dict(self)
def __setstate__(self, sdict):
dict.__init__(self, sdict)
self.__dict__ = self
def update(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self.__dict__ = self
class List(list):
"""
Like a regular python list but a[i] if i is out of bounds returns None
instead of `IndexOutOfBounds`
"""
def __call__(self, i, default=DEFAULT, cast=None, otherwise=None):
"""Allows to use a special syntax for fast-check of `request.args()`
validity
Args:
i: index
default: use this value if arg not found
cast: type cast
otherwise: can be:
- None: results in a 404
- str: redirect to this address
- callable: calls the function (nothing is passed)
Example:
You can use::
request.args(0,default=0,cast=int,otherwise='http://error_url')
request.args(0,default=0,cast=int,otherwise=lambda:...)
"""
n = len(self)
if 0 <= i < n or -n <= i < 0:
value = self[i]
elif default is DEFAULT:
value = None
else:
value, cast = default, False
if cast:
try:
value = cast(value)
except (ValueError, TypeError):
from http import HTTP, redirect
if otherwise is None:
raise HTTP(404)
elif isinstance(otherwise, str):
redirect(otherwise)
elif callable(otherwise):
return otherwise()
else:
raise RuntimeError("invalid otherwise")
return value
if __name__ == '__main__':
import doctest
doctest.testmod()
| __init__ | identifier_name |
f10-read-timeout.rs | /// Figure 10.10: Calling read with a timeout
///
/// Takeaway: First I tried with the regular `signal` function of libc
/// only to find out that the alarm signal does not interrupt the read
/// call. Digging into the C code it got obvious that the signal function
/// gets overriden by `lib/signal.c` which is a "reliable version of signal(),
/// using POSIX sigaction()". But this function gets only introduced in
/// Figure 10.18. This was quite misleading IMO.
///
/// $ f10-read-timeout 2>&1
/// read error!
/// ERROR: return code 1
extern crate libc;
#[macro_use(as_void)]
extern crate apue;
use libc::{STDOUT_FILENO, STDIN_FILENO, SIGALRM, SIG_ERR, c_int};
use libc::{alarm, write, read, exit};
use apue::signal;
const MAXLINE: usize = 4096;
fn sig_alrm(_: c_int) {
// nothing to do, just return to interrupt the read
}
fn main() {
unsafe {
let line: [u8; MAXLINE] = std::mem::uninitialized();
if signal(SIGALRM, sig_alrm) == SIG_ERR {
panic!("signal(SIGALRM) error");
}
alarm(1);
let n = read(STDIN_FILENO, as_void!(line), MAXLINE);
if n < 0 |
alarm(0);
write(STDOUT_FILENO, as_void!(line), n as _);
}
}
| {
println!("read error!");
exit(1);
} | conditional_block |
f10-read-timeout.rs | /// Figure 10.10: Calling read with a timeout
///
/// Takeaway: First I tried with the regular `signal` function of libc
/// only to find out that the alarm signal does not interrupt the read
/// call. Digging into the C code it got obvious that the signal function | /// using POSIX sigaction()". But this function gets only introduced in
/// Figure 10.18. This was quite misleading IMO.
///
/// $ f10-read-timeout 2>&1
/// read error!
/// ERROR: return code 1
extern crate libc;
#[macro_use(as_void)]
extern crate apue;
use libc::{STDOUT_FILENO, STDIN_FILENO, SIGALRM, SIG_ERR, c_int};
use libc::{alarm, write, read, exit};
use apue::signal;
const MAXLINE: usize = 4096;
fn sig_alrm(_: c_int) {
// nothing to do, just return to interrupt the read
}
fn main() {
unsafe {
let line: [u8; MAXLINE] = std::mem::uninitialized();
if signal(SIGALRM, sig_alrm) == SIG_ERR {
panic!("signal(SIGALRM) error");
}
alarm(1);
let n = read(STDIN_FILENO, as_void!(line), MAXLINE);
if n < 0 {
println!("read error!");
exit(1);
}
alarm(0);
write(STDOUT_FILENO, as_void!(line), n as _);
}
} | /// gets overriden by `lib/signal.c` which is a "reliable version of signal(), | random_line_split |
f10-read-timeout.rs | /// Figure 10.10: Calling read with a timeout
///
/// Takeaway: First I tried with the regular `signal` function of libc
/// only to find out that the alarm signal does not interrupt the read
/// call. Digging into the C code it got obvious that the signal function
/// gets overriden by `lib/signal.c` which is a "reliable version of signal(),
/// using POSIX sigaction()". But this function gets only introduced in
/// Figure 10.18. This was quite misleading IMO.
///
/// $ f10-read-timeout 2>&1
/// read error!
/// ERROR: return code 1
extern crate libc;
#[macro_use(as_void)]
extern crate apue;
use libc::{STDOUT_FILENO, STDIN_FILENO, SIGALRM, SIG_ERR, c_int};
use libc::{alarm, write, read, exit};
use apue::signal;
const MAXLINE: usize = 4096;
fn sig_alrm(_: c_int) {
// nothing to do, just return to interrupt the read
}
fn main() | {
unsafe {
let line: [u8; MAXLINE] = std::mem::uninitialized();
if signal(SIGALRM, sig_alrm) == SIG_ERR {
panic!("signal(SIGALRM) error");
}
alarm(1);
let n = read(STDIN_FILENO, as_void!(line), MAXLINE);
if n < 0 {
println!("read error!");
exit(1);
}
alarm(0);
write(STDOUT_FILENO, as_void!(line), n as _);
}
} | identifier_body |
|
f10-read-timeout.rs | /// Figure 10.10: Calling read with a timeout
///
/// Takeaway: First I tried with the regular `signal` function of libc
/// only to find out that the alarm signal does not interrupt the read
/// call. Digging into the C code it got obvious that the signal function
/// gets overriden by `lib/signal.c` which is a "reliable version of signal(),
/// using POSIX sigaction()". But this function gets only introduced in
/// Figure 10.18. This was quite misleading IMO.
///
/// $ f10-read-timeout 2>&1
/// read error!
/// ERROR: return code 1
extern crate libc;
#[macro_use(as_void)]
extern crate apue;
use libc::{STDOUT_FILENO, STDIN_FILENO, SIGALRM, SIG_ERR, c_int};
use libc::{alarm, write, read, exit};
use apue::signal;
const MAXLINE: usize = 4096;
fn sig_alrm(_: c_int) {
// nothing to do, just return to interrupt the read
}
fn | () {
unsafe {
let line: [u8; MAXLINE] = std::mem::uninitialized();
if signal(SIGALRM, sig_alrm) == SIG_ERR {
panic!("signal(SIGALRM) error");
}
alarm(1);
let n = read(STDIN_FILENO, as_void!(line), MAXLINE);
if n < 0 {
println!("read error!");
exit(1);
}
alarm(0);
write(STDOUT_FILENO, as_void!(line), n as _);
}
}
| main | identifier_name |
lookups.rs | /// this is a table lookup for all "flush" hands (e.g. both
/// flushes and straight-flushes. entries containing a zero
/// mean that combination is not possible with a five-card
/// flush hand.
pub const FLUSHES : [u16; 7937] = include!("snip/flushes.snip");
/// this is a table lookup for all non-flush hands consisting
/// of five unique ranks (i.e. either Straights or High Card
/// hands). it's similar to the above "flushes" array.
pub const UNIQUE_5 : [u16; 7937] = include!("snip/unique5.snip");
/// those two arrays are needed for original evaluator version
pub const PRODUCTS : [u32; 4888] = include!("snip/products.snip");
pub const VALUES : [u16; 4888] = include!("snip/values.snip");
/// primes associated with card values
pub const PRIMES: [u8; 13] = [ 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41 ];
/// permutations of 5 cards from 7, to evaluate a hand + table cards with a 5-card algorithm
pub const PERM_7 : [[u8; 5]; 21] = [
[ 0, 1, 2, 3, 4 ],
[ 0, 1, 2, 3, 5 ], | [ 0, 1, 2, 4, 5 ],
[ 0, 1, 2, 4, 6 ],
[ 0, 1, 2, 5, 6 ],
[ 0, 1, 3, 4, 5 ],
[ 0, 1, 3, 4, 6 ],
[ 0, 1, 3, 5, 6 ],
[ 0, 1, 4, 5, 6 ],
[ 0, 2, 3, 4, 5 ],
[ 0, 2, 3, 4, 6 ],
[ 0, 2, 3, 5, 6 ],
[ 0, 2, 4, 5, 6 ],
[ 0, 3, 4, 5, 6 ],
[ 1, 2, 3, 4, 5 ],
[ 1, 2, 3, 4, 6 ],
[ 1, 2, 3, 5, 6 ],
[ 1, 2, 4, 5, 6 ],
[ 1, 3, 4, 5, 6 ],
[ 2, 3, 4, 5, 6 ]
];
/// permutations to evaluate all 6 card combinations.
pub const PERM_6 : [[u8; 5]; 6] = [
[ 0, 1, 2, 3, 4 ],
[ 0, 1, 2, 3, 5 ],
[ 0, 1, 2, 4, 5 ],
[ 0, 1, 3, 4, 5 ],
[ 0, 2, 3, 4, 5 ],
[ 1, 2, 3, 4, 5 ],
];
// perfect hash specific lookups
#[allow(dead_code)]
pub const HASH_VALUES: [u16; 8192] = include!("snip/hash_values.snip");
#[allow(dead_code)]
pub const HASH_ADJUST: [u16; 512] = include!("snip/hash_adjust.snip"); | [ 0, 1, 2, 3, 6 ], | random_line_split |
test.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
pub use crate::dom::bindings::str::{ByteString, DOMString};
pub use crate::dom::headers::normalize_value;
// For compile-fail tests only.
pub use crate::dom::bindings::cell::DomRefCell;
pub use crate::dom::bindings::refcounted::TrustedPromise;
pub use crate::dom::bindings::root::Dom;
pub use crate::dom::node::Node;
pub mod area {
pub use crate::dom::htmlareaelement::{Area, Shape};
}
pub mod size_of {
use crate::dom::characterdata::CharacterData;
use crate::dom::element::Element;
use crate::dom::eventtarget::EventTarget;
use crate::dom::htmldivelement::HTMLDivElement;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::htmlspanelement::HTMLSpanElement;
use crate::dom::node::Node;
use crate::dom::text::Text;
use std::mem::size_of;
pub fn CharacterData() -> usize {
size_of::<CharacterData>()
}
pub fn Element() -> usize {
size_of::<Element>()
}
pub fn EventTarget() -> usize {
size_of::<EventTarget>()
}
pub fn HTMLDivElement() -> usize {
size_of::<HTMLDivElement>()
}
pub fn HTMLElement() -> usize {
size_of::<HTMLElement>()
}
| }
pub fn Node() -> usize {
size_of::<Node>()
}
pub fn Text() -> usize {
size_of::<Text>()
}
}
pub mod srcset {
pub use crate::dom::htmlimageelement::{parse_a_srcset_attribute, Descriptor, ImageSource};
}
pub mod timeranges {
pub use crate::dom::timeranges::TimeRangesContainer;
} | pub fn HTMLSpanElement() -> usize {
size_of::<HTMLSpanElement>() | random_line_split |
test.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
pub use crate::dom::bindings::str::{ByteString, DOMString};
pub use crate::dom::headers::normalize_value;
// For compile-fail tests only.
pub use crate::dom::bindings::cell::DomRefCell;
pub use crate::dom::bindings::refcounted::TrustedPromise;
pub use crate::dom::bindings::root::Dom;
pub use crate::dom::node::Node;
pub mod area {
pub use crate::dom::htmlareaelement::{Area, Shape};
}
pub mod size_of {
use crate::dom::characterdata::CharacterData;
use crate::dom::element::Element;
use crate::dom::eventtarget::EventTarget;
use crate::dom::htmldivelement::HTMLDivElement;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::htmlspanelement::HTMLSpanElement;
use crate::dom::node::Node;
use crate::dom::text::Text;
use std::mem::size_of;
pub fn CharacterData() -> usize {
size_of::<CharacterData>()
}
pub fn Element() -> usize {
size_of::<Element>()
}
pub fn EventTarget() -> usize {
size_of::<EventTarget>()
}
pub fn HTMLDivElement() -> usize {
size_of::<HTMLDivElement>()
}
pub fn HTMLElement() -> usize {
size_of::<HTMLElement>()
}
pub fn | () -> usize {
size_of::<HTMLSpanElement>()
}
pub fn Node() -> usize {
size_of::<Node>()
}
pub fn Text() -> usize {
size_of::<Text>()
}
}
pub mod srcset {
pub use crate::dom::htmlimageelement::{parse_a_srcset_attribute, Descriptor, ImageSource};
}
pub mod timeranges {
pub use crate::dom::timeranges::TimeRangesContainer;
}
| HTMLSpanElement | identifier_name |
test.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
pub use crate::dom::bindings::str::{ByteString, DOMString};
pub use crate::dom::headers::normalize_value;
// For compile-fail tests only.
pub use crate::dom::bindings::cell::DomRefCell;
pub use crate::dom::bindings::refcounted::TrustedPromise;
pub use crate::dom::bindings::root::Dom;
pub use crate::dom::node::Node;
pub mod area {
pub use crate::dom::htmlareaelement::{Area, Shape};
}
pub mod size_of {
use crate::dom::characterdata::CharacterData;
use crate::dom::element::Element;
use crate::dom::eventtarget::EventTarget;
use crate::dom::htmldivelement::HTMLDivElement;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::htmlspanelement::HTMLSpanElement;
use crate::dom::node::Node;
use crate::dom::text::Text;
use std::mem::size_of;
pub fn CharacterData() -> usize {
size_of::<CharacterData>()
}
pub fn Element() -> usize {
size_of::<Element>()
}
pub fn EventTarget() -> usize {
size_of::<EventTarget>()
}
pub fn HTMLDivElement() -> usize {
size_of::<HTMLDivElement>()
}
pub fn HTMLElement() -> usize {
size_of::<HTMLElement>()
}
pub fn HTMLSpanElement() -> usize {
size_of::<HTMLSpanElement>()
}
pub fn Node() -> usize {
size_of::<Node>()
}
pub fn Text() -> usize |
}
pub mod srcset {
pub use crate::dom::htmlimageelement::{parse_a_srcset_attribute, Descriptor, ImageSource};
}
pub mod timeranges {
pub use crate::dom::timeranges::TimeRangesContainer;
}
| {
size_of::<Text>()
} | identifier_body |
k8s_io_apimachinery_pkg_apis_meta_v1_root_paths.py | # coding: utf-8
"""
KubeVirt API
This is KubeVirt API an add-on for Kubernetes.
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class K8sIoApimachineryPkgApisMetaV1RootPaths(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'paths': 'list[str]'
}
attribute_map = {
'paths': 'paths'
}
def __init__(self, paths=None):
"""
K8sIoApimachineryPkgApisMetaV1RootPaths - a model defined in Swagger
"""
self._paths = None
self.paths = paths
@property
def paths(self):
"""
Gets the paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
paths are the paths available at root.
:return: The paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
:rtype: list[str]
"""
return self._paths
@paths.setter
def paths(self, paths):
"""
Sets the paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
paths are the paths available at root.
:param paths: The paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
:type: list[str]
"""
if paths is None:
raise ValueError("Invalid value for `paths`, must not be `None`")
self._paths = paths
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
|
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, K8sIoApimachineryPkgApisMetaV1RootPaths):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
)) | conditional_block |
k8s_io_apimachinery_pkg_apis_meta_v1_root_paths.py | # coding: utf-8
"""
KubeVirt API
This is KubeVirt API an add-on for Kubernetes.
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class K8sIoApimachineryPkgApisMetaV1RootPaths(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type. | """
swagger_types = {
'paths': 'list[str]'
}
attribute_map = {
'paths': 'paths'
}
def __init__(self, paths=None):
"""
K8sIoApimachineryPkgApisMetaV1RootPaths - a model defined in Swagger
"""
self._paths = None
self.paths = paths
@property
def paths(self):
"""
Gets the paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
paths are the paths available at root.
:return: The paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
:rtype: list[str]
"""
return self._paths
@paths.setter
def paths(self, paths):
"""
Sets the paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
paths are the paths available at root.
:param paths: The paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
:type: list[str]
"""
if paths is None:
raise ValueError("Invalid value for `paths`, must not be `None`")
self._paths = paths
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, K8sIoApimachineryPkgApisMetaV1RootPaths):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other | attribute_map (dict): The key is attribute name
and the value is json key in definition. | random_line_split |
k8s_io_apimachinery_pkg_apis_meta_v1_root_paths.py | # coding: utf-8
"""
KubeVirt API
This is KubeVirt API an add-on for Kubernetes.
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class K8sIoApimachineryPkgApisMetaV1RootPaths(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'paths': 'list[str]'
}
attribute_map = {
'paths': 'paths'
}
def __init__(self, paths=None):
|
@property
def paths(self):
"""
Gets the paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
paths are the paths available at root.
:return: The paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
:rtype: list[str]
"""
return self._paths
@paths.setter
def paths(self, paths):
"""
Sets the paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
paths are the paths available at root.
:param paths: The paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
:type: list[str]
"""
if paths is None:
raise ValueError("Invalid value for `paths`, must not be `None`")
self._paths = paths
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, K8sIoApimachineryPkgApisMetaV1RootPaths):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| """
K8sIoApimachineryPkgApisMetaV1RootPaths - a model defined in Swagger
"""
self._paths = None
self.paths = paths | identifier_body |
k8s_io_apimachinery_pkg_apis_meta_v1_root_paths.py | # coding: utf-8
"""
KubeVirt API
This is KubeVirt API an add-on for Kubernetes.
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class K8sIoApimachineryPkgApisMetaV1RootPaths(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'paths': 'list[str]'
}
attribute_map = {
'paths': 'paths'
}
def __init__(self, paths=None):
"""
K8sIoApimachineryPkgApisMetaV1RootPaths - a model defined in Swagger
"""
self._paths = None
self.paths = paths
@property
def paths(self):
"""
Gets the paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
paths are the paths available at root.
:return: The paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
:rtype: list[str]
"""
return self._paths
@paths.setter
def paths(self, paths):
"""
Sets the paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
paths are the paths available at root.
:param paths: The paths of this K8sIoApimachineryPkgApisMetaV1RootPaths.
:type: list[str]
"""
if paths is None:
raise ValueError("Invalid value for `paths`, must not be `None`")
self._paths = paths
def | (self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, K8sIoApimachineryPkgApisMetaV1RootPaths):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| to_dict | identifier_name |
ReportController.js | angular.module('ReportModule', ['ngRoute']).
// config(['$locationProvider', function($locationProvider) {
// $locationProvider.html5Mode(true);
// }]).
controller('ReportController', ['$scope', '$http', '$location', function($scope, $http, $location) {
$scope.leasing = [];
$scope.service = [];
$scope.defaulter = [];
// use a range of 6 months
var date = new Date();
var year = date.getFullYear();
var firstMonth = year + '-' + (date.getMonth() - 5) + '-01';
var lastMonth = year + '-' + (date.getMonth() + 1) + '-01';
function initLeasing() {
$http.get('../res/php/reports.php?action=getLeasingReport&firstMonth='+(date.getMonth() - 5)+"&lastMonth="+(date.getMonth() + 1)).
success(function(data) {
$scope.leasing = data;
});
};
function initDefaulters() {
$http.get('../res/php/reports.php?action=getServiceReport&firstMonth='+firstMonth+"&lastMonth="+lastMonth).
success(function(data) {
$scope.service = data;
});
};
function | () {
$http.get('../res/php/reports.php?action=getRentDefaultersReport&selectedMonth='+lastMonth).
success(function(data) {
$scope.defaulter = data;
});
};
initLeasing();
initDefaulters();
initService();
}]); | initService | identifier_name |
ReportController.js | angular.module('ReportModule', ['ngRoute']).
// config(['$locationProvider', function($locationProvider) {
// $locationProvider.html5Mode(true);
// }]).
controller('ReportController', ['$scope', '$http', '$location', function($scope, $http, $location) {
$scope.leasing = [];
$scope.service = [];
$scope.defaulter = [];
// use a range of 6 months
var date = new Date();
var year = date.getFullYear();
var firstMonth = year + '-' + (date.getMonth() - 5) + '-01';
var lastMonth = year + '-' + (date.getMonth() + 1) + '-01';
function initLeasing() | ;
function initDefaulters() {
$http.get('../res/php/reports.php?action=getServiceReport&firstMonth='+firstMonth+"&lastMonth="+lastMonth).
success(function(data) {
$scope.service = data;
});
};
function initService() {
$http.get('../res/php/reports.php?action=getRentDefaultersReport&selectedMonth='+lastMonth).
success(function(data) {
$scope.defaulter = data;
});
};
initLeasing();
initDefaulters();
initService();
}]); | {
$http.get('../res/php/reports.php?action=getLeasingReport&firstMonth='+(date.getMonth() - 5)+"&lastMonth="+(date.getMonth() + 1)).
success(function(data) {
$scope.leasing = data;
});
} | identifier_body |
ReportController.js | angular.module('ReportModule', ['ngRoute']).
// config(['$locationProvider', function($locationProvider) {
// $locationProvider.html5Mode(true);
// }]).
controller('ReportController', ['$scope', '$http', '$location', function($scope, $http, $location) {
$scope.leasing = [];
$scope.service = [];
$scope.defaulter = [];
// use a range of 6 months
var date = new Date();
var year = date.getFullYear();
var firstMonth = year + '-' + (date.getMonth() - 5) + '-01';
var lastMonth = year + '-' + (date.getMonth() + 1) + '-01';
function initLeasing() {
$http.get('../res/php/reports.php?action=getLeasingReport&firstMonth='+(date.getMonth() - 5)+"&lastMonth="+(date.getMonth() + 1)).
success(function(data) {
$scope.leasing = data;
});
};
function initDefaulters() {
$http.get('../res/php/reports.php?action=getServiceReport&firstMonth='+firstMonth+"&lastMonth="+lastMonth).
success(function(data) { |
function initService() {
$http.get('../res/php/reports.php?action=getRentDefaultersReport&selectedMonth='+lastMonth).
success(function(data) {
$scope.defaulter = data;
});
};
initLeasing();
initDefaulters();
initService();
}]); | $scope.service = data;
});
}; | random_line_split |
queue_alt.rs | /*!
Heterogeneous Queue (alternative)
This version is hand-written (no macros) but has a simpler architecture
that allows implicit consumption by deconstruction on assignment.
# Example
```rust
use heterogene::queue_alt::{Q0,Q1,Q2};
let q = ();
let q = q.append(1u);
let q = q.append('c');
let (num, q) = q;
let (ch, q) = q;
println!("Queue-alt: {} {} {}", num, ch, q);
```
*/
pub trait Q0 {
fn append<T1>(self, t1: T1) -> (T1,());
}
impl Q0 for () {
fn append<T1>(self, t1: T1) -> (T1,()) {
(t1,())
}
}
pub trait Q1<T1> {
fn append<T2>(self, t2: T2) -> (T1,(T2,()));
}
impl<T1> Q1<T1> for (T1,()) { | (t1,(t2,()))
}
}
pub trait Q2<T1,T2> {
fn append<T3>(self, t3: T3) -> (T1,(T2,(T3,())));
}
impl<T1,T2> Q2<T1,T2> for (T1,(T2,())) {
fn append<T3>(self, t3: T3) -> (T1,(T2,(T3,()))) {
let(t1,(t2,_)) = self;
(t1,(t2,(t3,())))
}
} | fn append<T2>(self, t2: T2) -> (T1,(T2,())) {
let (t1,_) = self; | random_line_split |
queue_alt.rs | /*!
Heterogeneous Queue (alternative)
This version is hand-written (no macros) but has a simpler architecture
that allows implicit consumption by deconstruction on assignment.
# Example
```rust
use heterogene::queue_alt::{Q0,Q1,Q2};
let q = ();
let q = q.append(1u);
let q = q.append('c');
let (num, q) = q;
let (ch, q) = q;
println!("Queue-alt: {} {} {}", num, ch, q);
```
*/
pub trait Q0 {
fn append<T1>(self, t1: T1) -> (T1,());
}
impl Q0 for () {
fn append<T1>(self, t1: T1) -> (T1,()) {
(t1,())
}
}
pub trait Q1<T1> {
fn append<T2>(self, t2: T2) -> (T1,(T2,()));
}
impl<T1> Q1<T1> for (T1,()) {
fn | <T2>(self, t2: T2) -> (T1,(T2,())) {
let (t1,_) = self;
(t1,(t2,()))
}
}
pub trait Q2<T1,T2> {
fn append<T3>(self, t3: T3) -> (T1,(T2,(T3,())));
}
impl<T1,T2> Q2<T1,T2> for (T1,(T2,())) {
fn append<T3>(self, t3: T3) -> (T1,(T2,(T3,()))) {
let(t1,(t2,_)) = self;
(t1,(t2,(t3,())))
}
}
| append | identifier_name |
queue_alt.rs | /*!
Heterogeneous Queue (alternative)
This version is hand-written (no macros) but has a simpler architecture
that allows implicit consumption by deconstruction on assignment.
# Example
```rust
use heterogene::queue_alt::{Q0,Q1,Q2};
let q = ();
let q = q.append(1u);
let q = q.append('c');
let (num, q) = q;
let (ch, q) = q;
println!("Queue-alt: {} {} {}", num, ch, q);
```
*/
pub trait Q0 {
fn append<T1>(self, t1: T1) -> (T1,());
}
impl Q0 for () {
fn append<T1>(self, t1: T1) -> (T1,()) {
(t1,())
}
}
pub trait Q1<T1> {
fn append<T2>(self, t2: T2) -> (T1,(T2,()));
}
impl<T1> Q1<T1> for (T1,()) {
fn append<T2>(self, t2: T2) -> (T1,(T2,())) |
}
pub trait Q2<T1,T2> {
fn append<T3>(self, t3: T3) -> (T1,(T2,(T3,())));
}
impl<T1,T2> Q2<T1,T2> for (T1,(T2,())) {
fn append<T3>(self, t3: T3) -> (T1,(T2,(T3,()))) {
let(t1,(t2,_)) = self;
(t1,(t2,(t3,())))
}
}
| {
let (t1,_) = self;
(t1,(t2,()))
} | identifier_body |
CutterPI.js | /**
* OpenEyes
*
* (C) Moorfields Eye Hospital NHS Foundation Trust, 2008-2011
* (C) OpenEyes Foundation, 2011-2013
* This file is part of OpenEyes.
* OpenEyes is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
* OpenEyes is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
* You should have received a copy of the GNU General Public License along with OpenEyes in a file titled COPYING. If not, see <http://www.gnu.org/licenses/>.
*
* @package OpenEyes
* @link http://www.openeyes.org.uk
* @author OpenEyes <[email protected]>
* @copyright Copyright (c) 2008-2011, Moorfields Eye Hospital NHS Foundation Trust
* @copyright Copyright (c) 2011-2013, OpenEyes Foundation
* @license http://www.gnu.org/licenses/gpl-3.0.html The GNU General Public License V3.0
*/
/**
* Cutter Peripheral iridectomy | * @class CutterPI
* @property {String} className Name of doodle subclass
* @param {Drawing} _drawing
* @param {Object} _parameterJSON
*/
ED.CutterPI = function(_drawing, _parameterJSON) {
// Set classname
this.className = "CutterPI";
// Saved parameters
this.savedParameterArray = ['rotation'];
// Call superclass constructor
ED.Doodle.call(this, _drawing, _parameterJSON);
}
/**
* Sets superclass and constructor
*/
ED.CutterPI.prototype = new ED.Doodle;
ED.CutterPI.prototype.constructor = ED.CutterPI;
ED.CutterPI.superclass = ED.Doodle.prototype;
/**
* Sets default properties
*/
ED.CutterPI.prototype.setPropertyDefaults = function() {
this.isScaleable = false;
this.isMoveable = false;
}
/**
* Sets default parameters
*/
ED.CutterPI.prototype.setParameterDefaults = function() {
this.setRotationWithDisplacements(160, 40);
}
/**
* Draws doodle or performs a hit test if a Point parameter is passed
*
* @param {Point} _point Optional point in canvas plane, passed if performing hit test
*/
ED.CutterPI.prototype.draw = function(_point) {
// Get context
var ctx = this.drawing.context;
// Call draw method in superclass
ED.CutterPI.superclass.draw.call(this, _point);
// Boundary path
ctx.beginPath();
// Draw base
ctx.arc(0, -324, 40, 0, 2 * Math.PI, true);
// Colour of fill
ctx.fillStyle = "rgba(255,255,255,1)";
// Set line attributes
ctx.lineWidth = 4;
// Colour of outer line is dark gray
ctx.strokeStyle = "rgba(120,120,120,0.75)";;
// Draw boundary path (also hit testing)
this.drawBoundary(_point);
// Return value indicating successful hittest
return this.isClicked;
}
/**
* Returns a string containing a text description of the doodle
*
* @returns {String} Description of doodle
*/
ED.CutterPI.prototype.groupDescription = function() {
return "Cutter iridectomy/s";
} | * | random_line_split |
transaction_map.rs | use std::collections::HashMap;
use std::collections::hash_map::Entry::{Occupied,Vacant};
use primitive::{UInt256,Transaction};
#[derive(PartialEq)]
pub enum TransactionIndexStatus {
Init = 0,
Get = 1,
}
pub struct TransactionIndex {
status: TransactionIndexStatus,
hash: UInt256,
transaction: Option<Transaction>,
waiters: Vec<UInt256>,
}
impl TransactionIndex {
pub fn new(hash: &UInt256) -> TransactionIndex {
TransactionIndex {
status: TransactionIndexStatus::Init,
hash: hash.clone(),
transaction: None,
waiters: Vec::new(),
}
}
pub fn is_init(&self) -> bool { self.status == TransactionIndexStatus::Init } | pub fn get_hash(&self) -> &UInt256 { &self.hash }
pub fn get_transaction(&self) -> &Option<Transaction> { &self.transaction }
pub fn set_transaction(&mut self, transaction: Transaction) {
self.transaction = Some(transaction);
self.status = TransactionIndexStatus::Get;
}
pub fn add_waiter(&mut self, next: UInt256) {
self.waiters.push(next);
}
pub fn move_waiters(&mut self, v:&mut Vec<UInt256>) {
v.append(&mut self.waiters);
}
}
#[derive(Default)]
pub struct TransactionMap {
map: HashMap< UInt256, TransactionIndex >,
}
impl TransactionMap {
pub fn get(&self, hash: &UInt256) -> Option<&TransactionIndex> {
self.map.get(hash)
}
pub fn get_mut(&mut self, hash: &UInt256) -> Option<&mut TransactionIndex> {
self.map.get_mut(hash)
}
pub fn insert(&mut self, hash: &UInt256) -> Result<&mut TransactionIndex, &mut TransactionIndex> {
match self.map.entry(hash.clone()) {
Vacant(v) => Ok(v.insert(TransactionIndex::new(hash))),
Occupied(o) => Err(o.into_mut())
}
}
} | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.