prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>timelion_vis_params_controller.js<|end_file_name|><|fim▁begin|>define(function (require) {<|fim▁hole|> require('plugins/timelion/directives/expression_directive');
const module = require('ui/modules').get('kibana/timelion_vis', ['kibana']);
module.controller('TimelionVisParamsController', function ($scope, $rootScope) {
$scope.vis.params.expression = $scope.vis.params.expression || '.es(*)';
$scope.vis.params.interval = $scope.vis.params.interval || '1m';
$scope.search = function () {
$rootScope.$broadcast('courier:searchRefresh');
};
});
});<|fim▁end|> | |
<|file_name|>hello.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | from helper import greeting
greeting("hello world...") |
<|file_name|>TextDealInterfaceService.java<|end_file_name|><|fim▁begin|>package com.jeecg.qywx.core.service;
import com.jeecg.qywx.base.entity.QywxReceivetext;
/**
* 文本处理接口
* @author 付明星
*
*/
public interface TextDealInterfaceService {
/**
* 文本消息处理接口
* @param receiveText 文本消息实体类
*/
void dealTextMessage(QywxReceivetext receiveText);<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>BinarySearch.java<|end_file_name|><|fim▁begin|><|fim▁hole|>public class BinarySearch {
public static int binarySearch(int arr[], int num, int startIndex,
int endIndex) {
if (startIndex > endIndex) {
return -1;
}
int mid = startIndex + (endIndex - startIndex) / 2;
if (num == arr[mid]) {
return mid;
} else if (num > arr[mid]) {
return binarySearch(arr, num, mid + 1, endIndex);
} else {
return binarySearch(arr, num, startIndex, mid - 1);
}
}
public static void main(String[] args) {
Scanner s = new Scanner(System.in);
int size = s.nextInt();
int[] arr = new int[size];
for (int i = 0; i < arr.length; i++) {
arr[i] = s.nextInt();
}
int num = s.nextInt();
int position = binarySearch(arr, num, 0, size - 1);
if (position == -1) {
System.out.println("The number is not present in the array");
} else {
System.out.println("The position of number in array is : "
+ position);
}
s.close();
}
}<|fim▁end|> | import java.util.Scanner;
|
<|file_name|>network_process.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: UTF-8 -*-
'''
Created on Jan 17, 2017
@author: hegxiten
'''
import sys
import geo.haversine as haversine
from imposm.parser import OSMParser
import geo.haversine as haversine
import numpy
import time
from scipy import spatial
import csv
import codecs
import math
default_encoding='utf-8'
if sys.getdefaultencoding()!=default_encoding:
reload(sys)
sys.setdefaultencoding(default_encoding)
PI=3.14159265358
def process_network(FOLDER,FILE,CONCURRENCYVAL,GLOBALROUNDINGDIGITS):
stations={}
'''stations[station_node_osmid]=[name, lat, lon]'''
refnodes_index_dict={}
'''refnodes_index_dict[nodeid]=listindex_of_nodeid'''
refnodes=[]
'''refnodes=[nodeid1,nodeid2,nodeid3...]'''
refnodes_coord_list=[]
'''refnodes_coord_list[coord1,coord2,coord3...]'''
node_fromto_dict={}
'''node_fromto_dict[fromnode]=[(fromnode,tonode1),(fromnode,tonode2),(fromnode,tonode3)...]'''
distance_mapper={}
'''distance_mapper[(fromnode,tonode)]=distance'''
attribute_mapper={}
'''attribute_mapper[(fromnode,tonode)]=attribute_dictionary'''
midpoints_coord=[]
'''miderpoint_map[(fromnode,tonode)]=(midercoord)'''
midsegment=[]
approxCoord_map={}
'''approxCoord_map[coord]=nodeid(veryfirstone)'''
refnode_mapper={}
'''refnode_mapper[nodeid2]=nodeid1(previous_nodeid1 with the same coordinate as nodeid2 after digit rounding)'''
edgeDict={}
'''edgeDict[(vertex tuple)]=(edgereflist,edgelength)'''
disconnected_stations=[]
connected_stations=[]
def loadstations():
'''Load stations from csv format output'''
startt=time.time()
with codecs.open(FOLDER+FILE+'_stations.csv', 'rb') as csvfile:
'''Example row: >>1234(osmid),$Illinois Terminal$($name$),40.11545(latitude),-88.24111(longitude)<<'''
spamreader = csv.reader(csvfile, delimiter=',', quotechar='$')
for row in spamreader:
stations[int(row[0])]=[row[1],float(row[2]),float(row[3])]
stopt=time.time()
print("Loading stations. Time:("+str(stopt-startt)+")")
def loadcoordinates():
'''Load coordinates of reference-nodes from csv format output'''
startt=time.time()
with codecs.open(FOLDER+FILE+'_waysegment_nodecoords.csv', 'rb',encoding='utf-8') as csvfile:
'''Example row: >>123(osmid),40.11545(latitude),-88.24111(longitude)<<'''
spamreader = csv.reader(csvfile, delimiter=',', quotechar='$')
for row in spamreader:
c1,c2=float(row[1]),float(row[2])
'''c1--lat, c2--lon'''
#c1,c2=round(float(row[1]),ROUNDINGDIGITS),round(float(row[2]),ROUNDINGDIGITS)
if (c1,c2) not in approxCoord_map:
approxCoord_map[(c1,c2)]=int(row[0])
'''row[0]--coordid'''
refnodes_index_dict[int(row[0])]=len(refnodes_coord_list)
refnodes.append(int(row[0]))
refnodes_coord_list.append((c1,c2))
refnode_mapper[int(row[0])]=int(row[0])
else:
refnode_mapper[int(row[0])]=approxCoord_map[(c1,c2)]
stopt=time.time()
print("Loading refnode coordinates. Time:("+str(stopt-startt)+")")
def loadwaysegments():
'''Load way segments from csv format output'''
startt=time.time()
with codecs.open(FOLDER+FILE+'_waysegments.csv', 'rb',encoding='utf-8') as csvfile:
'''Example row: >>1234567(osmid1),7654321(osmid2),1435(gauge),350(maxspeed in kph),yes(highspeed or not),N/A(service),main(usage)<<'''
spamreader = csv.reader(csvfile, delimiter=',', quotechar='$')
header=spamreader.next()
attr_list=header[2:]
attr_list.append('distance')
for row in spamreader:
if refnode_mapper.get(int(row[0])) is None:
print ("none")
else:
mfrom=refnode_mapper[int(row[0])]
mto=refnode_mapper[int(row[1])]
if mfrom not in node_fromto_dict:
node_fromto_dict[mfrom]=[]
if mto not in node_fromto_dict:
node_fromto_dict[mto]=[]
distance=haversine.hav_distance(refnodes_coord_list[refnodes_index_dict[mfrom]][0],refnodes_coord_list[refnodes_index_dict[mfrom]][1], refnodes_coord_list[refnodes_index_dict[mto]][0],refnodes_coord_list[refnodes_index_dict[mto]][1])
attr_dict={}
for i in attr_list:
if i=='distance':
attr_dict[i]=str(distance)
else:
attr_dict[i]=row[header.index(i)]
attribute_mapper[(mfrom,mto)]=attr_dict
attribute_mapper[(mto,mfrom)]=attr_dict
if (mfrom,mto) not in node_fromto_dict[mfrom] and mfrom!=mto:
node_fromto_dict[mfrom].append((mfrom,mto))
if (mto,mfrom) not in node_fromto_dict[mto] and mfrom!=mto:
node_fromto_dict[mto].append((mto,mfrom))
'''station's connectivity judging by suffix'''
for s in stations:<|fim▁hole|> if s not in node_fromto_dict:
disconnected_stations.append(s)
stations[s].append('disconnected')
else:
connected_stations.append(s)
stations[s].append('connected')
stopt=time.time()
print("Loading way segments ("+str(stopt-startt)+")")
def output_nodes_csv():
target = codecs.open(FOLDER+FILE+"_nodes.csv", 'w',encoding='utf-8')
for x in node_fromto_dict:
if x in stations:
if len(node_fromto_dict[x])!=0:
target.write(str(x)+",$"+stations[x][0].decode('utf-8')+"$,"+str(stations[x][1])+","+str(stations[x][2])+"\n")
else:
target.write(str(x)+",$$,"+str(refnodes_coord_list[refnodes_index_dict[x]][0])+","+str(refnodes_coord_list[refnodes_index_dict[x]][1])+"\n")
target.close()
'''Example row: >>1234(osmid),$Illinois Terminal$($name$),40.11545(latitude),-88.24111(longitude)<<'''
def output_links_csv():
target = codecs.open(FOLDER+FILE+"_links.csv", 'w',encoding='utf-8')
headerkeys=attribute_mapper.values()[0].keys()
header='vertex_1,vertex_2'
for k in headerkeys:
header=header+','+k
target.write(header+'\n')
for x in node_fromto_dict:
for (a,b) in node_fromto_dict[x]:
if a in node_fromto_dict and b in node_fromto_dict:
row_to_write=str(a)+","+str(b)
for attr in headerkeys:
row_to_write=row_to_write+','+attribute_mapper[(a,b)].get(attr,"N/A")
target.write(row_to_write+"\n")
target.close()
'''Example row: >>1234(osmid_vertex1),5678(osmid_vertex2),0.1534285(haversine_distance)<<'''
loadstations()
loadcoordinates()
loadwaysegments()
output_nodes_csv()
output_links_csv()
return node_fromto_dict
if __name__ == '__main__':
print ("===you're in test mode of network_process.py===")
FILE='beijing_china_latest.osm.pbf'
FOLDER='/home/hegxiten/workspace/data/'+FILE+'/'
CONCURRENCYVAL=4
GLOBALROUNDINGDIGITS=5
node_fromto_dict=process_network(FOLDER, FILE, CONCURRENCYVAL, GLOBALROUNDINGDIGITS)
print ("===test mode of network_process.py terminated===")<|fim▁end|> | |
<|file_name|>de.rs<|end_file_name|><|fim▁begin|>use pest::iterators::Pair;
use pest::Parser as P;
use pest_derive::Parser;
use serde::de;
use serde::forward_to_deserialize_any;
use std::char;
use std::collections::VecDeque;
use std::f64;
use std::io::Read;
use crate::error::{self, Error, Result};
#[derive(Parser)]
#[grammar_inline = r#"
// see https://spec.json5.org/#syntactic-grammar and
// https://spec.json5.org/#lexical-grammar
COMMENT = _{ "/*" ~ (!"*/" ~ ANY)* ~ "*/" | "//" ~ (!line_terminator ~ ANY)* }
WHITESPACE = _{
"\u{0009}" |
"\u{000B}" |
"\u{000C}" |
"\u{0020}" |
"\u{00A0}" |
"\u{FEFF}" |
SPACE_SEPARATOR |
line_terminator
}
array = { "[" ~ "]" | "[" ~ value ~ ("," ~ value)* ~ ","? ~ "]" }
boolean = @{ "true" | "false" }
char_escape_sequence = @{ single_escape_char | non_escape_char }
char_literal = @{ !("\\" | line_terminator) ~ ANY }
decimal_integer_literal = _{ "0" | ASCII_NONZERO_DIGIT ~ ASCII_DIGIT* }
decimal_literal = _{
decimal_integer_literal ~ "." ~ ASCII_DIGIT* ~ exponent_part? |
"." ~ ASCII_DIGIT+~ exponent_part? |
decimal_integer_literal ~ exponent_part?
}
double_quote_char = _{
"\\" ~ escape_sequence |
line_continuation |
!"\"" ~ char_literal
}
escape_char = _{ single_escape_char | ASCII_DIGIT | "x" | "u" }
escape_sequence = _{
char_escape_sequence |
nul_escape_sequence |
"x" ~ hex_escape_sequence |
"u" ~ unicode_escape_sequence
}
exponent_part = _{ ^"e" ~ ("+" | "-")? ~ ASCII_DIGIT+ }
hex_escape_sequence = @{ ASCII_HEX_DIGIT{2} }
hex_integer_literal = _{ ^"0x" ~ ASCII_HEX_DIGIT+ }
identifier = ${ identifier_start ~ identifier_part* }
identifier_part = _{
identifier_start |
&(
NONSPACING_MARK |
DIACRITIC | // not sure about this, spec says "Combining spacing mark (Mc)"
DECIMAL_NUMBER |
CONNECTOR_PUNCTUATION |
"\u{200C}" |
"\u{200D}"
) ~ char_literal
}
identifier_start = _{
&(unicode_letter | "$" | "_") ~ char_literal |
"\\u" ~ unicode_escape_sequence
}
key = _{ identifier | string }
line_continuation = _{ "\\" ~ line_terminator_sequence }
line_terminator = _{ "\u{000A}" | "\u{000D}" | "\u{2028}" | "\u{2029}" }
line_terminator_sequence = _{ "\u{000D}" ~ "\u{000A}" | line_terminator }
non_escape_char = _{ !(escape_char | line_terminator) ~ ANY }
nul_escape_sequence = @{ "0" }
null = @{ "null" }
number = @{ ("+" | "-")? ~ numeric_literal }
numeric_literal = _{
hex_integer_literal |
decimal_literal |
"Infinity" |
"NaN"
}
object = { "{" ~ "}" | "{" ~ pair ~ ("," ~ pair)* ~ ","? ~ "}" }
pair = _{ key ~ ":" ~ value }
single_escape_char = _{ "'" | "\"" | "\\" | "b" | "f" | "n" | "r" | "t" | "v" }
single_quote_char = _{
"\\" ~ escape_sequence |
line_continuation |
!"'" ~ char_literal
}
string = ${ "\"" ~ double_quote_char* ~ "\"" | "'" ~ single_quote_char* ~ "'" }
text = _{ SOI ~ value ~ EOI }
unicode_escape_sequence = @{ ASCII_HEX_DIGIT{4} }
unicode_letter = _{
UPPERCASE_LETTER |
LOWERCASE_LETTER |
TITLECASE_LETTER |
MODIFIER_LETTER |
OTHER_LETTER |
LETTER_NUMBER
}<|fim▁hole|>value = _{ null | boolean | string | number | object | array }
"#]
struct Parser;
/// Deserialize an instance of type `T` from a string of JSON5 text. Can fail if the input is
/// invalid JSON5, or doesn’t match the structure of the target type.
pub fn from_str<'a, T>(s: &'a str) -> Result<T>
where
T: de::Deserialize<'a>,
{
let mut deserializer = Deserializer::from_str(s)?;
T::deserialize(&mut deserializer)
}
/// Deserialize an instance of type `T` from a slice of JSON5 text. Can fail if the input is
/// invalid JSON5, or doesn’t match the structure of the target type.
pub fn from_slice<'a, T>(s: &'a [u8]) -> Result<T>
where
T: de::Deserialize<'a>,
{
let valid_utf8 = std::str::from_utf8(s)?;
let mut deserializer = Deserializer::from_str(valid_utf8)?;
T::deserialize(&mut deserializer)
}
/// Deserialize an instance of type `T` from any implementation of Read. Can fail if the input is
/// invalid JSON5, or doesn’t match the structure of the target type.
pub fn from_reader<T, R>(reader: &mut R) -> Result<T>
where
T: serde::de::DeserializeOwned,
R: Read
{
let mut data = String::default();
reader.read_to_string(&mut data)?;
from_str(&data)
}
struct Deserializer<'de> {
pair: Option<Pair<'de, Rule>>,
}
impl<'de> Deserializer<'de> {
/// Creates a JSON5 deserializer from a `&str`. This parses the input at construction time, so
/// can fail if the input is not valid JSON5.
fn from_str(input: &'de str) -> Result<Self> {
let pair = Parser::parse(Rule::text, input)?.next().unwrap();
Ok(Deserializer::from_pair(pair))
}
fn from_pair(pair: Pair<'de, Rule>) -> Self {
Deserializer { pair: Some(pair) }
}
}
impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
type Error = Error;
fn deserialize_any<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let pair = self.pair.take().unwrap();
let span = pair.as_span();
let mut res = (move || {
match pair.as_rule() {
Rule::null => visitor.visit_unit(),
Rule::boolean => visitor.visit_bool(parse_bool(&pair)),
Rule::string | Rule::identifier => visitor.visit_string(parse_string(pair)?),
Rule::number => {
if is_int(pair.as_str()) {
visitor.visit_i64(parse_integer(&pair)?)
} else {
visitor.visit_f64(parse_number(&pair)?)
}
}
Rule::array => visitor.visit_seq(Seq::new(pair)),
Rule::object => visitor.visit_map(Map::new(pair)),
_ => unreachable!(),
}
})();
error::set_location(&mut res, &span);
res
}
fn deserialize_enum<V>(
self,
_name: &'static str,
_variants: &'static [&'static str],
visitor: V,
) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let pair = self.pair.take().unwrap();
let span = pair.as_span();
let mut res = (move || {
visitor.visit_enum(Enum { pair })
})();
error::set_location(&mut res, &span);
res
}
// The below will get us the right types, but won't necessarily give
// meaningful results if the source is out of the range of the target type.
fn deserialize_i8<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let pair = self.pair.take().unwrap();
let span = pair.as_span();
let mut res = (move || {
visitor.visit_i8(parse_number(&pair)? as i8)
})();
error::set_location(&mut res, &span);
res
}
fn deserialize_i16<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let pair = self.pair.take().unwrap();
let span = pair.as_span();
let mut res = (move || {
visitor.visit_i16(parse_number(&pair)? as i16)
})();
error::set_location(&mut res, &span);
res
}
fn deserialize_i32<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let pair = self.pair.take().unwrap();
let span = pair.as_span();
let mut res = (move || {
visitor.visit_i32(parse_number(&pair)? as i32)
})();
error::set_location(&mut res, &span);
res
}
fn deserialize_i64<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let pair = self.pair.take().unwrap();
let span = pair.as_span();
let mut res = (move || {
visitor.visit_i64(parse_number(&pair)? as i64)
})();
error::set_location(&mut res, &span);
res
}
fn deserialize_i128<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let pair = self.pair.take().unwrap();
let span = pair.as_span();
let mut res = (move || {
visitor.visit_i128(parse_number(&pair)? as i128)
})();
error::set_location(&mut res, &span);
res
}
fn deserialize_u8<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let pair = self.pair.take().unwrap();
let span = pair.as_span();
let mut res = (move || {
visitor.visit_u8(parse_number(&pair)? as u8)
})();
error::set_location(&mut res, &span);
res
}
fn deserialize_u16<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let pair = self.pair.take().unwrap();
let span = pair.as_span();
let mut res = (move || {
visitor.visit_u16(parse_number(&pair)? as u16)
})();
error::set_location(&mut res, &span);
res
}
fn deserialize_u32<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let pair = self.pair.take().unwrap();
let span = pair.as_span();
let mut res = (move || {
visitor.visit_u32(parse_number(&pair)? as u32)
})();
error::set_location(&mut res, &span);
res
}
fn deserialize_u64<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let pair = self.pair.take().unwrap();
let span = pair.as_span();
let mut res = (move || {
visitor.visit_u64(parse_number(&pair)? as u64)
})();
error::set_location(&mut res, &span);
res
}
fn deserialize_u128<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let pair = self.pair.take().unwrap();
let span = pair.as_span();
let mut res = (move || {
visitor.visit_u128(parse_number(&pair)? as u128)
})();
error::set_location(&mut res, &span);
res
}
fn deserialize_f32<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let pair = self.pair.take().unwrap();
let span = pair.as_span();
let mut res = (move || {
visitor.visit_f32(parse_number(&pair)? as f32)
})();
error::set_location(&mut res, &span);
res
}
fn deserialize_f64<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let pair = self.pair.take().unwrap();
let span = pair.as_span();
let mut res = (move || {
visitor.visit_f64(parse_number(&pair)?)
})();
error::set_location(&mut res, &span);
res
}
fn deserialize_option<V>(self, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let pair = self.pair.take().unwrap();
let span = pair.as_span();
let mut res = (move || {
match pair.as_rule() {
Rule::null => visitor.visit_none(),
_ => visitor.visit_some(&mut Deserializer::from_pair(pair)),
}
})();
error::set_location(&mut res, &span);
res
}
fn deserialize_newtype_struct<V>(self, _name: &str, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
let span = self.pair.as_ref().unwrap().as_span();
let mut res = (move || {
visitor.visit_newtype_struct(self)
})();
error::set_location(&mut res, &span);
res
}
forward_to_deserialize_any! {
bool char str string bytes byte_buf unit unit_struct seq
tuple tuple_struct map struct identifier ignored_any
}
}
fn parse_bool(pair: &Pair<'_, Rule>) -> bool {
match pair.as_str() {
"true" => true,
"false" => false,
_ => unreachable!(),
}
}
fn parse_string(pair: Pair<'_, Rule>) -> Result<String> {
let span = pair.as_span();
let mut res = (move || {
pair.into_inner()
.map(|component| match component.as_rule() {
Rule::char_literal => Ok(String::from(component.as_str())),
Rule::char_escape_sequence => Ok(parse_char_escape_sequence(&component)),
Rule::nul_escape_sequence => Ok(String::from("\u{0000}")),
Rule::hex_escape_sequence | Rule::unicode_escape_sequence => {
let hex_escape = parse_hex(component.as_str())?;
match char::from_u32(hex_escape) {
Some(s) => Ok(s.to_string()),
None => Err(de::Error::custom("error parsing hex prefix")),
}
}
_ => unreachable!(),
})
.collect()
})();
error::set_location(&mut res, &span);
res
}
fn parse_char_escape_sequence(pair: &Pair<'_, Rule>) -> String {
String::from(match pair.as_str() {
"b" => "\u{0008}",
"f" => "\u{000C}",
"n" => "\n",
"r" => "\r",
"t" => "\t",
"v" => "\u{000B}",
c => c,
})
}
fn parse_number(pair: &Pair<'_, Rule>) -> Result<f64> {
match pair.as_str() {
"Infinity" => Ok(f64::INFINITY),
"-Infinity" => Ok(f64::NEG_INFINITY),
"NaN" | "-NaN" => Ok(f64::NAN),
s if is_hex_literal(s) => parse_hex(&s[2..]).map(f64::from),
s => {
if let Ok(r) = s.parse::<f64>() {
if r.is_finite() {
Ok(r)
} else {
Err(de::Error::custom("error parsing number: too large"))
}
} else {
Err(de::Error::custom("error parsing number"))
}
}
}
}
fn parse_integer(pair: &Pair<'_, Rule>) -> Result<i64> {
match pair.as_str() {
s if is_hex_literal(s) => Ok(parse_hex(&s[2..])? as i64),
s => s
.parse()
.or_else(|_| Err(de::Error::custom("error parsing integer"))),
}
}
fn is_int(s: &str) -> bool {
!s.contains('.') && (is_hex_literal(s) || (!s.contains('e') && !s.contains('E')))
}
fn parse_hex(s: &str) -> Result<u32> {
u32::from_str_radix(s, 16).or_else(|_| Err(de::Error::custom("error parsing hex")))
}
fn is_hex_literal(s: &str) -> bool {
s.len() > 2 && (&s[..2] == "0x" || &s[..2] == "0X")
}
struct Seq<'de> {
pairs: VecDeque<Pair<'de, Rule>>,
}
impl<'de> Seq<'de> {
pub fn new(pair: Pair<'de, Rule>) -> Self {
Self {
pairs: pair.into_inner().collect(),
}
}
}
impl<'de> de::SeqAccess<'de> for Seq<'de> {
type Error = Error;
fn size_hint(&self) -> Option<usize> {
Some(self.pairs.len())
}
fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>>
where
T: de::DeserializeSeed<'de>,
{
if let Some(pair) = self.pairs.pop_front() {
seed.deserialize(&mut Deserializer::from_pair(pair))
.map(Some)
} else {
Ok(None)
}
}
}
struct Map<'de> {
pairs: VecDeque<Pair<'de, Rule>>,
}
impl<'de> Map<'de> {
pub fn new(pair: Pair<'de, Rule>) -> Self {
Self {
pairs: pair.into_inner().collect(),
}
}
}
impl<'de> de::MapAccess<'de> for Map<'de> {
type Error = Error;
fn size_hint(&self) -> Option<usize> {
Some(self.pairs.len() / 2)
}
fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>>
where
K: de::DeserializeSeed<'de>,
{
if let Some(pair) = self.pairs.pop_front() {
seed.deserialize(&mut Deserializer::from_pair(pair))
.map(Some)
} else {
Ok(None)
}
}
fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value>
where
V: de::DeserializeSeed<'de>,
{
seed.deserialize(&mut Deserializer::from_pair(
self.pairs.pop_front().unwrap(),
))
}
}
struct Enum<'de> {
pair: Pair<'de, Rule>,
}
impl<'de> de::EnumAccess<'de> for Enum<'de> {
type Error = Error;
type Variant = Variant<'de>;
fn variant_seed<V>(self, seed: V) -> Result<(V::Value, Self::Variant)>
where
V: de::DeserializeSeed<'de>,
{
let span = self.pair.as_span();
let mut res = (move || {
match self.pair.as_rule() {
Rule::string => {
let tag = seed.deserialize(&mut Deserializer::from_pair(self.pair))?;
Ok((tag, Variant { pair: None }))
}
Rule::object => {
let mut pairs = self.pair.into_inner();
if let Some(tag_pair) = pairs.next() {
let tag = seed.deserialize(&mut Deserializer::from_pair(tag_pair))?;
Ok((tag, Variant { pair: pairs.next() }))
} else {
Err(de::Error::custom("expected a nonempty object"))
}
}
_ => Err(de::Error::custom("expected a string or an object")),
}
})();
error::set_location(&mut res, &span);
res
}
}
struct Variant<'de> {
pair: Option<Pair<'de, Rule>>,
}
impl<'de, 'a> de::VariantAccess<'de> for Variant<'de> {
type Error = Error;
fn unit_variant(self) -> Result<()> {
Ok(())
}
fn newtype_variant_seed<T>(self, seed: T) -> Result<T::Value>
where
T: de::DeserializeSeed<'de>,
{
seed.deserialize(&mut Deserializer::from_pair(self.pair.unwrap()))
}
fn tuple_variant<V>(self, _len: usize, visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
match self.pair {
Some(pair) => match pair.as_rule() {
Rule::array => visitor.visit_seq(Seq::new(pair)),
_ => Err(de::Error::custom("expected an array")),
},
None => Err(de::Error::custom("expected an array")),
}
}
fn struct_variant<V>(self, _fields: &'static [&'static str], visitor: V) -> Result<V::Value>
where
V: de::Visitor<'de>,
{
match self.pair {
Some(pair) => match pair.as_rule() {
Rule::object => visitor.visit_map(Map::new(pair)),
_ => Err(de::Error::custom("expected an object")),
},
None => Err(de::Error::custom("expected an object")),
}
}
}<|fim▁end|> | |
<|file_name|>itemCtrl.js<|end_file_name|><|fim▁begin|>function tabCtrl(id) {
var element = document.querySelectorAll('[data-selector="tabbar/tab"]');
for (var i = 0; i < element.length; i++) {<|fim▁hole|> }
}
}
function tabItemCtrl(id) {
var element = document.querySelectorAll('[data-selector="tabbar/item"]');
for (var i = 0; i < element.length; i++) {
if (element[i].dataset.id === id) {
element[i].classList.remove('tabbar__item--hidden');
} else {
element[i].classList.add('tabbar__item--hidden');
}
}
}
function init() {
var links = document.querySelectorAll('[data-selector="tabbar/tab"]');
for (var i = 0; i < links.length; i++) {
links[i].addEventListener('click', function (event) {
event.preventDefault();
tabCtrl(this.dataset.id);
tabItemCtrl(this.dataset.id);
}, false);
}
}
export default init;<|fim▁end|> | if (element[i].dataset.id === id) {
element[i].classList.add('tabbar__tab__active');
} else {
element[i].classList.remove('tabbar__tab__active'); |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from distutils.core import setup
from distutils.command.build import build as _build
from distutils.command.install import install as _install
from distutils.command.install_data import install_data as _install_data
from distutils.command.sdist import sdist as _sdist
from distutils.extension import Extension
import os
import subprocess<|fim▁hole|> from Cython.Distutils import build_ext
ext_module_src = "src/compizconfig.pyx"
else: # Otherwise build directly from C source
from distutils.command.build_ext import build_ext
ext_module_src = "src/compizconfig.c"
version_file = open ("VERSION", "r")
version = version_file.read ().strip ()
if "=" in version:
version = version.split ("=")[1]
def pkgconfig(*packages, **kw):
flag_map = {'-I': 'include_dirs', '-L': 'library_dirs', '-l': 'libraries', '-R': 'runtime_library_dirs'}
cmd = ['pkg-config', '--libs', '--cflags']
tokens = subprocess.Popen (cmd + list(packages), stdout=subprocess.PIPE).communicate()[0].split ()
for t in tokens:
if '-L' in t[:2]:
kw.setdefault (flag_map.get ("-L"), []).append (t[2:])
if not os.getenv ("COMPIZ_DISABLE_RPATH") is "1":
kw.setdefault (flag_map.get ("-R"), []).append (t[2:])
elif '-I' in t[:2]:
kw.setdefault (flag_map.get ("-I"), []).append (t[2:])
elif '-l' in t[:2]:
kw.setdefault (flag_map.get ("-l"), []).append (t[2:])
return kw
VERSION_FILE = os.path.join (os.path.dirname (__file__), "VERSION")
pkgconfig_libs = subprocess.Popen (["pkg-config", "--libs", "libcompizconfig"], stdout=subprocess.PIPE, stderr=open(os.devnull, 'w')).communicate ()[0]
if len (pkgconfig_libs) is 0:
print ("CompizConfig Python [ERROR]: No libcompizconfig.pc found in the pkg-config search path")
print ("Ensure that libcompizonfig is installed or libcompizconfig.pc is in your $PKG_CONFIG_PATH")
exit (1);
libs = pkgconfig_libs[2:].split (" ")[0]
INSTALLED_FILES = "installed_files"
class install (_install):
def run (self):
_install.run (self)
outputs = self.get_outputs ()
length = 0
if self.root:
length += len (self.root)
if self.prefix:
length += len (self.prefix)
if length:
for counter in xrange (len (outputs)):
outputs[counter] = outputs[counter][length:]
data = "\n".join (outputs)
try:
file = open (INSTALLED_FILES, "w")
except:
self.warn ("Could not write installed files list %s" % \
INSTALLED_FILES)
return
file.write (data)
file.close ()
class install_data (_install_data):
def run (self):
def chmod_data_file (file):
try:
os.chmod (file, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH)
except:
self.warn ("Could not chmod data file %s" % file)
_install_data.run (self)
map (chmod_data_file, self.get_outputs ())
class uninstall (_install):
def run (self):
try:
file = open (INSTALLED_FILES, "r")
except:
self.warn ("Could not read installed files list %s" % \
INSTALLED_FILES)
return
files = file.readlines ()
file.close ()
prepend = ""
if self.root:
prepend += self.root
if self.prefix:
prepend += self.prefix
if len (prepend):
for counter in xrange (len (files)):
files[counter] = prepend + files[counter].rstrip ()
for file in files:
print ("Uninstalling %s" % file)
try:
os.unlink (file)
except:
self.warn ("Could not remove file %s" % file)
class sdist (_sdist):
def run (self):
# Build C file
if os.path.exists ("src/compizconfig.pyx"):
from Cython.Compiler.Main import compile as cython_compile
cython_compile ("src/compizconfig.pyx")
# Run regular sdist
_sdist.run (self)
def add_defaults (self):
_sdist.add_defaults (self)
# Remove pyx source and add c source
if os.path.exists ("src/compizconfig.pyx"):
self.filelist.exclude_pattern ("src/compizconfig.pyx")
self.filelist.append ("src/compizconfig.c")
setup (
name = "compizconfig-python",
version = version,
description = "CompizConfig Python",
url = "http://www.compiz.org/",
license = "GPL",
maintainer = "Guillaume Seguin",
maintainer_email = "[email protected]",
cmdclass = {"uninstall" : uninstall,
"install" : install,
"install_data" : install_data,
"build_ext" : build_ext,
"sdist" : sdist},
ext_modules=[
Extension ("compizconfig", [ext_module_src],
**pkgconfig("libcompizconfig"))
]
)<|fim▁end|> |
# If src/compizconfig.pyx exists, build using Cython
if os.path.exists ("src/compizconfig.pyx"): |
<|file_name|>OutHandler.java<|end_file_name|><|fim▁begin|><|fim▁hole|>import java.io.*;
public class OutHandler implements Runnable {
OutHandler(Connection connection) {
this.connection = connection;
}
private Connection connection;
public void run() {}
}<|fim▁end|> | package irc.bot;
|
<|file_name|>web.py<|end_file_name|><|fim▁begin|>from kompromatron.core import app<|fim▁hole|>from kompromatron.views.base import base
# app.register_blueprint(entities)
# app.register_blueprint(relations)
#app.register_blueprint(base)<|fim▁end|> | |
<|file_name|>RedeliveryOnExceptionBlockedDelayTest.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @version
*/
public class RedeliveryOnExceptionBlockedDelayTest extends ContextTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(RedeliveryOnExceptionBlockedDelayTest.class);
private static volatile int attempt;
public void testRedelivery() throws Exception {
MockEndpoint before = getMockEndpoint("mock:result");
before.expectedBodiesReceived("Hello World", "Hello Camel");
// we use blocked redelivery delay so the messages arrive in the same order
MockEndpoint result = getMockEndpoint("mock:result");
result.expectedBodiesReceived("Hello World", "Hello Camel");
<|fim▁hole|>
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
// will by default block
onException(IllegalArgumentException.class)
.maximumRedeliveries(5).redeliveryDelay(2000);
from("seda:start")
.to("log:before")
.to("mock:before")
.process(new Processor() {
public void process(Exchange exchange) throws Exception {
LOG.info("Processing at attempt " + attempt + " " + exchange);
String body = exchange.getIn().getBody(String.class);
if (body.contains("World")) {
if (++attempt <= 2) {
LOG.info("Processing failed will thrown an exception");
throw new IllegalArgumentException("Damn");
}
}
exchange.getIn().setBody("Hello " + body);
LOG.info("Processing at attempt " + attempt + " complete " + exchange);
}
})
.to("log:after")
.to("mock:result");
}
};
}
}<|fim▁end|> | template.sendBody("seda:start", "World");
template.sendBody("seda:start", "Camel"); |
<|file_name|>discon_tools.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
from loguru import logger
from pathlib import Path
def check_meta_yaml_for_noarch(fn:Path, text=None):
import re
logger.debug("Checking for noarch")
if text is None:
with open(fn, "rt") as fl:
text = fl.read()
mo = re.search(r"\n\s*noarch_python:\s*True", text)
if mo:
logger.info("Detected conda noarch python")
return True
mo = re.search(r"\n\s*noarch:\s*python", text)
if mo:
logger.info("Detected conda noarch python")<|fim▁hole|><|fim▁end|> | return True
return False |
<|file_name|>hr.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"label": _("Documents"),
"icon": "icon-star",
"items": [
{
"type": "doctype",
"name": "Employee",
"description": _("Employee records."),
},
{
"type": "doctype",
"name": "Leave Application",
"description": _("Applications for leave."),
},
{
"type": "doctype",
"name": "Expense Claim",
"description": _("Claims for company expense."),
},
{
"type": "doctype",
"name": "Attendance",
"description": _("Attendance record."),
},
{
"type": "doctype",
"name": "Salary Slip",
"description": _("Monthly salary statement."),
},
{
"type": "doctype",
"name": "Appraisal",
"description": _("Performance appraisal."),
},
{
"type": "doctype",
"name": "Job Applicant",
"description": _("Applicant for a Job."),
},
{
"type": "doctype",
"name": "Job Opening",
"description": _("Opening for a Job."),
},
{
"type": "doctype",
"name": "Offer Letter",
"description": _("Offer candidate a Job."),
},
]
},
{
"label": _("Tools"),
"icon": "icon-wrench",
"items": [
{
"type": "doctype",
"name": "Salary Manager",
"label": _("Process Payroll"),
"description":_("Generate Salary Slips"),
"hide_count": True
},
{
"type": "doctype",
"name": "Timesheet Report",
"label": _("Timesheet Report"),
"description":_("Attendance Report"),
"hide_count": True
},
{
"type": "doctype",
"name": "Upload Attendance",
"description":_("Upload attendance from a .csv file"),
"hide_count": True
},<|fim▁hole|> "description":_("Allocate leaves for the year."),
"hide_count": True
},
]
},
{
"label": _("Setup"),
"icon": "icon-cog",
"items": [
{
"type": "doctype",
"name": "HR Settings",
"description": _("Settings for HR Module")
},
{
"type": "doctype",
"name": "Employment Type",
"description": _("Types of employment (permanent, contract, intern etc.).")
},
{
"type": "doctype",
"name": "Branch",
"description": _("Organization branch master.")
},
{
"type": "doctype",
"name": "Department",
"description": _("Organization unit (department) master.")
},
{
"type": "doctype",
"name": "Designation",
"description": _("Employee designation (e.g. CEO, Director etc.).")
},
{
"type": "doctype",
"name": "Salary Structure",
"description": _("Salary template master.")
},
{
"type": "doctype",
"name": "Earning Type",
"description": _("Salary components.")
},
{
"type": "doctype",
"name": "Deduction Type",
"description": _("Tax and other salary deductions.")
},
{
"type": "doctype",
"name": "Leave Allocation",
"description": _("Allocate leaves for a period.")
},
{
"type": "doctype",
"name":"Leave Type",
"description": _("Type of leaves like casual, sick etc."),
},
{
"type": "doctype",
"name": "Holiday List",
"description": _("Holiday master.")
},
{
"type": "doctype",
"name": "Leave Block List",
"description": _("Block leave applications by department.")
},
{
"type": "doctype",
"name": "Appraisal Template",
"description": _("Template for performance appraisals.")
},
{
"type": "doctype",
"name": "Expense Claim Type",
"description": _("Types of Expense Claim.")
},
{
"type": "doctype",
"name": "Email Account",
"description": _("Setup incoming server for jobs email id. (e.g. [email protected])")
},
]
},
{
"label": _("Standard Reports"),
"icon": "icon-list",
"items": [
{
"type": "report",
"is_query_report": True,
"name": "Employee Leave Balance",
"doctype": "Leave Application"
},
{
"type": "report",
"is_query_report": True,
"name": "Employee Birthday",
"doctype": "Employee"
},
{
"type": "report",
"name": "Employee Information",
"doctype": "Employee"
},
{
"type": "report",
"is_query_report": True,
"name": "Monthly Salary Register",
"doctype": "Salary Slip"
},
{
"type": "report",
"is_query_report": True,
"name": "Monthly Attendance Sheet",
"doctype": "Attendance"
},
]
},
]<|fim▁end|> | {
"type": "doctype",
"name": "Leave Control Panel",
"label": _("Leave Allocation Tool"), |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod info;
mod releases;
mod releases_delete;<|fim▁hole|><|fim▁end|> | mod releases_finalize;
mod releases_info;
mod releases_list;
mod releases_new; |
<|file_name|>filterdialog.cpp<|end_file_name|><|fim▁begin|>/***************************************************************************
Copyright (C) 2003-2009 Robby Stephenson <[email protected]>
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or *
* modify it under the terms of the GNU General Public License as *
* published by the Free Software Foundation; either version 2 of *
* the License or (at your option) version 3 or any later version *
* accepted by the membership of KDE e.V. (or its successor approved *
* by the membership of KDE e.V.), which shall act as a proxy *
* defined in Section 14 of version 3 of the license. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program. If not, see <http://www.gnu.org/licenses/>. *
* *
***************************************************************************/
// The layout borrows heavily from kmsearchpatternedit.cpp in kmail
// which is authored by Marc Mutz <[email protected]> under the GPL
#include "filterdialog.h"
#include "tellico_kernel.h"
#include "document.h"
#include "collection.h"
#include "fieldcompletion.h"
#include "gui/filterrulewidgetlister.h"
#include "gui/filterrulewidget.h"
#include "tellico_debug.h"
#include <KLocalizedString>
#include <KHelpClient>
#include <QGroupBox>
#include <QRadioButton>
#include <QButtonGroup>
#include <QLabel>
#include <QApplication>
#include <QFrame>
#include <QVBoxLayout>
#include <QPushButton>
#include <QLineEdit>
#include <QDialogButtonBox>
using Tellico::FilterDialog;
namespace {
static const int FILTER_MIN_WIDTH = 600;
}
// modal dialog so I don't have to worry about updating stuff
// don't show apply button if not saving, i.e. just modifying existing filter
FilterDialog::FilterDialog(Mode mode_, QWidget* parent_)
: QDialog(parent_), m_filter(nullptr), m_mode(mode_), m_saveFilter(nullptr) {
setModal(true);
setWindowTitle(mode_ == CreateFilter ? i18n("Advanced Filter") : i18n("Modify Filter"));
QVBoxLayout* topLayout = new QVBoxLayout();
setLayout(topLayout);
QDialogButtonBox* buttonBox;
if(mode_ == CreateFilter) {
buttonBox = new QDialogButtonBox(QDialogButtonBox::Help|QDialogButtonBox::Ok|QDialogButtonBox::Cancel|QDialogButtonBox::Apply);
} else {
buttonBox = new QDialogButtonBox(QDialogButtonBox::Help|QDialogButtonBox::Ok|QDialogButtonBox::Cancel);
}
m_okButton = buttonBox->button(QDialogButtonBox::Ok);
m_applyButton = buttonBox->button(QDialogButtonBox::Apply);
connect(m_okButton, &QAbstractButton::clicked, this, &FilterDialog::slotOk);
if(m_applyButton) {
connect(m_applyButton, &QAbstractButton::clicked, this, &FilterDialog::slotApply);
}
connect(buttonBox, &QDialogButtonBox::accepted, this, &QDialog::accept);
connect(buttonBox, &QDialogButtonBox::rejected, this, &QDialog::reject);
connect(buttonBox, &QDialogButtonBox::helpRequested, this, &FilterDialog::slotHelp);
QGroupBox* m_matchGroup = new QGroupBox(i18n("Filter Criteria"), this);
QVBoxLayout* vlay = new QVBoxLayout(m_matchGroup);
topLayout->addWidget(m_matchGroup);
m_matchGroup->setSizePolicy(QSizePolicy::MinimumExpanding, QSizePolicy::Fixed);
m_matchAll = new QRadioButton(i18n("Match a&ll of the following"), m_matchGroup);
m_matchAny = new QRadioButton(i18n("Match an&y of the following"), m_matchGroup);
m_matchAll->setChecked(true);
vlay->addWidget(m_matchAll);
vlay->addWidget(m_matchAny);
QButtonGroup* bg = new QButtonGroup(m_matchGroup);
bg->addButton(m_matchAll);
bg->addButton(m_matchAny);
#if (QT_VERSION < QT_VERSION_CHECK(5, 15, 0))
void (QButtonGroup::* buttonClicked)(int) = &QButtonGroup::buttonClicked;
connect(bg, buttonClicked, this, &FilterDialog::slotFilterChanged);
#else
connect(bg, &QButtonGroup::idClicked, this, &FilterDialog::slotFilterChanged);
#endif
m_ruleLister = new FilterRuleWidgetLister(m_matchGroup);
connect(m_ruleLister, &KWidgetLister::widgetRemoved, this, &FilterDialog::slotShrink);
connect(m_ruleLister, &FilterRuleWidgetLister::signalModified, this, &FilterDialog::slotFilterChanged);
m_ruleLister->setFocus();
vlay->addWidget(m_ruleLister);
QHBoxLayout* blay = new QHBoxLayout();
topLayout->addLayout(blay);
QLabel* lab = new QLabel(i18n("Filter name:"), this);
blay->addWidget(lab);
m_filterName = new QLineEdit(this);
blay->addWidget(m_filterName);
connect(m_filterName, &QLineEdit::textChanged, this, &FilterDialog::slotFilterChanged);
// only when creating a new filter can it be saved
if(m_mode == CreateFilter) {
m_saveFilter = new QPushButton(QIcon::fromTheme(QStringLiteral("view-filter")), i18n("&Save Filter"), this);
blay->addWidget(m_saveFilter);<|fim▁hole|> m_applyButton->setEnabled(false);
}
m_okButton->setEnabled(false); // disable at start
buttonBox->button(QDialogButtonBox::Cancel)->setDefault(true);
setMinimumWidth(qMax(minimumWidth(), FILTER_MIN_WIDTH));
topLayout->addWidget(buttonBox);
}
Tellico::FilterPtr FilterDialog::currentFilter(bool alwaysCreateNew_) {
FilterPtr newFilter(new Filter(Filter::MatchAny));
if(m_matchAll->isChecked()) {
newFilter->setMatch(Filter::MatchAll);
} else {
newFilter->setMatch(Filter::MatchAny);
}
foreach(QWidget* widget, m_ruleLister->widgetList()) {
FilterRuleWidget* rw = static_cast<FilterRuleWidget*>(widget);
FilterRule* rule = rw->rule();
if(rule && !rule->isEmpty()) {
newFilter->append(rule);
} else {
delete rule;
}
}
newFilter->setName(m_filterName->text());
if(!m_filter || !alwaysCreateNew_) {
m_filter = newFilter;
}
return newFilter;
}
void FilterDialog::setFilter(Tellico::FilterPtr filter_) {
if(!filter_) {
slotClear();
return;
}
if(filter_->op() == Filter::MatchAll) {
m_matchAll->setChecked(true);
} else {
m_matchAny->setChecked(true);
}
m_ruleLister->setFilter(filter_);
m_filterName->setText(filter_->name());
m_filter = filter_;
}
void FilterDialog::slotOk() {
slotApply();
accept();
}
void FilterDialog::slotApply() {
emit signalUpdateFilter(currentFilter());
}
void FilterDialog::slotHelp() {
KHelpClient::invokeHelp(QStringLiteral("filter-dialog"));
}
void FilterDialog::slotClear() {
// myDebug();
m_matchAll->setChecked(true);
m_ruleLister->reset();
m_filterName->clear();
}
void FilterDialog::slotShrink() {
updateGeometry();
QApplication::sendPostedEvents();
resize(width(), sizeHint().height());
}
void FilterDialog::slotFilterChanged() {
const bool hadFilter = m_filter && !m_filter->isEmpty();
const bool emptyFilter = currentFilter(true)->isEmpty();
// an empty filter can be ok if the filter was originally not empty
const bool enableOk = !currentFilter()->isEmpty() || hadFilter;
if(m_saveFilter) {
m_saveFilter->setEnabled(!m_filterName->text().isEmpty() && !emptyFilter);
if(m_applyButton) {
m_applyButton->setEnabled(!emptyFilter);
}
}
if(m_applyButton) {
m_applyButton->setEnabled(enableOk);
}
m_okButton->setEnabled(enableOk);
m_okButton->setDefault(enableOk);
}
void FilterDialog::slotSaveFilter() {
// non-op if editing an existing filter
if(m_mode != CreateFilter) {
return;
}
// in this case, currentFilter() either creates a new filter or
// updates the current one. If creating a new one, then I want to copy it
const bool wasEmpty = !m_filter;
FilterPtr filter(new Filter(*currentFilter()));
if(wasEmpty) {
m_filter = filter;
}
// this keeps the saving completely decoupled from the filter setting in the detailed view
if(filter->isEmpty()) {
m_filter = FilterPtr();
return;
}
Kernel::self()->addFilter(filter);
}<|fim▁end|> | m_saveFilter->setEnabled(false);
connect(m_saveFilter, &QAbstractButton::clicked, this, &FilterDialog::slotSaveFilter); |
<|file_name|>play_sound.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 2 -*-
# Author: Fábio André Damas <skkeeper at gmail dot com>
from threading import Thread
from subprocess import Popen, PIPE
<|fim▁hole|> Thread.__init__(self)
self.filename = filename
self.volume = volume
def run(self):
cmd = 'play -v ' + self.volume + ' ' + self.filename
p = Popen(cmd, shell=True, stderr=PIPE, close_fds=True)
# TODO: Test if limits the number of clicks
p.wait()
if p.returncode != 0:
print '\033[1;31mWe found a error with SoX, did you install it?\033[1;m'
p.stderr.read()<|fim▁end|> | class PlaySound(Thread):
def __init__(self, filename, volume): |
<|file_name|>test_cargo_compile_plugins.rs<|end_file_name|><|fim▁begin|>use std::fs;
use std::env;
use support::{project, execs};
use support::{COMPILING, RUNNING};
use hamcrest::assert_that;
fn setup() {
}
test!(plugin_to_the_max {
if !::is_nightly() { return }
let foo = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo_lib"
[dependencies.bar]
path = "../bar"
"#)
.file("src/main.rs", r#"
#![feature(plugin)]
#![plugin(bar)]
extern crate foo_lib;
fn main() { foo_lib::foo(); }
"#)
.file("src/foo_lib.rs", r#"
#![feature(plugin)]
#![plugin(bar)]
pub fn foo() {}
"#);
let bar = project("bar")
.file("Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
[lib]
name = "bar"
plugin = true
[dependencies.baz]
path = "../baz"
"#)
.file("src/lib.rs", r#"
#![feature(plugin_registrar, rustc_private)]
extern crate rustc_plugin;
extern crate baz;
use rustc_plugin::Registry;
#[plugin_registrar]
pub fn foo(_reg: &mut Registry) {
println!("{}", baz::baz());
}
"#);
let baz = project("baz")
.file("Cargo.toml", r#"
[package]
name = "baz"
version = "0.0.1"
authors = []
[lib]
name = "baz"
crate_type = ["dylib"]
"#)
.file("src/lib.rs", "pub fn baz() -> i32 { 1 }");
bar.build();
baz.build();
assert_that(foo.cargo_process("build"),
execs().with_status(0));
assert_that(foo.cargo("doc"),
execs().with_status(0));
});
test!(plugin_with_dynamic_native_dependency {
if !::is_nightly() { return }
let build = project("builder")
.file("Cargo.toml", r#"
[package]
name = "builder"
version = "0.0.1"
authors = []
[lib]
name = "builder"
crate-type = ["dylib"]
"#)
.file("src/lib.rs", r#"
#[no_mangle]
pub extern fn foo() {}
"#);
assert_that(build.cargo_process("build"),
execs().with_status(0));
let src = build.root().join("target/debug");
let lib = fs::read_dir(&src).unwrap().map(|s| s.unwrap().path()).find(|lib| {
let lib = lib.file_name().unwrap().to_str().unwrap();
lib.starts_with(env::consts::DLL_PREFIX) &&
lib.ends_with(env::consts::DLL_SUFFIX)
}).unwrap();
let libname = lib.file_name().unwrap().to_str().unwrap();
let libname = &libname[env::consts::DLL_PREFIX.len()..
libname.len() - env::consts::DLL_SUFFIX.len()];
let foo = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.bar]
path = "bar"
"#)
.file("src/main.rs", r#"
#![feature(plugin)]
#![plugin(bar)]
fn main() {}
"#)
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
build = 'build.rs'
[lib]
name = "bar"
plugin = true<|fim▁hole|> use std::path::PathBuf;
use std::env;
fn main() {
let src = PathBuf::from(env::var("SRC").unwrap());
println!("cargo:rustc-flags=-L {}", src.parent().unwrap()
.display());
}
"#)
.file("bar/src/lib.rs", r#"
#![feature(plugin_registrar, rustc_private)]
extern crate rustc_plugin;
use rustc_plugin::Registry;
#[cfg_attr(not(target_env = "msvc"), link(name = "builder"))]
#[cfg_attr(target_env = "msvc", link(name = "builder.dll"))]
extern { fn foo(); }
#[plugin_registrar]
pub fn bar(_reg: &mut Registry) {
unsafe { foo() }
}
"#);
assert_that(foo.cargo_process("build").env("SRC", &lib).arg("-v"),
execs().with_status(0));
});
test!(plugin_integration {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
build = "build.rs"
[lib]
name = "foo"
plugin = true
doctest = false
"#)
.file("build.rs", "fn main() {}")
.file("src/lib.rs", "")
.file("tests/it_works.rs", "");
assert_that(p.cargo_process("test").arg("-v"),
execs().with_status(0));
});
test!(doctest_a_plugin {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies]
bar = { path = "bar" }
"#)
.file("src/lib.rs", r#"
#[macro_use]
extern crate bar;
"#)
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
[lib]
name = "bar"
plugin = true
"#)
.file("bar/src/lib.rs", r#"
pub fn bar() {}
"#);
assert_that(p.cargo_process("test").arg("-v"),
execs().with_status(0));
});
// See #1515
test!(native_plugin_dependency_with_custom_ar_linker {
let target = ::rustc_host();
let foo = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo"
plugin = true
"#)
.file("src/lib.rs", "");
let bar = project("bar")
.file("Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
[lib]
name = "bar"
[dependencies.foo]
path = "../foo"
"#)
.file("src/lib", "")
.file(".cargo/config", &format!(r#"
[target.{}]
ar = "nonexistent-ar"
linker = "nonexistent-linker"
"#, target));
foo.build();
assert_that(bar.cargo_process("build").arg("--verbose"),
execs().with_stdout(&format!("\
{compiling} foo v0.0.1 ({url})
{running} `rustc [..] -C ar=nonexistent-ar -C linker=nonexistent-linker [..]`
", compiling = COMPILING, running = RUNNING, url = bar.url())))
});<|fim▁end|> | "#)
.file("bar/build.rs", r#" |
<|file_name|>SetPEVersion.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# 2016-10-20T16:00+08:00
import fnmatch
import glob
import itertools
import os
import re
import subprocess
import sys
import fileutil
# Match version numbers of these formats:
# 1.2.3
# 1.2.3.4
version_number_re = r'([0-9]+(?:.[0-9]+){2,3})'
# Match version numbers of this format:
incomplete_version_number_re = r'^[0-9]+(?:.[0-9]+){2}$'
# Match a line, in Changelog.txt, which contains a valid version number
version_line_re = r'^### v{0}.*$'.format(version_number_re)
def get_topmost_version_line(changelog_file):
with open(changelog_file, 'r', encoding=fileutil.get_file_encoding(changelog_file, 'utf-8')) as fp:
return next(filter(lambda line: re.match(version_line_re, line), fp.readlines()))
def get_version_number(version_line):
match_res = re.match(version_line_re, version_line)
assert match_res, 'Invalid version line'
if match_res:
return match_res.groups()[0]
# 1.2.3 -> 1.2.3.0
def canonicalize_version_number(version_number):
assert re.match(r'^{0}$'.format(version_number_re), version_number), 'Invalid version number format(neither x.x.x nor x.x.x.x)'
if re.match(incomplete_version_number_re, version_number):
version_number += '.0'
return version_number
def perror(*args, **kwargs):
sys.stderr.write(*args, **kwargs)
sys.exit(1)
def quote_path(path):
if path.startswith('"') and path.endswith('"'):
return path
return '"{0}"'.format(path)
def is_dll_or_exe(file):
assert os.path.isfile(file)
return fnmatch.fnmatch(file, '*.dll') or fnmatch.fnmatch(file, '*.exe')
def _get_full_path(candidate_path, file_name):
if candidate_path is None:
candidate_path = ''
if os.path.isfile(candidate_path):
return candidate_path
elif os.path.isdir(candidate_path):
return os.path.join(candidate_path, file_name)
else:
return os.path.join(os.path.dirname(sys.argv[0]), file_name)
def _iterate_module_files_legacy(module_path):
assert os.path.isdir(module_path)
yield from filter(is_dll_or_exe,
map(lambda item: os.path.join(module_path, item),
os.listdir(module_path)))
def _iterate_module_files_new(module_path):
assert os.path.isdir(module_path)
yield from filter(is_dll_or_exe,
filter(os.path.isfile,
map(lambda item_name: os.path.join(module_path, item_name),
map(lambda item: item.name,
os.scandir(module_path)))))
# `os.scandir` is new in Python 3.5, and Python 3.5 needs Windows Vista or higher.
if sys.version_info >= (3, 5):
iterate_module_files_v1 = _iterate_module_files_new
run_subprocess = subprocess.run
else:
iterate_module_files_v1 = _iterate_module_files_legacy
run_subprocess = subprocess.call
_module_patterns = '*.dll', '*.exe'
def iterate_module_files_v2(module_path):
assert os.path.isdir(module_path)
for pattern in _module_patterns:
pattern = os.path.join(module_path, pattern)
yield from glob.iglob(pattern)
def iterate_module_files_v3(module_path):
assert os.path.isdir(module_path)
yield from itertools.chain.from_iterable(
glob.iglob(pattern) for pattern in map(lambda pattern: os.path.join(module_path, pattern), _module_patterns))
def main():
"""
Usage:
SetPEVersion.py (--module-path=<PATH>) [--changelog=FILE] [--stampver=FILE] [--debug]
SetPEVersion.py -h | --help
SetPEVersion.py -v | --version
Options:
-c FILE --changelog=FILE Specify the full path of "Changelog.txt"
-s FILE --stampver=FILE Specify the full path of "StampVer.exe"
-m PATH --module-path=PATH Specify a single module file(DLL or EXE) or a directory that contains module files
-d --debug Show more messages for debug purpose
-h --help Show this help message
-v --version Show version message
"""
import docopt
import pprint
args = docopt.docopt(main.__doc__, version='SetPEVersion v0.1.0')
changelog = _get_full_path(args['--changelog'], 'Changelog.txt')
stampver = _get_full_path(args['--stampver'], 'StampVer.exe')
if not os.path.isfile(changelog):
perror('Changelog file not found at "{0}".'.format(changelog))
if not os.path.isfile(stampver):
perror('StampVer.exe not found at "{0}".'.format(changelog))
modules = []<|fim▁hole|> if args['--module-path']:
if os.path.isfile(args['--module-path']):
modules.append(args['--module-path'])
elif os.path.isdir(args['--module-path']):
modules.extend(iterate_module_files_v3(args['--module-path']))
else:
perror('Invalid module path "{0}": Neither an existing file nor an existing directory.'.format(args['--module-path']))
else:
perror('"--module-path" option is required.')
# Get the topmost line which contains a valid version number from Changelog.txt
topmost_version_line = get_topmost_version_line(changelog)
version_number = canonicalize_version_number(get_version_number(topmost_version_line))
if args['--debug']:
print('-' * 79)
print(args)
print(changelog)
print(stampver)
print(version_number)
pprint.pprint(modules)
print('-' * 79)
for module in modules:
# Code below does work for `StampVer.exe`.
#cmd_args = (stampver, '-k', '-f"{0}"'.format(version_number), '-p"{0}"'.format(version_number), module)
#subprocess.run(cmd_args)
#
# so I have to quote those arguments all by myself
cmd_args = ' '.join((quote_path(stampver), '-k', '-f"{0}"'.format(version_number), '-p"{0}"'.format(version_number), quote_path(module)))
run_subprocess(cmd_args)
if __name__ == '__main__':
main()
# References:
# Ongoing-Study/cpp/msvc_cmdline_args/msvc_cmdline_args.cpp
# [Python glob multiple filetypes](http://stackoverflow.com/questions/4568580/python-glob-multiple-filetypes)
# https://github.com/telegramdesktop/tdesktop/blob/dev/Telegram/build/set_version.py<|fim▁end|> | |
<|file_name|>BC.py<|end_file_name|><|fim▁begin|>"""
compressible-specific boundary conditions. Here, in particular, we
implement an HSE BC in the vertical direction.
Note: the pyro BC routines operate on a single variable at a time, so
some work will necessarily be repeated.
Also note: we may come in here with the aux_data (source terms), so
we'll do a special case for them
"""
import compressible.eos as eos
from util import msg
import math
import numpy as np
def user(bc_name, bc_edge, variable, ccdata):
"""
A hydrostatic boundary. This integrates the equation of HSE into
the ghost cells to get the pressure and density under the assumption
that the specific internal energy is constant.
Upon exit, the ghost cells for the input variable will be set
Parameters
----------
bc_name : {'hse'}
The descriptive name for the boundary condition -- this allows
for pyro to have multiple types of user-supplied boundary
conditions. For this module, it needs to be 'hse'.
bc_edge : {'ylb', 'yrb'}
The boundary to update: ylb = lower y boundary; yrb = upper y
boundary.
variable : {'density', 'x-momentum', 'y-momentum', 'energy'}
The variable whose ghost cells we are filling
ccdata : CellCenterData2d object
The data object
"""
myg = ccdata.grid
if bc_name == "hse":
if bc_edge == "ylb":
# lower y boundary
# we will take the density to be constant, the velocity to
# be outflow, and the pressure to be in HSE
if variable in ["density", "x-momentum", "y-momentum", "ymom_src", "E_src", "fuel", "ash"]:
v = ccdata.get_var(variable)
j = myg.jlo-1
while j >= 0:
v[:, j] = v[:, myg.jlo]
j -= 1
elif variable == "energy":
dens = ccdata.get_var("density")
xmom = ccdata.get_var("x-momentum")
ymom = ccdata.get_var("y-momentum")
ener = ccdata.get_var("energy")
grav = ccdata.get_aux("grav")
gamma = ccdata.get_aux("gamma")
dens_base = dens[:, myg.jlo]
ke_base = 0.5*(xmom[:, myg.jlo]**2 + ymom[:, myg.jlo]**2) / \
dens[:, myg.jlo]
eint_base = (ener[:, myg.jlo] - ke_base)/dens[:, myg.jlo]
pres_base = eos.pres(gamma, dens_base, eint_base)
# we are assuming that the density is constant in this
# formulation of HSE, so the pressure comes simply from
# differencing the HSE equation
j = myg.jlo-1
while j >= 0:
pres_below = pres_base - grav*dens_base*myg.dy
rhoe = eos.rhoe(gamma, pres_below)
ener[:, j] = rhoe + ke_base
pres_base = pres_below.copy()
j -= 1
else:
raise NotImplementedError("variable not defined")
elif bc_edge == "yrb":
# upper y boundary
# we will take the density to be constant, the velocity to
# be outflow, and the pressure to be in HSE
if variable in ["density", "x-momentum", "y-momentum", "ymom_src", "E_src", "fuel", "ash"]:
v = ccdata.get_var(variable)
for j in range(myg.jhi+1, myg.jhi+myg.ng+1):
v[:, j] = v[:, myg.jhi]
elif variable == "energy":
dens = ccdata.get_var("density")
xmom = ccdata.get_var("x-momentum")
ymom = ccdata.get_var("y-momentum")
ener = ccdata.get_var("energy")
grav = ccdata.get_aux("grav")
gamma = ccdata.get_aux("gamma")
dens_base = dens[:, myg.jhi]
ke_base = 0.5*(xmom[:, myg.jhi]**2 + ymom[:, myg.jhi]**2) / \
dens[:, myg.jhi]
eint_base = (ener[:, myg.jhi] - ke_base)/dens[:, myg.jhi]
pres_base = eos.pres(gamma, dens_base, eint_base)
# we are assuming that the density is constant in this
# formulation of HSE, so the pressure comes simply from
# differencing the HSE equation
for j in range(myg.jhi+1, myg.jhi+myg.ng+1):
pres_above = pres_base + grav*dens_base*myg.dy
rhoe = eos.rhoe(gamma, pres_above)
ener[:, j] = rhoe + ke_base
pres_base = pres_above.copy()
else:
raise NotImplementedError("variable not defined")
else:
msg.fail("error: hse BC not supported for xlb or xrb")
<|fim▁hole|> # Boundary conditions for double Mach reflection problem
gamma = ccdata.get_aux("gamma")
if bc_edge == "xlb":
# lower x boundary
# inflow condition with post shock setup
v = ccdata.get_var(variable)
i = myg.ilo - 1
if variable in ["density", "x-momentum", "y-momentum", "energy"]:
val = inflow_post_bc(variable, gamma)
while i >= 0:
v[i, :] = val
i = i - 1
else:
v[:, :] = 0.0 # no source term
elif bc_edge == "ylb":
# lower y boundary
# for x > 1./6., reflective boundary
# for x < 1./6., inflow with post shock setup
if variable in ["density", "x-momentum", "y-momentum", "energy"]:
v = ccdata.get_var(variable)
j = myg.jlo - 1
jj = 0
while j >= 0:
xcen_l = myg.x < 1.0/6.0
xcen_r = myg.x >= 1.0/6.0
v[xcen_l, j] = inflow_post_bc(variable, gamma)
if variable == "y-momentum":
v[xcen_r, j] = -1.0*v[xcen_r, myg.jlo+jj]
else:
v[xcen_r, j] = v[xcen_r, myg.jlo+jj]
j = j - 1
jj = jj + 1
else:
v = ccdata.get_var(variable)
v[:, :] = 0.0 # no source term
elif bc_edge == "yrb":
# upper y boundary
# time-dependent boundary, the shockfront moves with a 10 mach velocity forming an angle
# to the x-axis of 30 degrees clockwise.
# x coordinate of the grid is used to judge whether the cell belongs to pure post shock area,
# the pure pre shock area or the mixed area.
if variable in ["density", "x-momentum", "y-momentum", "energy"]:
v = ccdata.get_var(variable)
for j in range(myg.jhi+1, myg.jhi+myg.ng+1):
shockfront_up = 1.0/6.0 + (myg.y[j] + 0.5*myg.dy*math.sqrt(3))/math.tan(math.pi/3.0) \
+ (10.0/math.sin(math.pi/3.0))*ccdata.t
shockfront_down = 1.0/6.0 + (myg.y[j] - 0.5*myg.dy*math.sqrt(3))/math.tan(math.pi/3.0) \
+ (10.0/math.sin(math.pi/3.0))*ccdata.t
shockfront = np.array([shockfront_down, shockfront_up])
for i in range(myg.ihi+myg.ng+1):
v[i, j] = 0.0
cx_down = myg.x[i] - 0.5*myg.dx*math.sqrt(3)
cx_up = myg.x[i] + 0.5*myg.dx*math.sqrt(3)
cx = np.array([cx_down, cx_up])
for sf in shockfront:
for x in cx:
if x < sf:
v[i, j] = v[i, j] + 0.25*inflow_post_bc(variable, gamma)
else:
v[i, j] = v[i, j] + 0.25*inflow_pre_bc(variable, gamma)
else:
v = ccdata.get_var(variable)
v[:, :] = 0.0 # no source term
else:
msg.fail("error: bc type %s not supported" % (bc_name))
def inflow_post_bc(var, g):
# inflow boundary condition with post shock setup
r_l = 8.0
u_l = 7.1447096
v_l = -4.125
p_l = 116.5
if var == "density":
vl = r_l
elif var == "x-momentum":
vl = r_l*u_l
elif var == "y-momentum":
vl = r_l*v_l
elif var == "energy":
vl = p_l/(g - 1.0) + 0.5*r_l*(u_l*u_l + v_l*v_l)
else:
vl = 0.0
return vl
def inflow_pre_bc(var, g):
# pre shock setup
r_r = 1.4
u_r = 0.0
v_r = 0.0
p_r = 1.0
if var == "density":
vl = r_r
elif var == "x-momentum":
vl = r_r*u_r
elif var == "y-momentum":
vl = r_r*v_r
elif var == "energy":
vl = p_r/(g - 1.0) + 0.5*r_r*(u_r*u_r + v_r*v_r)
else:
vl = 0.0
return vl<|fim▁end|> | elif bc_name == "ramp": |
<|file_name|>collectors_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""Test the collector flows."""
import os
from grr.client import vfs
from grr.lib import action_mocks
from grr.lib import aff4
from grr.lib import artifact
from grr.lib import artifact_lib
from grr.lib import artifact_test
from grr.lib import flags
from grr.lib import rdfvalue
from grr.lib import test_lib
from grr.lib import utils
from grr.lib.flows.general import collectors
from grr.lib.flows.general import transfer
from grr.test_data import client_fixture
# pylint: mode=test
class CollectorTest(artifact_test.ArtifactTest):
pass
class TestArtifactCollectors(CollectorTest):
"""Test the artifact collection mechanism with fake artifacts."""
def setUp(self):
"""Make sure things are initialized."""
super(TestArtifactCollectors, self).setUp()
self.original_artifact_reg = artifact_lib.ArtifactRegistry.artifacts
artifact_lib.ArtifactRegistry.ClearRegistry()
self.LoadTestArtifacts()
artifact_reg = artifact_lib.ArtifactRegistry.artifacts
self.fakeartifact = artifact_reg["FakeArtifact"]
self.fakeartifact2 = artifact_reg["FakeArtifact2"]
self.output_count = 0
with aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw") as fd:
fd.Set(fd.Schema.SYSTEM("Linux"))
kb = fd.Schema.KNOWLEDGE_BASE()
artifact.SetCoreGRRKnowledgeBaseValues(kb, fd)
fd.Set(kb)
def tearDown(self):
super(TestArtifactCollectors, self).tearDown()
artifact_lib.ArtifactRegistry.artifacts = self.original_artifact_reg
self.fakeartifact.collectors = [] # Reset any Collectors
self.fakeartifact.conditions = [] # Reset any Conditions
self.fakeartifact2.collectors = [] # Reset any Collectors
self.fakeartifact2.conditions = [] # Reset any Conditions
def testInterpolateArgs(self):
collect_flow = collectors.ArtifactCollectorFlow(None, token=self.token)
collect_flow.state.Register("knowledge_base", rdfvalue.KnowledgeBase())
collect_flow.current_artifact_name = "blah"
collect_flow.state.knowledge_base.MergeOrAddUser(
rdfvalue.KnowledgeBaseUser(username="test1"))
collect_flow.state.knowledge_base.MergeOrAddUser(
rdfvalue.KnowledgeBaseUser(username="test2"))
test_rdf = rdfvalue.KnowledgeBase()
action_args = {"usernames": ["%%users.username%%", "%%users.username%%"],
"nointerp": "asdfsdf", "notastring": test_rdf}
kwargs = collect_flow.InterpolateDict(action_args)
self.assertItemsEqual(kwargs["usernames"],
["test1", "test2", "test1", "test2"])
self.assertEqual(kwargs["nointerp"], "asdfsdf")
self.assertEqual(kwargs["notastring"], test_rdf)
# We should be using an array since users.username will expand to multiple
# values.
self.assertRaises(ValueError, collect_flow.InterpolateDict,
{"bad": "%%users.username%%"})
list_args = collect_flow.InterpolateList(["%%users.username%%",
"%%users.username%%aa"])
self.assertItemsEqual(list_args, ["test1", "test2", "test1aa", "test2aa"])
list_args = collect_flow.InterpolateList(["one"])
self.assertEqual(list_args, ["one"])
def testGrepRegexCombination(self):
collect_flow = collectors.ArtifactCollectorFlow(None, token=self.token)
self.assertEqual(collect_flow._CombineRegex([r"simple"]),
"simple")
self.assertEqual(collect_flow._CombineRegex(["a", "b"]),
"(a)|(b)")
self.assertEqual(collect_flow._CombineRegex(["a", "b", "c"]),
"(a)|(b)|(c)")
self.assertEqual(collect_flow._CombineRegex(["a|b", "[^_]b", "c|d"]),
"(a|b)|([^_]b)|(c|d)")
def testGrep(self):
class MockCallFlow(object):
def CallFlow(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
mock_call_flow = MockCallFlow()
with utils.Stubber(collectors.ArtifactCollectorFlow, "CallFlow",
mock_call_flow.CallFlow):
collect_flow = collectors.ArtifactCollectorFlow(None, token=self.token)
collect_flow.state.Register("knowledge_base", rdfvalue.KnowledgeBase())
collect_flow.current_artifact_name = "blah"
collect_flow.state.knowledge_base.MergeOrAddUser(
rdfvalue.KnowledgeBaseUser(username="test1"))
collect_flow.state.knowledge_base.MergeOrAddUser(
rdfvalue.KnowledgeBaseUser(username="test2"))
collector = rdfvalue.Collector(
collector_type=rdfvalue.Collector.CollectorType.GREP,
args={"path_list": ["/etc/passwd"],
"content_regex_list": [r"^a%%users.username%%b$"]})
collect_flow.Grep(collector, rdfvalue.PathSpec.PathType.TSK)
conditions = mock_call_flow.kwargs["conditions"]
self.assertEqual(len(conditions), 1)
regexes = conditions[0].contents_regex_match.regex.SerializeToString()
self.assertItemsEqual(regexes.split("|"), ["(^atest1b$)", "(^atest2b$)"])
self.assertEqual(mock_call_flow.kwargs["paths"], ["/etc/passwd"])
def testGetArtifact1(self):
"""Test we can get a basic artifact."""
client_mock = action_mocks.ActionMock("TransferBuffer", "StatFile", "Find",
"FingerprintFile", "HashBuffer")
client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw")
client.Set(client.Schema.SYSTEM("Linux"))
client.Flush()
# Dynamically add a Collector specifying the base path.
file_path = os.path.join(self.base_path, "test_img.dd")
coll1 = rdfvalue.Collector(
collector_type=rdfvalue.Collector.CollectorType.FILE,
args={"path_list": [file_path]})
self.fakeartifact.collectors.append(coll1)
artifact_list = ["FakeArtifact"]
for _ in test_lib.TestFlowHelper("ArtifactCollectorFlow", client_mock,
artifact_list=artifact_list, use_tsk=False,
token=self.token, client_id=self.client_id
):
pass
# Test the AFF4 file that was created.
fd1 = aff4.FACTORY.Open("%s/fs/os/%s" % (self.client_id, file_path),
token=self.token)
fd2 = open(file_path)
fd2.seek(0, 2)
self.assertEqual(fd2.tell(), int(fd1.Get(fd1.Schema.SIZE)))
def testRunGrrClientActionArtifact(self):
"""Test we can get a GRR client artifact."""
client_mock = action_mocks.ActionMock("ListProcesses")
client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw")
client.Set(client.Schema.SYSTEM("Linux"))
client.Flush()
coll1 = rdfvalue.Collector(
collector_type=rdfvalue.Collector.CollectorType.GRR_CLIENT_ACTION,
args={"client_action": r"ListProcesses"})
self.fakeartifact.collectors.append(coll1)
artifact_list = ["FakeArtifact"]
for _ in test_lib.TestFlowHelper("ArtifactCollectorFlow", client_mock,
artifact_list=artifact_list,
token=self.token, client_id=self.client_id,
output="test_artifact"
):
pass
# Test the AFF4 file that was created.
fd = aff4.FACTORY.Open(rdfvalue.RDFURN(self.client_id).Add("test_artifact"),
token=self.token)
self.assertTrue(isinstance(list(fd)[0], rdfvalue.Process))
self.assertTrue(len(fd) > 5)
def testRunGrrClientActionArtifactSplit(self):
"""Test that artifacts get split into separate collections."""
client_mock = action_mocks.ActionMock("ListProcesses", "StatFile")
client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw")
client.Set(client.Schema.SYSTEM("Linux"))
client.Flush()
coll1 = rdfvalue.Collector(
collector_type=rdfvalue.Collector.CollectorType.GRR_CLIENT_ACTION,
args={"client_action": r"ListProcesses"})
self.fakeartifact.collectors.append(coll1)
self.fakeartifact2.collectors.append(coll1)
artifact_list = ["FakeArtifact", "FakeArtifact2"]
for _ in test_lib.TestFlowHelper("ArtifactCollectorFlow", client_mock,
artifact_list=artifact_list,
token=self.token, client_id=self.client_id,
output="test_artifact",
split_output_by_artifact=True):
pass
# Check that we got two separate collections based on artifact name
fd = aff4.FACTORY.Open(rdfvalue.RDFURN(
self.client_id).Add("test_artifact_FakeArtifact"),
token=self.token)
self.assertTrue(isinstance(list(fd)[0], rdfvalue.Process))
self.assertTrue(len(fd) > 5)
fd = aff4.FACTORY.Open(rdfvalue.RDFURN(
self.client_id).Add("test_artifact_FakeArtifact2"),
token=self.token)
self.assertTrue(len(fd) > 5)
self.assertTrue(isinstance(list(fd)[0], rdfvalue.Process))
def testConditions(self):
"""Test we can get a GRR client artifact with conditions."""
# Run with false condition.
client_mock = action_mocks.ActionMock("ListProcesses")
coll1 = rdfvalue.Collector(
collector_type=rdfvalue.Collector.CollectorType.GRR_CLIENT_ACTION,
args={"client_action": "ListProcesses"},
conditions=["os == 'Windows'"])
self.fakeartifact.collectors.append(coll1)
fd = self._RunClientActionArtifact(client_mock, ["FakeArtifact"])<|fim▁hole|> # Now run with matching or condition.
coll1.conditions = ["os == 'Linux' or os == 'Windows'"]
self.fakeartifact.collectors = []
self.fakeartifact.collectors.append(coll1)
fd = self._RunClientActionArtifact(client_mock, ["FakeArtifact"])
self.assertEqual(fd.__class__.__name__, "RDFValueCollection")
# Now run with impossible or condition.
coll1.conditions.append("os == 'NotTrue'")
self.fakeartifact.collectors = []
self.fakeartifact.collectors.append(coll1)
fd = self._RunClientActionArtifact(client_mock, ["FakeArtifact"])
self.assertEqual(fd.__class__.__name__, "AFF4Volume")
def testSupportedOS(self):
"""Test supported_os inside the collector object."""
# Run with false condition.
client_mock = action_mocks.ActionMock("ListProcesses")
coll1 = rdfvalue.Collector(
collector_type=rdfvalue.Collector.CollectorType.GRR_CLIENT_ACTION,
args={"client_action": "ListProcesses"}, supported_os=["Windows"])
self.fakeartifact.collectors.append(coll1)
fd = self._RunClientActionArtifact(client_mock, ["FakeArtifact"])
self.assertEqual(fd.__class__.__name__, "AFF4Volume")
# Now run with matching or condition.
coll1.conditions = []
coll1.supported_os = ["Linux", "Windows"]
self.fakeartifact.collectors = []
self.fakeartifact.collectors.append(coll1)
fd = self._RunClientActionArtifact(client_mock, ["FakeArtifact"])
self.assertEqual(fd.__class__.__name__, "RDFValueCollection")
# Now run with impossible or condition.
coll1.conditions = ["os == 'Linux' or os == 'Windows'"]
coll1.supported_os = ["NotTrue"]
self.fakeartifact.collectors = []
self.fakeartifact.collectors.append(coll1)
fd = self._RunClientActionArtifact(client_mock, ["FakeArtifact"])
self.assertEqual(fd.__class__.__name__, "AFF4Volume")
def _RunClientActionArtifact(self, client_mock, artifact_list):
client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw")
client.Set(client.Schema.SYSTEM("Linux"))
client.Flush()
self.output_count += 1
output = "test_artifact_%d" % self.output_count
for _ in test_lib.TestFlowHelper("ArtifactCollectorFlow", client_mock,
artifact_list=artifact_list,
token=self.token, client_id=self.client_id,
output=output
):
pass
# Test the AFF4 file was not created, as flow should not have run due to
# conditions.
fd = aff4.FACTORY.Open(rdfvalue.RDFURN(self.client_id).Add(output),
token=self.token)
return fd
class TestArtifactCollectorsInteractions(CollectorTest):
"""Test the collection of artifacts.
This class loads both real and test artifacts to test the interaction of badly
defined artifacts with real artifacts.
"""
def setUp(self):
"""Add test artifacts to existing registry."""
super(TestArtifactCollectorsInteractions, self).setUp()
self.original_artifact_reg = artifact_lib.ArtifactRegistry.artifacts
self.LoadTestArtifacts()
def tearDown(self):
super(TestArtifactCollectorsInteractions, self).tearDown()
artifact_lib.ArtifactRegistry.artifacts = self.original_artifact_reg
def testProcessCollectedArtifacts(self):
"""Test downloading files from artifacts."""
client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw")
client.Set(client.Schema.SYSTEM("Windows"))
client.Set(client.Schema.OS_VERSION("6.2"))
client.Flush()
vfs.VFS_HANDLERS[
rdfvalue.PathSpec.PathType.REGISTRY] = test_lib.FakeRegistryVFSHandler
vfs.VFS_HANDLERS[
rdfvalue.PathSpec.PathType.OS] = test_lib.FakeFullVFSHandler
client_mock = action_mocks.ActionMock("TransferBuffer", "StatFile", "Find",
"HashBuffer", "FingerprintFile",
"ListDirectory")
# Get KB initialized
for _ in test_lib.TestFlowHelper(
"KnowledgeBaseInitializationFlow", client_mock,
client_id=self.client_id, token=self.token):
pass
artifact_list = ["WindowsPersistenceMechanismFiles"]
with test_lib.Instrument(
transfer.MultiGetFile, "Start") as getfile_instrument:
for _ in test_lib.TestFlowHelper("ArtifactCollectorFlow", client_mock,
artifact_list=artifact_list,
token=self.token,
client_id=self.client_id,
output="analysis/{p}/{u}-{t}",
split_output_by_artifact=True):
pass
# Check MultiGetFile got called for our runkey files
# TODO(user): RunKeys for S-1-5-20 are not found because users.sid only
# expands to users with profiles.
pathspecs = getfile_instrument.args[0][0].args.pathspecs
self.assertItemsEqual([x.path for x in pathspecs],
[u"C:\\Windows\\TEMP\\A.exe"])
artifact_list = ["BadPathspecArtifact"]
with test_lib.Instrument(
transfer.MultiGetFile, "Start") as getfile_instrument:
for _ in test_lib.TestFlowHelper("ArtifactCollectorFlow", client_mock,
artifact_list=artifact_list,
token=self.token,
client_id=self.client_id,
output="analysis/{p}/{u}-{t}",
split_output_by_artifact=True):
pass
self.assertFalse(getfile_instrument.args)
class TestArtifactCollectorsRealArtifacts(CollectorTest):
"""Test the collection of real artifacts."""
def _CheckDriveAndRoot(self):
client_mock = action_mocks.ActionMock("StatFile", "ListDirectory")
for _ in test_lib.TestFlowHelper("ArtifactCollectorFlow", client_mock,
artifact_list=[
"SystemDriveEnvironmentVariable"],
token=self.token, client_id=self.client_id,
output="testsystemdrive"):
pass
fd = aff4.FACTORY.Open(rdfvalue.RDFURN(
self.client_id).Add("testsystemdrive"), token=self.token)
self.assertEqual(len(fd), 1)
self.assertEqual(str(fd[0]), "C:")
for _ in test_lib.TestFlowHelper("ArtifactCollectorFlow", client_mock,
artifact_list=["SystemRoot"],
token=self.token, client_id=self.client_id,
output="testsystemroot"):
pass
fd = aff4.FACTORY.Open(
rdfvalue.RDFURN(self.client_id).Add("testsystemroot"), token=self.token)
self.assertEqual(len(fd), 1)
# Filesystem gives WINDOWS, registry gives Windows
self.assertTrue(str(fd[0]) in [r"C:\Windows", r"C:\WINDOWS"])
def testSystemDriveArtifact(self):
client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw")
client.Set(client.Schema.SYSTEM("Windows"))
client.Set(client.Schema.OS_VERSION("6.2"))
client.Flush()
class BrokenClientMock(action_mocks.ActionMock):
def StatFile(self, _):
raise IOError
def ListDirectory(self, _):
raise IOError
# No registry, broken filesystem, this should just raise.
with self.assertRaises(RuntimeError):
for _ in test_lib.TestFlowHelper("ArtifactCollectorFlow",
BrokenClientMock(), artifact_list=[
"SystemDriveEnvironmentVariable"],
token=self.token,
client_id=self.client_id,
output="testsystemdrive"):
pass
# No registry, so this should use the fallback flow
vfs.VFS_HANDLERS[
rdfvalue.PathSpec.PathType.OS] = test_lib.ClientVFSHandlerFixture
self._CheckDriveAndRoot()
# Registry is present, so this should use the regular artifact collection
vfs.VFS_HANDLERS[
rdfvalue.PathSpec.PathType.REGISTRY] = test_lib.FakeRegistryVFSHandler
self._CheckDriveAndRoot()
def testRunWMIComputerSystemProductArtifact(self):
class WMIActionMock(action_mocks.ActionMock):
def WmiQuery(self, _):
return client_fixture.WMI_CMP_SYS_PRD
client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw")
client.Set(client.Schema.SYSTEM("Windows"))
client.Set(client.Schema.OS_VERSION("6.2"))
client.Flush()
client_mock = WMIActionMock()
for _ in test_lib.TestFlowHelper(
"ArtifactCollectorFlow", client_mock,
artifact_list=["WMIComputerSystemProduct"], token=self.token,
client_id=self.client_id,
dependencies=rdfvalue.ArtifactCollectorFlowArgs.Dependency.IGNORE_DEPS,
store_results_in_aff4=True):
pass
client = aff4.FACTORY.Open(self.client_id, token=self.token,)
hardware = client.Get(client.Schema.HARDWARE_INFO)
self.assertTrue(isinstance(hardware, rdfvalue.HardwareInfo))
self.assertEqual(str(hardware.serial_number), "2RXYYZ1")
def testRunWMIArtifact(self):
class WMIActionMock(action_mocks.ActionMock):
def WmiQuery(self, _):
return client_fixture.WMI_SAMPLE
client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw")
client.Set(client.Schema.SYSTEM("Windows"))
client.Set(client.Schema.OS_VERSION("6.2"))
client.Flush()
client_mock = WMIActionMock()
for _ in test_lib.TestFlowHelper(
"ArtifactCollectorFlow", client_mock, artifact_list=["WMILogicalDisks"],
token=self.token, client_id=self.client_id,
dependencies=rdfvalue.ArtifactCollectorFlowArgs.Dependency.IGNORE_DEPS,
store_results_in_aff4=True):
pass
# Test that we set the client VOLUMES attribute
client = aff4.FACTORY.Open(self.client_id, token=self.token)
volumes = client.Get(client.Schema.VOLUMES)
self.assertEqual(len(volumes), 2)
for result in volumes:
self.assertTrue(isinstance(result, rdfvalue.Volume))
self.assertTrue(result.windows.drive_letter in ["Z:", "C:"])
if result.windows.drive_letter == "C:":
self.assertAlmostEqual(result.FreeSpacePercent(), 76.142, delta=0.001)
self.assertEqual(result.Name(), "C:")
elif result.windows.drive_letter == "Z:":
self.assertEqual(result.Name(), "homefileshare$")
self.assertAlmostEqual(result.FreeSpacePercent(), 58.823, delta=0.001)
def testRetrieveDependencies(self):
"""Test getting an artifact without a KB using retrieve_depdendencies."""
client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw")
client.Set(client.Schema.SYSTEM("Windows"))
client.Set(client.Schema.OS_VERSION("6.2"))
client.Flush()
vfs.VFS_HANDLERS[
rdfvalue.PathSpec.PathType.REGISTRY] = test_lib.FakeRegistryVFSHandler
vfs.VFS_HANDLERS[
rdfvalue.PathSpec.PathType.OS] = test_lib.FakeFullVFSHandler
client_mock = action_mocks.ActionMock("TransferBuffer", "StatFile", "Find",
"HashBuffer", "FingerprintFile",
"ListDirectory")
artifact_list = ["WinDirEnvironmentVariable"]
for _ in test_lib.TestFlowHelper(
"ArtifactCollectorFlow", client_mock, artifact_list=artifact_list,
token=self.token, client_id=self.client_id,
dependencies=rdfvalue.ArtifactCollectorFlowArgs.Dependency.FETCH_NOW,
output="testRetrieveDependencies"):
pass
output = aff4.FACTORY.Open(self.client_id.Add("testRetrieveDependencies"),
token=self.token)
self.assertEqual(len(output), 1)
self.assertEqual(output[0], r"C:\Windows")
def main(argv):
# Run the full test suite
test_lib.GrrTestProgram(argv=argv)
if __name__ == "__main__":
flags.StartMain(main)<|fim▁end|> | self.assertEqual(fd.__class__.__name__, "AFF4Volume")
|
<|file_name|>0003_auto__del_new__add_news.py<|end_file_name|><|fim▁begin|># encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration<|fim▁hole|>class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting model 'New'
db.delete_table('news_new')
# Removing M2M table for field projects_relateds on 'New'
db.delete_table('news_new_projects_relateds')
# Adding model 'News'
db.create_table('news_news', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=200)),
('summary', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('body', self.gf('django.db.models.fields.TextField')()),
('image', self.gf('django.db.models.fields.files.ImageField')(max_length=100, null=True, blank=True)),
('author', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('datetime', self.gf('django.db.models.fields.DateTimeField')()),
))
db.send_create_signal('news', ['News'])
# Adding M2M table for field projects_relateds on 'News'
db.create_table('news_news_projects_relateds', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('news', models.ForeignKey(orm['news.news'], null=False)),
('project', models.ForeignKey(orm['projects.project'], null=False))
))
db.create_unique('news_news_projects_relateds', ['news_id', 'project_id'])
def backwards(self, orm):
# Adding model 'New'
db.create_table('news_new', (
('body', self.gf('django.db.models.fields.TextField')()),
('author', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=200)),
('image', self.gf('django.db.models.fields.files.ImageField')(max_length=100, null=True, blank=True)),
('summary', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('datetime', self.gf('django.db.models.fields.DateTimeField')()),
))
db.send_create_signal('news', ['New'])
# Adding M2M table for field projects_relateds on 'New'
db.create_table('news_new_projects_relateds', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('new', models.ForeignKey(orm['news.new'], null=False)),
('project', models.ForeignKey(orm['projects.project'], null=False))
))
db.create_unique('news_new_projects_relateds', ['new_id', 'project_id'])
# Deleting model 'News'
db.delete_table('news_news')
# Removing M2M table for field projects_relateds on 'News'
db.delete_table('news_news_projects_relateds')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'news.news': {
'Meta': {'object_name': 'News'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'body': ('django.db.models.fields.TextField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'projects_relateds': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['projects.Project']", 'null': 'True', 'blank': 'True'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'projects.project': {
'Meta': {'object_name': 'Project'},
'description': ('django.db.models.fields.TextField', [], {}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'sponsor': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['news']<|fim▁end|> | from django.db import models
|
<|file_name|>maintenance_controller.py<|end_file_name|><|fim▁begin|># Copyright (C) 2019 OpenMotics BV
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Module for handling maintenance mode
"""
from __future__ import absolute_import
import logging
import random
import socket
from gateway.daemon_thread import BaseThread
from gateway.exceptions import InMaintenanceModeException
from ioc import INJECTED, Inject, Injectable, Singleton
from platform_utils import System
if False: # MYPY
from typing import Dict, Optional, Callable, Any, ByteString
from master.maintenance_communicator import MaintenanceCommunicator
logger = logging.getLogger(__name__)
@Injectable.named('maintenance_controller')
@Singleton
class MaintenanceController(object):
SOCKET_TIMEOUT = 60
@Inject
def __init__(self, maintenance_communicator=INJECTED, ssl_private_key=INJECTED, ssl_certificate=INJECTED):
# type: (MaintenanceCommunicator, str, str) -> None
"""
:type maintenance_communicator: gateway.maintenance_communicator.MaintenanceCommunicator
"""
self._consumers = {} # type: Dict[int, Callable[[str], Any]]
self._privatekey_filename = ssl_private_key
self._certificate_filename = ssl_certificate
self._maintenance_communicator = maintenance_communicator
if self._maintenance_communicator:
self._maintenance_communicator.set_receiver(self._received_data)
self._connection = None # type: Optional[socket.socket]
self._server_thread = None # type: Optional[BaseThread]
#######################
# Internal management #
#######################
def start(self):
# type: () -> None
if self._maintenance_communicator:
self._maintenance_communicator.start()
<|fim▁hole|> self._maintenance_communicator.stop()
def _received_data(self, message):
# type: (str) -> None
try:
if self._connection is not None:
self._connection.sendall(bytearray(message.rstrip().encode()) + bytearray(b'\n'))
except Exception:
logger.exception('Exception forwarding maintenance data to socket connection.')
for consumer_id in self._consumers.keys():
callback = self._consumers.get(consumer_id)
if callback is None:
continue
try:
callback(message.rstrip())
except Exception:
logger.exception('Exception forwarding maintenance data to consumer %s', str(consumer_id))
def _activate(self):
# type: () -> None
if not self._maintenance_communicator.is_active():
self._maintenance_communicator.activate()
def _deactivate(self):
# type: () -> None
if self._maintenance_communicator.is_active():
self._maintenance_communicator.deactivate()
#################
# Subscriptions #
#################
def add_consumer(self, consumer_id, callback):
# type: (int, Callable[[str],Any]) -> None
self._consumers[consumer_id] = callback
self._activate()
def remove_consumer(self, consumer_id):
# type: (int) -> None
self._consumers.pop(consumer_id, None)
if not self._consumers:
logger.info('Stopping maintenance mode due to no consumers.')
self._deactivate()
##########
# Socket #
##########
def open_maintenace_socket(self):
# type: () -> int
"""
Opens a TCP/SSL socket, connecting it with the maintenance service
"""
port = random.randint(6000, 7000)
self._server_thread = BaseThread(name='maintenancesock', target=self._run_socket_server, args=[port])
self._server_thread.daemon = True
self._server_thread.start()
return port
def _run_socket_server(self, port):
# type: (int) -> None
connection_timeout = MaintenanceController.SOCKET_TIMEOUT
logger.info('Starting maintenance socket on port %s', port)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.settimeout(connection_timeout)
sock = System.get_ssl_socket(sock,
private_key_filename=self._privatekey_filename,
certificate_filename=self._certificate_filename)
sock.bind(('', port))
sock.listen(1)
try:
logger.info('Waiting for maintenance connection.')
self._connection, address = sock.accept()
logger.info('Maintenance connection from %s', str(address))
self._handle_connection()
logger.info('Maintenance session ended, closing maintenance socket')
sock.close()
except socket.timeout:
logger.info('Maintenance socket timed out, closing.')
sock.close()
except Exception:
logger.exception('Error in maintenance service')
sock.close()
def _handle_connection(self):
# type: () -> None
"""
Handles one incoming connection.
"""
assert self._connection is not None
try:
self._connection.settimeout(1)
self._connection.sendall(b'Activating maintenance mode, waiting for other actions to complete ...\n')
self._activate()
self._connection.sendall(b'Connected\n')
while self._maintenance_communicator.is_active():
try:
try:
data = self._connection.recv(1024)
if not data:
logger.info('Stopping maintenance mode due to no data.')
break
if data.startswith(b'exit'):
logger.info('Stopping maintenance mode due to exit.')
break
self._maintenance_communicator.write(data.decode())
except Exception as exception:
if System.handle_socket_exception(self._connection, exception, logger):
continue
else:
logger.exception('Unexpected exception receiving connection data')
break
except Exception:
logger.exception('Exception in maintenance mode')
break
except InMaintenanceModeException:
self._connection.sendall(b'Maintenance mode already active.\n')
finally:
self._deactivate()
logger.info('Maintenance mode deactivated')
self._connection.close()
self._connection = None
#######
# I/O #
#######
def write(self, message):
# type: (bytes) -> None
self._maintenance_communicator.write(str(message.decode()))<|fim▁end|> | def stop(self):
# type: () -> None
if self._maintenance_communicator: |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
args = ' '.join(sys.argv[1:])<|fim▁hole|>print(f"""Deprecated as of commit 959939b771. Use flask utility script instead:
$ flask {args}
""")
raise SystemExit(1)<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/**
* Dependencies
* @type {exports}
*/
var fs = require('fs')
, q = require('q')
, _ = require('underscore')
, path = require('path')
, natural = require('natural')
, nounInflector = new natural.NounInflector()
, argv = require('optimist').argv<|fim▁hole|> , generator = require('./model.generator');
/**
* Arguments
*/
var directory = path.resolve(process.argv[2])
, dest = path.resolve(process.argv[3]);
var host = ""
, version = "1.0"
, rel = "";
/**
* Functions
*/
var resourceName = function(model) {
return nounInflector.pluralize(model.modelName).toLowerCase();
};
/**
* load a model
* @param modelPath
* @returns {*}
*/
var loadModel = function(modelPath) {
return require(directory + '/' + modelPath);
};
/**
* Write the schema file
* @param modelPath
*/
var profileModel = function(modelPath) {
var model = loadModel(modelPath);
var schema = generator(host, version, model, rel);
mkdir(dest)
fs.writeFile(dest + '/' + resourceName(model) + '.json', JSON.stringify(schema, false, 2), function(err) {
});
};
/**
* Read models from directory
*/
fs.readdir(directory, function(err, files) {
_.each(files, profileModel);
});
/**
* Make a directory
* @param path
* @param root
* @returns {boolean|*}
*/
function mkdir(path, root) {
var dirs = path.split('/')
, dir = dirs.shift()
, root = (root || '') + dir + '/';
try {
fs.mkdirSync(root);
}
catch(e) {
if (!fs.statSync(root).isDirectory()) {
throw new Error(e);
}
}
return !dirs.length || mkdir(dirs.join('/'), root);
}<|fim▁end|> | , path = require('path') |
<|file_name|>auth.js<|end_file_name|><|fim▁begin|>import {
SIGN_IN,<|fim▁hole|> SIGN_IN_SUCCESS,
SIGN_IN_ERROR,
SIGN_OUT,
SIGN_OUT_SUCCESS,
USER_LOGGED,
USER_UNLOGGED
} from '../../constants/action-types';
const INITIAL_STATE = {
isUserSignedIn: false,
isSignOutInProgress: false,
isInProgress: null,
hasError: false,
errorMessage: '',
userIsLogged: null,
user: {}
};
export default function(state = INITIAL_STATE, action) {
switch (action.type) {
case USER_LOGGED:
return {
...state,
userIsLogged: true,
user: {
...action.payload,
}
};
case USER_UNLOGGED:
return {
...state,
userIsLogged: false,
user: {}
};
case SIGN_IN_SUCCESS:
return {
...state,
isUserSignedIn: true,
isInProgress: false
};
case SIGN_IN:
return {
...state,
isInProgress: true
};
case SIGN_IN_ERROR:
return {
...state,
hasError: true,
error: action.error
};
case SIGN_OUT_SUCCESS:
return {
...state,
isUserSignedIn: false,
isSignOutInProgress: false
};
case SIGN_OUT:
return {
...state,
isSignOutInProgress: true
};
default:
return state;
}
}<|fim▁end|> | |
<|file_name|>BiQuad.cpp<|end_file_name|><|fim▁begin|>/***************************************************/
/*! \class BiQuad
\brief STK biquad (two-pole, two-zero) filter class.
This class implements a two-pole, two-zero digital filter.
Methods are provided for creating a resonance or notch in the
frequency response while maintaining a constant filter gain.
by Perry R. Cook and Gary P. Scavone, 1995-2012.
*/
/***************************************************/
#include "BiQuad.h"
#include <cmath>
namespace stk {
BiQuad :: BiQuad() : Filter()
{
b_.resize( 3, 0.0 );
a_.resize( 3, 0.0 );
b_[0] = 1.0;
a_[0] = 1.0;
inputs_.resize( 3, 1, 0.0 );
outputs_.resize( 3, 1, 0.0 );
Stk::addSampleRateAlert( this );
}
BiQuad :: ~BiQuad()
{
Stk::removeSampleRateAlert( this );
}
void BiQuad :: setCoefficients( StkFloat b0, StkFloat b1, StkFloat b2, StkFloat a1, StkFloat a2, bool clearState )
{
b_[0] = b0;
b_[1] = b1;
b_[2] = b2;
a_[1] = a1;<|fim▁hole|> a_[2] = a2;
if ( clearState ) this->clear();
}
void BiQuad :: sampleRateChanged( StkFloat newRate, StkFloat oldRate )
{
if ( !ignoreSampleRateChange_ ) {
std::cerr << "BiQuad::sampleRateChanged: you may need to recompute filter coefficients!";
handleError( StkError::WARNING );
}
}
void BiQuad :: setResonance( StkFloat frequency, StkFloat radius, bool normalize )
{
#if defined(_STK_DEBUG_)
if ( frequency < 0.0 || frequency > 0.5 * Stk::sampleRate() ) {
std::cerr << "BiQuad::setResonance: frequency argument (" << frequency << ") is out of range!";
handleError( StkError::WARNING ); return;
}
if ( radius < 0.0 || radius >= 1.0 ) {
std::cerr << "BiQuad::setResonance: radius argument (" << radius << ") is out of range!";
handleError( StkError::WARNING ); return;
}
#endif
a_[2] = radius * radius;
a_[1] = -2.0 * radius * cos( TWO_PI * frequency / Stk::sampleRate() );
if ( normalize ) {
// Use zeros at +- 1 and normalize the filter peak gain.
b_[0] = 0.5 - 0.5 * a_[2];
b_[1] = 0.0;
b_[2] = -b_[0];
}
}
void BiQuad :: setNotch( StkFloat frequency, StkFloat radius )
{
#if defined(_STK_DEBUG_)
if ( frequency < 0.0 || frequency > 0.5 * Stk::sampleRate() ) {
std::cerr << "BiQuad::setNotch: frequency argument (" << frequency << ") is out of range!";
handleError( StkError::WARNING ); return;
}
if ( radius < 0.0 ) {
std::cerr << "BiQuad::setNotch: radius argument (" << radius << ") is negative!";
handleError( StkError::WARNING ); return;
}
#endif
// This method does not attempt to normalize the filter gain.
b_[2] = radius * radius;
b_[1] = (StkFloat) -2.0 * radius * cos( TWO_PI * (double) frequency / Stk::sampleRate() );
}
void BiQuad :: setEqualGainZeroes( void )
{
b_[0] = 1.0;
b_[1] = 0.0;
b_[2] = -1.0;
}
} // stk namespace<|fim▁end|> | |
<|file_name|>basic_shape.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! CSS handling for the specified value of
//! [`basic-shape`][basic-shape]s
//!
//! [basic-shape]: https://drafts.csswg.org/css-shapes/#typedef-basic-shape
use cssparser::Parser;
use parser::{Parse, ParserContext};
use std::borrow::Cow;
use std::fmt;
use style_traits::{ToCss, ParseError, StyleParseErrorKind};
use values::computed::Percentage;
use values::generics::basic_shape::{Circle as GenericCircle};
use values::generics::basic_shape::{ClippingShape as GenericClippingShape, Ellipse as GenericEllipse};
use values::generics::basic_shape::{FillRule, BasicShape as GenericBasicShape};
use values::generics::basic_shape::{FloatAreaShape as GenericFloatAreaShape, InsetRect as GenericInsetRect};
use values::generics::basic_shape::{GeometryBox, ShapeBox, ShapeSource};
use values::generics::basic_shape::{Polygon as GenericPolygon, ShapeRadius as GenericShapeRadius};
use values::generics::rect::Rect;
use values::specified::LengthOrPercentage;
use values::specified::border::BorderRadius;
use values::specified::position::{HorizontalPosition, Position, PositionComponent, Side, VerticalPosition};
use values::specified::url::SpecifiedUrl;
/// A specified clipping shape.
pub type ClippingShape = GenericClippingShape<BasicShape, SpecifiedUrl>;
/// A specified float area shape.
pub type FloatAreaShape = GenericFloatAreaShape<BasicShape, SpecifiedUrl>;
/// A specified basic shape.
pub type BasicShape = GenericBasicShape<HorizontalPosition, VerticalPosition, LengthOrPercentage>;
/// The specified value of `inset()`
pub type InsetRect = GenericInsetRect<LengthOrPercentage>;
/// A specified circle.
pub type Circle = GenericCircle<HorizontalPosition, VerticalPosition, LengthOrPercentage>;
/// A specified ellipse.
pub type Ellipse = GenericEllipse<HorizontalPosition, VerticalPosition, LengthOrPercentage>;
/// The specified value of `ShapeRadius`
pub type ShapeRadius = GenericShapeRadius<LengthOrPercentage>;
/// The specified value of `Polygon`
pub type Polygon = GenericPolygon<LengthOrPercentage>;
impl<ReferenceBox: Parse> Parse for ShapeSource<BasicShape, ReferenceBox, SpecifiedUrl> {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
if input.try(|i| i.expect_ident_matching("none")).is_ok() {
return Ok(ShapeSource::None)
}
if let Ok(url) = input.try(|i| SpecifiedUrl::parse(context, i)) {
return Ok(ShapeSource::Url(url))
}
fn parse_component<U: Parse>(context: &ParserContext, input: &mut Parser,
component: &mut Option<U>) -> bool {
if component.is_some() {
return false // already parsed this component
}
*component = input.try(|i| U::parse(context, i)).ok();
component.is_some()
}
let mut shape = None;
let mut ref_box = None;
while parse_component(context, input, &mut shape) ||
parse_component(context, input, &mut ref_box) {
//
}
if let Some(shp) = shape {
return Ok(ShapeSource::Shape(shp, ref_box))
}
ref_box.map(|v| ShapeSource::Box(v)).ok_or(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
}
impl Parse for GeometryBox {
fn parse<'i, 't>(_context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
if let Ok(shape_box) = input.try(|i| ShapeBox::parse(i)) {
return Ok(GeometryBox::ShapeBox(shape_box))
}
try_match_ident_ignore_ascii_case! { input,
"fill-box" => Ok(GeometryBox::FillBox),
"stroke-box" => Ok(GeometryBox::StrokeBox),
"view-box" => Ok(GeometryBox::ViewBox),
}
}
}
impl Parse for BasicShape {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
let location = input.current_source_location();
let function = input.expect_function()?.clone();
input.parse_nested_block(move |i| {
(match_ignore_ascii_case! { &function,
"inset" => return InsetRect::parse_function_arguments(context, i).map(GenericBasicShape::Inset),
"circle" => return Circle::parse_function_arguments(context, i).map(GenericBasicShape::Circle),
"ellipse" => return Ellipse::parse_function_arguments(context, i).map(GenericBasicShape::Ellipse),
"polygon" => return Polygon::parse_function_arguments(context, i).map(GenericBasicShape::Polygon),
_ => Err(())
}).map_err(|()| location.new_custom_error(StyleParseErrorKind::UnexpectedFunction(function.clone())))
})
}
}
impl Parse for InsetRect {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
input.expect_function_matching("inset")?;
input.parse_nested_block(|i| Self::parse_function_arguments(context, i))
}
}
impl InsetRect {
/// Parse the inner function arguments of `inset()`
pub fn parse_function_arguments<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>)
-> Result<Self, ParseError<'i>> {
let rect = Rect::parse_with(context, input, LengthOrPercentage::parse)?;
let round = if input.try(|i| i.expect_ident_matching("round")).is_ok() {
Some(BorderRadius::parse(context, input)?)
} else {
None
};
Ok(GenericInsetRect {
rect: rect,
round: round,
})
}
}
impl Parse for Circle {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>)
-> Result<Self, ParseError<'i>> {
input.expect_function_matching("circle")?;
input.parse_nested_block(|i| Self::parse_function_arguments(context, i))
}
}
impl Circle {
#[allow(missing_docs)]
pub fn parse_function_arguments<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>)
-> Result<Self, ParseError<'i>> {
let radius = input.try(|i| ShapeRadius::parse(context, i)).ok().unwrap_or_default();
let position = if input.try(|i| i.expect_ident_matching("at")).is_ok() {
Position::parse(context, input)?
} else {
Position::center()
};
Ok(GenericCircle {
radius: radius,
position: position,
})
}
}
impl ToCss for Circle {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
dest.write_str("circle(")?;
if GenericShapeRadius::ClosestSide != self.radius {
self.radius.to_css(dest)?;
dest.write_str(" ")?;
}
dest.write_str("at ")?;
serialize_basicshape_position(&self.position, dest)?;
dest.write_str(")")
}
}
impl Parse for Ellipse {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
input.expect_function_matching("ellipse")?;
input.parse_nested_block(|i| Self::parse_function_arguments(context, i))
}
}
impl Ellipse {
#[allow(missing_docs)]
pub fn parse_function_arguments<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>)
-> Result<Self, ParseError<'i>> {
let (a, b) = input.try(|i| -> Result<_, ParseError> {
Ok((ShapeRadius::parse(context, i)?, ShapeRadius::parse(context, i)?))
}).ok().unwrap_or_default();
let position = if input.try(|i| i.expect_ident_matching("at")).is_ok() {
Position::parse(context, input)?
} else {
Position::center()
};
Ok(GenericEllipse {
semiaxis_x: a,
semiaxis_y: b,
position: position,
})
}
}
impl ToCss for Ellipse {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
dest.write_str("ellipse(")?;
if self.semiaxis_x != ShapeRadius::default() || self.semiaxis_y != ShapeRadius::default() {
self.semiaxis_x.to_css(dest)?;
dest.write_str(" ")?;
self.semiaxis_y.to_css(dest)?;
dest.write_str(" ")?;
}
dest.write_str("at ")?;
serialize_basicshape_position(&self.position, dest)?;
dest.write_str(")")
}
}
impl Parse for ShapeRadius {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>)
-> Result<Self, ParseError<'i>> {
if let Ok(lop) = input.try(|i| LengthOrPercentage::parse_non_negative(context, i)) {
return Ok(GenericShapeRadius::Length(lop))
}
try_match_ident_ignore_ascii_case! { input,
"closest-side" => Ok(GenericShapeRadius::ClosestSide),
"farthest-side" => Ok(GenericShapeRadius::FarthestSide),
}
}
}
/// <https://drafts.csswg.org/css-shapes/#basic-shape-serialization>
///
/// Positions get serialized differently with basic shapes. Keywords
/// are converted to percentages where possible. Only the two or four
/// value forms are used. In case of two keyword-percentage pairs,
/// the keywords are folded into the percentages
fn serialize_basicshape_position<W>(position: &Position, dest: &mut W) -> fmt::Result
where W: fmt::Write
{
fn to_keyword_and_lop<S>(component: &PositionComponent<S>) -> (S, Cow<LengthOrPercentage>)
where S: Copy + Side
{
match *component {
PositionComponent::Center => {
(S::start(), Cow::Owned(LengthOrPercentage::Percentage(Percentage(0.5))))
},
PositionComponent::Side(keyword, None) => {
// left | top => 0%
// right | bottom => 100%
let p = if keyword.is_start() { 0. } else { 1. };
(S::start(), Cow::Owned(LengthOrPercentage::Percentage(Percentage(p))))
},<|fim▁hole|> (S::start(), Cow::Owned(LengthOrPercentage::Percentage(Percentage(1. - p.0))))
} else {
(keyword, Cow::Borrowed(lop))
}
},
PositionComponent::Length(ref lop) |
PositionComponent::Side(_, Some(ref lop)) => {
(S::start(), to_non_zero_length(lop))
},
}
}
fn to_non_zero_length(lop: &LengthOrPercentage) -> Cow<LengthOrPercentage> {
match *lop {
LengthOrPercentage::Length(ref l) if l.is_zero() => {
Cow::Owned(LengthOrPercentage::Percentage(Percentage(0.)))
},
_ => {
Cow::Borrowed(lop)
}
}
}
fn write_pair<A, B, W>(a: &A, b: &B, dest: &mut W) -> fmt::Result
where A: ToCss, B: ToCss, W: fmt::Write
{
a.to_css(dest)?;
dest.write_str(" ")?;
b.to_css(dest)
}
let (x_pos, x_lop) = to_keyword_and_lop(&position.horizontal);
let (y_pos, y_lop) = to_keyword_and_lop(&position.vertical);
if x_pos.is_start() && y_pos.is_start() {
return write_pair(&*x_lop, &*y_lop, dest);
}
write_pair(&x_pos, &*x_lop, dest)?;
dest.write_str(" ")?;
write_pair(&y_pos, &*y_lop, dest)
}
impl Parse for Polygon {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
input.expect_function_matching("polygon")?;
input.parse_nested_block(|i| Self::parse_function_arguments(context, i))
}
}
impl Polygon {
/// Parse the inner arguments of a `polygon` function.
pub fn parse_function_arguments<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>)
-> Result<Self, ParseError<'i>> {
let fill = input.try(|i| -> Result<_, ParseError> {
let fill = FillRule::parse(i)?;
i.expect_comma()?; // only eat the comma if there is something before it
Ok(fill)
}).ok().unwrap_or_default();
let buf = input.parse_comma_separated(|i| {
Ok((LengthOrPercentage::parse(context, i)?, LengthOrPercentage::parse(context, i)?))
})?;
Ok(Polygon {
fill: fill,
coordinates: buf,
})
}
}<|fim▁end|> | PositionComponent::Side(keyword, Some(ref lop)) if !keyword.is_start() => {
if let LengthOrPercentage::Percentage(p) = *to_non_zero_length(lop) { |
<|file_name|>inlines.js<|end_file_name|><|fim▁begin|>/**
* Django admin inlines
*<|fim▁hole|> * Based on jQuery Formset 1.1
* @author Stanislaus Madueke (stan DOT madueke AT gmail DOT com)
* @requires jQuery 1.2.6 or later
*
* Copyright (c) 2009, Stanislaus Madueke
* All rights reserved.
*
* Spiced up with Code from Zain Memon's GSoC project 2009
* and modified for Django by Jannis Leidel, Travis Swicegood and Julien Phalip.
*
* Licensed under the New BSD License
* See: http://www.opensource.org/licenses/bsd-license.php
*/
(function($) {
$.fn.formset = function(opts) {
var options = $.extend({}, $.fn.formset.defaults, opts);
var $this = $(this);
var $parent = $this.parent();
var nextIndex = get_no_forms(options.prefix);
//store the options. This is needed for nested inlines, to recreate the same form
var group = $this.closest('.inline-group');
group.data('django_formset', options);
// Add form classes for dynamic behaviour
$this.each(function(i) {
$(this).not("." + options.emptyCssClass).addClass(options.formCssClass);
});
if (isAddButtonVisible(options)) {
var addButton;
if ($this.attr("tagName") == "TR") {
// If forms are laid out as table rows, insert the
// "add" button in a new table row:
var numCols = this.eq(-1).children().length;
$parent.append('<tr class="' + options.addCssClass + '"><td colspan="' + numCols + '"><a href="javascript:void(0)">' + options.addText + "</a></tr>");
addButton = $parent.find("tr:last a");
} else {
// Otherwise, insert it immediately after the last form:
$this.filter(":last").after('<div class="' + options.addCssClass + '"><a href="javascript:void(0)">' + options.addText + "</a></div>");
addButton = $this.filter(":last").next().find("a");
}
addButton.click(function(e) {
e.preventDefault();
addRow(options);
});
}
return this;
};
/* Setup plugin defaults */
$.fn.formset.defaults = {
prefix : "form", // The form prefix for your django formset
addText : "add another", // Text for the add link
deleteText : "remove", // Text for the delete link
addCssClass : "add-row", // CSS class applied to the add link
deleteCssClass : "delete-row", // CSS class applied to the delete link
emptyCssClass : "empty-row", // CSS class applied to the empty row
formCssClass : "dynamic-form", // CSS class applied to each form in a formset
added : null, // Function called each time a new form is added
removed : null // Function called each time a form is deleted
};
// Tabular inlines ---------------------------------------------------------
$.fn.tabularFormset = function(options) {
var $rows = $(this);
var alternatingRows = function(row) {
row_number = 0;
$($rows.selector).not(".add-row").removeClass("row1 row2").each(function() {
$(this).addClass('row' + ((row_number%2)+1));
next = $(this).next();
while (next.hasClass('nested-inline-row')) {
next.addClass('row' + ((row_number%2)+1));
next = next.next();
}
row_number = row_number + 1;
});
};
var reinitDateTimeShortCuts = function() {
// Reinitialize the calendar and clock widgets by force
if ( typeof DateTimeShortcuts != "undefined") {
$(".datetimeshortcuts").remove();
DateTimeShortcuts.init();
}
};
var updateSelectFilter = function() {
// If any SelectFilter widgets are a part of the new form,
// instantiate a new SelectFilter instance for it.
if ( typeof SelectFilter != 'undefined') {
$('.selectfilter').each(function(index, value) {
var namearr = value.name.split('-');
SelectFilter.init(value.id, namearr[namearr.length - 1], false, options.adminStaticPrefix);
});
$('.selectfilterstacked').each(function(index, value) {
var namearr = value.name.split('-');
SelectFilter.init(value.id, namearr[namearr.length - 1], true, options.adminStaticPrefix);
});
}
};
var initPrepopulatedFields = function(row) {
row.find('.prepopulated_field').each(function() {
var field = $(this), input = field.find('input, select, textarea'), dependency_list = input.data('dependency_list') || [], dependencies = [];
$.each(dependency_list, function(i, field_name) {
dependencies.push('#' + row.find('.field-' + field_name).find('input, select, textarea').attr('id'));
});
if (dependencies.length) {
input.prepopulate(dependencies, input.attr('maxlength'));
}
});
};
$rows.formset({
prefix : options.prefix,
addText : options.addText,
formCssClass : "dynamic-" + options.prefix,
deleteCssClass : "inline-deletelink",
deleteText : options.deleteText,
emptyCssClass : "empty-form",
removed : function(row) {
alternatingRows(row);
if(options.removed) options.removed(row);
},
added : function(row) {
initPrepopulatedFields(row);
reinitDateTimeShortCuts();
updateSelectFilter();
alternatingRows(row);
if(options.added) options.added(row);
}
});
return $rows;
};
// Stacked inlines ---------------------------------------------------------
$.fn.stackedFormset = function(options) {
var $rows = $(this);
var update_inline_labels = function(formset_to_update) {
formset_to_update.children('.inline-related').not('.empty-form').children('h3').find('.inline_label').each(function(i) {
var count = i + 1;
$(this).html($(this).html().replace(/(#\d+)/g, "#" + count));
});
};
var reinitDateTimeShortCuts = function() {
// Reinitialize the calendar and clock widgets by force, yuck.
if ( typeof DateTimeShortcuts != "undefined") {
$(".datetimeshortcuts").remove();
DateTimeShortcuts.init();
}
};
var updateSelectFilter = function() {
// If any SelectFilter widgets were added, instantiate a new instance.
if ( typeof SelectFilter != "undefined") {
$(".selectfilter").each(function(index, value) {
var namearr = value.name.split('-');
SelectFilter.init(value.id, namearr[namearr.length - 1], false, options.adminStaticPrefix);
});
$(".selectfilterstacked").each(function(index, value) {
var namearr = value.name.split('-');
SelectFilter.init(value.id, namearr[namearr.length - 1], true, options.adminStaticPrefix);
});
}
};
var initPrepopulatedFields = function(row) {
row.find('.prepopulated_field').each(function() {
var field = $(this), input = field.find('input, select, textarea'), dependency_list = input.data('dependency_list') || [], dependencies = [];
$.each(dependency_list, function(i, field_name) {
dependencies.push('#' + row.find('.form-row .field-' + field_name).find('input, select, textarea').attr('id'));
});
if (dependencies.length) {
input.prepopulate(dependencies, input.attr('maxlength'));
}
});
};
$rows.formset({
prefix : options.prefix,
addText : options.addText,
formCssClass : "dynamic-" + options.prefix,
deleteCssClass : "inline-deletelink",
deleteText : options.deleteText,
emptyCssClass : "empty-form",
removed : function(row) {
update_inline_labels(row);
if(options.removed) options.removed(row);
},
added : (function(row) {
initPrepopulatedFields(row);
reinitDateTimeShortCuts();
updateSelectFilter();
update_inline_labels(row.parent());
if(options.added) options.added(row);
})
});
return $rows;
};
function create_nested_formsets(parentPrefix, rowId) {
// we use the first formset as template. so replace every index by 0
var sourceParentPrefix = parentPrefix.replace(/[-][0-9][-]/g, "-0-");
var row_prefix = parentPrefix+'-'+rowId;
var row = $('#'+row_prefix);
// Check if the form should have nested formsets
// This is horribly hackish. It tries to collect one set of nested inlines from already existing rows and clone these
var search_space = $("#"+sourceParentPrefix+'-0').nextUntil("."+sourceParentPrefix + "-not-nested");
//all nested inlines
var nested_inlines = search_space.find("." + sourceParentPrefix + "-nested-inline");
nested_inlines.each(function(index) {
// prefixes for the nested formset
var normalized_formset_prefix = $(this).attr('id').split('-group')[0];
// = "parent_formset_prefix"-0-"nested_inline_name"_set
var formset_prefix = normalized_formset_prefix.replace(sourceParentPrefix + "-0", row_prefix);
// = "parent_formset_prefix"-"next_form_id"-"nested_inline_name"_set
// Find the normalized formset and clone it
var template = $(this).clone();
//get the options that were used to create the source formset
var options = $(this).data('django_formset');
//clone, so that we don't modify the old one
options = $.extend({}, options);
options.prefix = formset_prefix;
var isTabular = template.find('#'+normalized_formset_prefix+'-empty').is('tr');
//remove all existing rows from the clone
if (isTabular) {
//tabular
template.find(".form-row").not(".empty-form").remove();
template.find(".nested-inline-row").remove();
} else {
//stacked cleanup
template.find(".inline-related").not(".empty-form").remove();
}
//remove other unnecessary things
template.find('.'+options.addCssClass).remove();
//replace the cloned prefix with the new one
update_props(template, normalized_formset_prefix, formset_prefix);
//reset update formset management variables
template.find('#id_' + formset_prefix + '-INITIAL_FORMS').val(0);
template.find('#id_' + formset_prefix + '-TOTAL_FORMS').val(0);
//remove the fk and id values, because these don't exist yet
template.find('.original').empty();
//postprocess stacked/tabular
if (isTabular) {
var formset = template.find('.tabular.inline-related tbody tr.' + formset_prefix + '-not-nested').tabularFormset(options);
var border_class = (index+1 < nested_inlines.length) ? ' no-bottom-border' : '';
var wrapped = $('<tr class="nested-inline-row' + border_class + '"/>').html($('<td colspan="100%"/>').html(template));
//insert the formset after the row
row.after(wrapped);
} else {
var formset = template.find(".inline-related").stackedFormset(options);
row.after(template);
}
//add a empty row. This will in turn create the nested formsets
addRow(options);
});
return nested_inlines.length;
};
function update_props(template, normalized_formset_prefix, formset_prefix) {
// Fix template id
template.attr('id', template.attr('id').replace(normalized_formset_prefix, formset_prefix));
template.find('*').each(function() {
if ($(this).attr("for")) {
$(this).attr("for", $(this).attr("for").replace(normalized_formset_prefix, formset_prefix));
}
if ($(this).attr("class")) {
$(this).attr("class", $(this).attr("class").replace(normalized_formset_prefix, formset_prefix));
}
if (this.id) {
this.id = this.id.replace(normalized_formset_prefix, formset_prefix);
}
if (this.name) {
this.name = this.name.replace(normalized_formset_prefix, formset_prefix);
}
});
};
// This returns the amount of forms in the given formset
function get_no_forms(formset_prefix) {
formset_prop = $("#id_" + formset_prefix + "-TOTAL_FORMS")
if (!formset_prop.length) {
return 0;
}
return parseInt(formset_prop.attr("autocomplete", "off").val());
}
function change_no_forms(formset_prefix, increase) {
var no_forms = get_no_forms(formset_prefix);
if (increase) {
$("#id_" + formset_prefix + "-TOTAL_FORMS").attr("autocomplete", "off").val(parseInt(no_forms) + 1);
} else {
$("#id_" + formset_prefix + "-TOTAL_FORMS").attr("autocomplete", "off").val(parseInt(no_forms) - 1);
}
};
// This return the maximum amount of forms in the given formset
function get_max_forms(formset_prefix) {
var max_forms = $("#id_" + formset_prefix + "-MAX_NUM_FORMS").attr("autocomplete", "off").val();
if ( typeof max_forms == 'undefined' || max_forms == '') {
return '';
}
return parseInt(max_forms);
};
function addRow(options) {
var nextIndex = get_no_forms(options.prefix);
var row = insertNewRow(options.prefix, options);
updateAddButton(options.prefix);
// Add delete button handler
row.find("a." + options.deleteCssClass).click(function(e) {
e.preventDefault();
// Find the row that will be deleted by this button
var row = $(this).parents("." + options.formCssClass);
// Remove the parent form containing this button:
var formset_to_update = row.parent();
//remove nested inlines
while (row.next().hasClass('nested-inline-row')) {
row.next().remove();
}
row.remove();
change_no_forms(options.prefix, false);
// If a post-delete callback was provided, call it with the deleted form:
if (options.removed) {
options.removed(formset_to_update);
}
});
var num_formsets = create_nested_formsets(options.prefix, nextIndex);
if(row.is("tr") && num_formsets > 0) {
row.addClass("no-bottom-border");
}
// If a post-add callback was supplied, call it with the added form:
if (options.added) {
options.added(row);
}
nextIndex = nextIndex + 1;
};
function insertNewRow(prefix, options) {
var template = $("#" + prefix + "-empty");
var nextIndex = get_no_forms(prefix);
var row = prepareRowTemplate(template, prefix, nextIndex, options);
// when adding something from a cloned formset the id is the same
// Insert the new form when it has been fully edited
row.insertBefore($(template));
// Update number of total forms
change_no_forms(prefix, true);
return row;
};
function prepareRowTemplate(template, prefix, index, options) {
var row = template.clone(true);
row.removeClass(options.emptyCssClass).addClass(options.formCssClass).attr("id", prefix + "-" + index);
if (row.is("tr")) {
// If the forms are laid out in table rows, insert
// the remove button into the last table cell:
row.children(":last").append('<div><a class="' + options.deleteCssClass + '" href="javascript:void(0)">' + options.deleteText + "</a></div>");
} else if (row.is("ul") || row.is("ol")) {
// If they're laid out as an ordered/unordered list,
// insert an <li> after the last list item:
row.append('<li><a class="' + options.deleteCssClass + '" href="javascript:void(0)">' + options.deleteText + "</a></li>");
} else {
// Otherwise, just insert the remove button as the
// last child element of the form's container:
row.children(":first").append('<span><a class="' + options.deleteCssClass + '" href="javascript:void(0)">' + options.deleteText + "</a></span>");
}
row.find("*").each(function() {
updateElementIndex(this, prefix, index);
});
return row;
};
function updateElementIndex(el, prefix, ndx) {
var id_regex = new RegExp("(" + prefix + "-(\\d+|__prefix__))");
var replacement = prefix + "-" + ndx;
if ($(el).attr("for")) {
$(el).attr("for", $(el).attr("for").replace(id_regex, replacement));
}
if (el.id) {
el.id = el.id.replace(id_regex, replacement);
}
if (el.name) {
el.name = el.name.replace(id_regex, replacement);
}
};
/** show or hide the addButton **/
function updateAddButton(options) {
// Hide add button in case we've hit the max, except we want to add infinitely
var btn = $("#" + options.prefix + "-empty").parent().children('.'+options.addCssClass);
if (isAddButtonVisible(options)) {
btn.hide();
} else {
btn.show();
}
}
function isAddButtonVisible(options) {
return !(get_max_forms(options.prefix) !== '' && (get_max_forms(options.prefix) - get_no_forms(options.prefix)) <= 0);
}
})(django.jQuery);
// TODO:
// Remove border between tabular fieldset and nested inline
// Fix alternating rows<|fim▁end|> | |
<|file_name|>table_rowgroup.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! CSS table formatting contexts.
#![deny(unsafe_code)]
use app_units::Au;
use block::{BlockFlow, ISizeAndMarginsComputer};
use context::LayoutContext;
use euclid::Point2D;
use flow::{Flow, FlowClass, OpaqueFlow};
use fragment::{Fragment, FragmentBorderBoxIterator, Overflow};
use layout_debug;
use rustc_serialize::{Encodable, Encoder};
use std::fmt;
use std::iter::{IntoIterator, Iterator, Peekable};
use std::sync::Arc;
use style::computed_values::{border_collapse, border_spacing};
use style::logical_geometry::{LogicalSize, WritingMode};
use style::properties::ComputedValues;
use table::{ColumnComputedInlineSize, ColumnIntrinsicInlineSize, InternalTable, TableLikeFlow};
use table_row::{self, CollapsedBordersForRow};
use util::print_tree::PrintTree;
/// A table formatting context.
pub struct TableRowGroupFlow {
/// Fields common to all block flows.
pub block_flow: BlockFlow,
/// Information about the intrinsic inline-sizes of each column.
pub column_intrinsic_inline_sizes: Vec<ColumnIntrinsicInlineSize>,
/// Information about the actual inline sizes of each column.
pub column_computed_inline_sizes: Vec<ColumnComputedInlineSize>,
/// The spacing for this rowgroup.
pub spacing: border_spacing::T,
/// The direction of the columns, propagated down from the table during the inline-size
/// assignment phase.
pub table_writing_mode: WritingMode,
/// Information about the borders for each cell that we bubble up to our parent. This is only
/// computed if `border-collapse` is `collapse`.
pub preliminary_collapsed_borders: CollapsedBordersForRow,
/// The final width of the borders in the inline direction for each cell, computed by the
/// entire table and pushed down into each row during inline size computation.
pub collapsed_inline_direction_border_widths_for_table: Vec<Au>,
/// The final width of the borders in the block direction for each cell, computed by the
/// entire table and pushed down into each row during inline size computation.
pub collapsed_block_direction_border_widths_for_table: Vec<Au>,
}
impl Encodable for TableRowGroupFlow {
fn encode<S: Encoder>(&self, e: &mut S) -> Result<(), S::Error> {
self.block_flow.encode(e)
}
}
impl TableRowGroupFlow {
pub fn from_fragment(fragment: Fragment) -> TableRowGroupFlow {
let writing_mode = fragment.style().writing_mode;
TableRowGroupFlow {
block_flow: BlockFlow::from_fragment(fragment, None),
column_intrinsic_inline_sizes: Vec::new(),
column_computed_inline_sizes: Vec::new(),
spacing: border_spacing::T {
horizontal: Au(0),
vertical: Au(0),
},
table_writing_mode: writing_mode,
preliminary_collapsed_borders: CollapsedBordersForRow::new(),
collapsed_inline_direction_border_widths_for_table: Vec::new(),
collapsed_block_direction_border_widths_for_table: Vec::new(),
}
}
pub fn populate_collapsed_border_spacing<'a, I>(
&mut self,
collapsed_inline_direction_border_widths_for_table: &[Au],
collapsed_block_direction_border_widths_for_table: &mut Peekable<I>)
where I: Iterator<Item=&'a Au> {
self.collapsed_inline_direction_border_widths_for_table.clear();
self.collapsed_inline_direction_border_widths_for_table
.extend(collapsed_inline_direction_border_widths_for_table.into_iter().map(|x| *x));
for _ in 0..self.block_flow.base.children.len() {
if let Some(collapsed_block_direction_border_width_for_table) =
collapsed_block_direction_border_widths_for_table.next() {
self.collapsed_block_direction_border_widths_for_table
.push(*collapsed_block_direction_border_width_for_table)
}
}
if let Some(collapsed_block_direction_border_width_for_table) =
collapsed_block_direction_border_widths_for_table.peek() {
self.collapsed_block_direction_border_widths_for_table
.push(**collapsed_block_direction_border_width_for_table)
}
}
}
impl Flow for TableRowGroupFlow {
fn class(&self) -> FlowClass {
FlowClass::TableRowGroup
}
fn as_mut_table_rowgroup(&mut self) -> &mut TableRowGroupFlow {
self
}
fn as_table_rowgroup(&self) -> &TableRowGroupFlow {
self
}
fn as_mut_block(&mut self) -> &mut BlockFlow {
&mut self.block_flow
}
fn as_block(&self) -> &BlockFlow {
&self.block_flow
}
fn column_intrinsic_inline_sizes(&mut self) -> &mut Vec<ColumnIntrinsicInlineSize> {
&mut self.column_intrinsic_inline_sizes
}
fn column_computed_inline_sizes(&mut self) -> &mut Vec<ColumnComputedInlineSize> {
&mut self.column_computed_inline_sizes
}
fn bubble_inline_sizes(&mut self) {
let _scope = layout_debug_scope!("table_rowgroup::bubble_inline_sizes {:x}",
self.block_flow.base.debug_id());
// Proper calculation of intrinsic sizes in table layout requires access to the entire
// table, which we don't have yet. Defer to our parent.
}
/// Recursively (top-down) determines the actual inline-size of child contexts and fragments.
/// When called on this context, the context has had its inline-size set by the parent context.
fn assign_inline_sizes(&mut self, layout_context: &LayoutContext) {
let _scope = layout_debug_scope!("table_rowgroup::assign_inline_sizes {:x}",
self.block_flow.base.debug_id());<|fim▁hole|>
// The position was set to the containing block by the flow's parent.
let containing_block_inline_size = self.block_flow.base.block_container_inline_size;
let (inline_start_content_edge, inline_end_content_edge) = (Au(0), Au(0));
let content_inline_size = containing_block_inline_size;
let border_collapse = self.block_flow.fragment.style.get_inheritedtable().border_collapse;
let inline_size_computer = InternalTable {
border_collapse: border_collapse,
};
inline_size_computer.compute_used_inline_size(&mut self.block_flow,
layout_context,
containing_block_inline_size);
let column_computed_inline_sizes = &self.column_computed_inline_sizes;
let border_spacing = self.spacing;
let table_writing_mode = self.table_writing_mode;
let collapsed_inline_direction_border_widths_for_table =
&self.collapsed_inline_direction_border_widths_for_table;
let mut collapsed_block_direction_border_widths_for_table =
self.collapsed_block_direction_border_widths_for_table.iter().peekable();
self.block_flow.propagate_assigned_inline_size_to_children(layout_context,
inline_start_content_edge,
inline_end_content_edge,
content_inline_size,
|child_flow,
_child_index,
_content_inline_size,
_writing_mode,
_inline_start_margin_edge,
_inline_end_margin_edge| {
table_row::propagate_column_inline_sizes_to_child(
child_flow,
table_writing_mode,
column_computed_inline_sizes,
&border_spacing);
if border_collapse == border_collapse::T::collapse {
let child_table_row = child_flow.as_mut_table_row();
child_table_row.populate_collapsed_border_spacing(
collapsed_inline_direction_border_widths_for_table,
&mut collapsed_block_direction_border_widths_for_table);
}
});
}
fn assign_block_size<'a>(&mut self, _: &'a LayoutContext<'a>) {
debug!("assign_block_size: assigning block_size for table_rowgroup");
self.block_flow.assign_block_size_for_table_like_flow(self.spacing.vertical)
}
fn compute_absolute_position(&mut self, layout_context: &LayoutContext) {
self.block_flow.compute_absolute_position(layout_context)
}
fn update_late_computed_inline_position_if_necessary(&mut self, inline_position: Au) {
self.block_flow.update_late_computed_inline_position_if_necessary(inline_position)
}
fn update_late_computed_block_position_if_necessary(&mut self, block_position: Au) {
self.block_flow.update_late_computed_block_position_if_necessary(block_position)
}
fn build_display_list(&mut self, layout_context: &LayoutContext) {
debug!("build_display_list_table_rowgroup: same process as block flow");
self.block_flow.build_display_list(layout_context)
}
fn repair_style(&mut self, new_style: &Arc<ComputedValues>) {
self.block_flow.repair_style(new_style)
}
fn compute_overflow(&self) -> Overflow {
self.block_flow.compute_overflow()
}
fn generated_containing_block_size(&self, flow: OpaqueFlow) -> LogicalSize<Au> {
self.block_flow.generated_containing_block_size(flow)
}
fn iterate_through_fragment_border_boxes(&self,
iterator: &mut FragmentBorderBoxIterator,
level: i32,
stacking_context_position: &Point2D<Au>) {
self.block_flow.iterate_through_fragment_border_boxes(iterator, level, stacking_context_position)
}
fn mutate_fragments(&mut self, mutator: &mut FnMut(&mut Fragment)) {
self.block_flow.mutate_fragments(mutator)
}
fn print_extra_flow_children(&self, print_tree: &mut PrintTree) {
self.block_flow.print_extra_flow_children(print_tree);
}
}
impl fmt::Debug for TableRowGroupFlow {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "TableRowGroupFlow: {:?}", self.block_flow)
}
}<|fim▁end|> | debug!("assign_inline_sizes({}): assigning inline_size for flow", "table_rowgroup"); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
fs.tests: testcases for the fs module
"""
from __future__ import with_statement
# Send any output from the logging module to stdout, so it will
# be captured by nose and reported appropriately
import sys
import logging
logging.basicConfig(level=logging.ERROR, stream=sys.stdout)
from fs.base import *
from fs.path import *
from fs.errors import *
from fs.filelike import StringIO
import datetime
import unittest
import os
import os.path
import pickle
import random
import copy
import time
try:
import threading
except ImportError:
import dummy_threading as threading
import six
from six import PY3, b
class FSTestCases(object):
"""Base suite of testcases for filesystem implementations.
Any FS subclass should be capable of passing all of these tests.
To apply the tests to your own FS implementation, simply use FSTestCase
as a mixin for your own unittest.TestCase subclass and have the setUp
method set self.fs to an instance of your FS implementation.
NB. The Filesystem being tested must have a capacity of at least 3MB.
This class is designed as a mixin so that it's not detected by test
loading tools such as nose.
"""
def check(self, p):
"""Check that a file exists within self.fs"""
return self.fs.exists(p)
def test_invalid_chars(self):
"""Check paths validate ok"""
# Will have to be overriden selectively for custom validepath methods
self.assertEqual(self.fs.validatepath(''), None)
self.assertEqual(self.fs.validatepath('.foo'), None)
self.assertEqual(self.fs.validatepath('foo'), None)
self.assertEqual(self.fs.validatepath('foo/bar'), None)
self.assert_(self.fs.isvalidpath('foo/bar'))
def test_meta(self):
"""Checks getmeta / hasmeta are functioning"""
# getmeta / hasmeta are hard to test, since there is no way to validate
# the implementation's response
meta_names = ["read_only",
"network",
"unicode_paths"]
stupid_meta = 'thismetashouldnotexist!"r$$%^&&*()_+'
self.assertRaises(NoMetaError, self.fs.getmeta, stupid_meta)
self.assertFalse(self.fs.hasmeta(stupid_meta))
self.assertEquals(None, self.fs.getmeta(stupid_meta, None))
self.assertEquals(3.14, self.fs.getmeta(stupid_meta, 3.14))
for meta_name in meta_names:
try:
meta = self.fs.getmeta(meta_name)
self.assertTrue(self.fs.hasmeta(meta_name))
except NoMetaError:
self.assertFalse(self.fs.hasmeta(meta_name))
def test_root_dir(self):
self.assertTrue(self.fs.isdir(""))
self.assertTrue(self.fs.isdir("/"))
# These may be false (e.g. empty dict) but mustn't raise errors
self.fs.getinfo("")
self.assertTrue(self.fs.getinfo("/") is not None)
def test_getsyspath(self):
try:
syspath = self.fs.getsyspath("/")
except NoSysPathError:
pass
else:
self.assertTrue(isinstance(syspath, unicode))
syspath = self.fs.getsyspath("/", allow_none=True)
if syspath is not None:
self.assertTrue(isinstance(syspath, unicode))
def test_debug(self):
str(self.fs)
repr(self.fs)
self.assert_(hasattr(self.fs, 'desc'))
def test_open_on_directory(self):
self.fs.makedir("testdir")
try:
f = self.fs.open("testdir")
except ResourceInvalidError:
pass
except Exception:
raise
ecls = sys.exc_info()[0]
assert False, "%s raised instead of ResourceInvalidError" % (ecls,)
else:
f.close()
assert False, "ResourceInvalidError was not raised"
def test_writefile(self):
self.assertRaises(ResourceNotFoundError, self.fs.open, "test1.txt")
f = self.fs.open("test1.txt", "wb")
f.write(b("testing"))
f.close()
self.assertTrue(self.check("test1.txt"))
f = self.fs.open("test1.txt", "rb")
self.assertEquals(f.read(), b("testing"))
f.close()
f = self.fs.open("test1.txt", "wb")
f.write(b("test file overwrite"))
f.close()
self.assertTrue(self.check("test1.txt"))
f = self.fs.open("test1.txt", "rb")
self.assertEquals(f.read(), b("test file overwrite"))
f.close()
def test_createfile(self):
test = b('now with content')
self.fs.createfile("test.txt")
self.assert_(self.fs.exists("test.txt"))
self.assertEqual(self.fs.getcontents("test.txt", "rb"), b(''))
self.fs.setcontents("test.txt", test)
self.fs.createfile("test.txt")
self.assertEqual(self.fs.getcontents("test.txt", "rb"), test)
self.fs.createfile("test.txt", wipe=True)
self.assertEqual(self.fs.getcontents("test.txt", "rb"), b(''))
def test_setcontents(self):
# setcontents() should accept both a string...
self.fs.setcontents("hello", b("world"))
self.assertEquals(self.fs.getcontents("hello", "rb"), b("world"))
# ...and a file-like object
self.fs.setcontents("hello", StringIO(b("to you, good sir!")))
self.assertEquals(self.fs.getcontents(
"hello", "rb"), b("to you, good sir!"))
# setcontents() should accept both a string...
self.fs.setcontents("hello", b("world"), chunk_size=2)
self.assertEquals(self.fs.getcontents("hello", "rb"), b("world"))
# ...and a file-like object
self.fs.setcontents("hello", StringIO(
b("to you, good sir!")), chunk_size=2)
self.assertEquals(self.fs.getcontents(
"hello", "rb"), b("to you, good sir!"))
self.fs.setcontents("hello", b(""))
self.assertEquals(self.fs.getcontents("hello", "rb"), b(""))
def test_setcontents_async(self):
# setcontents() should accept both a string...
self.fs.setcontents_async("hello", b("world")).wait()
self.assertEquals(self.fs.getcontents("hello", "rb"), b("world"))
# ...and a file-like object
self.fs.setcontents_async("hello", StringIO(
b("to you, good sir!"))).wait()
self.assertEquals(self.fs.getcontents("hello"), b("to you, good sir!"))
self.fs.setcontents_async("hello", b("world"), chunk_size=2).wait()
self.assertEquals(self.fs.getcontents("hello", "rb"), b("world"))
# ...and a file-like object
self.fs.setcontents_async("hello", StringIO(
b("to you, good sir!")), chunk_size=2).wait()
self.assertEquals(self.fs.getcontents(
"hello", "rb"), b("to you, good sir!"))
def test_isdir_isfile(self):
self.assertFalse(self.fs.exists("dir1"))
self.assertFalse(self.fs.isdir("dir1"))
self.assertFalse(self.fs.isfile("a.txt"))
self.fs.setcontents("a.txt", b(''))
self.assertFalse(self.fs.isdir("dir1"))
self.assertTrue(self.fs.exists("a.txt"))
self.assertTrue(self.fs.isfile("a.txt"))
self.assertFalse(self.fs.exists("a.txt/thatsnotadir"))
self.fs.makedir("dir1")
self.assertTrue(self.fs.isdir("dir1"))
self.assertTrue(self.fs.exists("dir1"))
self.assertTrue(self.fs.exists("a.txt"))
self.fs.remove("a.txt")
self.assertFalse(self.fs.exists("a.txt"))
def test_listdir(self):
def check_unicode(items):
for item in items:
self.assertTrue(isinstance(item, unicode))
self.fs.setcontents(u"a", b(''))
self.fs.setcontents("b", b(''))
self.fs.setcontents("foo", b(''))
self.fs.setcontents("bar", b(''))
# Test listing of the root directory
d1 = self.fs.listdir()
self.assertEqual(len(d1), 4)
self.assertEqual(sorted(d1), [u"a", u"b", u"bar", u"foo"])
check_unicode(d1)
d1 = self.fs.listdir("")
self.assertEqual(len(d1), 4)
self.assertEqual(sorted(d1), [u"a", u"b", u"bar", u"foo"])
check_unicode(d1)
d1 = self.fs.listdir("/")
self.assertEqual(len(d1), 4)
check_unicode(d1)
# Test listing absolute paths
d2 = self.fs.listdir(absolute=True)
self.assertEqual(len(d2), 4)
self.assertEqual(sorted(d2), [u"/a", u"/b", u"/bar", u"/foo"])
check_unicode(d2)
# Create some deeper subdirectories, to make sure their
# contents are not inadvertantly included
self.fs.makedir("p/1/2/3", recursive=True)
self.fs.setcontents("p/1/2/3/a", b(''))
self.fs.setcontents("p/1/2/3/b", b(''))
self.fs.setcontents("p/1/2/3/foo", b(''))
self.fs.setcontents("p/1/2/3/bar", b(''))
self.fs.makedir("q")
# Test listing just files, just dirs, and wildcards
dirs_only = self.fs.listdir(dirs_only=True)
files_only = self.fs.listdir(files_only=True)
contains_a = self.fs.listdir(wildcard="*a*")
self.assertEqual(sorted(dirs_only), [u"p", u"q"])
self.assertEqual(sorted(files_only), [u"a", u"b", u"bar", u"foo"])
self.assertEqual(sorted(contains_a), [u"a", u"bar"])
check_unicode(dirs_only)
check_unicode(files_only)
check_unicode(contains_a)
# Test listing a subdirectory
d3 = self.fs.listdir("p/1/2/3")
self.assertEqual(len(d3), 4)
self.assertEqual(sorted(d3), [u"a", u"b", u"bar", u"foo"])
check_unicode(d3)
# Test listing a subdirectory with absoliute and full paths
d4 = self.fs.listdir("p/1/2/3", absolute=True)
self.assertEqual(len(d4), 4)
self.assertEqual(sorted(d4), [u"/p/1/2/3/a", u"/p/1/2/3/b", u"/p/1/2/3/bar", u"/p/1/2/3/foo"])
check_unicode(d4)
d4 = self.fs.listdir("p/1/2/3", full=True)
self.assertEqual(len(d4), 4)
self.assertEqual(sorted(d4), [u"p/1/2/3/a", u"p/1/2/3/b", u"p/1/2/3/bar", u"p/1/2/3/foo"])
check_unicode(d4)
# Test that appropriate errors are raised
self.assertRaises(ResourceNotFoundError, self.fs.listdir, "zebra")
self.assertRaises(ResourceInvalidError, self.fs.listdir, "foo")
def test_listdirinfo(self):
def check_unicode(items):
for (nm, info) in items:
self.assertTrue(isinstance(nm, unicode))
def check_equal(items, target):
names = [nm for (nm, info) in items]
self.assertEqual(sorted(names), sorted(target))
self.fs.setcontents(u"a", b(''))
self.fs.setcontents("b", b(''))
self.fs.setcontents("foo", b(''))
self.fs.setcontents("bar", b(''))
# Test listing of the root directory
d1 = self.fs.listdirinfo()
self.assertEqual(len(d1), 4)
check_equal(d1, [u"a", u"b", u"bar", u"foo"])
check_unicode(d1)
d1 = self.fs.listdirinfo("")
self.assertEqual(len(d1), 4)
check_equal(d1, [u"a", u"b", u"bar", u"foo"])
check_unicode(d1)
d1 = self.fs.listdirinfo("/")
self.assertEqual(len(d1), 4)
check_equal(d1, [u"a", u"b", u"bar", u"foo"])
check_unicode(d1)
# Test listing absolute paths
d2 = self.fs.listdirinfo(absolute=True)
self.assertEqual(len(d2), 4)
check_equal(d2, [u"/a", u"/b", u"/bar", u"/foo"])
check_unicode(d2)
# Create some deeper subdirectories, to make sure their
# contents are not inadvertantly included
self.fs.makedir("p/1/2/3", recursive=True)
self.fs.setcontents("p/1/2/3/a", b(''))
self.fs.setcontents("p/1/2/3/b", b(''))
self.fs.setcontents("p/1/2/3/foo", b(''))
self.fs.setcontents("p/1/2/3/bar", b(''))
self.fs.makedir("q")
# Test listing just files, just dirs, and wildcards
dirs_only = self.fs.listdirinfo(dirs_only=True)
files_only = self.fs.listdirinfo(files_only=True)
contains_a = self.fs.listdirinfo(wildcard="*a*")
check_equal(dirs_only, [u"p", u"q"])
check_equal(files_only, [u"a", u"b", u"bar", u"foo"])
check_equal(contains_a, [u"a", u"bar"])
check_unicode(dirs_only)
check_unicode(files_only)
check_unicode(contains_a)
# Test listing a subdirectory
d3 = self.fs.listdirinfo("p/1/2/3")
self.assertEqual(len(d3), 4)
check_equal(d3, [u"a", u"b", u"bar", u"foo"])
check_unicode(d3)
# Test listing a subdirectory with absoliute and full paths
d4 = self.fs.listdirinfo("p/1/2/3", absolute=True)
self.assertEqual(len(d4), 4)
check_equal(d4, [u"/p/1/2/3/a", u"/p/1/2/3/b", u"/p/1/2/3/bar", u"/p/1/2/3/foo"])
check_unicode(d4)
d4 = self.fs.listdirinfo("p/1/2/3", full=True)
self.assertEqual(len(d4), 4)
check_equal(d4, [u"p/1/2/3/a", u"p/1/2/3/b", u"p/1/2/3/bar", u"p/1/2/3/foo"])
check_unicode(d4)
# Test that appropriate errors are raised
self.assertRaises(ResourceNotFoundError, self.fs.listdirinfo, "zebra")
self.assertRaises(ResourceInvalidError, self.fs.listdirinfo, "foo")
def test_walk(self):
self.fs.setcontents('a.txt', b('hello'))
self.fs.setcontents('b.txt', b('world'))
self.fs.makeopendir('foo').setcontents('c', b('123'))
sorted_walk = sorted([(d, sorted(fs)) for (d, fs) in self.fs.walk()])
self.assertEquals(sorted_walk,
[("/", ["a.txt", "b.txt"]),
("/foo", ["c"])])
# When searching breadth-first, shallow entries come first
found_a = False
for _, files in self.fs.walk(search="breadth"):
if "a.txt" in files:
found_a = True
if "c" in files:
break
assert found_a, "breadth search order was wrong"
# When searching depth-first, deep entries come first
found_c = False
for _, files in self.fs.walk(search="depth"):
if "c" in files:
found_c = True
if "a.txt" in files:
break
assert found_c, "depth search order was wrong: " + \
str(list(self.fs.walk(search="depth")))
def test_walk_wildcard(self):
self.fs.setcontents('a.txt', b('hello'))
self.fs.setcontents('b.txt', b('world'))
self.fs.makeopendir('foo').setcontents('c', b('123'))
self.fs.makeopendir('.svn').setcontents('ignored', b(''))
for dir_path, paths in self.fs.walk(wildcard='*.txt'):
for path in paths:
self.assert_(path.endswith('.txt'))
for dir_path, paths in self.fs.walk(wildcard=lambda fn: fn.endswith('.txt')):
for path in paths:
self.assert_(path.endswith('.txt'))
def test_walk_dir_wildcard(self):
self.fs.setcontents('a.txt', b('hello'))
self.fs.setcontents('b.txt', b('world'))
self.fs.makeopendir('foo').setcontents('c', b('123'))
self.fs.makeopendir('.svn').setcontents('ignored', b(''))
for dir_path, paths in self.fs.walk(dir_wildcard=lambda fn: not fn.endswith('.svn')):
for path in paths:
self.assert_('.svn' not in path)
def test_walkfiles(self):
self.fs.makeopendir('bar').setcontents('a.txt', b('123'))
self.fs.makeopendir('foo').setcontents('b', b('123'))
self.assertEquals(sorted(
self.fs.walkfiles()), ["/bar/a.txt", "/foo/b"])
self.assertEquals(sorted(self.fs.walkfiles(
dir_wildcard="*foo*")), ["/foo/b"])
self.assertEquals(sorted(self.fs.walkfiles(
wildcard="*.txt")), ["/bar/a.txt"])
def test_walkdirs(self):
self.fs.makeopendir('bar').setcontents('a.txt', b('123'))
self.fs.makeopendir('foo').makeopendir(
"baz").setcontents('b', b('123'))
self.assertEquals(sorted(self.fs.walkdirs()), [
"/", "/bar", "/foo", "/foo/baz"])
self.assertEquals(sorted(self.fs.walkdirs(
wildcard="*foo*")), ["/", "/foo", "/foo/baz"])
def test_unicode(self):
alpha = u"\N{GREEK SMALL LETTER ALPHA}"
beta = u"\N{GREEK SMALL LETTER BETA}"
self.fs.makedir(alpha)
self.fs.setcontents(alpha + "/a", b(''))
self.fs.setcontents(alpha + "/" + beta, b(''))
self.assertTrue(self.check(alpha))
self.assertEquals(sorted(self.fs.listdir(alpha)), ["a", beta])
def test_makedir(self):
check = self.check
self.fs.makedir("a")
self.assertTrue(check("a"))
self.assertRaises(
ParentDirectoryMissingError, self.fs.makedir, "a/b/c")
self.fs.makedir("a/b/c", recursive=True)
self.assert_(check("a/b/c"))
self.fs.makedir("foo/bar/baz", recursive=True)
self.assert_(check("foo/bar/baz"))
self.fs.makedir("a/b/child")
self.assert_(check("a/b/child"))
self.assertRaises(DestinationExistsError, self.fs.makedir, "/a/b")
self.fs.makedir("/a/b", allow_recreate=True)
self.fs.setcontents("/a/file", b(''))
self.assertRaises(ResourceInvalidError, self.fs.makedir, "a/file")
def test_remove(self):
self.fs.setcontents("a.txt", b(''))
self.assertTrue(self.check("a.txt"))
self.fs.remove("a.txt")
self.assertFalse(self.check("a.txt"))
self.assertRaises(ResourceNotFoundError, self.fs.remove, "a.txt")
self.fs.makedir("dir1")
self.assertRaises(ResourceInvalidError, self.fs.remove, "dir1")
self.fs.setcontents("/dir1/a.txt", b(''))
self.assertTrue(self.check("dir1/a.txt"))
self.fs.remove("dir1/a.txt")
self.assertFalse(self.check("/dir1/a.txt"))
def test_removedir(self):
check = self.check
self.fs.makedir("a")
self.assert_(check("a"))
self.fs.removedir("a")
self.assertRaises(ResourceNotFoundError, self.fs.removedir, "a")
self.assert_(not check("a"))
self.fs.makedir("a/b/c/d", recursive=True)
self.assertRaises(DirectoryNotEmptyError, self.fs.removedir, "a/b")
self.fs.removedir("a/b/c/d")
self.assert_(not check("a/b/c/d"))
self.fs.removedir("a/b/c")
self.assert_(not check("a/b/c"))
self.fs.removedir("a/b")
self.assert_(not check("a/b"))
# Test recursive removal of empty parent dirs
self.fs.makedir("foo/bar/baz", recursive=True)
self.fs.removedir("foo/bar/baz", recursive=True)
self.assert_(not check("foo/bar/baz"))
self.assert_(not check("foo/bar"))
self.assert_(not check("foo"))
self.fs.makedir("foo/bar/baz", recursive=True)
self.fs.setcontents("foo/file.txt", b("please don't delete me"))
self.fs.removedir("foo/bar/baz", recursive=True)
self.assert_(not check("foo/bar/baz"))
self.assert_(not check("foo/bar"))
self.assert_(check("foo/file.txt"))
# Ensure that force=True works as expected
self.fs.makedir("frollic/waggle", recursive=True)
self.fs.setcontents("frollic/waddle.txt", b("waddlewaddlewaddle"))
self.assertRaises(DirectoryNotEmptyError, self.fs.removedir, "frollic")
self.assertRaises(
ResourceInvalidError, self.fs.removedir, "frollic/waddle.txt")
self.fs.removedir("frollic", force=True)
self.assert_(not check("frollic"))
# Test removing unicode dirs
kappa = u"\N{GREEK CAPITAL LETTER KAPPA}"
self.fs.makedir(kappa)
self.assert_(self.fs.isdir(kappa))
self.fs.removedir(kappa)
self.assertRaises(ResourceNotFoundError, self.fs.removedir, kappa)
self.assert_(not self.fs.isdir(kappa))
self.fs.makedir(pathjoin("test", kappa), recursive=True)
self.assert_(check(pathjoin("test", kappa)))
self.fs.removedir("test", force=True)
self.assert_(not check("test"))
def test_rename(self):
check = self.check
# test renaming a file in the same directory
self.fs.setcontents("foo.txt", b("Hello, World!"))
self.assert_(check("foo.txt"))
self.fs.rename("foo.txt", "bar.txt")
self.assert_(check("bar.txt"))
self.assert_(not check("foo.txt"))
# test renaming a directory in the same directory
self.fs.makedir("dir_a")<|fim▁hole|> self.assert_(check("dir_b"))
self.assert_(check("dir_b/test.txt"))
self.assert_(not check("dir_a/test.txt"))
self.assert_(not check("dir_a"))
# test renaming a file into a different directory
self.fs.makedir("dir_a")
self.fs.rename("dir_b/test.txt", "dir_a/test.txt")
self.assert_(not check("dir_b/test.txt"))
self.assert_(check("dir_a/test.txt"))
# test renaming a file into a non-existent directory
self.assertRaises(ParentDirectoryMissingError,
self.fs.rename, "dir_a/test.txt", "nonexistent/test.txt")
def test_info(self):
test_str = b("Hello, World!")
self.fs.setcontents("info.txt", test_str)
info = self.fs.getinfo("info.txt")
self.assertEqual(info['size'], len(test_str))
self.fs.desc("info.txt")
self.assertRaises(ResourceNotFoundError, self.fs.getinfo, "notafile")
self.assertRaises(
ResourceNotFoundError, self.fs.getinfo, "info.txt/inval")
def test_infokeys(self):
test_str = b("Hello, World!")
self.fs.setcontents("info.txt", test_str)
info = self.fs.getinfo("info.txt")
for k, v in info.iteritems():
self.assertEqual(self.fs.getinfokeys('info.txt', k), {k: v})
test_info = {}
if 'modified_time' in info:
test_info['modified_time'] = info['modified_time']
if 'size' in info:
test_info['size'] = info['size']
self.assertEqual(self.fs.getinfokeys('info.txt', 'size', 'modified_time'), test_info)
self.assertEqual(self.fs.getinfokeys('info.txt', 'thiscantpossiblyexistininfo'), {})
def test_getsize(self):
test_str = b("*") * 23
self.fs.setcontents("info.txt", test_str)
size = self.fs.getsize("info.txt")
self.assertEqual(size, len(test_str))
def test_movefile(self):
check = self.check
contents = b(
"If the implementation is hard to explain, it's a bad idea.")
def makefile(path):
self.fs.setcontents(path, contents)
def checkcontents(path):
check_contents = self.fs.getcontents(path, "rb")
self.assertEqual(check_contents, contents)
return contents == check_contents
self.fs.makedir("foo/bar", recursive=True)
makefile("foo/bar/a.txt")
self.assert_(check("foo/bar/a.txt"))
self.assert_(checkcontents("foo/bar/a.txt"))
self.fs.move("foo/bar/a.txt", "foo/b.txt")
self.assert_(not check("foo/bar/a.txt"))
self.assert_(check("foo/b.txt"))
self.assert_(checkcontents("foo/b.txt"))
self.fs.move("foo/b.txt", "c.txt")
self.assert_(not check("foo/b.txt"))
self.assert_(check("/c.txt"))
self.assert_(checkcontents("/c.txt"))
makefile("foo/bar/a.txt")
self.assertRaises(
DestinationExistsError, self.fs.move, "foo/bar/a.txt", "/c.txt")
self.assert_(check("foo/bar/a.txt"))
self.assert_(check("/c.txt"))
self.fs.move("foo/bar/a.txt", "/c.txt", overwrite=True)
self.assert_(not check("foo/bar/a.txt"))
self.assert_(check("/c.txt"))
def test_movedir(self):
check = self.check
contents = b(
"If the implementation is hard to explain, it's a bad idea.")
def makefile(path):
self.fs.setcontents(path, contents)
self.assertRaises(ResourceNotFoundError, self.fs.movedir, "a", "b")
self.fs.makedir("a")
self.fs.makedir("b")
makefile("a/1.txt")
makefile("a/2.txt")
makefile("a/3.txt")
self.fs.makedir("a/foo/bar", recursive=True)
makefile("a/foo/bar/baz.txt")
self.fs.movedir("a", "copy of a")
self.assert_(self.fs.isdir("copy of a"))
self.assert_(check("copy of a/1.txt"))
self.assert_(check("copy of a/2.txt"))
self.assert_(check("copy of a/3.txt"))
self.assert_(check("copy of a/foo/bar/baz.txt"))
self.assert_(not check("a/1.txt"))
self.assert_(not check("a/2.txt"))
self.assert_(not check("a/3.txt"))
self.assert_(not check("a/foo/bar/baz.txt"))
self.assert_(not check("a/foo/bar"))
self.assert_(not check("a/foo"))
self.assert_(not check("a"))
self.fs.makedir("a")
self.assertRaises(
DestinationExistsError, self.fs.movedir, "copy of a", "a")
self.fs.movedir("copy of a", "a", overwrite=True)
self.assert_(not check("copy of a"))
self.assert_(check("a/1.txt"))
self.assert_(check("a/2.txt"))
self.assert_(check("a/3.txt"))
self.assert_(check("a/foo/bar/baz.txt"))
def test_cant_copy_from_os(self):
sys_executable = os.path.abspath(os.path.realpath(sys.executable))
self.assertRaises(FSError, self.fs.copy, sys_executable, "py.exe")
def test_copyfile(self):
check = self.check
contents = b(
"If the implementation is hard to explain, it's a bad idea.")
def makefile(path, contents=contents):
self.fs.setcontents(path, contents)
def checkcontents(path, contents=contents):
check_contents = self.fs.getcontents(path, "rb")
self.assertEqual(check_contents, contents)
return contents == check_contents
self.fs.makedir("foo/bar", recursive=True)
makefile("foo/bar/a.txt")
self.assert_(check("foo/bar/a.txt"))
self.assert_(checkcontents("foo/bar/a.txt"))
# import rpdb2; rpdb2.start_embedded_debugger('password');
self.fs.copy("foo/bar/a.txt", "foo/b.txt")
self.assert_(check("foo/bar/a.txt"))
self.assert_(check("foo/b.txt"))
self.assert_(checkcontents("foo/bar/a.txt"))
self.assert_(checkcontents("foo/b.txt"))
self.fs.copy("foo/b.txt", "c.txt")
self.assert_(check("foo/b.txt"))
self.assert_(check("/c.txt"))
self.assert_(checkcontents("/c.txt"))
makefile("foo/bar/a.txt", b("different contents"))
self.assert_(checkcontents("foo/bar/a.txt", b("different contents")))
self.assertRaises(
DestinationExistsError, self.fs.copy, "foo/bar/a.txt", "/c.txt")
self.assert_(checkcontents("/c.txt"))
self.fs.copy("foo/bar/a.txt", "/c.txt", overwrite=True)
self.assert_(checkcontents("foo/bar/a.txt", b("different contents")))
self.assert_(checkcontents("/c.txt", b("different contents")))
def test_copydir(self):
check = self.check
contents = b(
"If the implementation is hard to explain, it's a bad idea.")
def makefile(path):
self.fs.setcontents(path, contents)
def checkcontents(path):
check_contents = self.fs.getcontents(path)
self.assertEqual(check_contents, contents)
return contents == check_contents
self.fs.makedir("a")
self.fs.makedir("b")
makefile("a/1.txt")
makefile("a/2.txt")
makefile("a/3.txt")
self.fs.makedir("a/foo/bar", recursive=True)
makefile("a/foo/bar/baz.txt")
self.fs.copydir("a", "copy of a")
self.assert_(check("copy of a/1.txt"))
self.assert_(check("copy of a/2.txt"))
self.assert_(check("copy of a/3.txt"))
self.assert_(check("copy of a/foo/bar/baz.txt"))
checkcontents("copy of a/1.txt")
self.assert_(check("a/1.txt"))
self.assert_(check("a/2.txt"))
self.assert_(check("a/3.txt"))
self.assert_(check("a/foo/bar/baz.txt"))
checkcontents("a/1.txt")
self.assertRaises(DestinationExistsError, self.fs.copydir, "a", "b")
self.fs.copydir("a", "b", overwrite=True)
self.assert_(check("b/1.txt"))
self.assert_(check("b/2.txt"))
self.assert_(check("b/3.txt"))
self.assert_(check("b/foo/bar/baz.txt"))
checkcontents("b/1.txt")
def test_copydir_with_dotfile(self):
check = self.check
contents = b(
"If the implementation is hard to explain, it's a bad idea.")
def makefile(path):
self.fs.setcontents(path, contents)
self.fs.makedir("a")
makefile("a/1.txt")
makefile("a/2.txt")
makefile("a/.hidden.txt")
self.fs.copydir("a", "copy of a")
self.assert_(check("copy of a/1.txt"))
self.assert_(check("copy of a/2.txt"))
self.assert_(check("copy of a/.hidden.txt"))
self.assert_(check("a/1.txt"))
self.assert_(check("a/2.txt"))
self.assert_(check("a/.hidden.txt"))
def test_readwriteappendseek(self):
def checkcontents(path, check_contents):
read_contents = self.fs.getcontents(path, "rb")
self.assertEqual(read_contents, check_contents)
return read_contents == check_contents
test_strings = [b("Beautiful is better than ugly."),
b("Explicit is better than implicit."),
b("Simple is better than complex.")]
all_strings = b("").join(test_strings)
self.assertRaises(ResourceNotFoundError, self.fs.open, "a.txt", "r")
self.assert_(not self.fs.exists("a.txt"))
f1 = self.fs.open("a.txt", "wb")
pos = 0
for s in test_strings:
f1.write(s)
pos += len(s)
self.assertEqual(pos, f1.tell())
f1.close()
self.assert_(self.fs.exists("a.txt"))
self.assert_(checkcontents("a.txt", all_strings))
f2 = self.fs.open("b.txt", "wb")
f2.write(test_strings[0])
f2.close()
self.assert_(checkcontents("b.txt", test_strings[0]))
f3 = self.fs.open("b.txt", "ab")
# On win32, tell() gives zero until you actually write to the file
# self.assertEquals(f3.tell(),len(test_strings[0]))
f3.write(test_strings[1])
self.assertEquals(f3.tell(), len(test_strings[0])+len(test_strings[1]))
f3.write(test_strings[2])
self.assertEquals(f3.tell(), len(all_strings))
f3.close()
self.assert_(checkcontents("b.txt", all_strings))
f4 = self.fs.open("b.txt", "wb")
f4.write(test_strings[2])
f4.close()
self.assert_(checkcontents("b.txt", test_strings[2]))
f5 = self.fs.open("c.txt", "wb")
for s in test_strings:
f5.write(s+b("\n"))
f5.close()
f6 = self.fs.open("c.txt", "rb")
for s, t in zip(f6, test_strings):
self.assertEqual(s, t+b("\n"))
f6.close()
f7 = self.fs.open("c.txt", "rb")
f7.seek(13)
word = f7.read(6)
self.assertEqual(word, b("better"))
f7.seek(1, os.SEEK_CUR)
word = f7.read(4)
self.assertEqual(word, b("than"))
f7.seek(-9, os.SEEK_END)
word = f7.read(7)
self.assertEqual(word, b("complex"))
f7.close()
self.assertEqual(self.fs.getcontents("a.txt", "rb"), all_strings)
def test_truncate(self):
def checkcontents(path, check_contents):
read_contents = self.fs.getcontents(path, "rb")
self.assertEqual(read_contents, check_contents)
return read_contents == check_contents
self.fs.setcontents("hello", b("world"))
checkcontents("hello", b("world"))
self.fs.setcontents("hello", b("hi"))
checkcontents("hello", b("hi"))
self.fs.setcontents("hello", b("1234567890"))
checkcontents("hello", b("1234567890"))
with self.fs.open("hello", "rb+") as f:
f.truncate(7)
checkcontents("hello", b("1234567"))
with self.fs.open("hello", "rb+") as f:
f.seek(5)
f.truncate()
checkcontents("hello", b("12345"))
def test_truncate_to_larger_size(self):
with self.fs.open("hello", "wb") as f:
f.truncate(30)
self.assertEquals(self.fs.getsize("hello"), 30)
# Some file systems (FTPFS) don't support both reading and writing
if self.fs.getmeta('file.read_and_write', True):
with self.fs.open("hello", "rb+") as f:
f.seek(25)
f.write(b("123456"))
with self.fs.open("hello", "rb") as f:
f.seek(25)
self.assertEquals(f.read(), b("123456"))
def test_write_past_end_of_file(self):
if self.fs.getmeta('file.read_and_write', True):
with self.fs.open("write_at_end", "wb") as f:
f.seek(25)
f.write(b("EOF"))
with self.fs.open("write_at_end", "rb") as f:
self.assertEquals(f.read(), b("\x00")*25 + b("EOF"))
def test_with_statement(self):
# This is a little tricky since 'with' is actually new syntax.
# We use eval() to make this method safe for old python versions.
import sys
if sys.version_info[0] >= 2 and sys.version_info[1] >= 5:
# A successful 'with' statement
contents = "testing the with statement"
code = "from __future__ import with_statement\n"
code += "with self.fs.open('f.txt','wb-') as testfile:\n"
code += " testfile.write(contents)\n"
code += "self.assertEquals(self.fs.getcontents('f.txt', 'rb'),contents)"
code = compile(code, "<string>", 'exec')
eval(code)
# A 'with' statement raising an error
contents = "testing the with statement"
code = "from __future__ import with_statement\n"
code += "with self.fs.open('f.txt','wb-') as testfile:\n"
code += " testfile.write(contents)\n"
code += " raise ValueError\n"
code = compile(code, "<string>", 'exec')
self.assertRaises(ValueError, eval, code, globals(), locals())
self.assertEquals(self.fs.getcontents('f.txt', 'rb'), contents)
def test_pickling(self):
if self.fs.getmeta('pickle_contents', True):
self.fs.setcontents("test1", b("hello world"))
fs2 = pickle.loads(pickle.dumps(self.fs))
self.assert_(fs2.isfile("test1"))
fs3 = pickle.loads(pickle.dumps(self.fs, -1))
self.assert_(fs3.isfile("test1"))
else:
# Just make sure it doesn't throw an exception
fs2 = pickle.loads(pickle.dumps(self.fs))
def test_big_file(self):
"""Test handling of a big file (1MB)"""
chunk_size = 1024 * 256
num_chunks = 4
def chunk_stream():
"""Generate predictable-but-randomy binary content."""
r = random.Random(0)
randint = r.randint
int2byte = six.int2byte
for _i in xrange(num_chunks):
c = b("").join(int2byte(randint(
0, 255)) for _j in xrange(chunk_size//8))
yield c * 8
f = self.fs.open("bigfile", "wb")
try:
for chunk in chunk_stream():
f.write(chunk)
finally:
f.close()
chunks = chunk_stream()
f = self.fs.open("bigfile", "rb")
try:
try:
while True:
if chunks.next() != f.read(chunk_size):
assert False, "bigfile was corrupted"
except StopIteration:
if f.read() != b(""):
assert False, "bigfile was corrupted"
finally:
f.close()
def test_settimes(self):
def cmp_datetimes(d1, d2):
"""Test datetime objects are the same to within the timestamp accuracy"""
dts1 = time.mktime(d1.timetuple())
dts2 = time.mktime(d2.timetuple())
return int(dts1) == int(dts2)
d1 = datetime.datetime(2010, 6, 20, 11, 0, 9, 987699)
d2 = datetime.datetime(2010, 7, 5, 11, 0, 9, 500000)
self.fs.setcontents('/dates.txt', b('check dates'))
# If the implementation supports settimes, check that the times
# can be set and then retrieved
try:
self.fs.settimes('/dates.txt', d1, d2)
except UnsupportedError:
pass
else:
info = self.fs.getinfo('/dates.txt')
self.assertTrue(cmp_datetimes(d1, info['accessed_time']))
self.assertTrue(cmp_datetimes(d2, info['modified_time']))
def test_removeroot(self):
self.assertRaises(RemoveRootError, self.fs.removedir, "/")
def test_zero_read(self):
"""Test read(0) returns empty string"""
self.fs.setcontents('foo.txt', b('Hello, World'))
with self.fs.open('foo.txt', 'rb') as f:
self.assert_(len(f.read(0)) == 0)
with self.fs.open('foo.txt', 'rt') as f:
self.assert_(len(f.read(0)) == 0)
# May be disabled - see end of file
class ThreadingTestCases(object):
"""Testcases for thread-safety of FS implementations."""
# These are either too slow to be worth repeating,
# or cannot possibly break cross-thread.
_dont_retest = ("test_pickling", "test_multiple_overwrite",)
__lock = threading.RLock()
def _yield(self):
# time.sleep(0.001)
# Yields without a delay
time.sleep(0)
def _lock(self):
self.__lock.acquire()
def _unlock(self):
self.__lock.release()
def _makeThread(self, func, errors):
def runThread():
try:
func()
except Exception:
errors.append(sys.exc_info())
thread = threading.Thread(target=runThread)
thread.daemon = True
return thread
def _runThreads(self, *funcs):
check_interval = sys.getcheckinterval()
sys.setcheckinterval(1)
try:
errors = []
threads = [self._makeThread(f, errors) for f in funcs]
for t in threads:
t.start()
for t in threads:
t.join()
for (c, e, t) in errors:
raise e, None, t
finally:
sys.setcheckinterval(check_interval)
def test_setcontents_threaded(self):
def setcontents(name, contents):
f = self.fs.open(name, "wb")
self._yield()
try:
f.write(contents)
self._yield()
finally:
f.close()
def thread1():
c = b("thread1 was 'ere")
setcontents("thread1.txt", c)
self.assertEquals(self.fs.getcontents("thread1.txt", 'rb'), c)
def thread2():
c = b("thread2 was 'ere")
setcontents("thread2.txt", c)
self.assertEquals(self.fs.getcontents("thread2.txt", 'rb'), c)
self._runThreads(thread1, thread2)
def test_setcontents_threaded_samefile(self):
def setcontents(name, contents):
f = self.fs.open(name, "wb")
self._yield()
try:
f.write(contents)
self._yield()
finally:
f.close()
def thread1():
c = b("thread1 was 'ere")
setcontents("threads.txt", c)
self._yield()
self.assertEquals(self.fs.listdir("/"), ["threads.txt"])
def thread2():
c = b("thread2 was 'ere")
setcontents("threads.txt", c)
self._yield()
self.assertEquals(self.fs.listdir("/"), ["threads.txt"])
def thread3():
c = b("thread3 was 'ere")
setcontents("threads.txt", c)
self._yield()
self.assertEquals(self.fs.listdir("/"), ["threads.txt"])
try:
self._runThreads(thread1, thread2, thread3)
except ResourceLockedError:
# that's ok, some implementations don't support concurrent writes
pass
def test_cases_in_separate_dirs(self):
class TestCases_in_subdir(self.__class__, unittest.TestCase):
"""Run all testcases against a subdir of self.fs"""
def __init__(this, subdir):
super(TestCases_in_subdir, this).__init__("test_listdir")
this.subdir = subdir
for meth in dir(this):
if not meth.startswith("test_"):
continue
if meth in self._dont_retest:
continue
if not hasattr(FSTestCases, meth):
continue
if self.fs.exists(subdir):
self.fs.removedir(subdir, force=True)
self.assertFalse(self.fs.isdir(subdir))
self.assertTrue(self.fs.isdir("/"))
self.fs.makedir(subdir)
self._yield()
getattr(this, meth)()
@property
def fs(this):
return self.fs.opendir(this.subdir)
def check(this, p):
return self.check(pathjoin(this.subdir, relpath(p)))
def thread1():
TestCases_in_subdir("thread1")
def thread2():
TestCases_in_subdir("thread2")
def thread3():
TestCases_in_subdir("thread3")
self._runThreads(thread1, thread2, thread3)
def test_makedir_winner(self):
errors = []
def makedir():
try:
self.fs.makedir("testdir")
except DestinationExistsError, e:
errors.append(e)
def makedir_noerror():
try:
self.fs.makedir("testdir", allow_recreate=True)
except DestinationExistsError, e:
errors.append(e)
def removedir():
try:
self.fs.removedir("testdir")
except (ResourceNotFoundError, ResourceLockedError), e:
errors.append(e)
# One thread should succeed, one should error
self._runThreads(makedir, makedir)
self.assertEquals(len(errors), 1)
self.fs.removedir("testdir")
# One thread should succeed, two should error
errors = []
self._runThreads(makedir, makedir, makedir)
if len(errors) != 2:
raise AssertionError(errors)
self.fs.removedir("testdir")
# All threads should succeed
errors = []
self._runThreads(makedir_noerror, makedir_noerror, makedir_noerror)
self.assertEquals(len(errors), 0)
self.assertTrue(self.fs.isdir("testdir"))
self.fs.removedir("testdir")
# makedir() can beat removedir() and vice-versa
errors = []
self._runThreads(makedir, removedir)
if self.fs.isdir("testdir"):
self.assertEquals(len(errors), 1)
self.assertFalse(isinstance(errors[0], DestinationExistsError))
self.fs.removedir("testdir")
else:
self.assertEquals(len(errors), 0)
def test_concurrent_copydir(self):
self.fs.makedir("a")
self.fs.makedir("a/b")
self.fs.setcontents("a/hello.txt", b("hello world"))
self.fs.setcontents("a/guido.txt", b("is a space alien"))
self.fs.setcontents("a/b/parrot.txt", b("pining for the fiords"))
def copydir():
self._yield()
self.fs.copydir("a", "copy of a")
def copydir_overwrite():
self._yield()
self.fs.copydir("a", "copy of a", overwrite=True)
# This should error out since we're not overwriting
self.assertRaises(
DestinationExistsError, self._runThreads, copydir, copydir)
self.assert_(self.fs.isdir('a'))
self.assert_(self.fs.isdir('a'))
copydir_overwrite()
self.assert_(self.fs.isdir('a'))
# This should run to completion and give a valid state, unless
# files get locked when written to.
try:
self._runThreads(copydir_overwrite, copydir_overwrite)
except ResourceLockedError:
pass
self.assertTrue(self.fs.isdir("copy of a"))
self.assertTrue(self.fs.isdir("copy of a/b"))
self.assertEqual(self.fs.getcontents(
"copy of a/b/parrot.txt", 'rb'), b("pining for the fiords"))
self.assertEqual(self.fs.getcontents(
"copy of a/hello.txt", 'rb'), b("hello world"))
self.assertEqual(self.fs.getcontents(
"copy of a/guido.txt", 'rb'), b("is a space alien"))
def test_multiple_overwrite(self):
contents = [b("contents one"), b(
"contents the second"), b("number three")]
def thread1():
for i in xrange(30):
for c in contents:
self.fs.setcontents("thread1.txt", c)
self.assertEquals(self.fs.getsize("thread1.txt"), len(c))
self.assertEquals(self.fs.getcontents(
"thread1.txt", 'rb'), c)
def thread2():
for i in xrange(30):
for c in contents:
self.fs.setcontents("thread2.txt", c)
self.assertEquals(self.fs.getsize("thread2.txt"), len(c))
self.assertEquals(self.fs.getcontents(
"thread2.txt", 'rb'), c)
self._runThreads(thread1, thread2)
# Uncomment to temporarily disable threading tests
# class ThreadingTestCases(object):
# _dont_retest = ()<|fim▁end|> | self.fs.setcontents("dir_a/test.txt", b("testerific"))
self.assert_(check("dir_a"))
self.fs.rename("dir_a", "dir_b") |
<|file_name|>s3_key.py<|end_file_name|><|fim▁begin|>#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import re
from typing import Callable, List, Optional, Union
from urllib.parse import urlparse
from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.sensors.base import BaseSensorOperator
class S3KeySensor(BaseSensorOperator):
"""
Waits for a key (a file-like instance on S3) to be present in a S3 bucket.
S3 being a key/value it does not support folders. The path is just a key
a resource.
:param bucket_key: The key being waited on. Supports full s3:// style url
or relative path from root level. When it's specified as a full s3://
url, please leave bucket_name as `None`.
:type bucket_key: str
:param bucket_name: Name of the S3 bucket. Only needed when ``bucket_key``
is not provided as a full s3:// url.
:type bucket_name: str
:param wildcard_match: whether the bucket_key should be interpreted as a
Unix wildcard pattern
:type wildcard_match: bool
:param aws_conn_id: a reference to the s3 connection
:type aws_conn_id: str
:param verify: Whether or not to verify SSL certificates for S3 connection.
By default SSL certificates are verified.
You can provide the following values:
- ``False``: do not validate SSL certificates. SSL will still be used
(unless use_ssl is False), but SSL certificates will not be
verified.
- ``path/to/cert/bundle.pem``: A filename of the CA cert bundle to uses.
You can specify this argument if you want to use a different
CA cert bundle than the one used by botocore.
:type verify: bool or str
"""
template_fields = ('bucket_key', 'bucket_name')
def __init__(
self,
*,
bucket_key: str,
bucket_name: Optional[str] = None,
wildcard_match: bool = False,
aws_conn_id: str = 'aws_default',
verify: Optional[Union[str, bool]] = None,
**kwargs,
):
super().__init__(**kwargs)
self.bucket_name = bucket_name
self.bucket_key = bucket_key
self.wildcard_match = wildcard_match
self.aws_conn_id = aws_conn_id
self.verify = verify
self.hook: Optional[S3Hook] = None
def poke(self, context):
if self.bucket_name is None:
parsed_url = urlparse(self.bucket_key)
if parsed_url.netloc == '':
raise AirflowException('If key is a relative path from root, please provide a bucket_name')
self.bucket_name = parsed_url.netloc
self.bucket_key = parsed_url.path.lstrip('/')
else:
parsed_url = urlparse(self.bucket_key)
if parsed_url.scheme != '' or parsed_url.netloc != '':
raise AirflowException(
'If bucket_name is provided, bucket_key'
' should be relative path from root'
' level, rather than a full s3:// url'
)
self.log.info('Poking for key : s3://%s/%s', self.bucket_name, self.bucket_key)
if self.wildcard_match:
return self.get_hook().check_for_wildcard_key(self.bucket_key, self.bucket_name)
return self.get_hook().check_for_key(self.bucket_key, self.bucket_name)
def get_hook(self) -> S3Hook:
"""Create and return an S3Hook"""
if self.hook:
return self.hook
self.hook = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)
return self.hook
class S3KeySizeSensor(S3KeySensor):
"""
Waits for a key (a file-like instance on S3) to be present and be more than
some size in a S3 bucket.
S3 being a key/value it does not support folders. The path is just a key
a resource.
:param bucket_key: The key being waited on. Supports full s3:// style url
or relative path from root level. When it's specified as a full s3://
url, please leave bucket_name as `None`.
:type bucket_key: str
:param bucket_name: Name of the S3 bucket. Only needed when ``bucket_key``
is not provided as a full s3:// url.
:type bucket_name: str
:param wildcard_match: whether the bucket_key should be interpreted as a
Unix wildcard pattern
:type wildcard_match: bool
:param aws_conn_id: a reference to the s3 connection
:type aws_conn_id: str
:param verify: Whether or not to verify SSL certificates for S3 connection.
By default SSL certificates are verified.
You can provide the following values:
- ``False``: do not validate SSL certificates. SSL will still be used
(unless use_ssl is False), but SSL certificates will not be
verified.
- ``path/to/cert/bundle.pem``: A filename of the CA cert bundle to uses.
You can specify this argument if you want to use a different
CA cert bundle than the one used by botocore.
:type verify: bool or str
:type check_fn: Optional[Callable[..., bool]]
:param check_fn: Function that receives the list of the S3 objects,
and returns the boolean:
- ``True``: a certain criteria is met
- ``False``: the criteria isn't met
**Example**: Wait for any S3 object size more than 1 megabyte ::
def check_fn(self, data: List) -> bool:
return any(f.get('Size', 0) > 1048576 for f in data if isinstance(f, dict))
:type check_fn: Optional[Callable[..., bool]]
"""<|fim▁hole|> def __init__(
self,
*,
check_fn: Optional[Callable[..., bool]] = None,
**kwargs,
):
super().__init__(**kwargs)
self.check_fn_user = check_fn
def poke(self, context):
if super().poke(context=context) is False:
return False
s3_objects = self.get_files(s3_hook=self.get_hook())
if not s3_objects:
return False
check_fn = self.check_fn if self.check_fn_user is None else self.check_fn_user
return check_fn(s3_objects)
def get_files(self, s3_hook: S3Hook, delimiter: Optional[str] = '/') -> List:
"""Gets a list of files in the bucket"""
prefix = self.bucket_key
config = {
'PageSize': None,
'MaxItems': None,
}
if self.wildcard_match:
prefix = re.split(r'[\[\*\?]', self.bucket_key, 1)[0]
paginator = s3_hook.get_conn().get_paginator('list_objects_v2')
response = paginator.paginate(
Bucket=self.bucket_name, Prefix=prefix, Delimiter=delimiter, PaginationConfig=config
)
keys = []
for page in response:
if 'Contents' in page:
_temp = [k for k in page['Contents'] if isinstance(k.get('Size', None), (int, float))]
keys = keys + _temp
return keys
def check_fn(self, data: List, object_min_size: Optional[Union[int, float]] = 0) -> bool:
"""Default function for checking that S3 Objects have size more than 0
:param data: List of the objects in S3 bucket.
:type data: list
:param object_min_size: Checks if the objects sizes are greater then this value.
:type object_min_size: int
"""
return all(f.get('Size', 0) > object_min_size for f in data if isinstance(f, dict))<|fim▁end|> | |
<|file_name|>qgslegenditem.cpp<|end_file_name|><|fim▁begin|>/***************************************************************************
qgslegenditem.cpp
---------------------
begin : January 2007
copyright : (C) 2007 by Martin Dobias
email : wonder.sk at gmail.com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
#include "qgslegenditem.h"
#include <QCoreApplication>
#include "qgslegend.h"
#include "qgslogger.h"
QgsLegendItem::QgsLegendItem( QTreeWidgetItem * theItem, QString theName )
: QTreeWidgetItem( theItem )
{
setText( 0, theName );
}
QgsLegendItem::QgsLegendItem( QTreeWidget* theListView, QString theString )
: QTreeWidgetItem( theListView )
{
setText( 0, theString );
}
QgsLegendItem::QgsLegendItem(): QTreeWidgetItem()
{
}
QgsLegendItem::~QgsLegendItem()
{
}
void QgsLegendItem::print( QgsLegendItem * theItem )
{
#if 0 //todo: adapt to qt4
Q3ListViewItemIterator myIterator( theItem );
while ( myIterator.current() )
{
LEGEND_ITEM_TYPE curtype = qobject_cast<QgsLegendItem *>( myIterator.current() )->type();
QgsDebugMsg( QString( "%1 - %2" ).arg( myIterator.current()->text( 0 ) ).arg( curtype ) );
if ( myIterator.current()->childCount() > 0 )
{
//print(qobject_cast<QgsLegendItem *>(myIterator.current()));
}
++myIterator;
}
#else
Q_UNUSED( theItem );
#endif
}
QgsLegendItem* QgsLegendItem::firstChild()
{
return dynamic_cast<QgsLegendItem *>( child( 0 ) );
}
QgsLegendItem* QgsLegendItem::nextSibling()
{
return dynamic_cast<QgsLegendItem *>( dynamic_cast<QgsLegend*>( treeWidget() )->nextSibling( this ) );
}
QgsLegendItem* QgsLegendItem::findYoungerSibling()
{
return dynamic_cast<QgsLegendItem *>( dynamic_cast<QgsLegend*>( treeWidget() )->previousSibling( this ) );
}
void QgsLegendItem::moveItem( QgsLegendItem* after )
{
qobject_cast<QgsLegend *>( treeWidget() )->moveItem( this, after );
}
void QgsLegendItem::removeAllChildren()
{
while ( child( 0 ) )
{
takeChild( 0 );
}
}
void QgsLegendItem::storeAppearanceSettings()
{
mExpanded = treeWidget()->isItemExpanded( this );
mHidden = treeWidget()->isItemHidden( this );
//call recursively for all subitems
for ( int i = 0; i < childCount(); ++i )
{
static_cast<QgsLegendItem*>( child( i ) )->storeAppearanceSettings();
}
}
void QgsLegendItem::restoreAppearanceSettings()
{
treeWidget()->setItemExpanded( this, mExpanded );
treeWidget()->setItemHidden( this, mHidden );
//call recursively for all subitems
for ( int i = 0; i < childCount(); ++i )
{
static_cast<QgsLegendItem*>( child( i ) )->restoreAppearanceSettings();
}<|fim▁hole|> QTreeWidget* treeWidgetPtr = treeWidget();
QgsLegend* legendPtr = qobject_cast<QgsLegend *>( treeWidgetPtr );
return legendPtr;
}
QTreeWidgetItem* QgsLegendItem::child( int i ) const
{
return QTreeWidgetItem::child( i );
}
QTreeWidgetItem* QgsLegendItem::parent() const
{
return QTreeWidgetItem::parent();
}
void QgsLegendItem::insertChild( int index, QTreeWidgetItem *child )
{
QTreeWidgetItem::insertChild( index, child );
}<|fim▁end|> | }
QgsLegend* QgsLegendItem::legend() const
{ |
<|file_name|>slices.go<|end_file_name|><|fim▁begin|><|fim▁hole|>package main
import (
"fmt"
"sort"
)
func main() {
var colors = []string{"Red", "Green", "Blue"}
fmt.Println(colors)
colors = append(colors, "Purple")
fmt.Println(colors)
colors = append(colors[1:len(colors)])
fmt.Println(colors)
colors = append(colors[1:])
fmt.Println(colors)
colors = append(colors[:len(colors)-1])
fmt.Println(colors)
numbers := make([]int, 5, 5)
numbers[0] = 134
numbers[1] = 72
numbers[2] = 32
numbers[3] = 12
numbers[4] = 156
fmt.Println(numbers)
numbers = append(numbers, 235)
fmt.Println(numbers)
fmt.Println(cap(numbers))
sort.Ints(numbers)
fmt.Println(numbers)
}<|fim▁end|> | |
<|file_name|>client.js<|end_file_name|><|fim▁begin|>(function($){
$(document).ready(function(){
$('.rainbowcake').rainbowcake();
<|fim▁hole|><|fim▁end|> | });
}(jQuery)); |
<|file_name|>autoCommand.py<|end_file_name|><|fim▁begin|>__author__ = 'leviwright'
from mainbot.commands import Command
<|fim▁hole|>class NickServLogin(Command):
arguments = []
permissionLevel = 3
permitExtraArgs = False
manArgCheck = False
defaultArgs = []
callName = "login"
def on_call(self, event, *args):
self.bot.connection.privmsg("NickServ", "identify %s" % self.bot.nickPass)
for x in self.bot.manOplist:
self.privMsg(x, "Sent request")<|fim▁end|> | |
<|file_name|>interfaces.ts<|end_file_name|><|fim▁begin|>export interface SimpleAPIConfig {
}
export interface SimpleAPI {
count: number;
add(n?: number): void;
sub(n?: number): void;<|fim▁hole|><|fim▁end|> | } |
<|file_name|>load.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Used by `rustc` when loading a plugin.
use rustc::session::Session;
use rustc_metadata::creader::CrateLoader;
use rustc_metadata::cstore::CStore;
use registry::Registry;
use std::borrow::ToOwned;
use std::env;
use std::mem;
use std::path::PathBuf;
use syntax::ast;
use syntax_pos::{Span, DUMMY_SP};
/// Pointer to a registrar function.
pub type PluginRegistrarFun =
fn(&mut Registry);
pub struct PluginRegistrar {
pub fun: PluginRegistrarFun,
pub args: Vec<ast::NestedMetaItem>,
}
struct PluginLoader<'a> {
sess: &'a Session,
reader: CrateLoader<'a>,
plugins: Vec<PluginRegistrar>,
}
fn call_malformed_plugin_attribute(a: &Session, b: Span) {
span_err!(a, b, E0498, "malformed plugin attribute");
}
/// Read plugin metadata and dynamically load registrar functions.
pub fn load_plugins(sess: &Session,
cstore: &CStore,
krate: &ast::Crate,
crate_name: &str,
addl_plugins: Option<Vec<String>>) -> Vec<PluginRegistrar> {
let mut loader = PluginLoader::new(sess, cstore, crate_name);
// do not report any error now. since crate attributes are
// not touched by expansion, every use of plugin without
// the feature enabled will result in an error later...
if sess.features_untracked().plugin {
for attr in &krate.attrs {
if !attr.check_name("plugin") {
continue;
}
let plugins = match attr.meta_item_list() {
Some(xs) => xs,
None => {
call_malformed_plugin_attribute(sess, attr.span);
continue;
}
};
for plugin in plugins {
// plugins must have a name and can't be key = value
match plugin.name() {
Some(name) if !plugin.is_value_str() => {
let args = plugin.meta_item_list().map(ToOwned::to_owned);
loader.load_plugin(plugin.span, &name.as_str(), args.unwrap_or_default());
},
_ => call_malformed_plugin_attribute(sess, attr.span),
}
}
}
}
if let Some(plugins) = addl_plugins {
for plugin in plugins {
loader.load_plugin(DUMMY_SP, &plugin, vec![]);
}
}
loader.plugins
}
impl<'a> PluginLoader<'a> {
fn new(sess: &'a Session, cstore: &'a CStore, crate_name: &str) -> Self {
PluginLoader {
sess,
reader: CrateLoader::new(sess, cstore, crate_name),<|fim▁hole|> }
}
fn load_plugin(&mut self, span: Span, name: &str, args: Vec<ast::NestedMetaItem>) {
let registrar = self.reader.find_plugin_registrar(span, name);
if let Some((lib, disambiguator)) = registrar {
let symbol = self.sess.generate_plugin_registrar_symbol(disambiguator);
let fun = self.dylink_registrar(span, lib, symbol);
self.plugins.push(PluginRegistrar {
fun,
args,
});
}
}
// Dynamically link a registrar function into the compiler process.
fn dylink_registrar(&mut self,
span: Span,
path: PathBuf,
symbol: String) -> PluginRegistrarFun {
use rustc_metadata::dynamic_lib::DynamicLibrary;
// Make sure the path contains a / or the linker will search for it.
let path = env::current_dir().unwrap().join(&path);
let lib = match DynamicLibrary::open(Some(&path)) {
Ok(lib) => lib,
// this is fatal: there are almost certainly macros we need
// inside this crate, so continue would spew "macro undefined"
// errors
Err(err) => {
self.sess.span_fatal(span, &err)
}
};
unsafe {
let registrar =
match lib.symbol(&symbol) {
Ok(registrar) => {
mem::transmute::<*mut u8,PluginRegistrarFun>(registrar)
}
// again fatal if we can't register macros
Err(err) => {
self.sess.span_fatal(span, &err)
}
};
// Intentionally leak the dynamic library. We can't ever unload it
// since the library can make things that will live arbitrarily long
// (e.g., an @-box cycle or a thread).
mem::forget(lib);
registrar
}
}
}<|fim▁end|> | plugins: vec![], |
<|file_name|>interfaceBatcher.go<|end_file_name|><|fim▁begin|>package stream
type RunningCount struct {
cnts []int
}
func NewRunningCount(sz int) *RunningCount {
return &RunningCount{make([]int, 0, sz)}
}
func (rc *RunningCount) GetAverage() int {
sz := len(rc.cnts)
if sz == 0 {
return 0
}
sum := 0
for _, i := range rc.cnts {
sum += i
}
return sum / sz
}
func (rc *RunningCount) GetAverageMin(min int) int {
avg := rc.GetAverage()
if avg < min {
return min
}
return avg
}
func (rc *RunningCount) Add(i int) {
if len(rc.cnts) < cap(rc.cnts) {
rc.cnts = append(rc.cnts, i)
} else {
rc.cnts = append(rc.cnts[1:], i)
}
}
type InterfaceContainer struct {
store []interface{}
runningCount *RunningCount
}
func NewInterfaceContainer() *InterfaceContainer {
return &InterfaceContainer{make([]interface{}, 0, 2), NewRunningCount(5)}
}
func (c *InterfaceContainer) Flush(out chan<- Object) bool {
if len(c.store) > 0 {
out <- c.store
cnt := len(c.store)
c.runningCount.Add(cnt)
c.store = make([]interface{}, 0, c.runningCount.GetAverageMin(2))
return true
}
return false
}
func (c *InterfaceContainer) FlushAll(out chan<- Object) bool {
return c.Flush(out)
}
func (c *InterfaceContainer) HasItems() bool {
return len(c.store) > 0
}
func (c *InterfaceContainer) Add(obj Object) {<|fim▁hole|> if cap(c.store) <= len(c.store) {
news := make([]interface{}, len(c.store), 2*cap(c.store))
copy(news, c.store)
c.store = news
}
c.store = append(c.store, obj)
}
func NewInterfaceBatchOp(pn ProcessedNotifier) *BatcherOperator {
container := NewInterfaceContainer()
op := NewBatchOperator("InterfaceBatchOp", container, pn)
return op
}<|fim▁end|> | |
<|file_name|>CpuAffinitySet.cc<|end_file_name|><|fim▁begin|>/* Web Polygraph http://www.web-polygraph.org/
* Copyright 2003-2014 The Measurement Factory
* Licensed under the Apache License, Version 2.0 */
#include "base/polygraph.h"
#include "xstd/h/iostream.h"
#include "xstd/h/string.h"
#include "xstd/CpuAffinitySet.h"
static std::ostream &operator <<(std::ostream &os, const cpu_set_t &cpuSet);
CpuAffinitySet::CpuAffinitySet() {
CPU_ZERO(&raw);
}
bool CpuAffinitySet::apply(ostream &err) {
Must(CPU_COUNT(&raw) > 0);
cpu_set_t origCpuSet;
CPU_ZERO(&origCpuSet);
if (sched_getaffinity(0, sizeof(origCpuSet), &origCpuSet) != 0) {
err << "failed to get original CPU affinity: " << Error::Last();
return false;
}
// CPU_AND to be able to distinguish EINVAL due to external restrictions
// from other EINVAL errors after calling sched_setaffinity().
cpu_set_t allowedCpuSet;
memcpy(&allowedCpuSet, &raw, sizeof(allowedCpuSet));
CPU_AND(&allowedCpuSet, &allowedCpuSet, &origCpuSet);
if (CPU_COUNT(&allowedCpuSet) <= 0) {
err << "requested CPU affinity is incompatible with preexisting restrictions" << std::endl <<
"requested affinity: " << raw << std::endl <<
"existing affinity: " << origCpuSet;
return false;
}
if (sched_setaffinity(0, sizeof(allowedCpuSet), &allowedCpuSet) != 0) {
err << "failed to set CPU affinity: " << Error::Last() << std::endl <<
"CPU affinity (after adjustments) was: " << allowedCpuSet;
return false;
}
cpu_set_t resultingCpuSet;
CPU_ZERO(&resultingCpuSet);
if (sched_getaffinity(0, sizeof(resultingCpuSet), &resultingCpuSet) != 0) {
err << "failed to get changed CPU affinity" << Error::Last();
return false;
}
if (!CPU_EQUAL(&raw, &resultingCpuSet)) {
err << "resulting/set CPU affinity: " << resultingCpuSet << std::endl <<
"differs from the configured CPU affinity: " << raw;
return true; // treat this as a warning, not error
}
return true;
}
void CpuAffinitySet::reset(const int coreId, const bool turnOn) {
if (turnOn)
CPU_SET(coreId, &raw);<|fim▁hole|>
std::ostream &CpuAffinitySet::print(std::ostream &os) const {
return os << raw;
}
static
std::ostream &operator <<(std::ostream &os, const cpu_set_t &cpuSet) {
const int count = CPU_COUNT(&cpuSet);
os << '[';
for (int printed = 0, i = 0; i < CPU_SETSIZE && printed < count; ++i) {
if (CPU_ISSET(i, &cpuSet)) {
os << i;
if (++printed < count)
os << ',';
}
}
os << ']';
return os;
}<|fim▁end|> | else
CPU_CLR(coreId, &raw);
} |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2014 Tesora, Inc.
# All Rights Reserved.
#<|fim▁hole|># not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from trove.guestagent.strategy import Strategy
from trove.openstack.common import log as logging
LOG = logging.getLogger(__name__)
def get_replication_strategy(replication_driver, ns=__name__):
LOG.debug("Getting replication strategy: %s.", replication_driver)
return Strategy.get_strategy(replication_driver, ns)<|fim▁end|> | # Licensed under the Apache License, Version 2.0 (the "License"); you may |
<|file_name|>a65114e48d6f_set_user_last_login_automatically_in_.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Set User.last_login automatically in the DB
Revision ID: a65114e48d6f
Revises: 104b4c56862b
Create Date: 2016-06-11 00:28:39.176496
"""
from alembic import op
import sqlalchemy as sa
revision = 'a65114e48d6f'
down_revision = '104b4c56862b'
def upgrade():
op.alter_column(
"accounts_user",
"last_login",
server_default=sa.func.now(),<|fim▁hole|>
def downgrade():
op.alter_column("accounts_user", "last_login", server_default=None)<|fim▁end|> | )
|
<|file_name|>0015_auto__add_field_geonamesmatchinglogmatchedplaces_remark.py<|end_file_name|><|fim▁begin|># encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'GeoNamesMatchingLogMatchedPlaces.remark'
db.add_column('united_geonames_geonamesmatchinglogmatchedplaces', 'remark', self.gf('django.db.models.fields.TextField')(null=True, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'GeoNamesMatchingLogMatchedPlaces.remark'
db.delete_column('united_geonames_geonamesmatchinglogmatchedplaces', 'remark')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),<|fim▁hole|> 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 7, 25, 14, 53, 19, 34425)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 7, 25, 14, 53, 19, 34316)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'united_geonames.geonamesmatchinglogmatch': {
'Meta': {'ordering': "['-matching_index']", 'object_name': 'GeoNamesMatchingLogMatch'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True'}),
'display_for_users': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'matching_index': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '233', 'null': 'True', 'blank': 'True'}),
'number_of_alternatives': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'start_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'})
},
'united_geonames.geonamesmatchinglogmatchedplaces': {
'Meta': {'object_name': 'GeoNamesMatchingLogMatchedPlaces'},
'best_match': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'geographical_distance': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'matchinglogmatch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'matched'", 'null': 'True', 'to': "orm['united_geonames.GeoNamesMatchingLogMatch']"}),
'ngram_distance': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'percentage': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'remark': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'united_geoname': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['united_geonames.UnitedGeoName']", 'null': 'True', 'blank': 'True'})
},
'united_geonames.unitedgeoname': {
'Meta': {'object_name': 'UnitedGeoName'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_name': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'united_geonames.unitedgeonamesynonim': {
'Meta': {'object_name': 'UnitedGeoNameSynonim'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True'}),
'coordinates': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'spatial_index': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'region': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'subregion': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'synonim_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'synonim_content_type_set'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'synonim_name': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'synonim_object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'united_geoname': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'geonames'", 'null': 'True', 'to': "orm['united_geonames.UnitedGeoName']"})
},
'united_geonames.usergeoname': {
'Meta': {'object_name': 'UserGeoName'},
'coordinates': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'spatial_index': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'region': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'subregion': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'united_geonames.userproject': {
'Meta': {'object_name': 'UserProject'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['united_geonames']<|fim▁end|> | 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), |
<|file_name|>ded3fd1d7f9d_.py<|end_file_name|><|fim▁begin|>"""empty message
Revision ID: ded3fd1d7f9d
Revises: b70e85abec53
Create Date: 2020-12-30 22:46:59.418950
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = 'ded3fd1d7f9d'
down_revision = 'b70e85abec53'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('hashfiles', sa.Column('checksum', sa.String(length=256), nullable=False))
op.drop_column('hashfiles', 'hash_str')
# ### end Alembic commands ###
<|fim▁hole|> op.drop_column('hashfiles', 'checksum')
# ### end Alembic commands ###<|fim▁end|> | def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('hashfiles', sa.Column('hash_str', mysql.VARCHAR(length=256), nullable=False)) |
<|file_name|>setup_wizard.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, copy
import os
import json
from frappe.utils import cstr, flt, getdate
from frappe import _
from frappe.utils.file_manager import save_file
from .default_website import website_maker
import install_fixtures
from .sample_data import make_sample_data
from erpnext.accounts.doctype.account.account import RootNotEditable
from frappe.core.doctype.communication.comment import add_info_comment
from erpnext.setup.setup_wizard.domainify import setup_domain
def setup_complete(args=None):
if frappe.db.sql("select name from tabCompany"):
frappe.throw(_("Setup Already Complete!!"))<|fim▁hole|>
create_price_lists(args)
create_fiscal_year_and_company(args)
create_sales_tax(args)
create_users(args)
set_defaults(args)
create_territories()
create_feed_and_todo()
create_email_digest()
create_letter_head(args)
create_taxes(args)
create_items(args)
create_customers(args)
create_suppliers(args)
if args.get('setup_website'):
website_maker(args)
create_logo(args)
frappe.local.message_log = []
setup_domain(args.get('domain'))
frappe.db.commit()
login_as_first_user(args)
frappe.db.commit()
frappe.clear_cache()
if args.get("add_sample_data"):
try:
make_sample_data(args)
frappe.clear_cache()
except:
# clear message
if frappe.message_log:
frappe.message_log.pop()
pass
def create_fiscal_year_and_company(args):
if (args.get('fy_start_date')):
curr_fiscal_year = get_fy_details(args.get('fy_start_date'), args.get('fy_end_date'))
frappe.get_doc({
"doctype":"Fiscal Year",
'year': curr_fiscal_year,
'year_start_date': args.get('fy_start_date'),
'year_end_date': args.get('fy_end_date'),
}).insert()
args["curr_fiscal_year"] = curr_fiscal_year
# Company
if (args.get('company_name')):
frappe.get_doc({
"doctype":"Company",
'company_name':args.get('company_name').strip(),
'abbr':args.get('company_abbr'),
'default_currency':args.get('currency'),
'country': args.get('country'),
'create_chart_of_accounts_based_on': 'Standard Template',
'chart_of_accounts': args.get('chart_of_accounts'),
'domain': args.get('domain')
}).insert()
#Enable shopping cart
enable_shopping_cart(args)
# Bank Account
create_bank_account(args)
def enable_shopping_cart(args):
frappe.get_doc({
"doctype": "Shopping Cart Settings",
"enabled": 1,
'company': args.get('company_name').strip(),
'price_list': frappe.db.get_value("Price List", {"selling": 1}),
'default_customer_group': _("Individual"),
'quotation_series': "QTN-",
}).insert()
def create_bank_account(args):
if args.get("bank_account"):
company_name = args.get('company_name').strip()
bank_account_group = frappe.db.get_value("Account",
{"account_type": "Bank", "is_group": 1, "root_type": "Asset",
"company": company_name})
if bank_account_group:
bank_account = frappe.get_doc({
"doctype": "Account",
'account_name': args.get("bank_account"),
'parent_account': bank_account_group,
'is_group':0,
'company': company_name,
"account_type": "Bank",
})
try:
return bank_account.insert()
except RootNotEditable:
frappe.throw(_("Bank account cannot be named as {0}").format(args.get("bank_account")))
except frappe.DuplicateEntryError:
# bank account same as a CoA entry
pass
def create_price_lists(args):
for pl_type, pl_name in (("Selling", _("Standard Selling")), ("Buying", _("Standard Buying"))):
frappe.get_doc({
"doctype": "Price List",
"price_list_name": pl_name,
"enabled": 1,
"buying": 1 if pl_type == "Buying" else 0,
"selling": 1 if pl_type == "Selling" else 0,
"currency": args["currency"]
}).insert()
def set_defaults(args):
# enable default currency
frappe.db.set_value("Currency", args.get("currency"), "enabled", 1)
global_defaults = frappe.get_doc("Global Defaults", "Global Defaults")
global_defaults.update({
'current_fiscal_year': args.curr_fiscal_year,
'default_currency': args.get('currency'),
'default_company':args.get('company_name').strip(),
"country": args.get("country"),
})
global_defaults.save()
frappe.db.set_value("System Settings", None, "email_footer_address", args.get("company"))
accounts_settings = frappe.get_doc("Accounts Settings")
accounts_settings.auto_accounting_for_stock = 1
accounts_settings.save()
stock_settings = frappe.get_doc("Stock Settings")
stock_settings.item_naming_by = "Item Code"
stock_settings.valuation_method = "FIFO"
stock_settings.default_warehouse = frappe.db.get_value('Warehouse', {'warehouse_name': _('Stores')})
stock_settings.stock_uom = _("Nos")
stock_settings.auto_indent = 1
stock_settings.auto_insert_price_list_rate_if_missing = 1
stock_settings.automatically_set_serial_nos_based_on_fifo = 1
stock_settings.save()
selling_settings = frappe.get_doc("Selling Settings")
selling_settings.cust_master_name = "Customer Name"
selling_settings.so_required = "No"
selling_settings.dn_required = "No"
selling_settings.allow_multiple_items = 1
selling_settings.save()
buying_settings = frappe.get_doc("Buying Settings")
buying_settings.supp_master_name = "Supplier Name"
buying_settings.po_required = "No"
buying_settings.pr_required = "No"
buying_settings.maintain_same_rate = 1
buying_settings.allow_multiple_items = 1
buying_settings.save()
notification_control = frappe.get_doc("Notification Control")
notification_control.quotation = 1
notification_control.sales_invoice = 1
notification_control.purchase_order = 1
notification_control.save()
hr_settings = frappe.get_doc("HR Settings")
hr_settings.emp_created_by = "Naming Series"
hr_settings.save()
def create_feed_and_todo():
"""update Activity feed and create todo for creation of item, customer, vendor"""
add_info_comment(**{
"subject": _("ERPNext Setup Complete!")
})
def create_email_digest():
from frappe.utils.user import get_system_managers
system_managers = get_system_managers(only_name=True)
if not system_managers:
return
companies = frappe.db.sql_list("select name FROM `tabCompany`")
for company in companies:
if not frappe.db.exists("Email Digest", "Default Weekly Digest - " + company):
edigest = frappe.get_doc({
"doctype": "Email Digest",
"name": "Default Weekly Digest - " + company,
"company": company,
"frequency": "Weekly",
"recipient_list": "\n".join(system_managers)
})
for df in edigest.meta.get("fields", {"fieldtype": "Check"}):
if df.fieldname != "scheduler_errors":
edigest.set(df.fieldname, 1)
edigest.insert()
# scheduler errors digest
if companies:
edigest = frappe.new_doc("Email Digest")
edigest.update({
"name": "Scheduler Errors",
"company": companies[0],
"frequency": "Daily",
"recipient_list": "\n".join(system_managers),
"scheduler_errors": 1,
"enabled": 1
})
edigest.insert()
def get_fy_details(fy_start_date, fy_end_date):
start_year = getdate(fy_start_date).year
if start_year == getdate(fy_end_date).year:
fy = cstr(start_year)
else:
fy = cstr(start_year) + '-' + cstr(start_year + 1)
return fy
def create_sales_tax(args):
country_wise_tax = get_country_wise_tax(args.get("country"))
if country_wise_tax and len(country_wise_tax) > 0:
for sales_tax, tax_data in country_wise_tax.items():
make_tax_account_and_template(args.get("company_name").strip(),
tax_data.get('account_name'), tax_data.get('tax_rate'), sales_tax)
def get_country_wise_tax(country):
data = {}
with open (os.path.join(os.path.dirname(__file__), "data", "country_wise_tax.json")) as countrywise_tax:
data = json.load(countrywise_tax).get(country)
return data
def create_taxes(args):
for i in xrange(1,6):
if args.get("tax_" + str(i)):
# replace % in case someone also enters the % symbol
tax_rate = cstr(args.get("tax_rate_" + str(i)) or "").replace("%", "")
account_name = args.get("tax_" + str(i))
make_tax_account_and_template(args.get("company_name").strip(), account_name, tax_rate)
def make_tax_account_and_template(company, account_name, tax_rate, template_name=None):
try:
account = make_tax_account(company, account_name, tax_rate)
if account:
make_sales_and_purchase_tax_templates(account, template_name)
except frappe.NameError, e:
if e.args[2][0]==1062:
pass
else:
raise
except RootNotEditable, e:
pass
def get_tax_account_group(company):
tax_group = frappe.db.get_value("Account",
{"account_name": "Duties and Taxes", "is_group": 1, "company": company})
if not tax_group:
tax_group = frappe.db.get_value("Account", {"is_group": 1, "root_type": "Liability",
"account_type": "Tax", "company": company})
return tax_group
def make_tax_account(company, account_name, tax_rate):
tax_group = get_tax_account_group(company)
if tax_group:
return frappe.get_doc({
"doctype":"Account",
"company": company,
"parent_account": tax_group,
"account_name": account_name,
"is_group": 0,
"report_type": "Balance Sheet",
"root_type": "Liability",
"account_type": "Tax",
"tax_rate": flt(tax_rate) if tax_rate else None
}).insert(ignore_permissions=True)
def make_sales_and_purchase_tax_templates(account, template_name=None):
if not template_name:
template_name = account.name
sales_tax_template = {
"doctype": "Sales Taxes and Charges Template",
"title": template_name,
"company": account.company,
"taxes": [{
"category": "Valuation and Total",
"charge_type": "On Net Total",
"account_head": account.name,
"description": "{0} @ {1}".format(account.account_name, account.tax_rate),
"rate": account.tax_rate
}]
}
# Sales
frappe.get_doc(copy.deepcopy(sales_tax_template)).insert(ignore_permissions=True)
# Purchase
purchase_tax_template = copy.deepcopy(sales_tax_template)
purchase_tax_template["doctype"] = "Purchase Taxes and Charges Template"
frappe.get_doc(purchase_tax_template).insert(ignore_permissions=True)
def create_items(args):
for i in xrange(1,6):
item = args.get("item_" + str(i))
if item:
item_group = args.get("item_group_" + str(i))
is_sales_item = args.get("is_sales_item_" + str(i))
is_purchase_item = args.get("is_purchase_item_" + str(i))
is_stock_item = item_group!=_("Services")
default_warehouse = ""
if is_stock_item:
default_warehouse = frappe.db.get_value("Warehouse", filters={
"warehouse_name": _("Finished Goods") if is_sales_item else _("Stores"),
"company": args.get("company_name").strip()
})
try:
frappe.get_doc({
"doctype":"Item",
"item_code": item,
"item_name": item,
"description": item,
"show_in_website": 1,
"is_sales_item": is_sales_item,
"is_purchase_item": is_purchase_item,
"is_stock_item": is_stock_item and 1 or 0,
"item_group": item_group,
"stock_uom": args.get("item_uom_" + str(i)),
"default_warehouse": default_warehouse
}).insert()
if args.get("item_img_" + str(i)):
item_image = args.get("item_img_" + str(i)).split(",")
if len(item_image)==3:
filename, filetype, content = item_image
fileurl = save_file(filename, content, "Item", item, decode=True).file_url
frappe.db.set_value("Item", item, "image", fileurl)
if args.get("item_price_" + str(i)):
item_price = flt(args.get("item_price_" + str(i)))
if is_sales_item:
price_list_name = frappe.db.get_value("Price List", {"selling": 1})
make_item_price(item, price_list_name, item_price)
if is_purchase_item:
price_list_name = frappe.db.get_value("Price List", {"buying": 1})
make_item_price(item, price_list_name, item_price)
except frappe.NameError:
pass
def make_item_price(item, price_list_name, item_price):
frappe.get_doc({
"doctype": "Item Price",
"price_list": price_list_name,
"item_code": item,
"price_list_rate": item_price
}).insert()
def create_customers(args):
for i in xrange(1,6):
customer = args.get("customer_" + str(i))
if customer:
try:
doc = frappe.get_doc({
"doctype":"Customer",
"customer_name": customer,
"customer_type": "Company",
"customer_group": _("Commercial"),
"territory": args.get("country"),
"company": args.get("company_name").strip()
}).insert()
if args.get("customer_contact_" + str(i)):
create_contact(args.get("customer_contact_" + str(i)),
"Customer", doc.name)
except frappe.NameError:
pass
def create_suppliers(args):
for i in xrange(1,6):
supplier = args.get("supplier_" + str(i))
if supplier:
try:
doc = frappe.get_doc({
"doctype":"Supplier",
"supplier_name": supplier,
"supplier_type": _("Local"),
"company": args.get("company_name").strip()
}).insert()
if args.get("supplier_contact_" + str(i)):
create_contact(args.get("supplier_contact_" + str(i)),
"Supplier", doc.name)
except frappe.NameError:
pass
def create_contact(contact, party_type, party):
"""Create contact based on given contact name"""
contact = contact.strip().split(" ")
contact = frappe.get_doc({
"doctype":"Contact",
"first_name":contact[0],
"last_name": len(contact) > 1 and contact[1] or ""
})
contact.append('links', dict(link_doctype=party_type, link_name=party))
contact.insert()
def create_letter_head(args):
if args.get("attach_letterhead"):
frappe.get_doc({
"doctype":"Letter Head",
"letter_head_name": _("Standard"),
"is_default": 1
}).insert()
attach_letterhead = args.get("attach_letterhead").split(",")
if len(attach_letterhead)==3:
filename, filetype, content = attach_letterhead
fileurl = save_file(filename, content, "Letter Head", _("Standard"), decode=True).file_url
frappe.db.set_value("Letter Head", _("Standard"), "content", "<img src='%s' style='max-width: 100%%;'>" % fileurl)
def create_logo(args):
if args.get("attach_logo"):
attach_logo = args.get("attach_logo").split(",")
if len(attach_logo)==3:
filename, filetype, content = attach_logo
fileurl = save_file(filename, content, "Website Settings", "Website Settings",
decode=True).file_url
frappe.db.set_value("Website Settings", "Website Settings", "brand_html",
"<img src='{0}' style='max-width: 40px; max-height: 25px;'> {1}".format(fileurl, args.get("company_name").strip()))
def create_territories():
"""create two default territories, one for home country and one named Rest of the World"""
from frappe.utils.nestedset import get_root_of
country = frappe.db.get_default("country")
root_territory = get_root_of("Territory")
for name in (country, _("Rest Of The World")):
if name and not frappe.db.exists("Territory", name):
frappe.get_doc({
"doctype": "Territory",
"territory_name": name.replace("'", ""),
"parent_territory": root_territory,
"is_group": "No"
}).insert()
def login_as_first_user(args):
if args.get("email") and hasattr(frappe.local, "login_manager"):
frappe.local.login_manager.login_as(args.get("email"))
def create_users(args):
if frappe.session.user == 'Administrator':
return
# create employee for self
emp = frappe.get_doc({
"doctype": "Employee",
"employee_name": " ".join(filter(None, [args.get("first_name"), args.get("last_name")])),
"user_id": frappe.session.user,
"status": "Active",
"company": args.get("company_name")
})
emp.flags.ignore_mandatory = True
emp.insert(ignore_permissions = True)
for i in xrange(1,5):
email = args.get("user_email_" + str(i))
fullname = args.get("user_fullname_" + str(i))
if email:
if not fullname:
fullname = email.split("@")[0]
parts = fullname.split(" ", 1)
user = frappe.get_doc({
"doctype": "User",
"email": email,
"first_name": parts[0],
"last_name": parts[1] if len(parts) > 1 else "",
"enabled": 1,
"user_type": "System User"
})
# default roles
user.append_roles("Projects Manager", "Stock User", "Support Team")
if args.get("user_sales_" + str(i)):
user.append_roles("Sales User", "Sales Manager", "Accounts User")
if args.get("user_purchaser_" + str(i)):
user.append_roles("Purchase User", "Purchase Manager", "Accounts User")
if args.get("user_accountant_" + str(i)):
user.append_roles("Accounts Manager", "Accounts User")
user.flags.delay_emails = True
if not frappe.db.get_value("User", email):
user.insert(ignore_permissions=True)
# create employee
emp = frappe.get_doc({
"doctype": "Employee",
"employee_name": fullname,
"user_id": email,
"status": "Active",
"company": args.get("company_name")
})
emp.flags.ignore_mandatory = True
emp.insert(ignore_permissions = True)
def create_academic_term():
at = ["Semester 1", "Semester 2", "Semester 3"]
ay = ["2013-14", "2014-15", "2015-16", "2016-17", "2017-18"]
for y in ay:
for t in at:
academic_term = frappe.new_doc("Academic Term")
academic_term.academic_year = y
academic_term.term_name = t
try:
academic_term.save()
except frappe.DuplicateEntryError:
pass
def create_academic_year():
ac = ["2013-14", "2014-15", "2015-16", "2016-17", "2017-18"]
for d in ac:
academic_year = frappe.new_doc("Academic Year")
academic_year.academic_year_name = d
try:
academic_year.save()
except frappe.DuplicateEntryError:
pass
def create_program(args):
for i in xrange(1,6):
if args.get("program_" + str(i)):
program = frappe.new_doc("Program")
program.program_code = args.get("program_" + str(i))
program.program_name = args.get("program_" + str(i))
try:
program.save()
except frappe.DuplicateEntryError:
pass
def create_course(args):
for i in xrange(1,6):
if args.get("course_" + str(i)):
course = frappe.new_doc("Course")
course.course_code = args.get("course_" + str(i))
course.course_name = args.get("course_" + str(i))
try:
course.save()
except frappe.DuplicateEntryError:
pass
def create_instructor(args):
for i in xrange(1,6):
if args.get("instructor_" + str(i)):
instructor = frappe.new_doc("Instructor")
instructor.instructor_name = args.get("instructor_" + str(i))
try:
instructor.save()
except frappe.DuplicateEntryError:
pass
def create_room(args):
for i in xrange(1,6):
if args.get("room_" + str(i)):
room = frappe.new_doc("Room")
room.room_name = args.get("room_" + str(i))
room.seating_capacity = args.get("room_capacity_" + str(i))
try:
room.save()
except frappe.DuplicateEntryError:
pass<|fim▁end|> |
install_fixtures.install(args.get("country")) |
<|file_name|>rt.rs<|end_file_name|><|fim▁begin|>use super::{proto, BUCKET_MAX, ID, MAX_BUCKETS, MIN_BOOTSTRAP_BKTS, TX_TIMEOUT_SECS};
use crate::tracker;
use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
use chrono::{DateTime, Utc};
use num_bigint::BigUint;
use rand::{self, Rng};
use std::collections::HashMap;
use std::net::SocketAddr;
use std::{cmp, mem};
const MAX_SEARCH_DEPTH: u8 = 5;
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct RoutingTable {
id: ID,
buckets: Vec<Bucket>,
last_resp_recvd: DateTime<Utc>,
last_req_recvd: DateTime<Utc>,
last_token_refresh: DateTime<Utc>,
last_tick: DateTime<Utc>,
transactions: HashMap<u32, Transaction>,
torrents: HashMap<[u8; 20], Torrent>,
bootstrapping: bool,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
struct Transaction {
created: DateTime<Utc>,
kind: TransactionKind,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
enum TransactionKind {
Initialization,
Query(ID),
TSearch {
id: ID,
torrent: usize,
hash: [u8; 20],
depth: u8,
},
}
#[derive(Clone, Debug, Serialize, Deserialize)]
struct Torrent {
peers: Vec<(ID, SocketAddr)>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Bucket {
start: ID,
end: ID,
last_updated: DateTime<Utc>,
nodes: Vec<Node>,
}
<|fim▁hole|> state: NodeState,
addr: SocketAddr,
last_updated: DateTime<Utc>,
token: Vec<u8>,
prev_token: Vec<u8>,
rem_token: Option<Vec<u8>>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum NodeState {
Good,
Questionable(usize),
Bad,
}
impl RoutingTable {
pub fn new() -> RoutingTable {
let mut id = [0u8; 20];
let mut rng = rand::thread_rng();
for i in &mut id {
*i = rng.gen::<u8>();
}
RoutingTable {
buckets: vec![Bucket::new(BigUint::from(0u8), id_from_pow(160))],
last_resp_recvd: Utc::now(),
last_req_recvd: Utc::now(),
last_token_refresh: Utc::now(),
last_tick: Utc::now(),
id: BigUint::from_bytes_be(&id),
transactions: HashMap::new(),
torrents: HashMap::new(),
bootstrapping: true,
}
}
pub fn init(&mut self) -> Vec<(proto::Request, SocketAddr)> {
self.refresh_tokens()
}
pub fn deserialize(data: &[u8]) -> Option<RoutingTable> {
bincode::deserialize(data).ok().map(|rt: RoutingTable| {
info!(
"Loaded {} dht buckets, {} nodes",
rt.buckets.len(),
rt.buckets.iter().map(|buk| buk.nodes.len()).sum::<usize>()
);
rt
})
}
pub fn add_addr(&mut self, addr: SocketAddr) -> (proto::Request, SocketAddr) {
let tx = self.new_init_tx();
(proto::Request::ping(tx, self.id.clone()), addr)
}
pub fn get_peers(
&mut self,
torrent: usize,
hash: [u8; 20],
) -> Vec<(proto::Request, SocketAddr)> {
let tid = BigUint::from_bytes_be(&hash[..]);
let idx = self.bucket_idx(&tid);
let mut nodes: Vec<proto::Node> = Vec::new();
for node in &self.buckets[idx].nodes {
nodes.push(node.into());
}
let mut reqs = Vec::new();
for node in nodes {
let tx = self.new_tsearch_tx(node.id, torrent, hash, 0);
let req = proto::Request::get_peers(tx, self.id.clone(), hash);
reqs.push((req, node.addr));
}
reqs
}
pub fn announce(&mut self, hash: [u8; 20]) -> Vec<(proto::Request, SocketAddr)> {
let mut nodes: Vec<(proto::Node, Vec<u8>)> = Vec::new();
for bucket in &self.buckets {
for node in &bucket.nodes {
if let Some(ref tok) = node.rem_token {
nodes.push((node.into(), tok.clone()))
}
}
}
let mut reqs = Vec::new();
for (node, tok) in nodes {
let tx = self.new_query_tx(node.id);
let req = proto::Request::announce(tx, self.id.clone(), hash, tok);
reqs.push((req, node.addr));
}
reqs
}
pub fn handle_req(&mut self, req: proto::Request, mut addr: SocketAddr) -> proto::Response {
self.last_req_recvd = Utc::now();
match req.kind {
// TODO: Consider adding the node if we don't have it?
proto::RequestKind::Ping(id) => {
if self.contains_id(&id) {
self.get_node_mut(&id).update();
}
proto::Response::id(req.transaction, self.id.clone())
}
proto::RequestKind::FindNode { id, target } => {
if self.contains_id(&id) {
self.get_node_mut(&id).update();
}
let mut nodes = Vec::new();
if self.contains_id(&target) {
nodes.push(self.get_node(&target).into())
} else {
let b = self.bucket_idx(&target);
for node in &self.buckets[b].nodes {
nodes.push(node.into());
}
}
proto::Response::find_node(req.transaction, self.id.clone(), nodes)
}
proto::RequestKind::AnnouncePeer {
id,
implied_port,
hash,
port,
token,
} => {
if !self.contains_id(&id) {
return proto::Response::error(
req.transaction,
proto::ErrorKind::Protocol("Unregistered peer!".to_owned()),
);
}
{
let node = self.get_node_mut(&id);
if !node.token_valid(&token) {
return proto::Response::error(
req.transaction,
proto::ErrorKind::Protocol("Bad token!".to_owned()),
);
}
node.update();
}
self.torrents
.entry(hash)
.or_insert(Torrent { peers: Vec::new() });
if !implied_port {
addr.set_port(port);
}
self.torrents.get_mut(&hash).unwrap().peers.push((id, addr));
proto::Response::id(req.transaction, self.id.clone())
}
proto::RequestKind::GetPeers { id, hash } => {
if !self.contains_id(&id) {
let n = Node::new(id.clone(), addr);
if self.add_node(n).is_err() {
// This will be processed immediately after.
}
}
let token = if !self.contains_id(&id) {
return proto::Response::error(
req.transaction,
proto::ErrorKind::Protocol("Unregistered peer!".to_owned()),
);
} else {
self.get_node(&id).token.clone()
};
if let Some(t) = self.torrents.get(&hash) {
proto::Response::peers(
req.transaction,
self.id.clone(),
token,
t.peers.iter().map(|p| p.1).collect(),
)
} else {
let mut nodes = Vec::new();
let b = self.bucket_idx(&BigUint::from_bytes_be(&hash[..]));
for node in &self.buckets[b].nodes {
nodes.push(node.into());
}
proto::Response::nodes(req.transaction, self.id.clone(), token, nodes)
}
}
}
}
pub fn handle_resp(
&mut self,
resp: proto::Response,
addr: SocketAddr,
) -> Result<tracker::Response, Vec<(proto::Request, SocketAddr)>> {
self.last_resp_recvd = Utc::now();
let mut reqs = Vec::new();
if resp.transaction.len() < 4 {
return Err(reqs);
}
let tid = BigEndian::read_u32(&resp.transaction[..]);
let tx = if let Some(tx) = self.transactions.remove(&tid) {
tx
} else {
return Err(reqs);
};
match (tx.kind, resp.kind) {
(TransactionKind::Initialization, proto::ResponseKind::ID(id)) => {
let mut n = Node::new(id.clone(), addr);
n.update();
if self.add_node(n).is_ok() && self.bootstrapping {
let tx = self.new_query_tx(id);
reqs.push((
proto::Request::find_node(tx, self.id.clone(), self.id.clone()),
addr,
));
}
}
(TransactionKind::Query(ref id1), proto::ResponseKind::ID(ref id2)) if id1 == id2 => {
if !self.contains_id(id1) {
return Err(reqs);
}
self.get_node_mut(id1).update();
if self.bootstrapping {
let tx = self.new_query_tx(id1.clone());
reqs.push((
proto::Request::find_node(tx, self.id.clone(), self.id.clone()),
addr,
));
}
}
(
TransactionKind::Query(ref id1),
proto::ResponseKind::FindNode {
id: ref id2,
ref mut nodes,
},
) if id1 == id2 => {
if !self.contains_id(id1) {
return Err(reqs);
}
self.get_node_mut(id1).update();
for node in nodes.drain(..) {
if !self.contains_id(&node.id) {
let id = node.id.clone();
let addr = node.addr;
if self.add_node(node.into()).is_ok() {
let tx = self.new_query_tx(id);
reqs.push((proto::Request::ping(tx, self.id.clone()), addr));
}
}
}
}
// Token refresh query
(
TransactionKind::Query(ref id1),
proto::ResponseKind::GetPeers {
id: ref id2,
ref mut token,
..
},
) if id1 == id2 => {
if !self.contains_id(id1) {
return Err(reqs);
}
let node = self.get_node_mut(id1);
node.update();
if let Some(ref mut rt) = node.rem_token {
mem::swap(rt, token);
} else {
node.rem_token = Some(token.clone());
}
}
(
TransactionKind::TSearch {
id: ref id1,
torrent,
hash,
depth,
},
proto::ResponseKind::GetPeers {
id: ref id2,
ref mut values,
ref mut nodes,
ref mut token,
},
) if id1 == id2 => {
if self.contains_id(id1) {
let node = self.get_node_mut(id1);
node.update();
if let Some(ref mut rt) = node.rem_token {
mem::swap(rt, token);
} else {
node.rem_token = Some(token.clone());
}
}
if !values.is_empty() {
return Ok(tracker::Response::DHT {
tid: torrent,
peers: mem::replace(values, vec![]),
});
}
if depth < MAX_SEARCH_DEPTH {
for node in nodes.drain(..) {
let id = node.id.clone();
let addr = node.addr;
if !self.contains_id(&node.id) {
let tx = self.new_tsearch_tx(id.clone(), torrent, hash, depth + 1);
reqs.push((proto::Request::get_peers(tx, self.id.clone(), hash), addr));
}
}
}
}
(TransactionKind::Query(id), proto::ResponseKind::Error(_)) => {
if !self.contains_id(&id) {
return Err(reqs);
}
self.get_node_mut(&id).update();
}
// Mismatched IDs
(TransactionKind::Query(id), proto::ResponseKind::ID(_))
| (TransactionKind::Query(id), proto::ResponseKind::FindNode { .. })
| (TransactionKind::Query(id), proto::ResponseKind::GetPeers { .. })
| (TransactionKind::TSearch { id, .. }, proto::ResponseKind::GetPeers { .. })
| (TransactionKind::TSearch { id, .. }, _) => {
self.remove_node(&id);
}
// Bad response to initialization ping
(TransactionKind::Initialization, _) => {
// No need to do anything here, we haven't added the node to the routing
// table yet.
}
}
Err(reqs)
}
pub fn tick(&mut self) -> Vec<(proto::Request, SocketAddr)> {
let mut reqs = Vec::new();
let dur = Utc::now().signed_duration_since(self.last_tick);
if dur.num_seconds() < 10 {
return reqs;
}
self.last_tick = Utc::now();
let mut nodes_to_ping: Vec<proto::Node> = Vec::new();
if self.is_bootstrapped() && self.bootstrapping {
self.bootstrapping = false;
}
self.transactions.retain(|_, tx| {
Utc::now().signed_duration_since(tx.created).num_seconds() < TX_TIMEOUT_SECS
});
let dur = Utc::now().signed_duration_since(self.last_token_refresh);
let tok_refresh = dur.num_minutes() > 5;
for bucket in &mut self.buckets {
for node in &mut bucket.nodes {
if tok_refresh {
node.new_token();
}
let dur = Utc::now().signed_duration_since(node.last_updated);
if dur.num_minutes() > 15 {
if node.good() {
node.state = NodeState::Questionable(1);
nodes_to_ping.push((&*node).into());
} else if let NodeState::Questionable(1) = node.state {
node.state = NodeState::Questionable(2);
nodes_to_ping.push((&*node).into());
} else {
node.state = NodeState::Bad;
}
}
}
}
for node in nodes_to_ping {
let tx = self.new_query_tx(node.id);
reqs.push((proto::Request::ping(tx, self.id.clone()), node.addr));
}
let buckets = &self.buckets;
self.torrents.retain(|_, t| {
t.peers.retain(|p| {
let idx = RoutingTable::bucket_idx_(&p.0, buckets);
buckets[idx].contains(&p.0)
});
!t.peers.is_empty()
});
reqs
}
pub fn serialize(&self) -> Vec<u8> {
bincode::serialize(self).unwrap()
}
pub fn is_bootstrapped(&self) -> bool {
self.buckets.len() >= MIN_BOOTSTRAP_BKTS
}
/// Send a bogus get_peers query and internally refresh our token.
fn refresh_tokens(&mut self) -> Vec<(proto::Request, SocketAddr)> {
let mut nodes: Vec<proto::Node> = Vec::new();
for bucket in &mut self.buckets {
for node in &mut bucket.nodes {
node.new_token();
nodes.push((&*node).into());
}
}
let mut reqs = Vec::new();
for node in nodes {
let tx = self.new_query_tx(node.id);
let req = proto::Request::get_peers(tx, self.id.clone(), [0xBEu8; 20]);
reqs.push((req, node.addr));
}
reqs
}
fn get_node_mut(&mut self, id: &ID) -> &mut Node {
let idx = self.bucket_idx(id);
let bidx = self.buckets[idx].idx_of(id).unwrap();
&mut self.buckets[idx].nodes[bidx]
}
fn get_node(&self, id: &ID) -> &Node {
let idx = self.bucket_idx(id);
let bidx = self.buckets[idx].idx_of(id).unwrap();
&self.buckets[idx].nodes[bidx]
}
fn contains_id(&self, id: &ID) -> bool {
let idx = self.bucket_idx(id);
self.buckets[idx].contains(id)
}
fn new_init_tx(&mut self) -> Vec<u8> {
let mut tb = Vec::new();
let tid = rand::random::<u32>();
tb.write_u32::<BigEndian>(tid).unwrap();
self.transactions.insert(
tid,
Transaction {
created: Utc::now(),
kind: TransactionKind::Initialization,
},
);
tb
}
fn new_query_tx(&mut self, id: ID) -> Vec<u8> {
let mut tb = Vec::new();
let tid = rand::random::<u32>();
tb.write_u32::<BigEndian>(tid).unwrap();
self.transactions.insert(
tid,
Transaction {
created: Utc::now(),
kind: TransactionKind::Query(id),
},
);
tb
}
fn new_tsearch_tx(&mut self, id: ID, torrent: usize, hash: [u8; 20], depth: u8) -> Vec<u8> {
let mut tb = Vec::new();
let tid = rand::random::<u32>();
tb.write_u32::<BigEndian>(tid).unwrap();
self.transactions.insert(
tid,
Transaction {
created: Utc::now(),
kind: TransactionKind::TSearch {
id,
torrent,
hash,
depth,
},
},
);
tb
}
fn add_node(&mut self, node: Node) -> Result<(), ()> {
let idx = self.bucket_idx(&node.id);
if self.buckets[idx].full() {
if self.buckets[idx].could_hold(&self.id) && self.buckets.len() < MAX_BUCKETS {
self.split_bucket(idx);
self.add_node(node)
} else {
Err(())
}
} else {
self.buckets[idx].add_node(node);
Ok(())
}
}
fn remove_node(&mut self, id: &ID) {
let idx = self.bucket_idx(id);
if let Some(i) = self.buckets[idx].idx_of(id) {
self.buckets[idx].nodes.remove(i);
}
}
fn split_bucket(&mut self, idx: usize) {
let midpoint = self.buckets[idx].midpoint();
let mut nb;
{
let pb = &mut self.buckets[idx];
nb = Bucket::new(midpoint.clone(), pb.end.clone());
pb.end = midpoint;
let nodes = mem::replace(&mut pb.nodes, Vec::with_capacity(BUCKET_MAX));
for node in nodes {
if pb.could_hold(&node.id) {
pb.nodes.push(node);
} else {
nb.nodes.push(node);
}
}
}
self.buckets.insert(idx + 1, nb);
}
fn bucket_idx(&self, id: &ID) -> usize {
RoutingTable::bucket_idx_(id, &self.buckets)
}
fn bucket_idx_(id: &ID, buckets: &[Bucket]) -> usize {
buckets
.binary_search_by(|bucket| {
if bucket.could_hold(id) {
cmp::Ordering::Equal
} else {
bucket.start.cmp(id)
}
})
.unwrap()
}
}
impl Bucket {
fn new(start: ID, end: ID) -> Bucket {
Bucket {
start,
end,
last_updated: Utc::now(),
nodes: Vec::with_capacity(BUCKET_MAX),
}
}
fn add_node(&mut self, mut node: Node) {
if self.nodes.len() < BUCKET_MAX {
self.nodes.push(node);
} else {
for n in &mut self.nodes {
if !n.good() {
mem::swap(n, &mut node);
break;
}
}
}
}
fn could_hold(&self, id: &ID) -> bool {
&self.start <= id && id < &self.end
}
fn full(&self) -> bool {
self.nodes.len() >= BUCKET_MAX && self.nodes.iter().all(|n| n.good())
}
fn midpoint(&self) -> ID {
self.start.clone() + (&self.end - &self.start) / BigUint::from(2u8)
}
fn contains(&self, id: &ID) -> bool {
self.idx_of(id).is_some()
}
fn idx_of(&self, id: &ID) -> Option<usize> {
self.nodes.iter().position(|node| &node.id == id)
}
}
impl Node {
fn new(id: ID, addr: SocketAddr) -> Node {
let token = Node::create_token();
Node {
id,
state: NodeState::Bad,
addr,
last_updated: Utc::now(),
prev_token: token.clone(),
rem_token: None,
token,
}
}
#[cfg(test)]
fn new_test(id: ID) -> Node {
Node::new(id, "127.0.0.1:0".parse().unwrap())
}
fn good(&self) -> bool {
if let NodeState::Good = self.state {
true
} else {
false
}
}
fn new_token(&mut self) {
let new_prev = mem::replace(&mut self.token, Node::create_token());
self.prev_token = new_prev;
}
fn token_valid(&self, token: &[u8]) -> bool {
token == &self.token[..] || token == &self.prev_token[..]
}
fn create_token() -> Vec<u8> {
let mut tok = Vec::new();
let mut rng = rand::thread_rng();
for _ in 0..20 {
tok.push(rng.gen::<u8>());
}
tok
}
fn update(&mut self) {
self.state = NodeState::Good;
self.last_updated = Utc::now();
}
}
impl From<proto::Node> for Node {
fn from(node: proto::Node) -> Self {
Node::new(node.id, node.addr)
}
}
impl<'a> Into<proto::Node> for &'a Node {
fn into(self) -> proto::Node {
proto::Node {
id: self.id.clone(),
addr: self.addr,
}
}
}
/// creates an ID of value 2^(pow)
fn id_from_pow(pow: usize) -> ID {
let mut id = [0u8; 21];
let idx = 20 - pow / 8;
let offset = pow % 8;
let block = id[idx];
id[idx] = block | (1 << offset);
BigUint::from_bytes_be(&id)
}
#[cfg(test)]
mod tests {
use super::{id_from_pow, Bucket, Node, RoutingTable};
use num_bigint::BigUint;
#[test]
fn test_id_from_pow() {
assert!(id_from_pow(159) > id_from_pow(158));
assert_eq!(id_from_pow(1), BigUint::from(2u8));
assert_eq!(id_from_pow(8), BigUint::from(256u16));
}
#[test]
fn test_bucket_midpoint() {
let b = Bucket::new(BigUint::from(0u8), BigUint::from(20u8));
assert_eq!(b.midpoint(), BigUint::from(10u8));
let b = Bucket::new(BigUint::from(0u8), id_from_pow(160));
assert_eq!(b.midpoint(), id_from_pow(159));
}
#[test]
fn test_bucket_split_far() {
let mut rt = RoutingTable::new();
rt.buckets[0].nodes = vec![Node::new_test(id_from_pow(100)); 8];
rt.split_bucket(0);
assert_eq!(rt.buckets[0].nodes.len(), 8);
assert_eq!(rt.buckets[1].nodes.len(), 0);
}
#[test]
fn test_bucket_split_close() {
let mut rt = RoutingTable::new();
rt.buckets[0].nodes = vec![Node::new_test(id_from_pow(159)); 8];
rt.split_bucket(0);
assert_eq!(rt.buckets[0].nodes.len(), 0);
assert_eq!(rt.buckets[1].nodes.len(), 8);
}
}<|fim▁end|> | #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Node {
id: ID, |
<|file_name|>serverlocal.js<|end_file_name|><|fim▁begin|>// Init express server
var express = require('express');
var app = express();
var bodyParser = require('body-parser');
var server = require('http').Server(app);
server.listen(3000);
console.log("started listenning on port 3000");
// Subscribe to lexa's router stream and update the LED accordingly<|fim▁hole|>// var led = new Gpio(18, 'out');
var sio = require('socket.io-client');
var socket = sio.connect('http://lexa.tuscale.ro');
// socket.on('message', function(msg) {
// console.log('Got a new message from the router:', msg);
// var jMsg = JSON.parse(msg);
// var newLedState = jMsg.led;
// led.writeSync(newLedState);
// });
// Init firebase
var firebase = require('firebase');
var io = require('socket.io')(server);
var firebase_app = firebase.initializeApp({
apiKey: "AIzaSyB3ZvJDuZ2HD-UppgPvY2by-GI0KnessXw",
authDomain: "rlexa-9f1ca.firebaseapp.com",
databaseURL: "https://rlexa-9f1ca.firebaseio.com",
projectId: "rlexa-9f1ca",
storageBucket: "rlexa-9f1ca.appspot.com",
messagingSenderId: "161670508523"
});
var db = firebase.database();
app.use(express.static('public'));
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
// Init NFC serial link
// var SerialPort = require('serialport');
// SerialPort.list(function (err, ports) {
// ports.forEach(function (port) {
// console.log(port.comName);
// });
// });
// var port = new SerialPort('/dev/ttyACM0', {
// baudRate: 9600,
// parser: SerialPort.parsers.readline("\r\n")
// });
// port.on('open', function () {
// console.log('open');
// });
// // Monitor NFC activity
// port.on('data', function (data) {
// var tagID = data.split(' ').join('');
// console.log(data.split(' '));
// tagID = tagID.substring(0, tagID.length - 1);
// console.log(tagID + " scanned ...");
// db.ref("card/" + tagID).once("value", function(cardOwnerSnap) {
// var cardOwnerName = cardOwnerSnap.child('name').val();
// if (cardOwnerName) {
// db.ref('authed').set(cardOwnerName);
// }
// });
// // Notify our web-clients that a tag was scanned
// io.sockets.emit('idscanned', { cardid: tagID });
// });
io.on('connection', function (socket) {
console.log('Web client connected.');
});
// Define web-facing endpoints for managing the users
app.post('/add_user', function (req, res) {
var currentUser = { name: req.body.name, led: req.body.led, id: req.body.id };
var updates = {};
updates['card/' + currentUser.id] = {
name: currentUser.name,
led: currentUser.led
};
updates['users/' + currentUser.name] = currentUser;
return firebase.database().ref().update(updates);
});
app.get('/get_users', function (req, res) {
firebase.database().ref().once('value', function (snap) {
var dataUsers = snap.child("users");
res.send(dataUsers);
});
});
app.get('/get_authed', function (req, res) {
db.ref().once('value', function (snap) {
var isUserLogged = snap.child("authed/Mike").val();
console.log(isUserLogged);
if (isUserLogged) {
var userData = snap.child("users/Mike/led")
console.log(parseInt(userData.val()));
}
})
var name = "BLAH";
name = name.toLowerCase();
name = name.charAt(0).toUpperCase() + name.slice(1);
res.send(name);
});
// Monitor process termination and do cleanups
// process.on('SIGINT', function () {
// led.writeSync(0);
// led.unexport();
// process.exit();
// });
// db.ref('authed').once('value', function (snap) {
// var lastScannedTagOwner = snap.val();
// if (lastScannedTagOwner) {
// // Valid tag present
// request({
// url: 'http://lexa.tuscale.ro/publish',
// method: 'POST',
// json: { led: (stateName === "on" ? 1 : 0) }
// },
// function (error, response, body) {
// if (error) {
// return console.error('upload failed:', error);
// }
// // Delete scanned tag and notify user of successfull op
// db.ref('authed').remove();
// that.emit(':tell', 'Hi ' + lastScannedTagOwner + '! Turning ' + stateName + ' the LED!');
// console.log('Upload successful! Server responded with:', body)
// }
// );
// } else {
// that.emit(':tell', 'Please scan your tag and try again.');
// }
// });<|fim▁end|> | // var onoff = require('onoff');
// var Gpio = onoff.Gpio; |
<|file_name|>align_faces.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2012, Philipp Wagner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the author nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import sys, math
from PIL import Image
import facedetect
def Distance(p1,p2):
dx = p2[0] - p1[0]
dy = p2[1] - p1[1]
return math.sqrt(dx*dx+dy*dy)
def ScaleRotateTranslate(image, angle, center = None, new_center = None, scale = None, resample=Image.BICUBIC):
if (scale is None) and (center is None):
return image.rotate(angle=angle, resample=resample)
nx,ny = x,y = center
sx=sy=1.0
if new_center:
(nx,ny) = new_center
if scale:
(sx,sy) = (scale, scale)
cosine = math.cos(angle)
sine = math.sin(angle)
a = cosine/sx
b = sine/sx
c = x-nx*a-ny*b
d = -sine/sy
e = cosine/sy
f = y-nx*d-ny*e
return image.transform(image.size, Image.AFFINE, (a,b,c,d,e,f), resample=resample)
def CropFace(image, eye_left=(0,0), eye_right=(0,0), offset_pct=(0.2,0.2), dest_sz = (70,70)):
# calculate offsets in original image
offset_h = math.floor(float(offset_pct[0])*dest_sz[0])
offset_v = math.floor(float(offset_pct[1])*dest_sz[1])
# get the direction
eye_direction = (eye_right[0] - eye_left[0], eye_right[1] - eye_left[1])
# calc rotation angle in radians
rotation = -math.atan2(float(eye_direction[1]),float(eye_direction[0]))
# distance between them
dist = Distance(eye_left, eye_right)
# calculate the reference eye-width
reference = dest_sz[0] - 2.0*offset_h
# scale factor
scale = float(dist)/float(reference)
# rotate original around the left eye
image = ScaleRotateTranslate(image, center=eye_left, angle=rotation)
# crop the rotated image
crop_xy = (eye_left[0] - scale*offset_h, eye_left[1] - scale*offset_v)
crop_size = (dest_sz[0]*scale, dest_sz[1]*scale)<|fim▁hole|> image = image.resize(dest_sz, Image.ANTIALIAS)
return image
if __name__ == "__main__":
f = open(sys.argv[1], 'r')
csv = open(sys.argv[2], "w")
for line in f:
lineArray = line.split(";")
fileName = lineArray[0]
label = lineArray[1]
print "aligning %s to aligned" % (fileName)
aligned_file_name = "aligned/%s" % fileName
face = facedetect.detect_faces(fileName)['face'][0]
print(face)
CropFace(Image.open(fileName), eye_left=(face[0],face[1]), eye_right=(face[2],face[1]), offset_pct=(0.08,0.08), dest_sz=(200,200)).save(aligned_file_name)
# CropFace(Image.open(fileName), eye_left=(252,364), eye_right=(420,366), offset_pct=(0.1,0.1), dest_sz=(200,200)).save(aligned_file_name)
csv.write("%s;%s" % (aligned_file_name, label))
f.close()
csv.close()<|fim▁end|> | image = image.crop((int(crop_xy[0]), int(crop_xy[1]), int(crop_xy[0]+crop_size[0]), int(crop_xy[1]+crop_size[1])))
# resize it |
<|file_name|>rawdsttran.rs<|end_file_name|><|fim▁begin|>#[doc = "Register `RAWDSTTRAN` reader"]
pub struct R(crate::R<RAWDSTTRAN_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<RAWDSTTRAN_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<RAWDSTTRAN_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<RAWDSTTRAN_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `RAWDSTTRAN` writer"]
pub struct W(crate::W<RAWDSTTRAN_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<RAWDSTTRAN_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<RAWDSTTRAN_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<RAWDSTTRAN_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `CH0` reader - Raw Interrupt Status for channel 0"]
pub struct CH0_R(crate::FieldReader<bool, bool>);
impl CH0_R {
pub(crate) fn new(bits: bool) -> Self {
CH0_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CH0_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CH0` writer - Raw Interrupt Status for channel 0"]
pub struct CH0_W<'a> {
w: &'a mut W,
}
impl<'a> CH0_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | (value as u32 & 0x01);
self.w
}
}
#[doc = "Field `CH1` reader - Raw Interrupt Status for channel 1"]
pub struct CH1_R(crate::FieldReader<bool, bool>);
impl CH1_R {
pub(crate) fn new(bits: bool) -> Self {
CH1_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CH1_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CH1` writer - Raw Interrupt Status for channel 1"]
pub struct CH1_W<'a> {
w: &'a mut W,
}
impl<'a> CH1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u32 & 0x01) << 1);
self.w
}
}
#[doc = "Field `CH2` reader - Raw Interrupt Status for channel 2"]
pub struct CH2_R(crate::FieldReader<bool, bool>);
impl CH2_R {
pub(crate) fn new(bits: bool) -> Self {
CH2_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CH2_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}<|fim▁hole|>impl<'a> CH2_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | ((value as u32 & 0x01) << 2);
self.w
}
}
#[doc = "Field `CH3` reader - Raw Interrupt Status for channel 3"]
pub struct CH3_R(crate::FieldReader<bool, bool>);
impl CH3_R {
pub(crate) fn new(bits: bool) -> Self {
CH3_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CH3_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CH3` writer - Raw Interrupt Status for channel 3"]
pub struct CH3_W<'a> {
w: &'a mut W,
}
impl<'a> CH3_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | ((value as u32 & 0x01) << 3);
self.w
}
}
#[doc = "Field `CH4` reader - Raw Interrupt Status for channel 4"]
pub struct CH4_R(crate::FieldReader<bool, bool>);
impl CH4_R {
pub(crate) fn new(bits: bool) -> Self {
CH4_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CH4_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CH4` writer - Raw Interrupt Status for channel 4"]
pub struct CH4_W<'a> {
w: &'a mut W,
}
impl<'a> CH4_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | ((value as u32 & 0x01) << 4);
self.w
}
}
#[doc = "Field `CH5` reader - Raw Interrupt Status for channel 5"]
pub struct CH5_R(crate::FieldReader<bool, bool>);
impl CH5_R {
pub(crate) fn new(bits: bool) -> Self {
CH5_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CH5_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CH5` writer - Raw Interrupt Status for channel 5"]
pub struct CH5_W<'a> {
w: &'a mut W,
}
impl<'a> CH5_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | ((value as u32 & 0x01) << 5);
self.w
}
}
#[doc = "Field `CH6` reader - Raw Interrupt Status for channel 6"]
pub struct CH6_R(crate::FieldReader<bool, bool>);
impl CH6_R {
pub(crate) fn new(bits: bool) -> Self {
CH6_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CH6_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CH6` writer - Raw Interrupt Status for channel 6"]
pub struct CH6_W<'a> {
w: &'a mut W,
}
impl<'a> CH6_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | ((value as u32 & 0x01) << 6);
self.w
}
}
#[doc = "Field `CH7` reader - Raw Interrupt Status for channel 7"]
pub struct CH7_R(crate::FieldReader<bool, bool>);
impl CH7_R {
pub(crate) fn new(bits: bool) -> Self {
CH7_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CH7_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CH7` writer - Raw Interrupt Status for channel 7"]
pub struct CH7_W<'a> {
w: &'a mut W,
}
impl<'a> CH7_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | ((value as u32 & 0x01) << 7);
self.w
}
}
impl R {
#[doc = "Bit 0 - Raw Interrupt Status for channel 0"]
#[inline(always)]
pub fn ch0(&self) -> CH0_R {
CH0_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Raw Interrupt Status for channel 1"]
#[inline(always)]
pub fn ch1(&self) -> CH1_R {
CH1_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Raw Interrupt Status for channel 2"]
#[inline(always)]
pub fn ch2(&self) -> CH2_R {
CH2_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Raw Interrupt Status for channel 3"]
#[inline(always)]
pub fn ch3(&self) -> CH3_R {
CH3_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - Raw Interrupt Status for channel 4"]
#[inline(always)]
pub fn ch4(&self) -> CH4_R {
CH4_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Raw Interrupt Status for channel 5"]
#[inline(always)]
pub fn ch5(&self) -> CH5_R {
CH5_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - Raw Interrupt Status for channel 6"]
#[inline(always)]
pub fn ch6(&self) -> CH6_R {
CH6_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - Raw Interrupt Status for channel 7"]
#[inline(always)]
pub fn ch7(&self) -> CH7_R {
CH7_R::new(((self.bits >> 7) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Raw Interrupt Status for channel 0"]
#[inline(always)]
pub fn ch0(&mut self) -> CH0_W {
CH0_W { w: self }
}
#[doc = "Bit 1 - Raw Interrupt Status for channel 1"]
#[inline(always)]
pub fn ch1(&mut self) -> CH1_W {
CH1_W { w: self }
}
#[doc = "Bit 2 - Raw Interrupt Status for channel 2"]
#[inline(always)]
pub fn ch2(&mut self) -> CH2_W {
CH2_W { w: self }
}
#[doc = "Bit 3 - Raw Interrupt Status for channel 3"]
#[inline(always)]
pub fn ch3(&mut self) -> CH3_W {
CH3_W { w: self }
}
#[doc = "Bit 4 - Raw Interrupt Status for channel 4"]
#[inline(always)]
pub fn ch4(&mut self) -> CH4_W {
CH4_W { w: self }
}
#[doc = "Bit 5 - Raw Interrupt Status for channel 5"]
#[inline(always)]
pub fn ch5(&mut self) -> CH5_W {
CH5_W { w: self }
}
#[doc = "Bit 6 - Raw Interrupt Status for channel 6"]
#[inline(always)]
pub fn ch6(&mut self) -> CH6_W {
CH6_W { w: self }
}
#[doc = "Bit 7 - Raw Interrupt Status for channel 7"]
#[inline(always)]
pub fn ch7(&mut self) -> CH7_W {
CH7_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Raw IntBlock Status\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rawdsttran](index.html) module"]
pub struct RAWDSTTRAN_SPEC;
impl crate::RegisterSpec for RAWDSTTRAN_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [rawdsttran::R](R) reader structure"]
impl crate::Readable for RAWDSTTRAN_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [rawdsttran::W](W) writer structure"]
impl crate::Writable for RAWDSTTRAN_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets RAWDSTTRAN to value 0"]
impl crate::Resettable for RAWDSTTRAN_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}<|fim▁end|> | #[doc = "Field `CH2` writer - Raw Interrupt Status for channel 2"]
pub struct CH2_W<'a> {
w: &'a mut W,
} |
<|file_name|>watermark.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/globocom/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com [email protected]
from os.path import splitext
from thumbor.ext.filters import _alpha
from thumbor.filters import BaseFilter, filter_method
import tornado.gen
class Filter(BaseFilter):
regex = r'(?:watermark\((?P<url>.*?),(?P<x>-?[\d]*?),(?P<y>-?[\d]*?),(?P<alpha>[\d]*?)\))'
def on_image_ready(self, buffer):
self.watermark_engine.load(buffer, self.extension)
self.watermark_engine.enable_alpha()
mode, data = self.watermark_engine.image_data_as_rgb()
imgdata = _alpha.apply(mode,
self.alpha,
data)
self.watermark_engine.set_image_data(imgdata)
inv_x = self.x[0] == '-'
inv_y = self.y[0] == '-'
x, y = int(self.x), int(self.y)
sz = self.engine.size
watermark_sz = self.watermark_engine.size
if inv_x:
x = (sz[0] - watermark_sz[0]) + x
if inv_y:
y = (sz[1] - watermark_sz[1]) + y
self.engine.paste(self.watermark_engine, (x, y), merge=True)
self.callback()
def on_fetch_done(self, buffer):
self.watermark_engine.load(buffer, self.extension)
self.storage.put(self.url, self.watermark_engine.read())
self.storage.put_crypto(self.url)
self.on_image_ready(buffer)
<|fim▁hole|> @filter_method(BaseFilter.String, r'-?[\d]+', r'-?[\d]+', BaseFilter.PositiveNumber, async=True)
@tornado.gen.coroutine
def watermark(self, callback, url, x, y, alpha):
self.url = url
self.x = x
self.y = y
self.alpha = alpha
self.callback = callback
self.extension = splitext(self.url)[-1].lower()
self.watermark_engine = self.context.modules.engine.__class__(self.context)
self.storage = self.context.modules.storage
buffer = yield tornado.gen.maybe_future(self.storage.get(self.url))
if buffer is not None:
self.on_image_ready(buffer)
else:
self.context.modules.loader.load(self.context, self.url, self.on_fetch_done)<|fim▁end|> | |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># Copyright 2015 Cloudbase Solutions.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from eventlet.green import subprocess
from eventlet import greenthread
from neutron_lib.utils import helpers
from oslo_log import log as logging
from oslo_utils import encodeutils
from neutron._i18n import _
from neutron.common import utils
LOG = logging.getLogger(__name__)
def create_process(cmd, addl_env=None):
cmd = list(map(str, cmd))
LOG.debug("Running command: %s", cmd)
env = os.environ.copy()
if addl_env:
env.update(addl_env)
obj = utils.subprocess_popen(cmd, shell=False,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=env,
preexec_fn=None,
close_fds=False)
return obj, cmd
def execute(cmd, process_input=None, addl_env=None,
check_exit_code=True, return_stderr=False, log_fail_as_error=True,
extra_ok_codes=None, run_as_root=False, do_decode=True):
try:
if process_input is not None:
_process_input = encodeutils.to_utf8(process_input)
else:
_process_input = None
obj, cmd = create_process(cmd, addl_env=addl_env)
_stdout, _stderr = obj.communicate(_process_input)
obj.stdin.close()
_stdout = helpers.safe_decode_utf8(_stdout)
_stderr = helpers.safe_decode_utf8(_stderr)
m = _("\nCommand: %(cmd)s\nExit code: %(code)s\nStdin: %(stdin)s\n"
"Stdout: %(stdout)s\nStderr: %(stderr)s") % \
{'cmd': cmd,
'code': obj.returncode,
'stdin': process_input or '',
'stdout': _stdout,
'stderr': _stderr}
extra_ok_codes = extra_ok_codes or []<|fim▁hole|> log_msg = m.strip().replace('\n', '; ')
if obj.returncode and log_fail_as_error:
LOG.error(log_msg)
else:
LOG.debug(log_msg)
if obj.returncode and check_exit_code:
raise RuntimeError(m)
finally:
# NOTE(termie): this appears to be necessary to let the subprocess
# call clean something up in between calls, without
# it two execute calls in a row hangs the second one
greenthread.sleep(0)
return (_stdout, _stderr) if return_stderr else _stdout<|fim▁end|> | if obj.returncode and obj.returncode in extra_ok_codes:
obj.returncode = None
|
<|file_name|>MessageCodec.java<|end_file_name|><|fim▁begin|>package net.lightstone.net.codec;
import java.io.IOException;
import net.lightstone.msg.Message;
import org.jboss.netty.buffer.ChannelBuffer;
public abstract class MessageCodec<T extends Message> {
private final Class<T> clazz;
private final int opcode;
public MessageCodec(Class<T> clazz, int opcode) {
this.clazz = clazz;
this.opcode = opcode;
}
public final Class<T> getType() {
return clazz;
}
public final int getOpcode() {
return opcode;<|fim▁hole|>
public abstract T decode(ChannelBuffer buffer) throws IOException;
}<|fim▁end|> | }
public abstract ChannelBuffer encode(T message) throws IOException; |
<|file_name|>csv_test.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The ezgliding Authors.
//<|fim▁hole|>//
// ezgliding is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// ezgliding is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with ezgliding. If not, see <http://www.gnu.org/licenses/>.
//
// Author: Ricardo Rocha <[email protected]>
package util
import (
"reflect"
"testing"
"time"
"github.com/rochaporto/ezgliding/airfield"
)
type CSV2AirfieldTest struct {
t string
in string
r []airfield.Airfield
err bool
}
var csv2AirfieldTests = []CSV2AirfieldTest{
{
"simple parse",
`
ID,ShortName,Name,Region,ICAO,Flags,Catalog,Length,Elevation,Runway,Frequency,Latitude,Longitude,Update
HABER,HABER,HABERE POC69,FR,,1032,0,0,1113,0119,122.5,46.270,6.463,0001-01-01 00:00:00 +0000 UTC
`,
[]airfield.Airfield{
airfield.Airfield{ID: "HABER", ShortName: "HABER", Name: "HABERE POC69",
Region: "FR", ICAO: "", Flags: 1032, Catalog: 0, Length: 0, Elevation: 1113,
Runway: "0119", Frequency: 122.5, Latitude: 46.270, Longitude: 6.463,
Update: time.Time{}},
},
false,
},
{
"multiline parse",
`
ID,ShortName,Name,Region,ICAO,Flags,Catalog,Length,Elevation,Runway,Frequency,Latitude,Longitude,Update
LSGG,GENEV,GENEVE COINTR,CH,LSGG,64,0,3880,430,0523,118.7,46.238,6.109,0001-01-01 00:00:00 +0000 UTC
LSGB,BEX,BEX,CH,LSGB,1024,0,0,399,1533,122.15,46.258,6.986,0001-01-01 00:00:00 +0000 UTC
`,
[]airfield.Airfield{
airfield.Airfield{ID: "LSGG", ShortName: "GENEV", Name: "GENEVE COINTR",
Region: "CH", ICAO: "LSGG", Flags: 64, Catalog: 0, Length: 3880, Elevation: 430,
Runway: "0523", Frequency: 118.7, Latitude: 46.238, Longitude: 6.109,
Update: time.Time{}},
airfield.Airfield{ID: "LSGB", ShortName: "BEX", Name: "BEX",
Region: "CH", ICAO: "LSGB", Flags: 1024, Catalog: 0, Length: 0, Elevation: 399,
Runway: "1533", Frequency: 122.15, Latitude: 46.258, Longitude: 6.986,
Update: time.Time{}},
},
false,
},
{
"parse with invalid csv format",
`
ID,ShortName,Name,Region,ICAO,Flags,Catalog,Length,Elevation,Runway,Frequency,Latitude,Longitude,Update
HABER,HABER,HABERE POC69,FR,,1032,0,0,1113,0119,122.5,46.270,6.463,0001-01-01 00:00:00 +0000 UTC,a
`,
[]airfield.Airfield{airfield.Airfield{}},
true,
},
{
"parse with invalid flags",
`
ID,ShortName,Name,Region,ICAO,Flags,Catalog,Length,Elevation,Runway,Frequency,Latitude,Longitude,Update
HABER,HABER,HABERE POC69,FR,,badflags,0,0,1113,0119,122.5,46.270,6.463,0001-01-01 00:00:00 +0000 UTC
`,
[]airfield.Airfield{airfield.Airfield{}},
true,
},
{
"parse with invalid catalog",
`
ID,ShortName,Name,Region,ICAO,Flags,Catalog,Length,Elevation,Runway,Frequency,Latitude,Longitude,Update
HABER,HABER,HABERE POC69,FR,,1032,badcatalog,0,1113,0119,122.5,46.270,6.463,0001-01-01 00:00:00 +0000 UTC
`,
[]airfield.Airfield{airfield.Airfield{}},
true,
},
{
"parse with invalid length",
`
ID,ShortName,Name,Region,ICAO,Flags,Catalog,Length,Elevation,Runway,Frequency,Latitude,Longitude,Update
HABER,HABER,HABERE POC69,FR,,1032,0,badlength,1113,0119,122.5,46.270,6.463,0001-01-01 00:00:00 +0000 UTC
`,
[]airfield.Airfield{airfield.Airfield{}},
true,
},
{
"parse with invalid elevation",
`
ID,ShortName,Name,Region,ICAO,Flags,Catalog,Length,Elevation,Runway,Frequency,Latitude,Longitude,Update
HABER,HABER,HABERE POC69,FR,,1032,0,0,badelevation,0119,122.5,46.270,6.463,0001-01-01 00:00:00 +0000 UTC
`,
[]airfield.Airfield{airfield.Airfield{}},
true,
},
{
"parse with invalid frequency",
`
ID,ShortName,Name,Region,ICAO,Flags,Catalog,Length,Elevation,Runway,Frequency,Latitude,Longitude,Update
HABER,HABER,HABERE POC69,FR,,1032,0,0,1113,0119,badfrequency,46.270,6.463,0001-01-01 00:00:00 +0000 UTC
`,
[]airfield.Airfield{airfield.Airfield{}},
true,
},
{
"parse with no records",
"",
[]airfield.Airfield{},
true,
},
}
func TestCSV2Airfield(t *testing.T) {
for _, test := range csv2AirfieldTests {
result, err := CSV2Struct(test.in, reflect.ValueOf([]airfield.Airfield{}).Type(),
reflect.ValueOf(airfield.Airfield{}).Type())
if err != nil && test.err {
continue
} else if err != nil {
t.Errorf("failed to parse csv in test %v :: %v", test.t, err)
continue
}
resultv := result.Interface().([]airfield.Airfield)
if len(resultv) != len(test.r) {
t.Errorf("%v :: expected %v but got %v airfields", test.t, len(test.r), len(resultv))
continue
}
for i, airfield := range resultv {
if airfield != test.r[i] {
t.Errorf("%v :: expected %v but got %v", test.t, test.r[i], airfield)
continue
}
}
}
}
type Airfield2CSVTest struct {
t string
in []airfield.Airfield
csv string
err bool
}
var airfield2CSVTests = []Airfield2CSVTest{
{
"simple conversion",
[]airfield.Airfield{
airfield.Airfield{ID: "HABER", ShortName: "HABER", Name: "HABERE POC69",
Region: "FR", ICAO: "", Flags: 1032, Catalog: 0, Length: 0, Elevation: 1113,
Runway: "0119", Frequency: 122.5, Latitude: 46.270, Longitude: 6.463,
Update: time.Time{}},
},
`ID,ShortName,Name,Region,ICAO,Flags,Catalog,Length,Elevation,Runway,Frequency,Latitude,Longitude,Update
HABER,HABER,HABERE POC69,FR,,1032,0,0,1113,0119,122.5,46.27,6.463,0001-01-01 00:00:00 +0000 UTC
`,
false,
},
{
"conversion of empty array",
[]airfield.Airfield{},
``,
false,
},
{
"conversion with all empty",
[]airfield.Airfield{
airfield.Airfield{ID: "", ShortName: "", Name: "",
Region: "", ICAO: "", Flags: 0, Catalog: 0, Length: 0, Elevation: 0,
Runway: "", Frequency: 0, Latitude: 0.0, Longitude: 0.0, Update: time.Time{}},
},
`ID,ShortName,Name,Region,ICAO,Flags,Catalog,Length,Elevation,Runway,Frequency,Latitude,Longitude,Update
,,,,,0,0,0,0,,0,0,0,0001-01-01 00:00:00 +0000 UTC
`,
false,
},
}
func TestAirfield2CSV(t *testing.T) {
for _, test := range airfield2CSVTests {
result := Struct2CSV(test.in)
if result != test.csv {
t.Errorf("expected\n%v\ngot\n%v", test.csv, result)
continue
}
}
}<|fim▁end|> | // This file is part of ezgliding. |
<|file_name|>gather_moves.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
* Computes moves.
*/
use mc = middle::mem_categorization;
use middle::borrowck::*;
use middle::borrowck::move_data::*;
use middle::moves;
use middle::ty;
use syntax::ast;
use syntax::ast_util;
use syntax::codemap::span;
use util::ppaux::{UserString};
pub fn gather_decl(bccx: @BorrowckCtxt,
move_data: &mut MoveData,
decl_id: ast::node_id,
_decl_span: span,
var_id: ast::node_id) {
let loan_path = @LpVar(var_id);
move_data.add_move(bccx.tcx, loan_path, decl_id, Declared);
}
pub fn gather_move_from_expr(bccx: @BorrowckCtxt,
move_data: &mut MoveData,
move_expr: @ast::expr,
cmt: mc::cmt) {
gather_move_from_expr_or_pat(bccx, move_data, move_expr.id,
MoveExpr(move_expr), cmt);
}
pub fn gather_move_from_pat(bccx: @BorrowckCtxt,
move_data: &mut MoveData,
move_pat: @ast::pat,
cmt: mc::cmt) {
gather_move_from_expr_or_pat(bccx, move_data, move_pat.id,
MovePat(move_pat), cmt);
}
fn gather_move_from_expr_or_pat(bccx: @BorrowckCtxt,
move_data: &mut MoveData,
move_id: ast::node_id,
move_kind: MoveKind,
cmt: mc::cmt) {
if !check_is_legal_to_move_from(bccx, cmt, cmt) {
return;
}
match opt_loan_path(cmt) {
Some(loan_path) => {
move_data.add_move(bccx.tcx, loan_path, move_id, move_kind);
}
None => {
// move from rvalue or unsafe pointer, hence ok
}
}
}
pub fn gather_captures(bccx: @BorrowckCtxt,
move_data: &mut MoveData,
closure_expr: @ast::expr) {
let captured_vars = bccx.capture_map.get(&closure_expr.id);
for captured_vars.iter().advance |captured_var| {
match captured_var.mode {
moves::CapMove => {
let fvar_id = ast_util::def_id_of_def(captured_var.def).node;
let loan_path = @LpVar(fvar_id);
move_data.add_move(bccx.tcx, loan_path, closure_expr.id,
Captured(closure_expr));
}
moves::CapCopy | moves::CapRef => {}
}
}
}
pub fn gather_assignment(bccx: @BorrowckCtxt,
move_data: &mut MoveData,
assignment_id: ast::node_id,
assignment_span: span,<|fim▁hole|> assignee_id: ast::node_id) {
move_data.add_assignment(bccx.tcx,
assignee_loan_path,
assignment_id,
assignment_span,
assignee_id);
}
fn check_is_legal_to_move_from(bccx: @BorrowckCtxt,
cmt0: mc::cmt,
cmt: mc::cmt) -> bool {
match cmt.cat {
mc::cat_implicit_self(*) |
mc::cat_deref(_, _, mc::region_ptr(*)) |
mc::cat_deref(_, _, mc::gc_ptr(*)) |
mc::cat_deref(_, _, mc::unsafe_ptr(*)) => {
bccx.span_err(
cmt0.span,
fmt!("cannot move out of %s",
bccx.cmt_to_str(cmt)));
false
}
// These are separate from the above cases for a better error message.
mc::cat_stack_upvar(*) |
mc::cat_copied_upvar(mc::CopiedUpvar { onceness: ast::Many, _ }) => {
let once_hint = if bccx.tcx.sess.once_fns() {
" (unless the destination closure type is `once fn')"
} else {
""
};
bccx.span_err(
cmt0.span,
fmt!("cannot move out of %s%s", bccx.cmt_to_str(cmt), once_hint));
false
}
// Can move out of captured upvars only if the destination closure
// type is 'once'. 1-shot stack closures emit the copied_upvar form
// (see mem_categorization.rs).
mc::cat_copied_upvar(mc::CopiedUpvar { onceness: ast::Once, _ }) => {
true
}
// It seems strange to allow a move out of a static item,
// but what happens in practice is that you have a
// reference to a constant with a type that should be
// moved, like `None::<~int>`. The type of this constant
// is technically `Option<~int>`, which moves, but we know
// that the content of static items will never actually
// contain allocated pointers, so we can just memcpy it.
// Since static items can never have allocated memory,
// this is ok. For now anyhow.
mc::cat_static_item => {
true
}
mc::cat_rvalue(*) |
mc::cat_local(*) |
mc::cat_arg(*) |
mc::cat_self(*) => {
true
}
mc::cat_downcast(b) |
mc::cat_interior(b, _) => {
match ty::get(b.ty).sty {
ty::ty_struct(did, _) | ty::ty_enum(did, _) => {
if ty::has_dtor(bccx.tcx, did) {
bccx.span_err(
cmt0.span,
fmt!("cannot move out of type `%s`, \
which defines the `Drop` trait",
b.ty.user_string(bccx.tcx)));
false
} else {
check_is_legal_to_move_from(bccx, cmt0, b)
}
}
_ => {
check_is_legal_to_move_from(bccx, cmt0, b)
}
}
}
mc::cat_deref(b, _, mc::uniq_ptr(*)) |
mc::cat_discr(b, _) => {
check_is_legal_to_move_from(bccx, cmt0, b)
}
}
}<|fim▁end|> | assignee_loan_path: @LoanPath, |
<|file_name|>LogicalMetricColumn.java<|end_file_name|><|fim▁begin|>// Copyright 2016 Yahoo Inc.
// Licensed under the terms of the Apache license. Please see LICENSE.md file distributed with this work for terms.
package com.yahoo.bard.webservice.data.metric;
/**
* LogicalMetricColumn.
*/
public class LogicalMetricColumn extends MetricColumn {
private final LogicalMetric metric;
/**
* Constructor.
*<|fim▁hole|> * @deprecated because LogicalMetricColumn is really only a thing for LogicalTable, so there's no reason for there
* to be an alias on the LogicalMetric inside the LogicalTableSchema.
*/
@Deprecated
public LogicalMetricColumn(String name, LogicalMetric metric) {
super(name);
this.metric = metric;
}
/**
* Constructor.
*
* @param metric The logical metric
*/
public LogicalMetricColumn(LogicalMetric metric) {
super(metric.getName());
this.metric = metric;
}
/**
* Getter for a logical metric.
*
* @return logical metric
*/
public LogicalMetric getLogicalMetric() {
return this.metric;
}
@Override
public String toString() {
return "{logicalMetric:'" + getName() + "'}";
}
}<|fim▁end|> | * @param name The column name
* @param metric The logical metric
* |
<|file_name|>output.js<|end_file_name|><|fim▁begin|>var _privateMethod = babelHelpers.classPrivateFieldLooseKey("privateMethod");
class Foo {
constructor() {
Object.defineProperty(this, _privateMethod, {<|fim▁hole|> value: _privateMethod2
});
this.publicField = babelHelpers.classPrivateFieldLooseBase(this, _privateMethod)[_privateMethod]();
}
}
var _privateMethod2 = function _privateMethod2() {
return 42;
};<|fim▁end|> | |
<|file_name|>legacy.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
#
# Easy AVR USB Keyboard Firmware Keymapper
# Copyright (C) 2018 David Howland
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""This module contains code to load legacy user save data."""
import pickle
import re
from .build import NUM_MACROS, NULL_SYMBOL, key_mode_map, led_modes, led_assignments
from .scancodes import scancodes
from .userdata import Map
legacy_layers = ["Default", "Layer 1", "Layer 2", "Layer 3", "Layer 4",
"Layer 5", "Layer 6", "Layer 7", "Layer 8", "Layer 9"]
class LegacySaveFileException(Exception):
"""Raised when an error is encountered while loading a legacy layout file."""
pass
def load_legacy(user_data, datfile):
"""Load the legacy .dat save file from the path given by `datfile` and populate
the UserData object given by `user_data`.
"""
legacy_data = open_legacy(datfile)
convert_legacy(user_data, legacy_data)
def open_legacy(datfile):
"""Opens and decodes the pickled data in a legacy .dat save file. `datfile`
is a path to the file. The function returns a dictionary with an item for each
component of the legacy file.
"""
with open(datfile, 'rb') as fdin:
data = pickle.load(fdin)
if len(data) < 12:
raise LegacySaveFileException("The .dat file is either broken or too old.")
unique_id = data[1]
maps = data[2]
macros = data[3]
actions = data[4]
modes = data[5]
wmods = data[6]
layout_mod = data[8]
leds = data[9]
if len(data) > 11:
advancedleds = data[11]
useadvancedleds = data[12]
else:
advancedleds = [(255, 0)] * len(led_assignments)
useadvancedleds = False
if len(data) > 13:
ledlayers = data[13]
else:<|fim▁hole|> for kmap in (maps, actions, modes, wmods):
if 'Fn' in kmap:
kmap['Layer 1'] = kmap['Fn']
del kmap['Fn']
# fixes for older versions (renamed/removed scancodes)
for layer in maps:
for row in maps[layer]:
for i, k in enumerate(row):
if k == "SCANCODE_DEBUG":
row[i] = "SCANCODE_CONFIG"
elif k == "SCANCODE_LOCKINGCAPS":
row[i] = "HID_KEYBOARD_SC_LOCKING_CAPS_LOCK"
elif k == "SCANCODE_FN":
row[i] = "SCANCODE_FN1"
elif k not in scancodes:
row[i] = NULL_SYMBOL
# fixes for older versions (renamed leds)
leds = ['Any Fn Active' if (x == 'Fn Lock') else x for x in leds]
leds = ['Fn1 Active' if (x == 'Fn Active') else x for x in leds]
# fixes for older versions (added macros)
extention = NUM_MACROS - len(macros)
if extention > 0:
macros.extend([''] * extention)
return {
'unique_id': unique_id,
'layout_mod': layout_mod,
'maps': maps,
'actions': actions,
'modes': modes,
'wmods': wmods,
'macros': macros,
'leds': leds,
'advancedleds': advancedleds,
'useadvancedleds': useadvancedleds,
'ledlayers': ledlayers,
}
def convert_legacy(user_data, legacy_data):
"""Converts the data from a legacy save file into a `user_data` object. `user_data`
should be a fresh instance of UserData and `legacy_data` is the output from a
successful call to open_legacy().
"""
# can't save to legacy file
user_data.path = None
# get good defaults to start from
user_data.new(legacy_data['unique_id'], legacy_data['layout_mod'])
# transmogrify the keymap data
for li, layer in enumerate(legacy_layers):
for ri, rowdef in enumerate(user_data.config.keyboard_definition):
if isinstance(rowdef, int):
continue
for ci, keydef in enumerate(rowdef):
keydim, matrix, _ = keydef
if user_data.layout_mod:
mod_map = user_data.config.alt_layouts[user_data.layout_mod]
keydim = mod_map.get((ri, ci), keydim)
if isinstance(keydim, tuple) and isinstance(matrix, tuple):
row, col = matrix
map = Map(legacy_data['maps'][layer][ri][ci],
key_mode_map[legacy_data['modes'][layer][ri][ci]],
legacy_data['actions'][layer][ri][ci],
legacy_data['wmods'][layer][ri][ci])
user_data.keymap[li][row][col] = map
# translate the macro data
user_data.macros = [translate_macro(macro) for macro in legacy_data['macros']]
# adapt the led data
user_data.led_modes = []
for old_assignment in legacy_data['leds']:
if old_assignment == 'Backlight':
user_data.led_modes.append(led_modes.index('Backlight'))
elif old_assignment in led_assignments:
user_data.led_modes.append(led_modes.index('Indicator'))
else:
user_data.led_modes.append(led_modes.index('Disabled'))
if legacy_data['useadvancedleds']:
for i, func in enumerate(legacy_data['advancedleds']):
led_id, _ = func
if led_id < len(user_data.led_modes):
user_data.led_modes[led_id] = led_modes.index('Indicator')
user_data.led_funcs[i] = func
# copy the rest
user_data.led_layers = legacy_data['ledlayers']
def translate_macro(input):
"""Translate the escape sequences in the original macro mini-language into
the equivalent representations in the new macro mini-language.
"""
# remove the special characters
input = input.replace("\\\\,", "\\")
input = input.replace("\\n,", "\n")
input = input.replace("\\t,", "\t")
# escape any $ symbols
input = input.replace("$", "$$")
# convert keyword format
input = re.sub(r'\\([A-Z0-9_]+\()', r'$\1', input)
# convert function/mod format
input = re.sub(r'\\([A-Z0-9_]+),', r'${\1}', input)
return input<|fim▁end|> | ledlayers = [0, 0, 0, 0, 0]
# fixes for older versions (renamed layers) |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// Crypto challenge Set1 / Challenge 1
// Convert hex to base64<|fim▁hole|>fn main() {
let args = std::os::args();
if args.len() != 2 {
println!("USAGE: challenge1 HEX_ENCODED_STRING");
} else {
let input = args[1].as_slice();
match codec::from_hex(input) {
Err(msg) => println!("Invalid hex string: {}", msg),
Ok(binary) => println!("{}", codec::to_base64(binary.as_slice()))
}
}
}
#[test]
fn challenge1() {
let input = "49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f69736f6e6f7573206d757368726f6f6d";
let output = codec::to_base64( codec::from_hex(input).ok().unwrap().as_slice() );
assert_eq!(output, String::from_str("SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t"));
}<|fim▁end|> |
extern crate codec;
#[cfg(not(test))] |
<|file_name|>l3_dvr_db.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 OpenStack Foundation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
import six
from neutron.api.v2 import attributes
from neutron.callbacks import events
from neutron.callbacks import exceptions
from neutron.callbacks import registry
from neutron.callbacks import resources
from neutron.common import constants as l3_const
from neutron.common import exceptions as n_exc
from neutron.common import utils as n_utils
from neutron.db import l3_attrs_db
from neutron.db import l3_db
from neutron.db import l3_dvrscheduler_db as l3_dvrsched_db
from neutron.db import models_v2
from neutron.extensions import l3
from neutron.extensions import portbindings
from neutron.i18n import _LI
from neutron import manager
from neutron.plugins.common import constants
from neutron.plugins.common import utils as p_utils
LOG = logging.getLogger(__name__)
router_distributed_opts = [
cfg.BoolOpt('router_distributed',
default=False,
help=_("System-wide flag to determine the type of router "
"that tenants can create. Only admin can override.")),
]
cfg.CONF.register_opts(router_distributed_opts)
class L3_NAT_with_dvr_db_mixin(l3_db.L3_NAT_db_mixin,
l3_attrs_db.ExtraAttributesMixin):
"""Mixin class to enable DVR support."""
router_device_owners = (
l3_db.L3_NAT_db_mixin.router_device_owners +
(l3_const.DEVICE_OWNER_DVR_INTERFACE,
l3_const.DEVICE_OWNER_ROUTER_SNAT,
l3_const.DEVICE_OWNER_AGENT_GW))
extra_attributes = (
l3_attrs_db.ExtraAttributesMixin.extra_attributes + [{
'name': "distributed",
'default': cfg.CONF.router_distributed
}])
def _create_router_db(self, context, router, tenant_id):
"""Create a router db object with dvr additions."""
router['distributed'] = is_distributed_router(router)
with context.session.begin(subtransactions=True):
router_db = super(
L3_NAT_with_dvr_db_mixin, self)._create_router_db(
context, router, tenant_id)
self._process_extra_attr_router_create(context, router_db, router)
return router_db
def _validate_router_migration(self, context, router_db, router_res):
"""Allow centralized -> distributed state transition only."""
if (router_db.extra_attributes.distributed and
router_res.get('distributed') is False):
LOG.info(_LI("Centralizing distributed router %s "
"is not supported"), router_db['id'])
raise n_exc.NotSupported(msg=_("Migration from distributed router "
"to centralized"))
elif (not router_db.extra_attributes.distributed and
router_res.get('distributed')):
# router should be disabled in order for upgrade
if router_db.admin_state_up:
msg = _('Cannot upgrade active router to distributed. Please '
'set router admin_state_up to False prior to upgrade.')
raise n_exc.BadRequest(resource='router', msg=msg)
# Notify advanced services of the imminent state transition
# for the router.
try:
kwargs = {'context': context, 'router': router_db}
registry.notify(
resources.ROUTER, events.BEFORE_UPDATE, self, **kwargs)
except exceptions.CallbackFailure as e:
with excutils.save_and_reraise_exception():
# NOTE(armax): preserve old check's behavior
if len(e.errors) == 1:
raise e.errors[0].error
raise l3.RouterInUse(router_id=router_db['id'],
reason=e)
def _update_distributed_attr(
self, context, router_id, router_db, data, gw_info):
"""Update the model to support the dvr case of a router."""
if data.get('distributed'):
old_owner = l3_const.DEVICE_OWNER_ROUTER_INTF
new_owner = l3_const.DEVICE_OWNER_DVR_INTERFACE
for rp in router_db.attached_ports.filter_by(port_type=old_owner):
rp.port_type = new_owner
rp.port.device_owner = new_owner
def _update_router_db(self, context, router_id, data, gw_info):
with context.session.begin(subtransactions=True):
router_db = super(
L3_NAT_with_dvr_db_mixin, self)._update_router_db(
context, router_id, data, gw_info)
migrating_to_distributed = (
not router_db.extra_attributes.distributed and
data.get('distributed') is True)
self._validate_router_migration(context, router_db, data)
router_db.extra_attributes.update(data)
self._update_distributed_attr(
context, router_id, router_db, data, gw_info)
if migrating_to_distributed:
if router_db['gw_port_id']:
# If the Legacy router is getting migrated to a DVR
# router, make sure to create corresponding
# snat interface ports that are to be consumed by
# the Service Node.
if not self._create_snat_intf_ports_if_not_exists(
context.elevated(), router_db):
LOG.debug("SNAT interface ports not created: %s",
router_db['id'])
cur_agents = self.list_l3_agents_hosting_router(
context, router_db['id'])['agents']
for agent in cur_agents:
self._unbind_router(context, router_db['id'],
agent['id'])
return router_db
def _delete_current_gw_port(self, context, router_id, router, new_network):
"""
Overriden here to handle deletion of dvr internal ports.
If there is a valid router update with gateway port to be deleted,
then go ahead and delete the csnat ports and the floatingip
agent gateway port associated with the dvr router.
"""
gw_ext_net_id = (
router.gw_port['network_id'] if router.gw_port else None)
super(L3_NAT_with_dvr_db_mixin,
self)._delete_current_gw_port(context, router_id,
router, new_network)
if (is_distributed_router(router) and
gw_ext_net_id != new_network):
self.delete_csnat_router_interface_ports(
context.elevated(), router)
# NOTE(Swami): Delete the Floatingip agent gateway port
# on all hosts when it is the last gateway port in the
# given external network.
filters = {'network_id': [gw_ext_net_id],
'device_owner': [l3_const.DEVICE_OWNER_ROUTER_GW]}
ext_net_gw_ports = self._core_plugin.get_ports(
context.elevated(), filters)
if not ext_net_gw_ports:
self.delete_floatingip_agent_gateway_port(
context.elevated(), None, gw_ext_net_id)
def _create_gw_port(self, context, router_id, router, new_network,
ext_ips):
super(L3_NAT_with_dvr_db_mixin,
self)._create_gw_port(context, router_id, router, new_network,
ext_ips)
# Make sure that the gateway port exists before creating the
# snat interface ports for distributed router.
if router.extra_attributes.distributed and router.gw_port:
snat_p_list = self._create_snat_intf_ports_if_not_exists(
context.elevated(), router)
if not snat_p_list:
LOG.debug("SNAT interface ports not created: %s", snat_p_list)
def _get_device_owner(self, context, router=None):
"""Get device_owner for the specified router."""
router_is_uuid = isinstance(router, six.string_types)
if router_is_uuid:
router = self._get_router(context, router)
if is_distributed_router(router):
return l3_const.DEVICE_OWNER_DVR_INTERFACE
return super(L3_NAT_with_dvr_db_mixin,
self)._get_device_owner(context, router)
def _get_interface_ports_for_network(self, context, network_id):
router_intf_qry = context.session.query(l3_db.RouterPort)
router_intf_qry = router_intf_qry.join(models_v2.Port)
return router_intf_qry.filter(
models_v2.Port.network_id == network_id,
l3_db.RouterPort.port_type.in_(l3_const.ROUTER_INTERFACE_OWNERS)
)
def _update_fip_assoc(self, context, fip, floatingip_db, external_port):
"""Override to create floating agent gw port for DVR.
Floating IP Agent gateway port will be created when a
floatingIP association happens.
"""
fip_port = fip.get('port_id')
super(L3_NAT_with_dvr_db_mixin, self)._update_fip_assoc(
context, fip, floatingip_db, external_port)
associate_fip = fip_port and floatingip_db['id']
if associate_fip and floatingip_db.get('router_id'):
admin_ctx = context.elevated()
router_dict = self.get_router(
admin_ctx, floatingip_db['router_id'])
# Check if distributed router and then create the
# FloatingIP agent gateway port
if router_dict.get('distributed'):
vm_hostid = self._get_vm_port_hostid(
context, fip_port)
if vm_hostid:
# FIXME (Swami): This FIP Agent Gateway port should be
# created only once and there should not be a duplicate
# for the same host. Until we find a good solution for
# augmenting multiple server requests we should use the
# existing flow.
fip_agent_port = (
self.create_fip_agent_gw_port_if_not_exists(
admin_ctx, external_port['network_id'],
vm_hostid))
LOG.debug("FIP Agent gateway port: %s", fip_agent_port)
def _get_floatingip_on_port(self, context, port_id=None):
"""Helper function to retrieve the fip associated with port."""
fip_qry = context.session.query(l3_db.FloatingIP)
floating_ip = fip_qry.filter_by(fixed_port_id=port_id)
return floating_ip.first()
def add_router_interface(self, context, router_id, interface_info):
add_by_port, add_by_sub = self._validate_interface_info(interface_info)
router = self._get_router(context, router_id)
device_owner = self._get_device_owner(context, router)
# This should be True unless adding an IPv6 prefix to an existing port
new_port = True
if add_by_port:
port, subnets = self._add_interface_by_port(
context, router, interface_info['port_id'], device_owner)
elif add_by_sub:
port, subnets, new_port = self._add_interface_by_subnet(
context, router, interface_info['subnet_id'], device_owner)
if new_port:
if router.extra_attributes.distributed and router.gw_port:
try:
admin_context = context.elevated()
self._add_csnat_router_interface_port(
admin_context, router, port['network_id'],
port['fixed_ips'][-1]['subnet_id'])
except Exception:
with excutils.save_and_reraise_exception():
# we need to preserve the original state prior
# the request by rolling back the port creation
# that led to new_port=True
self._core_plugin.delete_port(
admin_context, port['id'])
with context.session.begin(subtransactions=True):
router_port = l3_db.RouterPort(
port_id=port['id'],
router_id=router.id,
port_type=device_owner
)
context.session.add(router_port)
router_interface_info = self._make_router_interface_info(
router_id, port['tenant_id'], port['id'], subnets[-1]['id'],
[subnet['id'] for subnet in subnets])
self.notify_router_interface_action(
context, router_interface_info, 'add')
return router_interface_info
def _port_has_ipv6_address(self, port):
"""Overridden to return False if DVR SNAT port."""
if port['device_owner'] == l3_const.DEVICE_OWNER_ROUTER_SNAT:
return False
return super(L3_NAT_with_dvr_db_mixin,
self)._port_has_ipv6_address(port)
def _check_dvr_router_remove_required_and_notify_agent(
self, context, router, port, subnets):
if router.extra_attributes.distributed:
if router.gw_port and subnets[0]['id']:
self.delete_csnat_router_interface_ports(
context.elevated(), router, subnet_id=subnets[0]['id'])
plugin = manager.NeutronManager.get_service_plugins().get(
constants.L3_ROUTER_NAT)
l3_agents = plugin.get_l3_agents_hosting_routers(context,
[router['id']])
for l3_agent in l3_agents:
if not plugin.check_ports_exist_on_l3agent(context, l3_agent,
router['id']):
plugin.remove_router_from_l3_agent(
context, l3_agent['id'], router['id'])
router_interface_info = self._make_router_interface_info(
router['id'], port['tenant_id'], port['id'], subnets[0]['id'],
[subnet['id'] for subnet in subnets])
self.notify_router_interface_action(
context, router_interface_info, 'remove')
return router_interface_info
def remove_router_interface(self, context, router_id, interface_info):
remove_by_port, remove_by_subnet = (
self._validate_interface_info(interface_info, for_removal=True)
)
port_id = interface_info.get('port_id')
subnet_id = interface_info.get('subnet_id')
router = self._get_router(context, router_id)
device_owner = self._get_device_owner(context, router)
if remove_by_port:
port, subnets = self._remove_interface_by_port(
context, router_id, port_id, subnet_id, device_owner)
# remove_by_subnet is not used here, because the validation logic of
# _validate_interface_info ensures that at least one of remote_by_*
# is True.
else:
port, subnets = self._remove_interface_by_subnet(
context, router_id, subnet_id, device_owner)
router_interface_info = (
self._check_dvr_router_remove_required_and_notify_agent(
context, router, port, subnets))
return router_interface_info
def _get_snat_sync_interfaces(self, context, router_ids):
"""Query router interfaces that relate to list of router_ids."""
if not router_ids:
return []
qry = context.session.query(l3_db.RouterPort)
qry = qry.filter(
l3_db.RouterPort.router_id.in_(router_ids),
l3_db.RouterPort.port_type == l3_const.DEVICE_OWNER_ROUTER_SNAT
)
interfaces = collections.defaultdict(list)
for rp in qry:
interfaces[rp.router_id].append(
self._core_plugin._make_port_dict(rp.port, None))
LOG.debug("Return the SNAT ports: %s", interfaces)
return interfaces
def _build_routers_list(self, context, routers, gw_ports):
# Perform a single query up front for all routers
if not routers:
return []
router_ids = [r['id'] for r in routers]
snat_binding = l3_dvrsched_db.CentralizedSnatL3AgentBinding
query = (context.session.query(snat_binding).
filter(snat_binding.router_id.in_(router_ids))).all()
bindings = dict((b.router_id, b) for b in query)
for rtr in routers:
gw_port_id = rtr['gw_port_id']
# Collect gw ports only if available
if gw_port_id and gw_ports.get(gw_port_id):
rtr['gw_port'] = gw_ports[gw_port_id]
if 'enable_snat' in rtr[l3.EXTERNAL_GW_INFO]:
rtr['enable_snat'] = (
rtr[l3.EXTERNAL_GW_INFO]['enable_snat'])
binding = bindings.get(rtr['id'])
if not binding:
rtr['gw_port_host'] = None
LOG.debug('No snat is bound to router %s', rtr['id'])
continue
rtr['gw_port_host'] = binding.l3_agent.host
return routers
def _process_routers(self, context, routers):
routers_dict = {}
snat_intfs_by_router_id = self._get_snat_sync_interfaces(
context, [r['id'] for r in routers])
for router in routers:
routers_dict[router['id']] = router
if router['gw_port_id']:
snat_router_intfs = snat_intfs_by_router_id[router['id']]
LOG.debug("SNAT ports returned: %s ", snat_router_intfs)
router[l3_const.SNAT_ROUTER_INTF_KEY] = snat_router_intfs
return routers_dict
def _process_floating_ips_dvr(self, context, routers_dict,
floating_ips, host, agent):
fip_sync_interfaces = None
LOG.debug("FIP Agent : %s ", agent.id)
for floating_ip in floating_ips:
router = routers_dict.get(floating_ip['router_id'])
if router:
router_floatingips = router.get(l3_const.FLOATINGIP_KEY, [])
if router['distributed']:
if floating_ip.get('host', None) != host:
continue
LOG.debug("Floating IP host: %s", floating_ip['host'])
router_floatingips.append(floating_ip)
router[l3_const.FLOATINGIP_KEY] = router_floatingips
if not fip_sync_interfaces:
fip_sync_interfaces = self._get_fip_sync_interfaces(
context, agent.id)
LOG.debug("FIP Agent ports: %s", fip_sync_interfaces)
router[l3_const.FLOATINGIP_AGENT_INTF_KEY] = (
fip_sync_interfaces)
def _get_fip_sync_interfaces(self, context, fip_agent_id):
"""Query router interfaces that relate to list of router_ids."""
if not fip_agent_id:
return []
filters = {'device_id': [fip_agent_id],
'device_owner': [l3_const.DEVICE_OWNER_AGENT_GW]}
interfaces = self._core_plugin.get_ports(context.elevated(), filters)
LOG.debug("Return the FIP ports: %s ", interfaces)
return interfaces
def _get_dvr_sync_data(self, context, host, agent, router_ids=None,
active=None):
routers, interfaces, floating_ips = self._get_router_info_list(
context, router_ids=router_ids, active=active,
device_owners=l3_const.ROUTER_INTERFACE_OWNERS)
port_filter = {portbindings.HOST_ID: [host]}
ports = self._core_plugin.get_ports(context, port_filter)
port_dict = dict((port['id'], port) for port in ports)
# Add the port binding host to the floatingip dictionary
for fip in floating_ips:<|fim▁hole|> if vm_port:
fip['host'] = self._get_vm_port_hostid(context, fip['port_id'],
port=vm_port)
routers_dict = self._process_routers(context, routers)
self._process_floating_ips_dvr(context, routers_dict,
floating_ips, host, agent)
ports_to_populate = []
for router in routers_dict.values():
if router.get('gw_port'):
ports_to_populate.append(router['gw_port'])
if router.get(l3_const.FLOATINGIP_AGENT_INTF_KEY):
ports_to_populate += router[l3_const.FLOATINGIP_AGENT_INTF_KEY]
if router.get(l3_const.SNAT_ROUTER_INTF_KEY):
ports_to_populate += router[l3_const.SNAT_ROUTER_INTF_KEY]
ports_to_populate += interfaces
self._populate_subnets_for_ports(context, ports_to_populate)
self._process_interfaces(routers_dict, interfaces)
return list(routers_dict.values())
def _get_vm_port_hostid(self, context, port_id, port=None):
"""Return the portbinding host_id."""
vm_port_db = port or self._core_plugin.get_port(context, port_id)
device_owner = vm_port_db['device_owner'] if vm_port_db else ""
if (n_utils.is_dvr_serviced(device_owner) or
device_owner == l3_const.DEVICE_OWNER_AGENT_GW):
return vm_port_db[portbindings.HOST_ID]
def _get_agent_gw_ports_exist_for_network(
self, context, network_id, host, agent_id):
"""Return agent gw port if exist, or None otherwise."""
if not network_id:
LOG.debug("Network not specified")
return
filters = {
'network_id': [network_id],
'device_id': [agent_id],
'device_owner': [l3_const.DEVICE_OWNER_AGENT_GW]
}
ports = self._core_plugin.get_ports(context, filters)
if ports:
return ports[0]
def _get_router_ids(self, context):
"""Function to retrieve router IDs for a context without joins"""
query = self._model_query(context, l3_db.Router.id)
return [row[0] for row in query]
def delete_floatingip_agent_gateway_port(
self, context, host_id, ext_net_id):
"""Function to delete FIP gateway port with given ext_net_id."""
# delete any fip agent gw port
device_filter = {'device_owner': [l3_const.DEVICE_OWNER_AGENT_GW],
'network_id': [ext_net_id]}
ports = self._core_plugin.get_ports(context,
filters=device_filter)
for p in ports:
if not host_id or p[portbindings.HOST_ID] == host_id:
self._core_plugin.ipam.delete_port(context, p['id'])
if host_id:
return
def create_fip_agent_gw_port_if_not_exists(
self, context, network_id, host):
"""Function to return the FIP Agent GW port.
This function will create a FIP Agent GW port
if required. If the port already exists, it
will return the existing port and will not
create a new one.
"""
l3_agent_db = self._get_agent_by_type_and_host(
context, l3_const.AGENT_TYPE_L3, host)
if l3_agent_db:
LOG.debug("Agent ID exists: %s", l3_agent_db['id'])
f_port = self._get_agent_gw_ports_exist_for_network(
context, network_id, host, l3_agent_db['id'])
if not f_port:
LOG.info(_LI('Agent Gateway port does not exist,'
' so create one: %s'), f_port)
port_data = {'tenant_id': '',
'network_id': network_id,
'device_id': l3_agent_db['id'],
'device_owner': l3_const.DEVICE_OWNER_AGENT_GW,
'binding:host_id': host,
'admin_state_up': True,
'name': ''}
agent_port = p_utils.create_port(self._core_plugin, context,
{'port': port_data})
if agent_port:
self._populate_subnets_for_ports(context, [agent_port])
return agent_port
msg = _("Unable to create the Agent Gateway Port")
raise n_exc.BadRequest(resource='router', msg=msg)
else:
self._populate_subnets_for_ports(context, [f_port])
return f_port
def _get_snat_interface_ports_for_router(self, context, router_id):
"""Return all existing snat_router_interface ports."""
qry = context.session.query(l3_db.RouterPort)
qry = qry.filter_by(
router_id=router_id,
port_type=l3_const.DEVICE_OWNER_ROUTER_SNAT
)
ports = [self._core_plugin._make_port_dict(rp.port, None)
for rp in qry]
return ports
def _add_csnat_router_interface_port(
self, context, router, network_id, subnet_id, do_pop=True):
"""Add SNAT interface to the specified router and subnet."""
port_data = {'tenant_id': '',
'network_id': network_id,
'fixed_ips': [{'subnet_id': subnet_id}],
'device_id': router.id,
'device_owner': l3_const.DEVICE_OWNER_ROUTER_SNAT,
'admin_state_up': True,
'name': ''}
snat_port = p_utils.create_port(self._core_plugin, context,
{'port': port_data})
if not snat_port:
msg = _("Unable to create the SNAT Interface Port")
raise n_exc.BadRequest(resource='router', msg=msg)
with context.session.begin(subtransactions=True):
router_port = l3_db.RouterPort(
port_id=snat_port['id'],
router_id=router.id,
port_type=l3_const.DEVICE_OWNER_ROUTER_SNAT
)
context.session.add(router_port)
if do_pop:
return self._populate_subnets_for_ports(context, [snat_port])
return snat_port
def _create_snat_intf_ports_if_not_exists(self, context, router):
"""Function to return the snat interface port list.
This function will return the snat interface port list
if it exists. If the port does not exist it will create
new ports and then return the list.
"""
port_list = self._get_snat_interface_ports_for_router(
context, router.id)
if port_list:
self._populate_subnets_for_ports(context, port_list)
return port_list
port_list = []
int_ports = (
rp.port for rp in
router.attached_ports.filter_by(
port_type=l3_const.DEVICE_OWNER_DVR_INTERFACE
)
)
LOG.info(_LI('SNAT interface port list does not exist,'
' so create one: %s'), port_list)
for intf in int_ports:
if intf.fixed_ips:
# Passing the subnet for the port to make sure the IP's
# are assigned on the right subnet if multiple subnet
# exists
snat_port = self._add_csnat_router_interface_port(
context, router, intf['network_id'],
intf['fixed_ips'][0]['subnet_id'], do_pop=False)
port_list.append(snat_port)
if port_list:
self._populate_subnets_for_ports(context, port_list)
return port_list
def dvr_vmarp_table_update(self, context, port_dict, action):
"""Notify L3 agents of VM ARP table changes.
When a VM goes up or down, look for one DVR router on the port's
subnet, and send the VM's ARP details to all L3 agents hosting the
router.
"""
# Check this is a valid VM or service port
if not (n_utils.is_dvr_serviced(port_dict['device_owner']) and
port_dict['fixed_ips']):
return
ip_address = port_dict['fixed_ips'][0]['ip_address']
subnet = port_dict['fixed_ips'][0]['subnet_id']
filters = {'fixed_ips': {'subnet_id': [subnet]}}
ports = self._core_plugin.get_ports(context, filters=filters)
for port in ports:
if port['device_owner'] == l3_const.DEVICE_OWNER_DVR_INTERFACE:
router_id = port['device_id']
router_dict = self._get_router(context, router_id)
if router_dict.extra_attributes.distributed:
arp_table = {'ip_address': ip_address,
'mac_address': port_dict['mac_address'],
'subnet_id': subnet}
if action == "add":
notify_action = self.l3_rpc_notifier.add_arp_entry
elif action == "del":
notify_action = self.l3_rpc_notifier.del_arp_entry
notify_action(context, router_id, arp_table)
return
def delete_csnat_router_interface_ports(self, context,
router, subnet_id=None):
# Each csnat router interface port is associated
# with a subnet, so we need to pass the subnet id to
# delete the right ports.
# TODO(markmcclain): This is suboptimal but was left to reduce
# changeset size since it is late in cycle
ports = (
rp.port.id for rp in
router.attached_ports.filter_by(
port_type=l3_const.DEVICE_OWNER_ROUTER_SNAT)
if rp.port
)
c_snat_ports = self._core_plugin.get_ports(
context,
filters={'id': ports}
)
for p in c_snat_ports:
if subnet_id is None:
self._core_plugin.delete_port(context,
p['id'],
l3_port_check=False)
else:
if p['fixed_ips'][0]['subnet_id'] == subnet_id:
LOG.debug("Subnet matches: %s", subnet_id)
self._core_plugin.delete_port(context,
p['id'],
l3_port_check=False)
def is_distributed_router(router):
"""Return True if router to be handled is distributed."""
try:
# See if router is a DB object first
requested_router_type = router.extra_attributes.distributed
except AttributeError:
# if not, try to see if it is a request body
requested_router_type = router.get('distributed')
if attributes.is_attr_set(requested_router_type):
return requested_router_type
return cfg.CONF.router_distributed<|fim▁end|> | vm_port = port_dict.get(fip['port_id'], None) |
<|file_name|>card.py<|end_file_name|><|fim▁begin|>class Card:<|fim▁hole|><|fim▁end|> | count = 0
url = ""
name = ""
sideboard = -1 |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from distutils.core import setup
setup(
# Application name:
name="streaker",
# Version number (initial):
version="0.0.1",
# Application author details:
author="Aldi Alimucaj",
author_email="[email protected]",
<|fim▁hole|> scripts=['bin/streaker'],
# Include additional files into the package
include_package_data=True,
# Details
url="http://pypi.python.org/pypi/Streaker_v001/",
#
license="MIT",
description="GitHub streak manipulator",
# long_description=open("README.txt").read(),
# Dependent packages (distributions)
install_requires=[
# "",
],
)<|fim▁end|> | # Packages
packages=["streaker"],
|
<|file_name|>LicensingStore-test.ts<|end_file_name|><|fim▁begin|>import PluginSDK from "PluginSDK";
import { RequestUtil } from "mesosphere-shared-reactjs";
const SDK = PluginSDK.__getSDK("licensing", { enabled: true });
require("../../SDK").setSDK(SDK);
const LicensingStore = require("../LicensingStore").default;
const LicensingSummary = require("../../structs/LicensingSummary").default;
const EventTypes = require("../../constants/EventTypes");
import * as ActionTypes from "../../constants/ActionTypes";
const licensingSummaryFixture = require("../../../../tests/_fixtures/licensing/licensing-summary.json");
let thisRequestFn, thisLicensingSummaryFixture;
describe("LicensingStore", () => {
describe("#fetchLicensingSummary", () => {
beforeEach(() => {
thisRequestFn = RequestUtil.json;
RequestUtil.json = (handlers) => {
handlers.success({
...licensingSummaryFixture,
});
};
thisLicensingSummaryFixture = {
...licensingSummaryFixture,
};
});
afterEach(() => {
RequestUtil.json = thisRequestFn;
});
it("returns an instance of LicenseSummary", () => {
LicensingStore.fetchLicensingSummary();
const license = LicensingStore.getLicensingSummary();
expect(license instanceof LicensingSummary).toBeTruthy();
});
it("returns the licenseSummary it was given", () => {
LicensingStore.fetchLicensingSummary();
const license = LicensingStore.getLicensingSummary();
expect(license.getNumberBreaches()).toEqual(
thisLicensingSummaryFixture.number_of_breaches
);
expect(license.getExpiration()).toEqual(
thisLicensingSummaryFixture.end_timestamp
);
});
describe("dispatcher", () => {
it("stores licenseSummary when event is dispatched", () => {
SDK.dispatch({
type: ActionTypes.REQUEST_LICENSING_SUMMARY_SUCCESS,
data: { gid: "foo", bar: "baz" },
});
const license = LicensingStore.getLicensingSummary();<|fim▁hole|> expect(license.get("bar")).toEqual("baz");
});
it("dispatches the correct event upon success", () => {
const mockedFn = jest.fn();
LicensingStore.addChangeListener(
EventTypes.LICENSING_SUMMARY_SUCCESS,
mockedFn
);
SDK.dispatch({
type: ActionTypes.REQUEST_LICENSING_SUMMARY_SUCCESS,
data: { gid: "foo", bar: "baz" },
});
// Called twice because adding change listener starts polling
expect(mockedFn.mock.calls.length).toEqual(2);
});
it("dispatches the correct event upon error", () => {
const mockedFn = jest.fn();
LicensingStore.addChangeListener(
EventTypes.LICENSING_SUMMARY_ERROR,
mockedFn
);
SDK.dispatch({
type: ActionTypes.REQUEST_LICENSING_SUMMARY_ERROR,
data: { message: "error" },
});
expect(mockedFn.mock.calls.length).toEqual(1);
});
});
});
});<|fim▁end|> | expect(license.get("gid")).toEqual("foo"); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|>from . import models<|fim▁end|> | # Copyright 2017 Matmoz d.o.o. (<http://www.matmoz.si>).
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
|
<|file_name|>index_test.js<|end_file_name|><|fim▁begin|>import { test , moduleFor } from 'appkit/tests/helpers/module_for';
import Index from 'appkit/routes/index';
moduleFor('route:index', "Unit - IndexRoute");
test("it exists", function(){<|fim▁hole|><|fim▁end|> | ok(this.subject() instanceof Index);
}); |
<|file_name|>check_static_recursion.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This compiler pass detects constants that refer to themselves
// recursively.
<|fim▁hole|>use session::Session;
use middle::def::{DefStatic, DefConst, DefAssociatedConst, DefVariant, DefMap};
use util::nodemap::NodeMap;
use syntax::{ast, ast_util};
use syntax::codemap::Span;
use syntax::feature_gate::emit_feature_err;
use syntax::visit::Visitor;
use syntax::visit;
use std::cell::RefCell;
struct CheckCrateVisitor<'a, 'ast: 'a> {
sess: &'a Session,
def_map: &'a DefMap,
ast_map: &'a ast_map::Map<'ast>,
// `discriminant_map` is a cache that associates the `NodeId`s of local
// variant definitions with the discriminant expression that applies to
// each one. If the variant uses the default values (starting from `0`),
// then `None` is stored.
discriminant_map: RefCell<NodeMap<Option<&'ast ast::Expr>>>,
}
impl<'a, 'ast: 'a> Visitor<'ast> for CheckCrateVisitor<'a, 'ast> {
fn visit_item(&mut self, it: &'ast ast::Item) {
match it.node {
ast::ItemStatic(..) |
ast::ItemConst(..) => {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &it.span);
recursion_visitor.visit_item(it);
},
ast::ItemEnum(ref enum_def, ref generics) => {
// We could process the whole enum, but handling the variants
// with discriminant expressions one by one gives more specific,
// less redundant output.
for variant in &enum_def.variants {
if let Some(_) = variant.node.disr_expr {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &variant.span);
recursion_visitor.populate_enum_discriminants(enum_def);
recursion_visitor.visit_variant(variant, generics);
}
}
}
_ => {}
}
visit::walk_item(self, it)
}
fn visit_trait_item(&mut self, ti: &'ast ast::TraitItem) {
match ti.node {
ast::ConstTraitItem(_, ref default) => {
if let Some(_) = *default {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &ti.span);
recursion_visitor.visit_trait_item(ti);
}
}
_ => {}
}
visit::walk_trait_item(self, ti)
}
fn visit_impl_item(&mut self, ii: &'ast ast::ImplItem) {
match ii.node {
ast::ConstImplItem(..) => {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &ii.span);
recursion_visitor.visit_impl_item(ii);
}
_ => {}
}
visit::walk_impl_item(self, ii)
}
}
pub fn check_crate<'ast>(sess: &Session,
krate: &'ast ast::Crate,
def_map: &DefMap,
ast_map: &ast_map::Map<'ast>) {
let mut visitor = CheckCrateVisitor {
sess: sess,
def_map: def_map,
ast_map: ast_map,
discriminant_map: RefCell::new(NodeMap()),
};
visit::walk_crate(&mut visitor, krate);
sess.abort_if_errors();
}
struct CheckItemRecursionVisitor<'a, 'ast: 'a> {
root_span: &'a Span,
sess: &'a Session,
ast_map: &'a ast_map::Map<'ast>,
def_map: &'a DefMap,
discriminant_map: &'a RefCell<NodeMap<Option<&'ast ast::Expr>>>,
idstack: Vec<ast::NodeId>,
}
impl<'a, 'ast: 'a> CheckItemRecursionVisitor<'a, 'ast> {
fn new(v: &'a CheckCrateVisitor<'a, 'ast>, span: &'a Span)
-> CheckItemRecursionVisitor<'a, 'ast> {
CheckItemRecursionVisitor {
root_span: span,
sess: v.sess,
ast_map: v.ast_map,
def_map: v.def_map,
discriminant_map: &v.discriminant_map,
idstack: Vec::new(),
}
}
fn with_item_id_pushed<F>(&mut self, id: ast::NodeId, f: F)
where F: Fn(&mut Self) {
if self.idstack.iter().any(|&x| x == id) {
let any_static = self.idstack.iter().any(|&x| {
if let ast_map::NodeItem(item) = self.ast_map.get(x) {
if let ast::ItemStatic(..) = item.node {
true
} else {
false
}
} else {
false
}
});
if any_static {
if !self.sess.features.borrow().static_recursion {
emit_feature_err(&self.sess.parse_sess.span_diagnostic,
"static_recursion",
*self.root_span, "recursive static");
}
} else {
span_err!(self.sess, *self.root_span, E0265, "recursive constant");
}
return;
}
self.idstack.push(id);
f(self);
self.idstack.pop();
}
// If a variant has an expression specifying its discriminant, then it needs
// to be checked just like a static or constant. However, if there are more
// variants with no explicitly specified discriminant, those variants will
// increment the same expression to get their values.
//
// So for every variant, we need to track whether there is an expression
// somewhere in the enum definition that controls its discriminant. We do
// this by starting from the end and searching backward.
fn populate_enum_discriminants(&self, enum_definition: &'ast ast::EnumDef) {
// Get the map, and return if we already processed this enum or if it
// has no variants.
let mut discriminant_map = self.discriminant_map.borrow_mut();
match enum_definition.variants.first() {
None => { return; }
Some(variant) if discriminant_map.contains_key(&variant.node.id) => {
return;
}
_ => {}
}
// Go through all the variants.
let mut variant_stack: Vec<ast::NodeId> = Vec::new();
for variant in enum_definition.variants.iter().rev() {
variant_stack.push(variant.node.id);
// When we find an expression, every variant currently on the stack
// is affected by that expression.
if let Some(ref expr) = variant.node.disr_expr {
for id in &variant_stack {
discriminant_map.insert(*id, Some(expr));
}
variant_stack.clear()
}
}
// If we are at the top, that always starts at 0, so any variant on the
// stack has a default value and does not need to be checked.
for id in &variant_stack {
discriminant_map.insert(*id, None);
}
}
}
impl<'a, 'ast: 'a> Visitor<'ast> for CheckItemRecursionVisitor<'a, 'ast> {
fn visit_item(&mut self, it: &'ast ast::Item) {
self.with_item_id_pushed(it.id, |v| visit::walk_item(v, it));
}
fn visit_enum_def(&mut self, enum_definition: &'ast ast::EnumDef,
generics: &'ast ast::Generics) {
self.populate_enum_discriminants(enum_definition);
visit::walk_enum_def(self, enum_definition, generics);
}
fn visit_variant(&mut self, variant: &'ast ast::Variant,
_: &'ast ast::Generics) {
let variant_id = variant.node.id;
let maybe_expr;
if let Some(get_expr) = self.discriminant_map.borrow().get(&variant_id) {
// This is necessary because we need to let the `discriminant_map`
// borrow fall out of scope, so that we can reborrow farther down.
maybe_expr = (*get_expr).clone();
} else {
self.sess.span_bug(variant.span,
"`check_static_recursion` attempted to visit \
variant with unknown discriminant")
}
// If `maybe_expr` is `None`, that's because no discriminant is
// specified that affects this variant. Thus, no risk of recursion.
if let Some(expr) = maybe_expr {
self.with_item_id_pushed(expr.id, |v| visit::walk_expr(v, expr));
}
}
fn visit_trait_item(&mut self, ti: &'ast ast::TraitItem) {
self.with_item_id_pushed(ti.id, |v| visit::walk_trait_item(v, ti));
}
fn visit_impl_item(&mut self, ii: &'ast ast::ImplItem) {
self.with_item_id_pushed(ii.id, |v| visit::walk_impl_item(v, ii));
}
fn visit_expr(&mut self, e: &'ast ast::Expr) {
match e.node {
ast::ExprPath(..) => {
match self.def_map.borrow().get(&e.id).map(|d| d.base_def) {
Some(DefStatic(def_id, _)) |
Some(DefAssociatedConst(def_id, _)) |
Some(DefConst(def_id))
if ast_util::is_local(def_id) => {
match self.ast_map.get(def_id.node) {
ast_map::NodeItem(item) =>
self.visit_item(item),
ast_map::NodeTraitItem(item) =>
self.visit_trait_item(item),
ast_map::NodeImplItem(item) =>
self.visit_impl_item(item),
ast_map::NodeForeignItem(_) => {},
_ => {
self.sess.span_bug(
e.span,
&format!("expected item, found {}",
self.ast_map.node_to_string(def_id.node)));
}
}
}
// For variants, we only want to check expressions that
// affect the specific variant used, but we need to check
// the whole enum definition to see what expression that
// might be (if any).
Some(DefVariant(enum_id, variant_id, false))
if ast_util::is_local(enum_id) => {
if let ast::ItemEnum(ref enum_def, ref generics) =
self.ast_map.expect_item(enum_id.local_id()).node {
self.populate_enum_discriminants(enum_def);
let variant = self.ast_map.expect_variant(variant_id.local_id());
self.visit_variant(variant, generics);
} else {
self.sess.span_bug(e.span,
"`check_static_recursion` found \
non-enum in DefVariant");
}
}
_ => ()
}
},
_ => ()
}
visit::walk_expr(self, e);
}
}<|fim▁end|> | use ast_map; |
<|file_name|>URLShortenerAPI.java<|end_file_name|><|fim▁begin|>package com.therabbitmage.android.beacon.network;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;<|fim▁hole|>import java.net.URISyntaxException;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.message.BasicHeader;
import android.net.Uri;
import android.util.Log;
import com.therabbitmage.android.beacon.entities.google.urlshortener.Url;
public final class URLShortenerAPI {
private static final String TAG = URLShortenerAPI.class.getSimpleName();
private static final String BASE_URL = "https://www.googleapis.com/urlshortener/v1/url";
public static NetworkResponse urlShorten(String url) throws IOException, URISyntaxException{
android.net.Uri.Builder uriBuilder = Uri.parse(BASE_URL).buildUpon();
String uri = uriBuilder.build().toString();
Header[] headers = new Header[1];
headers[0] = new BasicHeader(ApacheNetworkUtils.HEADER_CONTENT_TYPE, ApacheNetworkUtils.TYPE_JSON);
ApacheNetworkUtils.getAndroidInstance(ApacheNetworkUtils.sUserAgent, false);
HttpResponse response = ApacheNetworkUtils.post(
uri,
ApacheNetworkUtils.getDefaultApacheHeaders(),
new Url(url).toJson());
ApacheNetworkUtils.toStringResponseHeaders(response.getAllHeaders());
ApacheNetworkUtils.toStringStatusLine(response.getStatusLine());
HttpEntity entity = response.getEntity();
NetworkResponse networkResponse = new NetworkResponse();
if(response.getStatusLine().getStatusCode() == HttpStatus.SC_OK){
networkResponse.setError(0);
BufferedReader br = new BufferedReader(new InputStreamReader(entity.getContent()));
StringBuilder stringBuilder = new StringBuilder();
String output = new String();
while((output = br.readLine()) != null){
stringBuilder.append(output);
}
br.close();
Log.i(TAG, "Body: " + stringBuilder.toString());
networkResponse.setUrl(Url.fromJson(stringBuilder.toString()));
} else {
networkResponse.setError(1);
}
return networkResponse;
}
}<|fim▁end|> | |
<|file_name|>fix_sensor_config_key.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
from __future__ import unicode_literals<|fim▁hole|>standard_library.install_aliases()
from builtins import *
import emission.core.get_database as edb
def fix_key(check_field, new_key):
print("First entry for "+new_key+" is %s" % list(edb.get_timeseries_db().find(
{"metadata.key": "config/sensor_config",
check_field: {"$exists": True}}).sort(
"metadata/write_ts").limit(1)))
udb = edb.get_usercache_db()
tdb = edb.get_timeseries_db()
for i, entry in enumerate(edb.get_timeseries_db().find(
{"metadata.key": "config/sensor_config",
check_field: {"$exists": True}})):
entry["metadata"]["key"] = new_key
if i % 10000 == 0:
print(udb.insert(entry))
print(tdb.remove(entry["_id"]))
else:
udb.insert(entry)
tdb.remove(entry["_id"])
fix_key("data.battery_status", "background/battery")
fix_key("data.latitude", "background/location")
fix_key("data.zzaEh", "background/motion_activity")
fix_key("data.currState", "statemachine/transition")<|fim▁end|> | from __future__ import division
from __future__ import absolute_import
from future import standard_library |
<|file_name|>ANMs.py<|end_file_name|><|fim▁begin|>'''
Created on Jan 17, 2014
@author: oliwa
'''
import sys as sys
import numpy as np
from prody.dynamics.anm import calcANM, ANM
from prody.dynamics.editing import extendModel, sliceModel
from prody.dynamics.functions import saveModel, loadModel, writeArray
from prody.proteins.pdbfile import writePDB, parsePDB
from prody.dynamics.mode import Vector
from prody.measure.measure import calcCenter, calcDistance
from prody.dynamics.compare import calcOverlap, calcCumulOverlap,\
calcSubspaceOverlap, calcCovOverlap, printOverlapTable, getOverlapTable
from prody.apps.prody_apps.prody_contacts import prody_contacts
import traceback
from prody.dynamics.nmdfile import writeNMD
import scipy as sp
class ANMs(object):
"""
This class holds all the ANMs for an encounter.
"""
def __init__(self, utils):
"""
Constructor
"""
self.utils = utils
def createSlcSelectionString(self, reference, isBoundComplex, ref_chain, referenceTitle):
""" Under the assumption that is reflected in the Benchmark 4.0 that the receptor atoms are set before the
ligand atoms (spacially in the PDB file), if the current protein under investigation is a ligand,
an offset is added to the selection string to match the atoms of the ligand from the complex. """
if isBoundComplex and not self.utils.isReceptor(referenceTitle):
print "adding offset"
return self.utils.addOffset(ref_chain.getSelstr(), reference.select('segment "R."').numAtoms())
else:
print "using original selstr"
return ref_chain.getSelstr()
def calcANMs(self, reference, ref_chain, numberOfModes, encounter, selstr='calpha', whatAtomsToMatch='calpha', modified="", forceRebuild=False, isBoundComplex=False):
# if the base model does not exist, it needs to be created along with the
# extended and slicedback models
if forceRebuild or not self.doesANMExist(reference, numberOfModes, selstr, whatAtomsToMatch, modified):
# Create the anm
anm = calcANM(reference, n_modes=numberOfModes, selstr=selstr)
# First extend the anm on all atoms
anm_extend = extendModel(anm[0], anm[1], reference, norm=True)
# Then slice it back to matched
selectionAtoms = self.createSlcSelectionString(reference, isBoundComplex, ref_chain, encounter.getReference().getTitle())
anm_slc = sliceModel(anm_extend[0], anm_extend[1], selectionAtoms)
# If isBoundComplex, slice one anm back to its overall matched chains
if isBoundComplex:
selectionAtomsCounterpart = self.createSlcSelectionString(reference, isBoundComplex, encounter.getBoundCounterpartChain(), encounter.getUnboundCounterpart().getTitle())
anm_slc_counterpart= sliceModel(anm_extend[0], anm_extend[1], selectionAtomsCounterpart)
# Save the models
# saveModel(anm[0],
# filename=self.getANMPath(reference, numberOfModes, selstr, whatAtomsToMatch),
# matrices=True)
# saveModel(anm_extend[0],
# filename=self.getANMPath(reference, numberOfModes, selstr, whatAtomsToMatch, modified="extended"),
# matrices=True
# )
# saveModel(anm_slc[0],
# filename=self.getANMPath(reference, numberOfModes, selstr, whatAtomsToMatch, modified="slicedback"),
# matrices=True
# )
print "created and saved models"
# print "reference, it is the complex: ", reference.select('calpha and segment "R."').numAtoms()
# print "to slice on, it is the mob_chain: ", ref_chain.numAtoms()
print "anm hessian : " + str(anm[0].getHessian().shape)
print "number of calpha : " + str(reference.select('calpha').numAtoms())
print "anm size : " + str(anm[0].getArray().shape)
print "anm_ext size : " + str(anm_extend[0].getArray().shape)
print "anm_slice size : " + str(anm_slc[0].getArray().shape)
print "selectionAtoms : " + selectionAtoms
if isBoundComplex:
print "anm slice counterpart size: " + str(anm_slc_counterpart[0].getArray().shape)
print "selectionAtoms counterpart: " + selectionAtomsCounterpart
# Save the models"
self._anm = anm
self._anm_extend = anm_extend
self._anm_slc = anm_slc
if isBoundComplex:
self._anm_slc_counterpart = anm_slc_counterpart
else:
#raise Exception("Problem with capturing the selection of saved models, do not use load models from files now.")
try:
# load models
anmModel = loadModel(self.getANMPath(reference, numberOfModes, selstr, whatAtomsToMatch)+".anm.npz")
anm_extendModel = loadModel(self.getANMPath(reference, numberOfModes, selstr, whatAtomsToMatch, modified="extended")+".nma.npz")
anm_slcModel = loadModel(self.getANMPath(reference, numberOfModes, selstr, whatAtomsToMatch, modified="slicedback")+".nma.npz")
# store models selections
anmModelSelection = reference.select(selstr)
anm_extendModelSelection = reference
selectionAtoms = self.createSlcSelectionString(reference, isBoundComplex, ref_chain)
anm_slcModelSelection = reference.select(selectionAtoms)
# recombine models and selections as tuples
anm = (anmModel, anmModelSelection)
anm_extend = (anm_extendModel, anm_extendModelSelection)
anm_slc = (anm_slcModel, anm_slcModelSelection)
print "loaded models"
print "anm size : " + str(anm[0].getArray().shape)
print "anm_ext size : " + str(anm_extend[0].getArray().shape)
print "anm_slice size: " + str(anm_slc[0].getArray().shape)
print "selectionAtoms: " + selectionAtoms
self._anm = anm
self._anm_extend = anm_extend
self._anm_slc = anm_slc
except IOError as e:
print "Error loading ANM models from disc: "+str(e)
def calcANMsForPart2a2k(self, reference, counterpart, proteinComplex, ref_chain, counterpart_chain, chain_complex, numberOfModes, selstr='calpha', whatAtomsToMatch='calpha'):
# Create the anm of reference, counterpart and proteinComplex)
# print "reference, counterpart, proteinComplex, chain_complex (calphas, calphas*3-6) : ", (reference.select('calpha').numAtoms(), reference.select('calpha').numAtoms()*3 -6), (counterpart.select('calpha').numAtoms(), counterpart.select('calpha').numAtoms()*3-6), (proteinComplex.select('calpha').numAtoms(), proteinComplex.select('calpha').numAtoms()*3-6), (chain_complex.select('calpha').numAtoms(), chain_complex.select('calpha').numAtoms()*3 -6)
# print "anm_reference, anm_counterpart, anm_complex hessian shapes : ", anm_reference[0].getHessian().shape, anm_counterpart[0].getHessian().shape, anm_complex[0].getHessian().shape
# print "anm_reference, anm_counterpart, anm_complex, anm_complex_slc getArray() shapes : ", anm_reference[0].getArray().shape, anm_counterpart[0].getArray().shape, anm_complex[0].getArray().shape, anm_complex_slc[0].getArray().shape
self._anm_reference, self._anm_reference_slc = self._calcANMsUnified(reference, ref_chain, numberOfModes/2, selstr, whatAtomsToMatch)
self._anm_counterpart, self._anm_counterpart_slc = self._calcANMsUnified(counterpart, counterpart_chain, numberOfModes/2, selstr, whatAtomsToMatch)
# print "15 ang contact before moving atoms:", proteinComplex.select('same residue as exwithin 15 of segment "L." ').numAtoms()
# self._moveSegment(proteinComplex, "L", 30)
# if proteinComplex.select('same residue as exwithin 15 of segment "L." ') != None:
# print "15 ang contact after moving atoms: ", proteinComplex.select('same residue as exwithin 15 of segment "L." ').numAtoms()
# else:
# print "15 ang contact after moving atoms: 0"
self._anm_complex, self._anm_complex_slc = self._calcANMsUnified(proteinComplex, chain_complex, numberOfModes, selstr, whatAtomsToMatch)
#self.utils.testHessianSubMatrices(self._anm_reference, self._anm_counterpart, self._anm_complex)
# check blockmatrix differences and pymol output
# useRelError = True
#significantDifferences = self.utils.testBlockMatrixMembership(self._anm_reference[0].getHessian(), self._anm_counterpart[0].getHessian(), self._anm_complex[0].getHessian(), useRelativeError=useRelError)
#self.utils.whichPatternsAreAffectedbySignificantDifferences(significantDifferences)
# assert reference.getResnums()[0] == proteinComplex.getResnums()[0]
#print self.utils.significantDifferencesToPymolResiduesString(significantDifferences, reference.getResnums()[0])
print "anm_reference_slc, anm_counterpart_slc, anm_complex_slc getArray() shapes : ", self._anm_reference_slc[0].getArray().shape, self._anm_counterpart_slc[0].getArray().shape, self._anm_complex_slc[0].getArray().shape
def calcANMsUnified(self, reference, counterpart, proteinComplex, numberOfModes, encounter, ref_chain = None, counterpart_chain = None, chain_complex = None, selstr='calpha', whatAtomsToMatch='calpha',):
""" Calculate the ANMs for the NMA. If examinations on the complex, it is assumed (for now) that the reference protein is the receptor. """
if (ref_chain == None) and (counterpart_chain == None) and (chain_complex == None):
self.bound_provided = False
else:
self.bound_provided = True
if self.utils.config.investigationsOn == "Individual" or self.utils.config.investigationsOn == "Complex" :
assert self.utils.config.whichCustomHIndividual == "HC_subvector" or self.utils.config.whichCustomHIndividual == "submatrix" or self.utils.config.whichCustomHIndividual == "canonical"
numberOfModesComplex = min((proteinComplex.select('calpha').numAtoms()*3 - 6), self.utils.config.maxModesToCalculate)
if ref_chain != None:
self._anm_reference, self._anm_reference_slc = self._calcANMsUnified(reference, numberOfModes, selstr, whatAtomsToMatch, ref_chain)
else:
self._anm_reference, self._anm_reference_slc = self._calcANMsUnified(reference, numberOfModes, selstr, whatAtomsToMatch)
self._anm_counterpart = calcANM(counterpart, n_modes = numberOfModes, selstr = selstr, zeros = True)
if chain_complex != None:
self._anm_complex, self._anm_complex_slc = self._calcANMsUnified(proteinComplex, numberOfModesComplex, selstr, whatAtomsToMatch, chain_complex)
else:
self._anm_complex, self._anm_complex_slc = self._calcANMsUnified(proteinComplex, numberOfModesComplex, selstr, whatAtomsToMatch)
# elif self.utils.config.investigationsOn == "Complex":
# numberOfModesComplex = numberOfModes*2
# self._anm_reference, self._anm_reference_slc = self._calcANMsUnified(reference, numberOfModes, selstr, whatAtomsToMatch, ref_chain)
# self._anm_counterpart, self._anm_counterpart_slc = self._calcANMsUnified(counterpart, numberOfModes, selstr, whatAtomsToMatch, counterpart_chain)
# self._anm_complex, self._anm_complex_slc = self._calcANMsUnified(proteinComplex, numberOfModesComplex, selstr, whatAtomsToMatch, chain_complex)
print "anm_reference anm_counterpart, anm_complex getArray() shapes : ", self._anm_reference[0].getArray().shape, self._anm_counterpart[0].getArray().shape, self._anm_complex[0].getArray().shape
print "anm_reference_slc, anm_complex_slc getArray() shapes : ", self._anm_reference_slc[0].getArray().shape, self._anm_complex_slc[0].getArray().shape
# create custom H via U1
if self.utils.config.customH:
HC = self._anm_complex[0].getHessian()
if self.utils.isReceptor(reference.getTitle()):
HR = self._anm_reference[0].getHessian()
HL = self._anm_counterpart[0].getHessian()
else:
HR = self._anm_counterpart[0].getHessian()
HL = self._anm_reference[0].getHessian()
HRtilde = HC[:HR.shape[0], :HR.shape[1]]
HLtilde = HC[HR.shape[0]:HR.shape[0]+HL.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]]
assert HR.shape == HRtilde.shape
assert HL.shape == HLtilde.shape
# for now assert that reference is always the receptor
if self.utils.config.investigationsOn == "Complex":
assert self.utils.isReceptor(reference.getTitle())
HCcustomBuild = np.zeros((HC.shape[0], HC.shape[1]))
if self.utils.isReceptor(reference.getTitle()):
if self.utils.config.whichCustomHC == "HC_U1" or self.utils.config.whichCustomHC == "HC_U1_1k1k":
HRtildeH_ANew, interCalphaIndicesHR = self.calcCustomH_ANew(HR.copy(), encounter.getReference(), encounter.getUnboundCounterpart(), encounter, "C_u", "r_ij", True, selstr)
HLtildeH_ANew, interCalphaIndicesHL = self.calcCustomH_ANew(HL.copy(), encounter.getUnboundCounterpart(), encounter.getReference(), encounter, "C_u", "r_ij", False, selstr)
HRL_new = self.calcCustomH_ANew_IJ(encounter.getReference(), encounter.getUnboundCounterpart(), encounter, False, "r_ij", True, selstr)
elif self.utils.config.whichCustomHC == "HC_0" or self.utils.config.whichCustomHC == "HC_06":
HRtildeH_ANew = HR.copy()
HLtildeH_ANew = HL.copy()
HRL_new = np.zeros(((reference.select('calpha').numAtoms()*3), (counterpart.select('calpha').numAtoms()*3) ))
interCalphaIndicesHR = None
interCalphaIndicesHL = None
print "reference is receptor, shapes of HRtilde, HLtilde, HRL: ", HRtildeH_ANew.shape, HLtildeH_ANew.shape, HRL_new.shape
else:
if self.utils.config.whichCustomHC == "HC_U1":
HRtildeH_ANew, interCalphaIndicesHR = self.calcCustomH_ANew(HR.copy(), encounter.getUnboundCounterpart(), encounter.getReference(), encounter, "C_u", "r_ij", False, selstr)
HLtildeH_ANew, interCalphaIndicesHL = self.calcCustomH_ANew(HL.copy(), encounter.getReference(), encounter.getUnboundCounterpart(), encounter, "C_u", "r_ij", True, selstr)
HRL_new = self.calcCustomH_ANew_IJ(encounter.getUnboundCounterpart(), encounter.getReference(), encounter, False, "r_ij", False, selstr)
print "reference is ligand, shapes of HLtilde, HRtilde, HRL: ", HLtildeH_ANew.shape, HRtildeH_ANew.shape, HRL_new.shape
# put the new HRtilde and HLtilde inside HC
HCcustomBuild[:HR.shape[0], :HR.shape[1]] = HRtildeH_ANew
HCcustomBuild[HR.shape[0]:HR.shape[0]+HL.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]] = HLtildeH_ANew
HCcustomBuild[0:HR.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]] = HRL_new
HCcustomBuild[HR.shape[0]:HR.shape[0]+HL.shape[0], 0:HR.shape[1]] = HRL_new.T
# optional assertion to test if HCcustomBuild equals the original HC if k = 1 and d = 15 (default ProDy settings)
if (self.utils.config.whichCustomHC == "HC_U1" and self.utils.config.customHRdistance == 15 and self.utils.config.customForceConstant == 1.0):
# assert np.allclose(HC, HCcustomBuild) # assert this if k = 1, A = 15
print "not asserting HCcustomBuild equals original HC with k1 A15"
# Projection
# def projectHessian(self, hessian, reference, proteinComplex, referenceSegment, projectionStyle, projectOnlyReferencePartOfHC=False, interCalphaIndices=None):
if self.utils.config.projectHessian:
if self.utils.config.investigationsOn == "Individual" or self.utils.config.investigationsOn == "Complex":
if self.utils.isReceptor(reference.getTitle()):
if self.utils.config.whichCustomHC == "HC_U1":
if self.utils.config.projectionStyle == "full" or self.utils.config.projectionStyle == "intra":
if self.utils.config.whichCustomHIndividual == "HC_subvector":
HCcustomBuild = self.projectHessian(HCcustomBuild.copy(), reference, proteinComplex, "R", self.utils.config.projectionStyle, True, interCalphaIndicesHR)
#HCcustomBuild = self.projectHessian(HCcustomBuild.copy(), proteinComplex, proteinComplex, '', self.utils.config.projectionStyle, False, interCalphaIndicesHR)
elif self.utils.config.whichCustomHIndividual == "submatrix":
HRtildeH_ANew = self.projectHessian(HRtildeH_ANew.copy(), reference, proteinComplex, "R", self.utils.config.projectionStyle, False, interCalphaIndicesHR)
elif self.utils.config.projectionStyle == "fixedDomainFrame":
HCcustomBuild = self.transformHessianToFixedDomainFrame(HCcustomBuild.copy(), reference, proteinComplex, "R", self.utils.config.projectionStyle)
# else reference is the ligand
else:
if self.utils.config.whichCustomHC == "HC_U1":
if self.utils.config.projectionStyle == "full" or self.utils.config.projectionStyle == "intra":
if self.utils.config.whichCustomHIndividual == "HC_subvector":
HCcustomBuild = self.projectHessian(HCcustomBuild.copy(), reference, proteinComplex, "L", self.utils.config.projectionStyle, True, interCalphaIndicesHL)
#HCcustomBuild = self.projectHessian(HCcustomBuild.copy(), proteinComplex, proteinComplex, '', self.utils.config.projectionStyle, False, interCalphaIndicesHL)
elif self.utils.config.whichCustomHIndividual == "submatrix":
HLtildeH_ANew = self.projectHessian(HLtildeH_ANew.copy(), reference, proteinComplex, "L", self.utils.config.projectionStyle, False, interCalphaIndicesHL)
elif self.utils.config.projectionStyle == "fixedDomainFrame":
HCcustomBuild = self.transformHessianToFixedDomainFrame(HCcustomBuild.copy(), reference, proteinComplex, "L", self.utils.config.projectionStyle)
elif self.utils.config.investigationsOn == "Complex":
# project out the rigid body motions of the receptor. if the goal is to project the whole complex, do: HCcustomBuild = self.projectHessian(HCcustomBuild, proteinComplex, proteinComplex, '')
if self.utils.config.projectionStyle == "full" or self.utils.config.projectionStyle == "intra":
HCcustomBuild = self.projectHessian(HCcustomBuild.copy(), reference, proteinComplex, "R", self.utils.config.projectionStyle, True, interCalphaIndicesHR)
elif self.utils.config.projectionStyle == "fullComplex":
HCcustomBuild = self.projectHessian(HCcustomBuild.copy(), proteinComplex, proteinComplex, '', self.utils.config.projectionStyle)
elif self.utils.config.projectionStyle == "fixedDomainFrame":
HCcustomBuild = self.transformHessianToFixedDomainFrame(HCcustomBuild.copy(), reference, proteinComplex, "R", self.utils.config.projectionStyle)
else:
raise Exception('unknown projection style')
if self.utils.config.investigationsOn == "Complex" or self.utils.config.whichCustomHIndividual == "HC_subvector":
# Create the custom complex ANM
self._anm_complex_tilde = ANM(self._anm_complex[0].getTitle()+"_"+self.utils.config.whichCustomHC)
self._anm_complex_tilde.setHessian(HCcustomBuild)
if self.utils.config.calculateZeroEigvalModes:
if self.utils.config.whichCustomHC == "HC_0" or self.utils.config.whichCustomHC == "HC_06":
numberOfModesComplex += 6
self._anm_complex_tilde.calcModes(n_modes=numberOfModesComplex, zeros=True)
else:
self._anm_complex_tilde.calcModes(n_modes=numberOfModesComplex)
# Extend the self._anm_reference_tilde on all atoms
anm_complex_tilde_extend = extendModel(self._anm_complex_tilde, self._anm_complex[1], proteinComplex, norm=True)
# Then slice the anm_complex to the matched atoms
self._anm_complex_tilde_slc = sliceModel(anm_complex_tilde_extend[0], anm_complex_tilde_extend[1], selstr)
# Normalize the modes of the sliced anm
self._anm_complex_tilde_slc = self.getNormalizedANM(self._anm_complex_tilde_slc)
# Replace the complex anm and the complex_slc anm with the modified ANMs
print "Replacing ANM H with ANM Htilde for the complex"
self._anm_complex = (self._anm_complex_tilde, self._anm_complex[1])
self._anm_complex_slc = self._anm_complex_tilde_slc
# modify HR to have the sliced part of HC_tilde
if self.utils.config.investigationsOn == "Individual" or self.utils.config.investigationsOn == "Complex":
if self.utils.config.whichCustomHIndividual == "HC_subvector":
Marray = self.utils.sliceComplexModestoMatchProtein(self._anm_complex[0].getArray(), reference, encounter.getReferenceSegment())
self._anm_reference_tilde = ANM(self._anm_reference[0].getTitle()+"_"+self.utils.config.whichCustomHC)
self._anm_reference_tilde.setEigens(Marray, self._anm_complex[0].getEigvals())
self._anm_reference_tilde = (self._anm_reference_tilde, self._anm_reference[1])
self._anm_reference_tilde = self.getNormalizedANM(self._anm_reference_tilde)
# submatrix, take the new HRtilde/HLtilde, re-calculate its modes and replace the previous ANM
elif self.utils.config.whichCustomHIndividual == "submatrix":
if self.utils.isReceptor(reference.getTitle()):
submatrix = HRtildeH_ANew
else:
submatrix = HLtildeH_ANew
self._anm_reference_tilde = ANM(self._anm_reference[0].getTitle()+"_"+self.utils.config.whichCustomHC)
self._anm_reference_tilde.setHessian(submatrix)
if self.utils.config.calculateZeroEigvalModes:
self._anm_reference_tilde.calcModes(n_modes=numberOfModes, zeros=True)
else:
self._anm_reference_tilde.calcModes(n_modes=numberOfModes)
self._anm_reference_tilde = (self._anm_reference_tilde, self._anm_reference[1])
# Extend the self._anm_reference_tilde on all atoms
anm_reference_tilde_extend = extendModel(self._anm_reference_tilde[0], self._anm_reference[1], reference, norm=True)
# Then slice the anm_reference to the matched
self._anm_reference_tilde_slc = sliceModel(anm_reference_tilde_extend[0], anm_reference_tilde_extend[1], selstr)
self._anm_reference_tilde_slc = self.getNormalizedANM(self._anm_reference_tilde_slc)
# Replace reference and reference_slc with the modified ANMs
print "Replacing ANM H with ANM Htilde for the reference"
self._anm_reference = self._anm_reference_tilde
self._anm_reference_slc = self._anm_reference_tilde_slc
def calcANMsForPart2b2k(self, reference, counterpart, proteinComplex, ref_chain, counterpart_chain, chain_complex, numberOfModes, encounter, selstr='calpha', whatAtomsToMatch='calpha'):
""" Unbound complex to bound complex NMA, it is assumed that the reference is the receptor and is the first object in the complex pdb file
This method creates self.* NMA objects
Args:
reference: the receptor protein
counterpart: the ligand protein
proteinComplex: the protein complex
ref_chain: the matched part of the reference
counterpart_chain: the matched part of the counterpart
chain_complex: the matched part on the complex
numberOfModes: the 2k number of modes
encounter: object aggregating proteins
selstr: the selection string for the NMA, course grained is calpha
"""
# Create the anm of reference, counterpart and proteinComplex)
self._anm_reference, self._anm_reference_slc = self._calcANMsUnified(reference, ref_chain, numberOfModes/2, selstr, whatAtomsToMatch)
self._anm_counterpart, self._anm_counterpart_slc = self._calcANMsUnified(counterpart, counterpart_chain, numberOfModes/2, selstr, whatAtomsToMatch)
self._anm_complex, self._anm_complex_slc = self._calcANMsUnified(proteinComplex, chain_complex, numberOfModes, selstr, whatAtomsToMatch)
print "anm_reference anm_counterpart, anm_complex getArray() shapes : ", self._anm_reference[0].getArray().shape, self._anm_counterpart[0].getArray().shape, self._anm_complex[0].getArray().shape
print "anm_reference_slc, anm_counterpart_slc, anm_complex_slc getArray() shapes : ", self._anm_reference_slc[0].getArray().shape, self._anm_counterpart_slc[0].getArray().shape, self._anm_complex_slc[0].getArray().shape
# modify the hessians
if self.utils.config.customH:
HC = self._anm_complex[0].getHessian()
if self.utils.isReceptor(reference.getTitle()):
HR = self._anm_reference[0].getHessian()
HL = self._anm_counterpart[0].getHessian()
else:
HR = self._anm_counterpart[0].getHessian()
HL = self._anm_reference[0].getHessian()
HRtilde = HC[:HR.shape[0], :HR.shape[1]]
HLtilde = HC[HR.shape[0]:HR.shape[0]+HL.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]]
assert HR.shape == HRtilde.shape
assert HL.shape == HLtilde.shape
# for now assert that reference is always the receptor, in case of complex investigation
assert self.utils.isReceptor(reference.getTitle())
HCcustomBuild = np.zeros((HC.shape[0], HC.shape[1]))
if self.utils.config.whichCustomHC == "HC_U1":
# create the complex hessian with interactions on the off diagonal using U1
print "HC_U1"
HRtildeH_ANew = self.calcCustomH_ANew(HR.copy(), encounter.getReference(), encounter.getUnboundCounterpart(), encounter, "C_u", "r_ij", True, selstr)
HLtildeH_ANew = self.calcCustomH_ANew(HL.copy(), encounter.getUnboundCounterpart(), encounter.getReference(), encounter, "C_u", "r_ij", False, selstr)
HRL_new = self.calcCustomH_ANew_IJ(encounter.getReference(), encounter.getUnboundCounterpart(), encounter, False, "r_ij", True, selstr)
elif self.utils.config.whichCustomHC == "HC_0" or self.utils.config.whichCustomHC == "HC_06":
# create the hessian by just using canonical HR and HL and offmatrices zero
print "HC_0 or HC_06"
HRtildeH_ANew = HR.copy()
HLtildeH_ANew = HL.copy()
HRL_new = np.zeros(((reference.select('calpha').numAtoms()*3), (counterpart.select('calpha').numAtoms()*3) ))
print "reference is receptor, shapes of HRtilde, HLtilde, HRL: ", HRtildeH_ANew.shape, HLtildeH_ANew.shape, HRL_new.shape
print "finished projecting H, anm_reference_tilde calc modes"
# put the new HRtilde and HLtilde inside HC
HCcustomBuild[:HR.shape[0], :HR.shape[1]] = HRtildeH_ANew
HCcustomBuild[HR.shape[0]:HR.shape[0]+HL.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]] = HLtildeH_ANew
HCcustomBuild[0:HR.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]] = HRL_new
HCcustomBuild[HR.shape[0]:HR.shape[0]+HL.shape[0], 0:HR.shape[1]] = HRL_new.T
#if self.utils.config.whichCustomHC == "HC_U1":
# assert np.allclose(HC, HCcustomBuild) # assert this if k = 1, A = 15
# print "asserted HC with k1 A 15"
if self.utils.config.projectHessian:
HCcustomBuild = self.projectHessian(HCcustomBuild, proteinComplex, proteinComplex, '')
# make HC anm
self._anm_complex_tilde = ANM(self._anm_complex[0].getTitle()+"_"+self.utils.config.whichCustomHC)
self._anm_complex_tilde.setHessian(HCcustomBuild)
self._anm_complex_tilde.calcModes(n_modes=numberOfModes)
# Extend the self._anm_reference_tilde on all atoms
anm_complex_tilde_extend = extendModel(self._anm_complex_tilde, self._anm_complex[1], proteinComplex, norm=True)
# Then slice the anm_complex to the matched atoms
self._anm_complex_tilde_slc = sliceModel(anm_complex_tilde_extend[0], anm_complex_tilde_extend[1], chain_complex.getSelstr())
# Replace the complex anm and the complex_slc anm with the modified ANMs
print "Replacing ANM H with ANM Htilde for the complex"
self._anm_complex = (self._anm_complex_tilde, self._anm_complex[1])
self._anm_complex_slc = self._anm_complex_tilde_slc
def calcANMsForPart2b(self, reference, counterpart, proteinComplex, ref_chain, counterpart_chain, chain_complex, numberOfModes, encounter, selstr='calpha', whatAtomsToMatch='calpha'):
""" Create the ANMs of the reference, counterpart and complex objects. If set in config, project the hessian matrix of the reference
to ensure 6 zero eigenvalue modes, see formula 8.27 from the book "A practical introduction to the simulation of molecular dynamics", Field. """
self._anm_reference, self._anm_reference_slc = self._calcANMsUnified(reference, ref_chain, numberOfModes, selstr, whatAtomsToMatch)
self._anm_counterpart = calcANM(counterpart, selstr=selstr)
# self._moveSegment(proteinComplex, "L", 50)
numberOfModesComplex = min((proteinComplex.select('calpha').numAtoms()*3 - 6), self.utils.config.maxModesToCalculate)
self._anm_complex, self._anm_complex_slc = self._calcANMsUnified(proteinComplex, chain_complex, numberOfModesComplex, selstr, whatAtomsToMatch)
# project hessian matrix
if self.utils.config.projectHessian:
HC = self._anm_complex[0].getHessian()
if self.utils.isReceptor(reference.getTitle()):
HR = self._anm_reference[0].getHessian()
HL = self._anm_counterpart[0].getHessian()
else:
HR = self._anm_counterpart[0].getHessian()
HL = self._anm_reference[0].getHessian()
HRtilde = HC[:HR.shape[0], :HR.shape[1]]
HLtilde = HC[HR.shape[0]:HR.shape[0]+HL.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]]
assert HR.shape == HRtilde.shape
assert HL.shape == HLtilde.shape
##
#writeArray("HRtildefromHC.txt", HRtilde, format='%f')
#writeArray("HLtildefromHC.txt", HLtilde, format='%f')
##
# Create the tilde ANM
self._anm_reference_tilde = ANM(self._anm_reference[0].getTitle()+"_tilde")
# Here the PH'P treatment for the hessian matrix from the normal modes book by Field
if self.utils.isReceptor(reference.getTitle()):
if self.utils.config.modifyHDelta:
print "modifying HR with deltaHR"
HRtilde = self.addscaledHdelta(HR, HRtilde, self.utils.config.deltamultiplicatorForH)
# if using terms with true bound structure second derivation parts r_{ij}-r_{ij}^{2}
if self.utils.config.customHR_A:
#writeArray("originalHR.txt", self._anm_reference[0].getHessian(), format='%f')
HRtilde = self.calcCustomH_A_NeighborsBound(self._anm_reference[0].getHessian(), encounter, selstr)
#writeArray("customHRtilde.txt", HRtilde, format='%f')
print "reference is receptor, shape of HRtilde: ", HRtilde.shape
HRtilde = self.projectHessian(HRtilde, reference, proteinComplex, encounter.getReferenceSegment())
self._anm_reference_tilde.setHessian(HRtilde)
else:
if self.utils.config.modifyHDelta:
print "modifying HL with deltaHL"
HLtilde = self.addscaledHdelta(HL, HLtilde, self.utils.config.deltamultiplicatorForH)
# if using terms with true bound structure second derivation parts r_{ij}-r_{ij}^{2}
if self.utils.config.customHR_A:
#writeArray("originalHL.txt", self._anm_reference[0].getHessian(), format='%f')
HLtilde = self.calcCustomH_A_NeighborsBound(self._anm_reference[0].getHessian(), encounter, selstr)
#writeArray("customHLtilde.txt", HLtilde, format='%f')
print "reference is ligand, shape of HLtilde: ", HLtilde.shape
HLtilde = self.projectHessian(HLtilde, reference, proteinComplex, encounter.getReferenceSegment())
self._anm_reference_tilde.setHessian(HLtilde)
print "finished projecting H, anm_reference_tilde calc modes"
# testing of projected eigenvals
self._anm_reference_tilde.calcModes(n_modes=numberOfModes)
#print "HR eigenvals: ", self._anm_reference[0].getEigvals()[0:10]
#print "HRtilde eigenvals: ", self._anm_reference_tilde.getEigvals()[0:10]
# Extend the self._anm_reference_tilde on all atoms
anm_reference_tilde_extend = extendModel(self._anm_reference_tilde, self._anm_reference[1], reference, norm=True)
# Then slice the anm_reference to the matched
self._anm_reference_tilde_slc = sliceModel(anm_reference_tilde_extend[0], anm_reference_tilde_extend[1], ref_chain.getSelstr())
# Replace reference and reference_slc with the modified ANMs
print "Replacing ANM H with ANM Htilde for the reference"
self._anm_reference = (self._anm_reference_tilde, self._anm_reference[1])
self._anm_reference_slc = self._anm_reference_tilde_slc
if self.utils.config.HR1kHRtilde1k:
self._anm_reference_original, self._anm_reference_slc_original = self._calcANMsUnified(reference, ref_chain, numberOfModes, selstr, whatAtomsToMatch)
def calcANMsForPart2bIndividualProtein_U1(self, reference, counterpart, proteinComplex, ref_chain, counterpart_chain, chain_complex, numberOfModes, encounter, selstr='calpha', whatAtomsToMatch='calpha'):
""" Create the ANMs of the reference, counterpart and complex objects. If set in config, project the hessian matrix of the reference
to ensure 6 zero eigenvalue modes, see formula 8.27 from the book "A practical introduction to the simulation of molecular dynamics", Field. """
self._anm_reference, self._anm_reference_slc = self._calcANMsUnified(reference, ref_chain, numberOfModes, selstr, whatAtomsToMatch)
self._anm_counterpart = calcANM(counterpart, selstr=selstr)
# self._moveSegment(proteinComplex, "L", 50)
numberOfModesComplex = min((proteinComplex.select('calpha').numAtoms()*3 - 6), self.utils.config.maxModesToCalculate)
self._anm_complex, self._anm_complex_slc = self._calcANMsUnified(proteinComplex, chain_complex, numberOfModesComplex, selstr, whatAtomsToMatch)
###
print "anm_reference anm_counterpart, anm_complex getArray() shapes : ", self._anm_reference[0].getArray().shape, self._anm_counterpart[0].getArray().shape, self._anm_complex[0].getArray().shape
print "anm_reference_slc, anm_complex_slc getArray() shapes : ", self._anm_reference_slc[0].getArray().shape, self._anm_complex_slc[0].getArray().shape
# create custom H via U1
if self.utils.config.customH:
HC = self._anm_complex[0].getHessian()
if self.utils.isReceptor(reference.getTitle()):
HR = self._anm_reference[0].getHessian()
HL = self._anm_counterpart[0].getHessian()
else:
HR = self._anm_counterpart[0].getHessian()
HL = self._anm_reference[0].getHessian()
HRtilde = HC[:HR.shape[0], :HR.shape[1]]
HLtilde = HC[HR.shape[0]:HR.shape[0]+HL.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]]
assert HR.shape == HRtilde.shape
assert HL.shape == HLtilde.shape
# for now assert that reference is always the receptor
HCcustomBuild = np.zeros((HC.shape[0], HC.shape[1]))
if self.utils.isReceptor(reference.getTitle()):
if self.utils.config.customHR_A:
#HR, referenceStructure, neighborStructure, encounter, neighborhoodFrom, equilibriumAt, workOnReceptor=True, selstr='calpha'
HRtildeH_ANew = self.calcCustomH_ANew(HR.copy(), encounter.getReference(), encounter.getUnboundCounterpart(), encounter, "C_u", "r_ij", True, selstr)
HLtildeH_ANew = self.calcCustomH_ANew(HL.copy(), encounter.getUnboundCounterpart(), encounter.getReference(), encounter, "C_u", "r_ij", False, selstr)
HRL_new = self.calcCustomH_ANew_IJ(encounter.getReference(), encounter.getUnboundCounterpart(), encounter, False, "r_ij", True, selstr)
print "reference is receptor, shapes of HRtilde, HLtilde, HRL: ", HRtildeH_ANew.shape, HLtildeH_ANew.shape, HRL_new.shape
else:
if self.utils.config.customHR_A:
HRtildeH_ANew = self.calcCustomH_ANew(HR.copy(), encounter.getUnboundCounterpart(), encounter.getReference(), encounter, "C_u", "r_ij", False, selstr)
HLtildeH_ANew = self.calcCustomH_ANew(HL.copy(), encounter.getReference(), encounter.getUnboundCounterpart(), encounter, "C_u", "r_ij", True, selstr)
HRL_new = self.calcCustomH_ANew_IJ(encounter.getUnboundCounterpart(), encounter.getReference(), encounter, False, "r_ij", False, selstr)
print "reference is ligand, shapes of HLtilde, HRtilde, HRL: ", HLtildeH_ANew.shape, HRtildeH_ANew.shape, HRL_new.shape
print "finished projecting H, anm_reference_tilde calc modes"
# put the new HRtilde and HLtilde inside HC
HCcustomBuild[:HR.shape[0], :HR.shape[1]] = HRtildeH_ANew
HCcustomBuild[HR.shape[0]:HR.shape[0]+HL.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]] = HLtildeH_ANew
HCcustomBuild[0:HR.shape[0], HR.shape[1]:HR.shape[1]+HL.shape[1]] = HRL_new
HCcustomBuild[HR.shape[0]:HR.shape[0]+HL.shape[0], 0:HR.shape[1]] = HRL_new.T
#assert np.allclose(HC, HCcustomBuild)
#sys.exit()
# Project the reference part in the HCcustomBuild matrix
if self.utils.isReceptor(reference.getTitle()):
if self.utils.config.customHR_A:
HCcustomBuildprojected = self.projectHessian(HCcustomBuild.copy(), reference, proteinComplex, "R", True)
else:
if self.utils.config.customHR_A:
HCcustomBuildprojected = self.projectHessian(HCcustomBuild.copy(), reference, proteinComplex, "L", True)
# Create the custom complex ANM
self._anm_complex_tilde = ANM(self._anm_complex[0].getTitle()+"_tilde")
self._anm_complex_tilde.setHessian(HCcustomBuildprojected)
if self.utils.config.enforceAllModesAfterProjection:
self._anm_complex_tilde.calcModes(n_modes=numberOfModes, zeros=True)
else:
self._anm_complex_tilde.calcModes(n_modes=numberOfModes)
# Extend the self._anm_reference_tilde on all atoms
anm_complex_tilde_extend = extendModel(self._anm_complex_tilde, self._anm_complex[1], proteinComplex, norm=True)
# Then slice the anm_complex to the matched atoms
self._anm_complex_tilde_slc = sliceModel(anm_complex_tilde_extend[0], anm_complex_tilde_extend[1], chain_complex.getSelstr())
# Replace the complex anm and the complex_slc anm with the modified ANMs
print "Replacing ANM H with ANM Htilde for the complex"
self._anm_complex = (self._anm_complex_tilde, self._anm_complex[1])
self._anm_complex_slc = self._anm_complex_tilde_slc
# Create custom anm for reference
if self.utils.config.enforceAllModesAfterProjection:
Marray = self.utils.sliceComplexModestoMatchProtein(self._anm_complex[0].getArray()[:,6:], reference, encounter.getReferenceSegment())
self._anm_reference_tilde = ANM(self._anm_reference[0].getTitle()+"_tilde")
self._anm_reference_tilde.setEigens(Marray, self._anm_complex[0].getEigvals()[6:])
else:
Marray = self.utils.sliceComplexModestoMatchProtein(self._anm_complex[0].getArray(), reference, encounter.getReferenceSegment())
self._anm_reference_tilde = ANM(self._anm_reference[0].getTitle()+"_tilde")
self._anm_reference_tilde.setEigens(Marray, self._anm_complex[0].getEigvals())
# Extend the self._anm_reference_tilde on all atoms
anm_reference_tilde_extend = extendModel(self._anm_reference_tilde, self._anm_reference[1], reference, norm=True)
# Then slice the anm_reference to the matched
self._anm_reference_tilde_slc = sliceModel(anm_reference_tilde_extend[0], anm_reference_tilde_extend[1], ref_chain.getSelstr())
#
# try modes comparison
# ranges = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 20, 30, 40, 50, 60, 70]
#
# try:
# subspaceOverlaps = []
# for val in ranges:
# subspaceOverlaps.append(calcSubspaceOverlap(self._anm_reference[0][0:val], self._anm_reference_tilde[0:val]))
# encounter.storeSubSpaceOverlaps(subspaceOverlaps, ranges)
# except Exception:
# sys.exc_clear()
#
# try:
# MarrayNormed = self.utils.normalized(Marray.copy(), axis=0)
# anm_reference_tilde_normed = ANM(self._anm_reference[0].getTitle()+"_tildenormed")
# anm_reference_tilde_normed.setEigens(MarrayNormed, self._anm_complex[0].getEigvals())
# covarianceOverlaps = []
# for val in ranges:
# covarianceOverlaps.append(calcCovOverlap(self._anm_reference[0][0:val], anm_reference_tilde_normed[0:val]))
# encounter.storeCovarianceOverlap(covarianceOverlaps, ranges)
# except Exception, err:
# #sys.exc_clear()
# print "Exception covarianceoverlap occurred: ", err
# print traceback.format_exc()
#
# try:
# overlapTable = getOverlapTable(self._anm_reference[0], self._anm_reference_tilde)
# encounter.storeOverlapTable(overlapTable)
# except Exception:
# sys.exc_clear()
#
# Replace reference and reference_slc with the modified ANMs
print "Replacing ANM H with ANM Htilde for the reference"
self._anm_reference = (self._anm_reference_tilde, self._anm_reference[1])
self._anm_reference_slc = self._anm_reference_tilde_slc
def _calcANMsUnified(self, reference, numberOfModes, selstr='calpha', whatAtomsToMatch='calpha', direct_call = None, ref_chain = None):
# Create the anm of the reference
#writePDB(reference.getTitle()+"forANMmoved.pdb", reference)
if self.utils.config.calculateZeroEigvalModes == True:
anm_reference = calcANM(reference, n_modes=numberOfModes, selstr=selstr, zeros=True)
else:
anm_reference = calcANM(reference, n_modes=numberOfModes, selstr=selstr)
# Extend the anm_reference on all atoms
anm_reference_extend = extendModel(anm_reference[0], anm_reference[1], reference, norm=True)
# Then slice the anm_reference to the matched
if direct_call == None:
if self.bound_provided == True:
anm_reference_slc = sliceModel(anm_reference_extend[0], anm_reference_extend[1], ref_chain.getSelstr())
else:
anm_reference_slc = sliceModel(anm_reference_extend[0], anm_reference_extend[1], selstr)
else:
anm_reference_slc = sliceModel(anm_reference_extend[0], anm_reference_extend[1], selstr)
# Normalize the slices anm
anm_reference_slc = self.getNormalizedANM(anm_reference_slc)
if direct_call == True:
self._anm_reference = anm_reference
self._anm_reference_slc = anm_reference_slc
else:
return anm_reference, anm_reference_slc
def getNormalizedANM(self, anm):
""" Normalize the modes of the anm and return this anm object
Args:
anm: the anm with modes calculated
Returns: anm with normalized modes
"""
M = self.normalizeM(anm[0].getArray())
eigenvals = anm[0].getEigvals()
anm[0].setEigens(M, eigenvals)
return anm
def _moveSegment(self, reference, segment, angstrom):
""" Move all atoms x,y,z, belonging to the segment the number in angstrom """
print "15 ang contact before moving atoms:", reference.select('same residue as exwithin 15 of segment "L." ').numAtoms()
ref_select = reference.select('segment \"'+segment+'.\"')
ref_select.setCoords(ref_select.getCoords()+angstrom)
if reference.select('same residue as exwithin 15 of segment "L." ') != None:
print "15 ang contact after moving atoms: ", reference.select('same residue as exwithin 15 of segment "L." ').numAtoms()
else:
print "15 ang contact after moving atoms: 0"
def replaceReferenceANMs(self, anm_new, reference, ref_chain = None):
""" Replace the anm of reference with anm_new and normalize along the way.
Args:
anm_new: the new ANM
reference: the protein the ANM was created on
ref_chain: the matched chains of reference
Result:
replaced self._anm_reference and self._anm_reference_slc based on normalized anm_new
"""
self._anm_reference = anm_new
self._anm_reference = self.getNormalizedANM(self._anm_reference)
# Extend the self._anm_reference_tilde on all atoms
anm_reference_extend = extendModel(self._anm_reference[0], self._anm_reference[1], reference, norm=True)
# Then slice the anm_reference to the matched
if ref_chain != None:
self._anm_reference_slc = sliceModel(anm_reference_extend[0], anm_reference_extend[1], ref_chain.getSelstr())
else:
self._anm_reference_slc = sliceModel(anm_reference_extend[0], anm_reference_extend[1], 'calpha')
self._anm_reference_slc = self.getNormalizedANM(self._anm_reference_slc)
def replaceComplexANMs(self, anm_new, proteinComplex, complex_chain = None):
""" Replace the anm of the complex with anm_new and normalize along the way.
Args:
anm_new: the new ANM
proteinComplex: the complex that the ANM was created on
complex_chain: the matched chains of the complex
Result:
replaced self._anm_complex and self._anm_complex_slc based on normalized anm_new
"""
self._anm_complex = anm_new
self._anm_complex = self.getNormalizedANM(self._anm_complex)
# Extend the self.self._anm_complex_tilde on all atoms
anm_complex_extend = extendModel(self._anm_complex[0], self._anm_complex[1], proteinComplex, norm=True)
# Then slice the anm_reference to the matched
if complex_chain != None:
self._anm_complex_slc = sliceModel(anm_complex_extend[0], anm_complex_extend[1], complex_chain.getSelstr())
else:
self._anm_complex_slc = sliceModel(anm_complex_extend[0], anm_complex_extend[1], complex_chain.getSelstr())
self._anm_complex_slc = self.getNormalizedANM(self._anm_complex_slc)
def calcANMSlcInterface(self, ref_chain_interface, reference, titleOfReferenceSingleProtein, isBoundComplex=False):
self._anm_slc_interface = self.getSlicedInterfaceANM(self.getANMExtend(), ref_chain_interface, reference, titleOfReferenceSingleProtein, isBoundComplex)
def getSlicedInterfaceANM(self, anm_ext, ref_chain_interface, reference, titleOfReferenceSingleProtein, isBoundComplex=False):
selectionAtoms = self.createSlcSelectionString(reference, isBoundComplex, ref_chain_interface, titleOfReferenceSingleProtein)
anm_slc_interface = sliceModel(anm_ext[0], anm_ext[1], selectionAtoms)
return anm_slc_interface
def calcInterfaceANMsforPart2a2k(self, encounter):
self._anm_reference_slc_interface = self._slicedInterfaceANMs(self._anm_reference, encounter.getMobile(), encounter.getMobChainInterface())
self._anm_counterpart_slc_interface = self._slicedInterfaceANMs(self._anm_counterpart, encounter.getBoundCounterpart(), encounter.getBoundCounterpartChainInterface())
self._anm_boundcomplex_slc_interface = self._slicedInterfaceANMs(self._anm_complex, encounter.boundComplex.complex , encounter.getBoundComplexChainInterface())
assert (self._anm_reference_slc_interface[1].numAtoms()
+ self._anm_counterpart_slc_interface[1].numAtoms()
== self._anm_boundcomplex_slc_interface[1].numAtoms())
for i in range(0, self._anm_reference_slc_interface[1].numAtoms()):
assert self._anm_reference_slc_interface[1][i].getResname() == self._anm_boundcomplex_slc_interface[1][i].getResname()
assert np.alltrue(self._anm_reference_slc_interface[1][i].getCoords() == self._anm_boundcomplex_slc_interface[1][i].getCoords())
assert self._anm_reference_slc_interface[1][i].getName() == self._anm_boundcomplex_slc_interface[1][i].getName()
offsetAtoms = self._anm_reference_slc_interface[1].numAtoms()
for i in range(0, self._anm_counterpart_slc_interface[1].numAtoms()):
j = i + offsetAtoms
assert self._anm_counterpart_slc_interface[1][i].getResname() == self._anm_boundcomplex_slc_interface[1][j].getResname()
assert np.alltrue(self._anm_counterpart_slc_interface[1][i].getCoords() == self._anm_boundcomplex_slc_interface[1][j].getCoords())
assert self._anm_counterpart_slc_interface[1][i].getName() == self._anm_boundcomplex_slc_interface[1][j].getName()
def calcInterfaceANMsUnified(self, reference, counterpart, proteinComplex, ref_chain_interface, counterpart_chain_interface, complex_chain_interface):
""" Calculate (slice) the ANMs according to the interfaces on prot1, prot2 and their complex representation.
Args:
reference: prot1
counterpart: prot2
proteinComplex: prot1 and prot2 as one parsed object
ref_chain_interface: interface of prot1
counterpart_chain_interface: interface of prot2
complex_chain_interface: interface of the proteinComplex
"""
self._anm_reference_slc_interface = self._slicedInterfaceANMs(self._anm_reference, reference, ref_chain_interface)
self._anm_counterpart_slc_interface = self._slicedInterfaceANMs(self._anm_counterpart, counterpart, counterpart_chain_interface)
self._anm_boundcomplex_slc_interface = self._slicedInterfaceANMs(self._anm_complex, proteinComplex, complex_chain_interface)
# normalize modes
self._anm_reference_slc_interface = self.getNormalizedANM(self._anm_reference_slc_interface)
self._anm_counterpart_slc_interface = self.getNormalizedANM(self._anm_counterpart_slc_interface)
self._anm_boundcomplex_slc_interface = self.getNormalizedANM(self._anm_boundcomplex_slc_interface)
assert (self._anm_reference_slc_interface[1].numAtoms()
+ self._anm_counterpart_slc_interface[1].numAtoms()
== self._anm_boundcomplex_slc_interface[1].numAtoms())
assertANMAtomEquality = False
if assertANMAtomEquality:
if self.utils.isReceptor(reference.getTitle()):
for i in range(0, self._anm_reference_slc_interface[1].numAtoms()):
# print i, self._anm_reference_slc_interface[1][i].getCoords(), self._anm_boundcomplex_slc_interface[1][i].getCoords()
assert self._anm_reference_slc_interface[1][i].getResname() == self._anm_boundcomplex_slc_interface[1][i].getResname()
assert np.alltrue(self._anm_reference_slc_interface[1][i].getCoords() == self._anm_boundcomplex_slc_interface[1][i].getCoords())
# item1roundedChoords = [round(x, 3) for x in self._anm_reference_slc_interface[1][i].getCoords().tolist()]
# item2roundedChoords = [round(x, 3) for x in self._anm_boundcomplex_slc_interface[1][i].getCoords().tolist()]
# assert np.alltrue(item1roundedChoords == item2roundedChoords)
assert self._anm_reference_slc_interface[1][i].getName() == self._anm_boundcomplex_slc_interface[1][i].getName()
offsetAtoms = self._anm_reference_slc_interface[1].numAtoms()
for i in range(0, self._anm_counterpart_slc_interface[1].numAtoms()):
j = i + offsetAtoms
assert self._anm_counterpart_slc_interface[1][i].getResname() == self._anm_boundcomplex_slc_interface[1][j].getResname()
assert np.alltrue(self._anm_counterpart_slc_interface[1][i].getCoords() == self._anm_boundcomplex_slc_interface[1][j].getCoords())
# item1roundedChoords = [round(x, 3) for x in self._anm_counterpart_slc_interface[1][i].getCoords().tolist()]
# item2roundedChoords = [round(x, 3) for x in self._anm_boundcomplex_slc_interface[1][j].getCoords().tolist()]
# assert np.alltrue(item1roundedChoords == item2roundedChoords)
assert self._anm_counterpart_slc_interface[1][i].getName() == self._anm_boundcomplex_slc_interface[1][j].getName()
else:
offsetAtoms = self._anm_counterpart_slc_interface[1].numAtoms()
for i in range(0, self._anm_reference_slc_interface[1].numAtoms()):
j = i + offsetAtoms
# print i, self._anm_reference_slc_interface[1][i].getCoords(), self._anm_boundcomplex_slc_interface[1][i].getCoords()
assert self._anm_reference_slc_interface[1][i].getResname() == self._anm_boundcomplex_slc_interface[1][j].getResname()
assert np.alltrue(self._anm_reference_slc_interface[1][i].getCoords() == self._anm_boundcomplex_slc_interface[1][j].getCoords())
# item1roundedChoords = [round(x, 3) for x in self._anm_reference_slc_interface[1][i].getCoords().tolist()]
# item2roundedChoords = [round(x, 3) for x in self._anm_boundcomplex_slc_interface[1][j].getCoords().tolist()]
# assert np.alltrue(item1roundedChoords == item2roundedChoords)
assert self._anm_reference_slc_interface[1][i].getName() == self._anm_boundcomplex_slc_interface[1][j].getName()
for i in range(0, self._anm_counterpart_slc_interface[1].numAtoms()):
assert self._anm_counterpart_slc_interface[1][i].getResname() == self._anm_boundcomplex_slc_interface[1][i].getResname()
assert np.alltrue(self._anm_counterpart_slc_interface[1][i].getCoords() == self._anm_boundcomplex_slc_interface[1][i].getCoords())
# item1roundedChoords = [round(x, 3) for x in self._anm_counterpart_slc_interface[1][i].getCoords().tolist()]
# item2roundedChoords = [round(x, 3) for x in self._anm_boundcomplex_slc_interface[1][i].getCoords().tolist()]
# assert np.alltrue(item1roundedChoords == item2roundedChoords)
assert self._anm_counterpart_slc_interface[1][i].getName() == self._anm_boundcomplex_slc_interface[1][i].getName()
def _slicedInterfaceANMs(self, anm, reference, interface):
""" Slice an anm to match the provided interface.
Args:
anm: the anm to be sliced
reference: the protein that the anm is based upon, necessary for extention of the model first
interface: the interface of the protein
"""
anm_ext = extendModel(anm[0], anm[1], reference, norm=True)
anm_slc = sliceModel(anm_ext[0], anm_ext[1], interface.getSelstr())
anm_slc = self.getNormalizedANM(anm_slc)
return anm_slc
def getANM(self):
""" Get the ANM calculated on the reference (default) calpha atoms. """
if self._anm == None:
raise Exception('self._anm == None')
return self._anm
def getANMExtend(self):
""" Get the ANM extended to the whole reference (all atoms). """
if self._anm_extend == None:
raise Exception('self._anm == None')
return self._anm_extend
def getANMSlc(self):
""" Get the sliced back ANM to match all atoms in the ref_chain."""
if self._anm_slc == None:
raise Exception('self._anm_slc == None')
return self._anm_slc
def getANMSlcCounterpart(self):
""" Get the sliced back ANM to match all atoms in the counterpart chain(s) """
if self._anm_slc_counterpart == None:
raise Exception('self._anm_slc == None')
return self._anm_slc_counterpart
def getANMSlcInterface(self):
""" Get the sliced back ANM to match all atoms in the ref_chain_interface. """
if self._anm_slc_interface == None:
raise Exception('self._anm_slc_interface == None')
return self._anm_slc_interface
def getANMComplexSlc(self):
""" Get the sliced back ANM to match all atoms in the chain_complex. """
if self._anm_complex_slc == None:
raise Exception('self._anm_complex_slc == None')
return self._anm_complex_slc
def getANMReference2a2kSlc(self):
""" Get the sliced back self._anm_reference_slc ANM to match all atoms in the reference variable. """
if self._anm_reference_slc == None:
raise Exception('self._anm_reference_slc == None')
return self._anm_reference_slc
def getANMCounterpart2a2kSlc(self):
""" Get the sliced back self._anm_counterpart_slc ANM to match all atoms in the counterpart variable. """
if self._anm_counterpart_slc == None:
raise Exception('self._anm_counterpart_slc == None')
return self._anm_counterpart_slc
def getANMReference(self):
if self._anm_reference == None:
raise Exception('self._anm_reference == None')
return self._anm_reference
def getANMReferenceSlc(self):
if self._anm_reference_slc == None:
raise Exception('self._anm_reference_slc == None')
return self._anm_reference_slc
def getANMCounterpart(self):
if self._anm_counterpart == None:
raise Exception('self._anm_counterpart == None')
return self._anm_counterpart
def getANMComplex(self):
if self._anm_complex == None:
raise Exception('self._anm_complex == None')
return self._anm_complex
def getANMReferenceSlcInterface(self):
if self._anm_reference_slc_interface == None:
raise Exception('self._anm_reference_slc_interface == None')
return self._anm_reference_slc_interface
def getANMCounterpartSlcInterface(self):
if self._anm_counterpart_slc_interface == None:
raise Exception('self._anm_counterpart_slc_interface == None')
return self._anm_counterpart_slc_interface
def getANMComplexSlcInterface(self):
if self._anm_boundcomplex_slc_interface == None:
raise Exception('self._anm_boundcomplex_slc_interface == None')
return self._anm_boundcomplex_slc_interface
def getANMPath(self, reference, numberOfModes, selstr, whatAtomsToMatch, modified=""):
path = self.utils.config.anmPath
prefix = reference.getTitle()
prefix = prefix.replace(" ", "_")
if modified == "":
return path+prefix+"_modes"+str(numberOfModes)+"_buildOn"+selstr+"_matchedOn"+whatAtomsToMatch
elif modified == "extended":
return path+"extended/"+prefix+"_modes"+str(numberOfModes)+"_buildOn"+selstr+"_matchedOn"+whatAtomsToMatch+"_extended"
elif modified == "slicedback":
return path+"slicedback/"+prefix+"_modes"+str(numberOfModes)+"_buildOn"+selstr+"_matchedOn"+whatAtomsToMatch+"_slicedback"
else:
raise Exception("the variable modified is not the empty string, extended or slicedback.")
def doesANMExist(self, reference, numberOfModes, selstr, whatAtomsToMatch, modified=""):
path = self.utils.config.anmPath
try:
with open(self.getANMPath(reference, numberOfModes, selstr, whatAtomsToMatch, modified)+".anm.npz"):
return True
except IOError:
return False
def projectHessian(self, hessian, reference, proteinComplex, referenceSegment, projectionStyle, projectOnlyReferencePartOfHC=False, interCalphaIndices=None):
""" Return the PH'P hessian which has 6 zero eigenvalues according to the formula 8.27
from the book "A practical introduction to the simulation of molecular dynamics", Field.
However, here it is made sure that the assumed basis is orthonormal via np.linalg.qr applied
on the six vectors discussed in this book.
Args:
hessian: the hessian to be projected
reference: the protein the hessian or HRtilde/HLtilde of the hessian was created on
proteinComplex: the whole protein that reference is part of
referenceSegment: if reference is receptor, provide "R", else it needs to be ligand, provide "L"
projectionStyle: project away from "full" (intra+inter) or "intra" (intra) or "fullComplex"
pojectOnlyReferencePartOfHC: if true, the hessian was created on reference, if false, HRtilde or HLtilde
of the hessian were created on the reference
interCalphaIndices: list of calphas indices that have intermolecular interactions
Returns: projected hessian with 6 external degrees of freedom (rotation and translation) removed
"""
assert projectionStyle == "full" or projectionStyle == "intra" or projectionStyle == "fullComplex"
normalize = True
numAtoms = reference.select('calpha').numAtoms()
numCoords = numAtoms*3
centerOfCoords = calcCenter(reference.select('calpha'))
assert np.alltrue(centerOfCoords == calcCenter(proteinComplex.select('segment \"'+referenceSegment+'.\"').select('calpha')))
print "before projection symmetry ==, allclose: ", np.all(hessian-hessian.T==0), np.allclose(hessian, hessian.T)
if projectOnlyReferencePartOfHC:
numComplexAtoms = proteinComplex.select('calpha').numAtoms()
numComplexCoords = numComplexAtoms*3
numCounterpartCoords = numComplexCoords - numCoords
if referenceSegment == "R":
assert numCounterpartCoords == proteinComplex.select('segment \"L.\"').select('calpha').numAtoms() * 3
else:
assert numCounterpartCoords == proteinComplex.select('segment \"R.\"').select('calpha').numAtoms() * 3
# Create null vector with length of the counterpart calphas
counterPartNullVector = np.zeros(numCounterpartCoords)
# Create I
I = np.identity(numCoords)
# Create the three translation vectors Tx, Ty, Tz
Tx = np.zeros(numCoords)
Tx = self.utils.fill3DArrayWithValue(Tx, 1.0, 0)
Ty = np.zeros(numCoords)<|fim▁hole|> Tz = np.zeros(numCoords)
Tz = self.utils.fill3DArrayWithValue(Tz, 1.0, 2)
# Create the three rotation vectors Rx, Ry, Rz
coordsCopy = reference.select('calpha').getCoords().copy()
Rx = self.utils.createRx(coordsCopy)
coordsCopy2 = reference.select('calpha').getCoords().copy()
Ry = self.utils.createRy(coordsCopy2)
coordsCopy3 = reference.select('calpha').getCoords().copy()
Rz = self.utils.createRz(coordsCopy3)
# remove inter atoms from projection
if projectionStyle == "intra":
Tx = self.removeInterAtoms(Tx, interCalphaIndices)
Ty = self.removeInterAtoms(Ty, interCalphaIndices)
Tz = self.removeInterAtoms(Tz, interCalphaIndices)
Rx = self.removeInterAtoms(Rx, interCalphaIndices)
Ry = self.removeInterAtoms(Ry, interCalphaIndices)
Rz = self.removeInterAtoms(Rz, interCalphaIndices)
if projectOnlyReferencePartOfHC:
# overwrite previous I
I = np.identity(numComplexCoords)
# extend (with the nullvector) the rotational and translational vectors to the dimension of the complex
if referenceSegment == "R":
Tx = np.concatenate((Tx, counterPartNullVector))
Ty = np.concatenate((Ty, counterPartNullVector))
Tz = np.concatenate((Tz, counterPartNullVector))
Rx = np.concatenate((Rx, counterPartNullVector))
Ry = np.concatenate((Ry, counterPartNullVector))
Rz = np.concatenate((Rz, counterPartNullVector))
else:
Tx = np.concatenate((counterPartNullVector, Tx))
Ty = np.concatenate((counterPartNullVector, Tz))
Tz = np.concatenate((counterPartNullVector, Tz))
Rx = np.concatenate((counterPartNullVector, Rx))
Ry = np.concatenate((counterPartNullVector, Ry))
Rz = np.concatenate((counterPartNullVector, Rz))
# Normalize translation vectors and apply rotational fix
if normalize:
Tx = Vector(Tx)
#Tx = self.subtractCenterOfCoords(Tx, centerOfCoords[0], 0.0, 0.0)
Tx = Tx.getNormed().getArray()
Ty = Vector(Ty)
#Ty = self.subtractCenterOfCoords(Ty, 0.0, centerOfCoords[1], 0.0)
Ty = Ty.getNormed().getArray()
Tz = Vector(Tz)
#Tz = self.subtractCenterOfCoords(Tz, 0.0, 0.0, centerOfCoords[2])
Tz = Tz.getNormed().getArray()
Rx = Vector(Rx)
#Rx = self.subtractCenterOfCoords(Rx, 0.0, centerOfCoords[2], centerOfCoords[1])
Rx = Rx.getNormed().getArray()
Ry = Vector(Ry)
#Ry = self.subtractCenterOfCoords(Ry, centerOfCoords[2], 0.0, centerOfCoords[0])
Ry = Ry.getNormed().getArray()
Rz = Vector(Rz)
#Rz = self.subtractCenterOfCoords(Rz, centerOfCoords[1], centerOfCoords[0], 0.0)
Rz = Rz.getNormed().getArray()
# Create P
#P = I - np.outer(Rx, Rx) - np.outer(Ry, Ry) - np.outer(Rz, Rz) - np.outer(Tx, Tx) - np.outer(Ty, Ty) - np.outer(Tz, Tz)
### corres P = I - P
#print "independent columns P: ", self.utils.independent_columns(P).shape
#print "matrix rank P: ", self.utils.matrixrank(P)
#print "independent columns I-P: ", self.utils.independent_columns(I-P).shape
#print "matrix rank I-P: ", self.utils.matrixrank(I-P)
#print "np matrix rank I-P : ", np.linalg.matrix_rank(I-P)
#print "np matrix as matrix rank I-P : ", np.linalg.matrix_rank(np.matrix(I-P))
assumedBasis = np.array([Tx, Ty, Tz, Rx, Ry, Rz]).T
MyQ, MyR = np.linalg.qr(assumedBasis)
#print "MyQ.shape: ", MyQ.shape
Rx = MyQ.T[0]
Ry = MyQ.T[1]
Rz = MyQ.T[2]
Tx = MyQ.T[3]
Ty = MyQ.T[4]
Tz = MyQ.T[5]
###
print "before full projection"
###
P = I - np.outer(Rx, Rx) - np.outer(Ry, Ry) - np.outer(Rz, Rz) - np.outer(Tx, Tx) - np.outer(Ty, Ty) - np.outer(Tz, Tz)
#print "assumedBasis : \n", assumedBasis.round(4)
#print "basis after QR: \n", np.array([Tx, Ty, Tz, Rx, Ry, Rz]).T.round(4)
#writeArray("assumedBasis.txt", assumedBasis.round(4), format="%f")
#writeArray("basis_after_QR.txt", np.array([Tx, Ty, Tz, Rx, Ry, Rz]).T.round(4), format="%f")
###
#print "P", P
# print "P.shape", P.shape
# print "symmetric P: ", np.allclose(P, P.T)
# print "complex calphas * 3: ", proteinComplex.select('calpha').numAtoms() * 3
# print "rank of P projection", projectionStyle, ": ", np.linalg.matrix_rank(np.matrix(P))
# P_eigenvals, P_eigenvecs = np.linalg.eigh(P)
# print "number of P_eigenvals: ", len(P_eigenvals)
# #print "P_eigenvals: ", P_eigenvals
# print "number of P_eigenvecs: ", len(P_eigenvecs)
# #print "P_eigenvecs: ", P_eigenvecs
# #writeArray("helperScripts/"+proteinComplex.getTitle()+"_P_"+projectionStyle+".txt", P, format='%10.7f')
# #writeArray("P_eigenvals"+projectionStyle+".txt", P_eigenvals, format='%10.7f')
# #writeArray("P_eigenvecs"+projectionStyle+".txt", P_eigenvecs, format='%10.7f')
#
# P_times_Peigenvecs = P.dot(P_eigenvecs)
# P_times_Peigenvecs_T = P.dot(P_eigenvecs).T
# P_orthonormalityTest = P_times_Peigenvecs_T.dot(P_times_Peigenvecs)
# #writeArray("P_orthonormalityTest"+projectionStyle+".txt", P_orthonormalityTest, format='%10.7f')
# # does this P_orthonormalityTest equal the identity matrix or part of it?
# print "P_orthonormalityTest: ", np.allclose(P_orthonormalityTest, np.identity(len(P_eigenvecs)))
# print "P_orthonormalityTest w/o upper 6x6: ", np.allclose(P_orthonormalityTest[6:,6:], np.identity(len(P_eigenvecs)-6))
# zeroM = np.zeros((len(P_eigenvecs), len(P_eigenvecs)))
# zeroM[6:,6:] = P_orthonormalityTest[6:,6:]
# print "P_orthonormalityTest except lower n-6,n-6 zero: ", np.allclose(P_orthonormalityTest, zeroM)
# proteinComplex_ca = proteinComplex.select('calpha')
# writePDB("complex_allatoms.pdb", proteinComplex)
# writePDB("complex_before_Ptimes.pdb", proteinComplex_ca)
# coord_shape = proteinComplex_ca.getCoords().shape
# coords_P = P.dot(proteinComplex_ca.getCoords().flatten())
# coords_P = coords_P.reshape(coord_shape)
# proteinComplex_ca.setCoords(coords_P)
# writePDB("complex_after_Ptimes"+projectionStyle+".pdb", proteinComplex_ca)
#raw_input()
###
# Q, R = np.linalg.qr(P, mode="complete")
# print "independent columns Q: ", self.utils.independent_columns(Q).shape
# print "matrix rank Q: ", self.utils.matrixrank(Q)
# print "matrix np rank Q: ", np.linalg.matrix_rank(Q)," ", np.linalg.matrix_rank(np.matrix(Q))
# print "log of determinant of Q: ", np.linalg.slogdet(Q)
### corres Q = I - Q
#P = I-Q
# Apply PH'H, np.dot is matrix multiplication for 2D arrays
#print "count orthogonal columns: ", self.utils.countOrthogonalColumns(I-P)
Hprime = np.dot(P.T, hessian)
Hprime = np.dot(Hprime, P)
# Return the projected hessian
#print "after projection symmetry ==, allclose: ", np.all(Hprime-Hprime.T==0), np.allclose(Hprime, Hprime.T)
#print "H: ", hessian
#print "Hprime: ", Hprime
return Hprime
def projectHessian_test2timesQR(self, hessian, reference, proteinComplex, referenceSegment, projectionStyle, projectOnlyReferencePartOfHC=False, interCalphaIndices=None):
""" Return the PH'P hessian which has 6 zero eigenvalues according to the formula 8.27
from the book "A practical introduction to the simulation of molecular dynamics", Field.
However, here it is made sure that the assumed basis is orthonormal via np.linalg.qr applied
on the six vectors discussed in this book.
Args:
hessian: the hessian to be projected
reference: the protein the hessian or HRtilde/HLtilde of the hessian was created on
proteinComplex: the whole protein that reference is part of
referenceSegment: if reference is receptor, provide "R", else it needs to be ligand, provide "L"
projectionStyle: project away from "full" (intra+inter) or "intra" (intra) or "fullComplex"
pojectOnlyReferencePartOfHC: if true, the hessian was created on reference, if false, HRtilde or HLtilde
of the hessian were created on the reference
interCalphaIndices: list of calphas indices that have intermolecular interactions
Returns: projected hessian with 6 external degrees of freedom (rotation and translation) removed
"""
assert projectionStyle == "full"
normalize = True
numAtoms = reference.select('calpha').numAtoms()
numCoords = numAtoms*3
centerOfCoords = calcCenter(reference.select('calpha'))
assert np.alltrue(centerOfCoords == calcCenter(proteinComplex.select('segment \"'+referenceSegment+'.\"').select('calpha')))
print "before projection symmetry ==, allclose: ", np.all(hessian-hessian.T==0), np.allclose(hessian, hessian.T)
numComplexAtoms = proteinComplex.select('calpha').numAtoms()
numComplexCoords = numComplexAtoms*3
numCounterpartCoords = numComplexCoords - numCoords
if referenceSegment == "R":
assert numCounterpartCoords == proteinComplex.select('segment \"L.\"').select('calpha').numAtoms() * 3
else:
assert numCounterpartCoords == proteinComplex.select('segment \"R.\"').select('calpha').numAtoms() * 3
# Create null vector with length of the counterpart calphas
counterPartNullVector = np.zeros(numCounterpartCoords)
# Create I
I = np.identity(numComplexCoords)
# Create the three translation vectors Tx, Ty, Tz
Tx = np.zeros(numComplexCoords)
Tx = self.utils.fill3DArrayWithValue(Tx, 1.0, 0)
Ty = np.zeros(numComplexCoords)
Ty = self.utils.fill3DArrayWithValue(Ty, 1.0, 1)
Tz = np.zeros(numComplexCoords)
Tz = self.utils.fill3DArrayWithValue(Tz, 1.0, 2)
# Create the three rotation vectors Rx, Ry, Rz
coordsCopy = proteinComplex.select('calpha').getCoords().copy()
Rx = self.utils.createRx(coordsCopy)
coordsCopy2 = proteinComplex.select('calpha').getCoords().copy()
Ry = self.utils.createRy(coordsCopy2)
coordsCopy3 = proteinComplex.select('calpha').getCoords().copy()
Rz = self.utils.createRz(coordsCopy3)
# if projectOnlyReferencePartOfHC:
# # overwrite previous I
# I = np.identity(numComplexCoords)
# # extend (with the nullvector) the rotational and translational vectors to the dimension of the complex
# if referenceSegment == "R":
# Tx = np.concatenate((Tx, counterPartNullVector))
# Ty = np.concatenate((Ty, counterPartNullVector))
# Tz = np.concatenate((Tz, counterPartNullVector))
# Rx = np.concatenate((Rx, counterPartNullVector))
# Ry = np.concatenate((Ry, counterPartNullVector))
# Rz = np.concatenate((Rz, counterPartNullVector))
# else:
# Tx = np.concatenate((counterPartNullVector, Tx))
# Ty = np.concatenate((counterPartNullVector, Tz))
# Tz = np.concatenate((counterPartNullVector, Tz))
# Rx = np.concatenate((counterPartNullVector, Rx))
# Ry = np.concatenate((counterPartNullVector, Ry))
# Rz = np.concatenate((counterPartNullVector, Rz))
# Normalize translation vectors and apply rotational fix
if normalize:
Tx = Vector(Tx)
#Tx = self.subtractCenterOfCoords(Tx, centerOfCoords[0], 0.0, 0.0)
Tx = Tx.getNormed().getArray()
Ty = Vector(Ty)
#Ty = self.subtractCenterOfCoords(Ty, 0.0, centerOfCoords[1], 0.0)
Ty = Ty.getNormed().getArray()
Tz = Vector(Tz)
#Tz = self.subtractCenterOfCoords(Tz, 0.0, 0.0, centerOfCoords[2])
Tz = Tz.getNormed().getArray()
Rx = Vector(Rx)
#Rx = self.subtractCenterOfCoords(Rx, 0.0, centerOfCoords[2], centerOfCoords[1])
Rx = Rx.getNormed().getArray()
Ry = Vector(Ry)
#Ry = self.subtractCenterOfCoords(Ry, centerOfCoords[2], 0.0, centerOfCoords[0])
Ry = Ry.getNormed().getArray()
Rz = Vector(Rz)
#Rz = self.subtractCenterOfCoords(Rz, centerOfCoords[1], centerOfCoords[0], 0.0)
Rz = Rz.getNormed().getArray()
assumedBasis = np.array([Tx, Ty, Tz, Rx, Ry, Rz]).T
MyQ, MyR = np.linalg.qr(assumedBasis, mode='full')
Rx = MyQ.T[0]
Ry = MyQ.T[1]
Rz = MyQ.T[2]
Tx = MyQ.T[3]
Ty = MyQ.T[4]
Tz = MyQ.T[5]
Rx = Rx[:numCoords]
Ry = Ry[:numCoords]
Rz = Rz[:numCoords]
Tx = Tx[:numCoords]
Ty = Ty[:numCoords]
Tz = Tz[:numCoords]
assumedBasis = np.array([Tx, Ty, Tz, Rx, Ry, Rz]).T
MyQ, MyR = np.linalg.qr(assumedBasis, mode='full')
Rx = MyQ.T[0]
Ry = MyQ.T[1]
Rz = MyQ.T[2]
Tx = MyQ.T[3]
Ty = MyQ.T[4]
Tz = MyQ.T[5]
print "len(Rx): ", len(Rx)
Tx = np.concatenate((Tx, counterPartNullVector))
Ty = np.concatenate((Ty, counterPartNullVector))
Tz = np.concatenate((Tz, counterPartNullVector))
Rx = np.concatenate((Rx, counterPartNullVector))
Ry = np.concatenate((Ry, counterPartNullVector))
Rz = np.concatenate((Rz, counterPartNullVector))
print "Pr test"
raw_input()
P = I - np.outer(Rx, Rx) - np.outer(Ry, Ry) - np.outer(Rz, Rz) - np.outer(Tx, Tx) - np.outer(Ty, Ty) - np.outer(Tz, Tz)
#print "assumedBasis : \n", assumedBasis.round(4)
#print "basis after QR: \n", np.array([Tx, Ty, Tz, Rx, Ry, Rz]).T.round(4)
#writeArray("assumedBasis.txt", assumedBasis.round(4), format="%f")
#writeArray("basis_after_QR.txt", np.array([Tx, Ty, Tz, Rx, Ry, Rz]).T.round(4), format="%f")
###
print "P", P
print "P.shape", P.shape
print "symmetric P: ", np.allclose(P, P.T)
print "complex calphas * 3: ", proteinComplex.select('calpha').numAtoms() * 3
print "rank of P projection", projectionStyle, ": ", np.linalg.matrix_rank(np.matrix(P))
P_eigenvals, P_eigenvecs = np.linalg.eigh(P)
print "number of P_eigenvals: ", len(P_eigenvals)
#print "P_eigenvals: ", P_eigenvals
print "number of P_eigenvecs: ", len(P_eigenvecs)
#print "P_eigenvecs: ", P_eigenvecs
writeArray("helperScripts/"+proteinComplex.getTitle()+"_P_"+projectionStyle+".txt", P, format='%10.7f')
#writeArray("P_eigenvals"+projectionStyle+".txt", P_eigenvals, format='%10.7f')
#writeArray("P_eigenvecs"+projectionStyle+".txt", P_eigenvecs, format='%10.7f')
P_times_Peigenvecs = P.dot(P_eigenvecs)
P_times_Peigenvecs_T = P.dot(P_eigenvecs).T
P_orthonormalityTest = P_times_Peigenvecs_T.dot(P_times_Peigenvecs)
#writeArray("P_orthonormalityTest"+projectionStyle+".txt", P_orthonormalityTest, format='%10.7f')
# does this P_orthonormalityTest equal the identity matrix or part of it?
print "P_orthonormalityTest: ", np.allclose(P_orthonormalityTest, np.identity(len(P_eigenvecs)))
print "P_orthonormalityTest w/o upper 6x6: ", np.allclose(P_orthonormalityTest[6:,6:], np.identity(len(P_eigenvecs)-6))
zeroM = np.zeros((len(P_eigenvecs), len(P_eigenvecs)))
zeroM[6:,6:] = P_orthonormalityTest[6:,6:]
print "P_orthonormalityTest except lower n-6,n-6 zero: ", np.allclose(P_orthonormalityTest, zeroM)
# proteinComplex_ca = proteinComplex.select('calpha')
# writePDB("complex_allatoms.pdb", proteinComplex)
# writePDB("complex_before_Ptimes.pdb", proteinComplex_ca)
# coord_shape = proteinComplex_ca.getCoords().shape
# coords_P = P.dot(proteinComplex_ca.getCoords().flatten())
# coords_P = coords_P.reshape(coord_shape)
# proteinComplex_ca.setCoords(coords_P)
# writePDB("complex_after_Ptimes"+projectionStyle+".pdb", proteinComplex_ca)
raw_input()
###
# Q, R = np.linalg.qr(P, mode="complete")
# print "independent columns Q: ", self.utils.independent_columns(Q).shape
# print "matrix rank Q: ", self.utils.matrixrank(Q)
# print "matrix np rank Q: ", np.linalg.matrix_rank(Q)," ", np.linalg.matrix_rank(np.matrix(Q))
# print "log of determinant of Q: ", np.linalg.slogdet(Q)
### corres Q = I - Q
#P = I-Q
# Apply PH'H, np.dot is matrix multiplication for 2D arrays
#print "count orthogonal columns: ", self.utils.countOrthogonalColumns(I-P)
Hprime = np.dot(P.T, hessian)
Hprime = np.dot(Hprime, P)
# Return the projected hessian
#print "after projection symmetry ==, allclose: ", np.all(Hprime-Hprime.T==0), np.allclose(Hprime, Hprime.T)
#print "H: ", hessian
#print "Hprime: ", Hprime
return Hprime
def transformHessianToFixedDomainFrame(self, hessian, reference, proteinComplex, referenceSegment, projectionStyle):
""" Application of formula 20 from:
Fuchigami, Sotaro, Satoshi Omori, Mitsunori Ikeguchi, and Akinori Kidera.
"Normal Mode Analysis of Protein Dynamics in a Non-Eckart Frame."
The Journal of Chemical Physics 132, no. 10 (March 11, 2010): 104109. doi:10.1063/1.3352566.
"""
numAtoms = reference.select('calpha').numAtoms()
numCoords = numAtoms*3
centerOfCoords = calcCenter(reference.select('calpha'))
#assert np.alltrue(centerOfCoords == calcCenter(proteinComplex.select('segment \"'+referenceSegment+'.\"').select('calpha')))
numComplexAtoms = proteinComplex.select('calpha').numAtoms()
numComplexCoords = numComplexAtoms*3
numCounterpartCoords = numComplexCoords - numCoords
if referenceSegment == "R":
# create the P matrix, receptor is fixed domain
P = np.zeros((numComplexCoords, numComplexCoords))
P[:numCoords, :numCoords] = np.identity(numCoords)
assert numCounterpartCoords == proteinComplex.select('segment \"L.\"').select('calpha').numAtoms() * 3
else:
# create the P matrix, ligand is fixed domain
P = np.zeros((numComplexCoords, numComplexCoords))
numCoords_receptor = proteinComplex.select('segment \"R.\"').select('calpha').numAtoms() * 3
P[numCoords_receptor:, numCoords_receptor:] = np.identity(proteinComplex.select('segment \"L.\"').select('calpha').numAtoms() * 3)
assert numCounterpartCoords == proteinComplex.select('segment \"R.\"').select('calpha').numAtoms() * 3
# create rigid body motion eigenvectors out_values
out_vals, out_vectors = sp.linalg.eigh(hessian)
# sort the eigenvalues and eigenvectors ascendingly, this is not asserted by the eigh return, see
# http://stackoverflow.com/questions/8092920/sort-eigenvalues-and-associated-eigenvectors-after-using-numpy-linalg-eig-in-pyt
idx = out_vals.argsort()
out_vals = out_vals[idx]
out_vectors = out_vectors[:,idx]
# take the first six eigenvalues and eigenvectors
out_vals = out_vals[:6]
out_vectors = out_vectors.T[:6].T
#print "P.shape: ", P.shape
#print "out_vectors.shape: ", out_vectors.shape
# create the transformation matrix
inv = (out_vectors.T.dot(P)).dot(out_vectors)
inv = np.linalg.inv(inv)
secondTerm = ((out_vectors.dot(inv)).dot(out_vectors.T)).dot(P)
U = np.identity(numComplexCoords) - secondTerm
print "calculated transformation matrix U"
#writeArray("hessianbeforeU.txt", hessian, format='%10.7f')
Hprime = np.dot(U, hessian)
Hprime = np.dot(Hprime, U.T)
#writeArray(proteinComplex.getTitle()+"U.txt", U, format='%10.7f')
#writeArray("hessianafterU.txt", Hprime, format='%10.7f')
print "obtained Hprime with a fixed domain frame"
return Hprime
def subtractCenterOfCoords(self, vector, xElement, yElement, zElement):
""" Subtract from a vector having a [i][3] dim array elementwise the center of coords and return the result. """
coordsNx3 = vector.getArrayNx3()
subtractArray = np.array([xElement, yElement, zElement])
coordsNx3 = coordsNx3 - subtractArray
resultVector = Vector(coordsNx3.flatten())
return resultVector
def addscaledHdelta(self, HR, HRtilde, deltaHRmultiplicator):
assert HR.shape == HRtilde.shape
deltaHR = HRtilde - HR
deltaHR = deltaHR * deltaHRmultiplicator
return (HR + deltaHR)
def calcCustomH_ANew(self, HR, referenceStructure, neighborStructure, encounter, neighborhoodFrom, equilibriumAt, workOnReceptor=True, selstr='calpha'):
""" Modifies the hessian HR or HL by adding additonal terms for intramolecular contacts.
Args:
HR: The original HR as calculated by prody
referenceStructure: structure to take calphas from, the hessian HR belongs to it or to its superset if I is a chain
neighborStructure: structure to apply the neighborhood calculations on
encounter: object with all encounter information
neighborhoodFrom: is the neighborhood calculated from the unbound complex C_u or the bound complex C_b
equilibriumAt: is the equilibrium set to r_ij or r_ij_b
workonReceptor: is the Hessian and the referenceStructure receptor or ligand
selstr: atomType of the course grained ANM (by default calpha)
"""
assert equilibriumAt == "r_ij" or equilibriumAt == "r_ij_b"
assert neighborhoodFrom == "C_u" or neighborhoodFrom == "C_b"
if workOnReceptor:
reference = encounter.getReference()
if self.bound_provided == True:
refchain = encounter.getRefChain()
mobile = encounter.getMobile()
mobChain = encounter.getMobChain()
boundCounterpart = encounter.getBoundCounterpart()
boundCounterpartChain = encounter.getBoundCounterpartChain()
unboundCounterpartChain = encounter.getUnboundCounterpartChain()
else:
reference = encounter.getUnboundCounterpart()
if self.bound_provided == True:
refchain = encounter.getUnboundCounterpartChain()
mobile = encounter.getBoundCounterpart()
mobChain = encounter.getBoundCounterpartChain()
boundCounterpart = encounter.getMobile()
boundCounterpartChain = encounter.getMobChain()
unboundCounterpartChain = encounter.getRefChain()
neighborStructureCalpha = neighborStructure.select('calpha')
contactsCounter = 0
interCalphaIndices = []
for idx, element in enumerate(referenceStructure.select('calpha')):
contactsOfI = encounter.getIntermolecularNeighborsOfAtom(element, neighborStructure, selstr, str(self.utils.config.customHRdistance))
# if element has contacts in the neighborStructure, the hessian needs an update in the 3*3 matrix on the diagonal of this element atom
if contactsOfI:
contactsCounter += contactsOfI.numAtoms()
interCalphaIndices.append(idx)
print "intermolecular contacts: ", contactsOfI.numAtoms()
contacts_counterpartChainIndices = self.utils.getMatchingStructureSelections(neighborStructureCalpha, contactsOfI, neighborStructureCalpha)
assert len(contactsOfI) == len(contacts_counterpartChainIndices)
# access each element contact to create the deltaTerm
overallTerm = np.zeros((3,3))
for elementcontact, contacts_counterpartChainIndex in zip(contactsOfI, contacts_counterpartChainIndices):
if neighborhoodFrom == "C_u":
r_ij = calcDistance(element, elementcontact)
if equilibriumAt == "r_ij":
r_ij_b = r_ij
#if element is not in matched reference or contact is not in matched counterpart: r_ij_b = r_ij
elif not(element in refchain.select('calpha')) or not(elementcontact in unboundCounterpartChain.select('calpha')):
r_ij_b = r_ij
else:
elementPositionInChain = encounter.accessANMs().getCalphaPosition(element, refchain.select('calpha'))
contactPositionInChain = encounter.accessANMs().getCalphaPosition(elementcontact, unboundCounterpartChain.select('calpha'))
r_ij_b = calcDistance(mobChain.select('calpha')[elementPositionInChain], boundCounterpartChain.select('calpha')[contactPositionInChain])
self.utils.assertTwoAtomsAreEqual(mobChain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(refchain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(unboundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
# make the 3*3 hessian term for this contact (excluding gamma, gamma is multiplied at the end to the sum)
deltaTerm = self.make3By3HessianTerm(element, elementcontact, r_ij, r_ij_b)
#print element, elementcontact, " r_ij, rij_b: ", r_ij, r_ij_b
overallTerm += deltaTerm
else:
if equilibriumAt == "r_ij_b":
r_ij_b = calcDistance(element, elementcontact)
elementPositionInChain = encounter.accessANMs().getCalphaPosition(element, mobChain.select('calpha'))
contactPositionInChain = encounter.accessANMs().getCalphaPosition(elementcontact, boundCounterpartChain.select('calpha'))
r_ij = calcDistance(refchain.select('calpha')[elementPositionInChain], unboundCounterpartChain.select('calpha')[contactPositionInChain])
self.utils.assertTwoAtomsAreEqual(mobChain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(refchain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(unboundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
else:
elementPositionInChain = encounter.accessANMs().getCalphaPosition(element, mobChain.select('calpha'))
contactPositionInChain = encounter.accessANMs().getCalphaPosition(elementcontact, boundCounterpartChain.select('calpha'))
r_ij = calcDistance(refchain.select('calpha')[elementPositionInChain], unboundCounterpartChain.select('calpha')[contactPositionInChain])
r_ij_b = r_ij
self.utils.assertTwoAtomsAreEqual(mobChain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(refchain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(unboundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
# make the 3*3 hessian term for this contact (excluding gamma, gamma is multiplied at the end to the sum)
deltaTerm = self.make3By3HessianTerm(refchain.select('calpha')[elementPositionInChain], unboundCounterpartChain.select('calpha')[contactPositionInChain], r_ij, r_ij_b)
#print refchain.select('calpha')[elementPositionInChain], unboundCounterpartChain.select('calpha')[contactPositionInChain], " r_ij, rij_b: ", r_ij, r_ij_b
overallTerm += deltaTerm
# multiply the overallTerm with the spring constant gamma
overallTerm = overallTerm * self.utils.config.customForceConstant
# add the overallterm to the hessian matrix
if neighborhoodFrom == "C_b":
elementPosition = encounter.accessANMs().getCalphaPosition(refchain.select('calpha')[elementPositionInChain], reference.select('calpha'))
else:
elementPosition = encounter.accessANMs().getCalphaPosition(element, reference.select('calpha'))
HR = self.add3By3MatrixtoHessian(overallTerm, HR, elementPosition*3)
print "added custom terms to hessian"
print "total intermolecular contacts: ", contactsCounter
return HR, interCalphaIndices
def calcCustomH_ANew_IJ(self, referenceStructure, neighborStructure, encounter, areStructuresChains, equilibriumAt, workOnReceptor=True, selstr='calpha'):
""" Creates the HRL matrix made through intramolecular contacts.
Args:
referenceStructure: structure to take calphas from, the hessian HR belongs to it or to its superset if I is a chain
neighborStructure: structure to apply the neighborhood calculations on
encounter: object with all encounter information
areStructuresChains: boolean to describe if the structures are chains (subsets)
equilibriumAt: is the equilibrium set to r_ij or r_ij_b
workonReceptor: is the Hessian and the referenceStructure receptor or ligand
selstr: atomType of the course grained ANM (by default calpha)
"""
assert equilibriumAt == "r_ij" or equilibriumAt == "r_ij_b"
if workOnReceptor:
if areStructuresChains:
if self.bound_provided == True:
mobile = encounter.getMobChain()
boundCounterpart = encounter.getBoundCounterpartChain()
else:
pass
else:
reference = encounter.getReference()
unboundCounterpart = encounter.getUnboundCounterpart()
if self.bound_provided == True:
refchain = encounter.getRefChain()
mobile = encounter.getMobile()
mobChain = encounter.getMobChain()
boundCounterpart = encounter.getBoundCounterpart()
boundCounterpartChain = encounter.getBoundCounterpartChain()
unboundCounterpartChain = encounter.getUnboundCounterpartChain()
else:
if areStructuresChains:
if self.bound_provided == True:
mobile = encounter.getBoundCounterpartChain()
boundCounterpart = encounter.getMobChain()
else:
pass
else:
reference = encounter.getUnboundCounterpart()
unboundCounterpart = encounter.getReference()
if self.bound_provided == True:
refchain = encounter.getUnboundCounterpartChain()
mobile = encounter.getBoundCounterpart()
mobChain = encounter.getBoundCounterpartChain()
boundCounterpart = encounter.getMobile()
boundCounterpartChain = encounter.getMobChain()
unboundCounterpartChain = encounter.getRefChain()
neighborStructureCalpha = neighborStructure.select('calpha')
offDiagonalHessianMatrix = np.zeros(((reference.select('calpha').numAtoms()*3), (unboundCounterpart.select('calpha').numAtoms()*3) ))
contactsCounter = 0
for idx, element in enumerate(referenceStructure.select('calpha')):
contactsOfI = encounter.getIntermolecularNeighborsOfAtom(element, neighborStructure, selstr, str(self.utils.config.customHRdistance))
# if element has contacts in the neighborStructure, the hessian needs an update in the 3*3 matrix on the diagonal of this element atom
if contactsOfI:
print "intermolecular contacts: ", contactsOfI.numAtoms()
contactsCounter += contactsOfI.numAtoms()
# print "contact at i, refChainCalphas[i]: ", i, refChainCalphas[i]
contacts_counterpartChainIndices = self.utils.getMatchingStructureSelections(neighborStructureCalpha, contactsOfI, neighborStructureCalpha)
assert len(contactsOfI) == len(contacts_counterpartChainIndices)
# access each element contact to create the deltaTerm
for elementcontact, contacts_counterpartChainIndex in zip(contactsOfI, contacts_counterpartChainIndices):
overallTerm = np.zeros((3,3))
#self.utils.assertTwoAtomsAreEqual(refChainCalphas[i], mobChainCalphas[i], useCoords=False, useResname=True)
#self.utils.assertTwoAtomsAreEqual(elementcontact, boundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=True)
r_ij = calcDistance(element, elementcontact)
if equilibriumAt == "r_ij":
r_ij_b = r_ij
#if element is not in matched reference or contact is not in matched counterpart: r_ij_b = r_ij
elif not(element in refchain.select('calpha')) or not(elementcontact in unboundCounterpartChain.select('calpha')):
r_ij_b = r_ij
else:
elementPositionInChain = encounter.accessANMs().getCalphaPosition(element, refchain.select('calpha'))
contactPositionInChain = encounter.accessANMs().getCalphaPosition(elementcontact, unboundCounterpartChain.select('calpha'))
r_ij_b = calcDistance(mobChain.select('calpha')[elementPositionInChain], boundCounterpartChain.select('calpha')[contactPositionInChain])
self.utils.assertTwoAtomsAreEqual(mobChain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(refchain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(unboundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
#
# make the 3*3 hessian term for this contact (excluding gamma, gamma is multiplied at the end to the sum)
deltaTerm = self.make3By3OffDiagonalHessianTermIJ(element, elementcontact, r_ij, r_ij_b)
overallTerm += deltaTerm
#print "r_ij, r_ij_b: ", r_ij, r_ij_b
# multiply the overallTerm with the spring constant gamma
overallTerm = overallTerm * self.utils.config.customForceConstant
# print overallTerm
offDiagonalHessianMatrix = self.add3By3MatrixtoOffDiagonalHessianMatrixIJ(overallTerm, offDiagonalHessianMatrix, idx*3, contacts_counterpartChainIndex*3)
#print contactsOfI.numAtoms(), "neighbors, modifying at hessian (loopcounter*3)+1: ", str((loopCounter*3)+1)
#print str(i)+"'th refchain calpha, hessian line number ", (loopCounter*3)+1, "contacts with ", unboundCounterpartChainCalphas[contacts_counterpartChainIndex], " unboundcounterpartchainindex: ", contacts_counterpartChainIndices
#print ""
# add the overallterm to the hessian matrix
###elementPosition = encounter.accessANMs().getCalphaPosition(element, encounter.getReference().select('calpha'))
print "added custom terms to offDiagonalHessianMatrix"
print "total intermolecular contacts: ", contactsCounter
return offDiagonalHessianMatrix
def calcCustomH_ANew_U1(self, HR, referenceStructure, neighborStructure, encounter, areStructuresChains, equilibriumAt, workOnReceptor=True, selstr='calpha'):
""" Modifies the hessian HR or HL by adding additonal terms for intramolecular contacts.
Args:
HR: The original HR as calculated by prody
referenceStructure: structure to take calphas from, the hessian HR belongs to it or to its superset if I is a chain
neighborStructure: structure to apply the neighborhood calculations on
encounter: object with all encounter information
areStructuresChains: boolean to describe if the structures are chains (subsets)
equilibriumAt: is the equilibrium set to r_ij or r_ij_b
workonReceptor: is the Hessian and the referenceStructure receptor or ligand
selstr: atomType of the course grained ANM (by default calpha)
"""
assert equilibriumAt == "r_ij" or equilibriumAt == "r_ij_b"
if workOnReceptor:
refchain = encounter.getRefChain()
mobile = encounter.getMobile()
mobChain = encounter.getMobChain()
boundCounterpart = encounter.getBoundCounterpart()
boundCounterpartChain = encounter.getBoundCounterpartChain()
unboundCounterpartChain = encounter.getUnboundCounterpartChain()
else:
refchain = encounter.getUnboundCounterpartChain()
mobile = encounter.getBoundCounterpart()
mobChain = encounter.getBoundCounterpartChain()
boundCounterpart = encounter.getMobile()
boundCounterpartChain = encounter.getMobChain()
unboundCounterpartChain = encounter.getRefChain()
neighborStructureCalpha = neighborStructure.select('calpha')
for idx, element in enumerate(referenceStructure.select('calpha')):
contactsOfI = encounter.getIntermolecularNeighborsOfAtom(element, neighborStructure, selstr, str(self.utils.config.customHRdistance))
# if element has contacts in the neighborStructure, the hessian needs an update in the 3*3 matrix on the diagonal of this element atom
if contactsOfI:
# print "contact at i, refChainCalphas[i]: ", i, refChainCalphas[i]
contacts_counterpartChainIndices = self.utils.getMatchingStructureSelections(neighborStructureCalpha, contactsOfI, neighborStructureCalpha)
assert len(contactsOfI) == len(contacts_counterpartChainIndices)
# access each element contact to create the deltaTerm
overallTerm = np.zeros((3,3))
for elementcontact, contacts_counterpartChainIndex in zip(contactsOfI, contacts_counterpartChainIndices):
#self.utils.assertTwoAtomsAreEqual(refChainCalphas[i], mobChainCalphas[i], useCoords=False, useResname=True)
#self.utils.assertTwoAtomsAreEqual(elementcontact, boundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=True)
if equilibriumAt == "r_ij_b":
r_ij_b = calcDistance(element, elementcontact)
elementPositionInChain = encounter.accessANMs().getCalphaPosition(element, mobChain.select('calpha'))
contactPositionInChain = encounter.accessANMs().getCalphaPosition(elementcontact, boundCounterpartChain.select('calpha'))
r_ij = calcDistance(refchain.select('calpha')[elementPositionInChain], unboundCounterpartChain.select('calpha')[contactPositionInChain])
self.utils.assertTwoAtomsAreEqual(mobChain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(refchain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(unboundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
else:
elementPositionInChain = encounter.accessANMs().getCalphaPosition(element, mobChain.select('calpha'))
contactPositionInChain = encounter.accessANMs().getCalphaPosition(elementcontact, boundCounterpartChain.select('calpha'))
r_ij = calcDistance(refchain.select('calpha')[elementPositionInChain], unboundCounterpartChain.select('calpha')[contactPositionInChain])
r_ij_b = r_ij
self.utils.assertTwoAtomsAreEqual(mobChain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(refchain.select('calpha')[elementPositionInChain], element, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(unboundCounterpartChain.select('calpha')[contactPositionInChain], elementcontact, useCoords=False, useResname=True)
#r_ij_b = calcDistance(zip(mobile.select('calpha'))[idx][0], zip(boundCounterpart.select('calpha'))[contacts_counterpartChainIndex][0])
# make the 3*3 hessian term for this contact (excluding gamma, gamma is multiplied at the end to the sum)
deltaTerm = self.make3By3HessianTerm(refchain.select('calpha')[elementPositionInChain], unboundCounterpartChain.select('calpha')[contactPositionInChain], r_ij, r_ij_b)
print refchain.select('calpha')[elementPositionInChain], unboundCounterpartChain.select('calpha')[contactPositionInChain], " r_ij, rij_b: ", r_ij, r_ij_b
overallTerm += deltaTerm
#print "r_ij, r_ij_b: ", r_ij, r_ij_b
# multiply the overallTerm with the spring constant gamma
overallTerm = overallTerm * self.utils.config.customForceConstant
# print overallTerm
#print contactsOfI.numAtoms(), "neighbors, modifying at hessian (loopcounter*3)+1: ", str((loopCounter*3)+1)
#print str(i)+"'th refchain calpha, hessian line number ", (loopCounter*3)+1, "contacts with ", unboundCounterpartChainCalphas[contacts_counterpartChainIndex], " unboundcounterpartchainindex: ", contacts_counterpartChainIndices
#print ""
# add the overallterm to the hessian matrix
elementPosition = encounter.accessANMs().getCalphaPosition(refchain.select('calpha')[elementPositionInChain], encounter.getReference().select('calpha'))
HR = self.add3By3MatrixtoHessian(overallTerm, HR, elementPosition*3)
print "adding to hessian at: ", (elementPosition*3+1)
print "added custom terms to hessian"
return HR
def calcCustomH_A(self, HR, encounter, workOnReceptor=True, selstr='calpha'):
""" Modifies the hessian of anm_reference according to calcCustomH_A and returns it. """
if workOnReceptor:
refChainCalphas = encounter.getRefChain().select('calpha')
mobChainCalphas = encounter.getMobChain().select('calpha')
mobChain = encounter.getMobChain()
refChain = encounter.getRefChain()
boundCounterpartChainCalphas = encounter.getBoundCounterpartChain().select('calpha')
boundCounterpartChain = encounter.getBoundCounterpartChain()
unboundCounterpartChain = encounter.getUnboundCounterpartChain()
unboundCounterpartChainCalphas = encounter.getUnboundCounterpartChain().select('calpha')
referenceCalphas = encounter.getReference().select('calpha')
else:
refChainCalphas = encounter.getUnboundCounterpartChain().select('calpha')
mobChainCalphas = encounter.getBoundCounterpartChain().select('calpha')
mobChain = encounter.getBoundCounterpartChain()
refChain = encounter.getUnboundCounterpartChain()
boundCounterpartChainCalphas = encounter.getMobChain().select('calpha')
boundCounterpartChain = encounter.getMobChain()
unboundCounterpartChain = encounter.getRefChain()
unboundCounterpartChainCalphas = encounter.getRefChain().select('calpha')
referenceCalphas = encounter.getUnboundCounterpart().select('calpha')
#encounter.printIntermolecularNeighbors(encounter.getReference(), encounter.getUnboundCounterpart(), selstr, str(self.utils.config.customHRdistance))
# Loop over all calphas in the reference structure (using matched chains)
counterUnmatchedCalphas = 0
loopCounter = 0
for element in referenceCalphas:
i = loopCounter - counterUnmatchedCalphas
if self.utils.doesAtomExistInY(element, refChainCalphas) is None:
counterUnmatchedCalphas += 1
loopCounter += 1
continue
else:
contactsOfI = encounter.getIntermolecularNeighbors(refChain, unboundCounterpartChain, i, selstr, str(self.utils.config.customHRdistance))
# if there are contacts in the unbound counterpart, the hessian needs an update in the 3*3 matrix of the diagonal of this atom
if contactsOfI:
# print "contact at i, refChainCalphas[i]: ", i, refChainCalphas[i]
contacts_counterpartChainIndices = self.utils.getMatchingStructure(unboundCounterpartChainCalphas, contactsOfI, boundCounterpartChainCalphas)
assert len(contactsOfI) == len(contacts_counterpartChainIndices)
# access each element contact to create the deltaTerm
overallTerm = np.zeros((3,3))
for elementcontact, contacts_counterpartChainIndex in zip(contactsOfI, contacts_counterpartChainIndices):
self.utils.assertTwoAtomsAreEqual(refChainCalphas[i], mobChainCalphas[i], useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(elementcontact, boundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=True)
r_ij = calcDistance(refChainCalphas[i], elementcontact)
r_ij_b = calcDistance(mobChainCalphas[i], boundCounterpartChainCalphas[contacts_counterpartChainIndex])
# make the 3*3 hessian term for this contact (excluding gamma, gamma is multiplied at the end to the sum)
deltaTerm = self.make3By3HessianTerm(refChainCalphas[i], elementcontact, r_ij, r_ij_b)
overallTerm += deltaTerm
#print "r_ij, r_ij_b: ", r_ij, r_ij_b
# multiply the overallTerm with the spring constant gamma
overallTerm = overallTerm * self.utils.config.customForceConstant
# print overallTerm
#print contactsOfI.numAtoms(), "neighbors, modifying at hessian (loopcounter*3)+1: ", str((loopCounter*3)+1)
print str(i)+"'th refchain calpha, hessian line number ", (loopCounter*3)+1, "contacts with ", unboundCounterpartChainCalphas[contacts_counterpartChainIndex], " unboundcounterpartchainindex: ", contacts_counterpartChainIndices
print ""
# add the overallterm to the hessian matrix
HR = self.add3By3MatrixtoHessian(overallTerm, HR, loopCounter*3)
loopCounter += 1
assert(loopCounter-counterUnmatchedCalphas) == refChainCalphas.numAtoms()
print "added custom terms to hessian"
return HR
def calcCustomH_A_IJ(self, encounter, workOnReceptor=True, selstr='calpha'):
""" Modifies the hessian of anm_reference according to calcCustomH_A and returns it. """
if workOnReceptor:
refChainCalphas = encounter.getRefChain().select('calpha')
mobChainCalphas = encounter.getMobChain().select('calpha')
mobChain = encounter.getMobChain()
refChain = encounter.getRefChain()
boundCounterpartChainCalphas = encounter.getBoundCounterpartChain().select('calpha')
boundCounterpartChain = encounter.getBoundCounterpartChain()
unboundCounterpartChain = encounter.getUnboundCounterpartChain()
unboundCounterpartChainCalphas = encounter.getUnboundCounterpartChain().select('calpha')
referenceCalphas = encounter.getReference().select('calpha')
mobileCalphas = encounter.getMobile().select('calpha')
unboundCounterpart = encounter.getUnboundCounterpart()
unboundCounterpartCalphas = encounter.getUnboundCounterpart().select('calpha')
else:
refChainCalphas = encounter.getUnboundCounterpartChain().select('calpha')
mobChainCalphas = encounter.getBoundCounterpartChain().select('calpha')
mobChain = encounter.getBoundCounterpartChain()
refChain = encounter.getUnboundCounterpartChain()
boundCounterpartChainCalphas = encounter.getMobChain().select('calpha')
boundCounterpartChain = encounter.getMobChain()
unboundCounterpartChain = encounter.getRefChain()
unboundCounterpartChainCalphas = encounter.getRefChain().select('calpha')
referenceCalphas = encounter.getUnboundCounterpart().select('calpha')
mobileCalphas = encounter.getBoundCounterpart().select('calpha')
unboundCounterpart = encounter.getReference()
unboundCounterpartCalphas = encounter.getReference().select('calpha')
offDiagonalHessianMatrix = np.zeros(((referenceCalphas.numAtoms()*3), (unboundCounterpartCalphas.numAtoms()*3) ))
#encounter.printIntermolecularNeighbors(encounter.getReference(), encounter.getUnboundCounterpart(), selstr, str(self.utils.config.customHRdistance))
# Loop over all calphas in the reference structure (using matched chains)
counterUnmatchedCalphas = 0
loopCounter = 0
for element in referenceCalphas:
i = loopCounter - counterUnmatchedCalphas
if self.utils.doesAtomExistInY(element, refChainCalphas) is None:
counterUnmatchedCalphas += 1
loopCounter += 1
continue
else:
contactsOfI = encounter.getIntermolecularNeighbors(refChain, unboundCounterpartChain, i, selstr, str(self.utils.config.customHRdistance))
# if there are contacts in the unbound counterpart, the hessian needs an update in the 3*3 matrix of the diagonal of this atom
if contactsOfI:
# print "contact at i, refChainCalphas[i]: ", i, refChainCalphas[i]
contacts_counterpartChainIndices = self.utils.getMatchingStructure(unboundCounterpartChainCalphas, contactsOfI, boundCounterpartChainCalphas)
assert len(contactsOfI) == len(contacts_counterpartChainIndices)
# access each element contact to create the deltaTerm
for elementcontact, contacts_counterpartChainIndex in zip(contactsOfI, contacts_counterpartChainIndices):
overallTerm = np.zeros((3,3))
self.utils.assertTwoAtomsAreEqual(refChainCalphas[i], mobChainCalphas[i], useCoords=False, useResname=True)
self.utils.assertTwoAtomsAreEqual(elementcontact, boundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=True)
r_ij = calcDistance(refChainCalphas[i], elementcontact)
r_ij_b = calcDistance(mobChainCalphas[i], boundCounterpartChainCalphas[contacts_counterpartChainIndex])
# make the 3*3 hessian term for this contact (excluding gamma, gamma is multiplied at the end to the sum)
deltaTerm = self.make3By3OffDiagonalHessianTermIJ(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], r_ij, r_ij)
overallTerm += deltaTerm
#print "r_ij, r_ij_b: ", r_ij, r_ij_b
# multiply the overallTerm with the spring constant gamma
overallTerm = overallTerm * self.utils.config.customForceConstant
counterPartCalphaPosition = encounter.accessANMs().getCalphaPosition(unboundCounterpartChainCalphas[contacts_counterpartChainIndex], unboundCounterpart)
print "off diagonal i,j "+str(loopCounter*3)+" "+str(counterPartCalphaPosition*3)+ " term: ", overallTerm
offDiagonalHessianMatrix = self.add3By3MatrixtoOffDiagonalHessianMatrixIJ(overallTerm, offDiagonalHessianMatrix, loopCounter*3, counterPartCalphaPosition*3)
#print contactsOfI.numAtoms(), "neighbors, modifying at hessian (loopcounter*3)+1: ", str((loopCounter*3)+1)
print str(i)+"'th refchain calpha, hessian line number ", (loopCounter*3)+1, "contacts with ", unboundCounterpartChainCalphas[contacts_counterpartChainIndex], " unboundcounterpartchainindex: ", contacts_counterpartChainIndices
print ""
loopCounter += 1
assert(loopCounter-counterUnmatchedCalphas) == refChainCalphas.numAtoms()
print "added custom terms to hessian"
return offDiagonalHessianMatrix
def calcCustomH_A_NeighborsBound(self, HR, encounter, selstr='calpha'):
""" Modifies the hessian of anm_reference according to calcCustomH_A and returns it. """
refChainCalphas = encounter.getRefChain().select('calpha')
mobChainCalphas = encounter.getMobChain().select('calpha')
boundCounterpartChainCalphas = encounter.getBoundCounterpartChain().select('calpha')
unboundCounterpartChainCalphas = encounter.getUnboundCounterpartChain().select('calpha')
referenceCalphas = encounter.getReference().select('calpha')
mobileCalphas = encounter.getMobile().select('calpha')
#encounter.printIntermolecularNeighbors(encounter.getMobile(), encounter.getBoundCounterpart(), selstr, str(self.utils.config.customHRdistance))
# Loop over all calphas in the reference structure (using matched chains)
counterUnmatchedCalphas = 0
loopCounter = 0
for element in referenceCalphas:
i = loopCounter - counterUnmatchedCalphas
if self.utils.doesAtomExistInY(element, refChainCalphas) is None:
counterUnmatchedCalphas += 1
loopCounter += 1
continue
else:
contactsOfI = encounter.getIntermolecularNeighbors(encounter.getMobChain(), encounter.getBoundCounterpartChain(), i, selstr, str(self.utils.config.customHRdistance))
# if there are contacts in the unbound counterpart, the hessian needs an update in the 3*3 matrix of the diagonal of this atom
if contactsOfI:
# print "contact at i, refChainCalphas[i]: ", i, refChainCalphas[i]
contacts_counterpartChainIndices = self.utils.getMatchingStructure(boundCounterpartChainCalphas, contactsOfI, unboundCounterpartChainCalphas)
assert len(contactsOfI) == len(contacts_counterpartChainIndices)
# access each element contact to create the deltaTerm
overallTerm = np.zeros((3,3))
for elementcontact, contacts_counterpartChainIndex in zip(contactsOfI, contacts_counterpartChainIndices):
self.utils.assertTwoAtomsAreEqual(refChainCalphas[i], mobChainCalphas[i], useCoords=False, useResname=False)
self.utils.assertTwoAtomsAreEqual(elementcontact, unboundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=False)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChainCalphas[contacts_counterpartChainIndex], elementcontact, useCoords=False, useResname=False)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChainCalphas[contacts_counterpartChainIndex], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=False)
r_ij = calcDistance(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex])
r_ij_b = calcDistance(mobChainCalphas[i], elementcontact)
# make the 3*3 hessian term for this contact (excluding gamma, gamma is multiplied at the end to the sum)
# if customHR_B, just use the distance d_0, else use the true distance in the bound pairs for the second derivatives
if self.utils.config.customHR_B:
if r_ij >= self.utils.config.customHRdistance:
deltaTerm = self.make3By3HessianTerm(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], r_ij, self.utils.config.customHRdistance)
overallTerm += deltaTerm
else:
deltaTerm = self.make3By3HessianTerm(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], r_ij, r_ij_b)
overallTerm += deltaTerm
#print "r_ij, r_ij_b: ", r_ij, r_ij_b
# multiply the overallTerm with the spring constant gamma
overallTerm = overallTerm * self.utils.config.customForceConstant
#print overallTerm
print contactsOfI.numAtoms(), "neighbors, modifying at hessian (loopcounter*3)+1: ", str((loopCounter*3)+1)
#print contactsOfI.getSelstr()
print str(i)+"'th refchain calpha, hessian line number ", (loopCounter*3)+1, "contacts with ", unboundCounterpartChainCalphas[contacts_counterpartChainIndex], " unboundcounterpartchainindex: ", contacts_counterpartChainIndices
print ""
# add the overallterm to the hessian matrix
HR = self.add3By3MatrixtoHessian(overallTerm, HR, loopCounter*3)
loopCounter += 1
assert(loopCounter-counterUnmatchedCalphas) == refChainCalphas.numAtoms()
print "added custom terms to hessian"
return HR
def calcCustomH_A_NeighborsBoundGeneral(self, HR, encounter, workOnReceptor=True, selstr='calpha'):
""" Modifies the hessian of anm_reference according to calcCustomH_A and returns it. """
if workOnReceptor:
refChainCalphas = encounter.getRefChain().select('calpha')
mobChainCalphas = encounter.getMobChain().select('calpha')
mobChain = encounter.getMobChain()
boundCounterpartChainCalphas = encounter.getBoundCounterpartChain().select('calpha')
boundCounterpartChain = encounter.getBoundCounterpartChain()
unboundCounterpartChainCalphas = encounter.getUnboundCounterpartChain().select('calpha')
referenceCalphas = encounter.getReference().select('calpha')
else:
refChainCalphas = encounter.getUnboundCounterpartChain().select('calpha')
mobChainCalphas = encounter.getBoundCounterpartChain().select('calpha')
mobChain = encounter.getBoundCounterpartChain()
boundCounterpartChainCalphas = encounter.getMobChain().select('calpha')
boundCounterpartChain = encounter.getMobChain()
unboundCounterpartChainCalphas = encounter.getRefChain().select('calpha')
referenceCalphas = encounter.getUnboundCounterpart().select('calpha')
#encounter.printIntermolecularNeighbors(encounter.getMobile(), encounter.getBoundCounterpart(), selstr, str(self.utils.config.customHRdistance))
# Loop over all calphas in the reference structure (using matched chains)
counterUnmatchedCalphas = 0
loopCounter = 0
for element in referenceCalphas:
i = loopCounter - counterUnmatchedCalphas
if self.utils.doesAtomExistInY(element, refChainCalphas) is None:
counterUnmatchedCalphas += 1
loopCounter += 1
continue
else:
contactsOfI = encounter.getIntermolecularNeighbors(mobChain, boundCounterpartChain, i, selstr, str(self.utils.config.customHRdistance))
# if there are contacts in the unbound counterpart, the hessian needs an update in the 3*3 matrix of the diagonal of this atom
if contactsOfI:
# print "contact at i, refChainCalphas[i]: ", i, refChainCalphas[i]
contacts_counterpartChainIndices = self.utils.getMatchingStructure(boundCounterpartChainCalphas, contactsOfI, unboundCounterpartChainCalphas)
assert len(contactsOfI) == len(contacts_counterpartChainIndices)
# access each element contact to create the deltaTerm
overallTerm = np.zeros((3,3))
for elementcontact, contacts_counterpartChainIndex in zip(contactsOfI, contacts_counterpartChainIndices):
self.utils.assertTwoAtomsAreEqual(refChainCalphas[i], mobChainCalphas[i], useCoords=False, useResname=False)
self.utils.assertTwoAtomsAreEqual(elementcontact, unboundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=False)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChainCalphas[contacts_counterpartChainIndex], elementcontact, useCoords=False, useResname=False)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChainCalphas[contacts_counterpartChainIndex], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=False)
r_ij = calcDistance(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex])
r_ij_b = calcDistance(mobChainCalphas[i], elementcontact)
# make the 3*3 hessian term for this contact (excluding gamma, gamma is multiplied at the end to the sum)
# if customHR_B, just use the distance d_0, else use the true distance in the bound pairs for the second derivatives
if self.utils.config.customHR_B:
if r_ij >= self.utils.config.customHRdistance:
deltaTerm = self.make3By3HessianTerm(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], r_ij, self.utils.config.customHRdistance)
overallTerm += deltaTerm
else:
deltaTerm = self.make3By3HessianTerm(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], r_ij, r_ij_b)
overallTerm += deltaTerm
#print "r_ij, r_ij_b: ", r_ij, r_ij_b
# multiply the overallTerm with the spring constant gamma
overallTerm = overallTerm * self.utils.config.customForceConstant
#print overallTerm
print contactsOfI.numAtoms(), "neighbors, modifying at hessian (loopcounter*3)+1: ", str((loopCounter*3)+1)
#print contactsOfI.getSelstr()
print str(i)+"'th refchain calpha, hessian line number ", (loopCounter*3)+1, "contacts with ", unboundCounterpartChainCalphas[contacts_counterpartChainIndex], " unboundcounterpartchainindex: ", contacts_counterpartChainIndices
print ""
# add the overallterm to the hessian matrix
HR = self.add3By3MatrixtoHessian(overallTerm, HR, loopCounter*3)
loopCounter += 1
assert(loopCounter-counterUnmatchedCalphas) == refChainCalphas.numAtoms()
print "added custom terms to hessian"
return HR
def calcOffDiagonalHessianBlockMatrixGeneral_IJ(self, encounter, workOnReceptor=True, selstr='calpha'):
""" Creates the off diagonal hessian block matrix and returns it. """
if workOnReceptor:
refChainCalphas = encounter.getRefChain().select('calpha')
mobChainCalphas = encounter.getMobChain().select('calpha')
mobChain = encounter.getMobChain()
boundCounterpartChainCalphas = encounter.getBoundCounterpartChain().select('calpha')
boundCounterpartChain = encounter.getBoundCounterpartChain()
unboundCounterpartChainCalphas = encounter.getUnboundCounterpartChain().select('calpha')
referenceCalphas = encounter.getReference().select('calpha')
mobileCalphas = encounter.getMobile().select('calpha')
unboundCounterpart = encounter.getUnboundCounterpart()
unboundCounterpartCalphas = encounter.getUnboundCounterpart().select('calpha')
else:
refChainCalphas = encounter.getUnboundCounterpartChain().select('calpha')
mobChainCalphas = encounter.getBoundCounterpartChain().select('calpha')
mobChain = encounter.getBoundCounterpartChain()
boundCounterpartChainCalphas = encounter.getMobChain().select('calpha')
boundCounterpartChain = encounter.getMobChain()
unboundCounterpartChainCalphas = encounter.getRefChain().select('calpha')
referenceCalphas = encounter.getUnboundCounterpart().select('calpha')
mobileCalphas = encounter.getBoundCounterpart().select('calpha')
unboundCounterpart = encounter.getReference()
unboundCounterpartCalphas = encounter.getReference().select('calpha')
offDiagonalHessianMatrix = np.zeros(((referenceCalphas.numAtoms()*3), (unboundCounterpartCalphas.numAtoms()*3) ))
# Loop over all calphas in the reference structure (using matched chains)
counterUnmatchedCalphas = 0
loopCounter = 0
for element in referenceCalphas:
i = loopCounter - counterUnmatchedCalphas
if self.utils.doesAtomExistInY(element, refChainCalphas) is None:
counterUnmatchedCalphas += 1
loopCounter += 1
continue
else:
contactsOfI = encounter.getIntermolecularNeighbors(mobChain, boundCounterpartChain, i, selstr, str(self.utils.config.customHRdistance))
# if there are contacts in the bound counterpart, the off diagonal part of the hessian needs an update in the 3*3 matrix of this atom and its neighbor
if contactsOfI:
# print "contact at i, refChainCalphas[i]: ", i, refChainCalphas[i]
contacts_counterpartChainIndices = self.utils.getMatchingStructure(boundCounterpartChainCalphas, contactsOfI, unboundCounterpartChainCalphas)
assert len(contactsOfI) == len(contacts_counterpartChainIndices)
# access each element contact to create the deltaTerm
for elementcontact, contacts_counterpartChainIndex in zip(contactsOfI, contacts_counterpartChainIndices):
overallTerm = np.zeros((3,3))
self.utils.assertTwoAtomsAreEqual(refChainCalphas[i], mobChainCalphas[i], useCoords=False, useResname=False)
self.utils.assertTwoAtomsAreEqual(elementcontact, unboundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=False)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChainCalphas[contacts_counterpartChainIndex], elementcontact, useCoords=False, useResname=False)
self.utils.assertTwoAtomsAreEqual(boundCounterpartChainCalphas[contacts_counterpartChainIndex], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], useCoords=False, useResname=False)
r_ij = calcDistance(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex])
r_ij_b = calcDistance(mobChainCalphas[i], elementcontact)
# make the 3*3 hessian term for this contact
# if customHR_B, just use the distance d_0, else use the true distance in the bound pairs for the second derivatives
if self.utils.config.customHR_B:
if r_ij >= self.utils.config.customHRdistance:
deltaTerm = self.make3By3OffDiagonalHessianTermIJ(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], r_ij, self.utils.config.customHRdistance)
overallTerm += deltaTerm
else:
deltaTerm = self.make3By3OffDiagonalHessianTermIJ(refChainCalphas[i], unboundCounterpartChainCalphas[contacts_counterpartChainIndex], r_ij, r_ij_b)
overallTerm += deltaTerm
# multiply the overallTerm with the spring constant gamma
overallTerm = overallTerm * self.utils.config.customForceConstant
# add the overall Term to the correct off diagonal super element in the hessian
counterPartCalphaPosition = encounter.accessANMs().getCalphaPosition(unboundCounterpartChainCalphas[contacts_counterpartChainIndex], unboundCounterpart)
offDiagonalHessianMatrix = self.add3By3MatrixtoOffDiagonalHessianMatrixIJ(overallTerm, offDiagonalHessianMatrix, loopCounter*3, counterPartCalphaPosition*3)
#print "r_ij, r_ij_b: ", r_ij, r_ij_b
#print overallTerm
print contactsOfI.numAtoms(), "neighbors, modifying at hessian (loopcounter*3)+1: ", str((loopCounter*3)+1)
#print contactsOfI.getSelstr()
#print str(i)+"'th refchain calpha, hessian line number ", (loopCounter*3)+1, "contacts with ", unboundCounterpartChainCalphas[contacts_counterpartChainIndex], " unboundcounterpartchainindex: ", contacts_counterpartChainIndices
print ""
loopCounter += 1
assert(loopCounter-counterUnmatchedCalphas) == refChainCalphas.numAtoms()
print "added custom terms to hessian"
return offDiagonalHessianMatrix
# origs
def secondDerivativeTermOnDiagonal(self, x_i, x_j, r_ij, r_ij_b):
""" @V / @x_i@x_i (excluding gamma)"""
result = 1 + (r_ij_b * np.power(x_j - x_i, 2) ) / np.power(r_ij, 3) - r_ij_b/r_ij
return result
def secondDerivateTermOffDiagonal(self, x_i, x_j, y_i, y_j, r_ij, r_ij_b):
""" @V / @x_i@y_j (excluding gamma) """
result = r_ij_b * (x_j - x_i) * ((y_j - y_i)/np.power(r_ij, 3))
return result
def secondDerivateTermOffDiagonalAtomsIJ(self, x_i, x_j, y_i, y_j, r_ij, r_ij_b):
""" Equation 21 before reducing, Atilgan paper, @V / @x_i@y_j (excluding gamma) """
result = -1.0 * r_ij_b * (x_j - x_i) * ((y_j - y_i)/np.power(r_ij, 3))
return result
#
# using r_ij_b
# def secondDerivativeTermOnDiagonal(self, x_i, x_j, r_ij, r_ij_b):
# """ @V / @x_i@x_i (excluding gamma) from paper, assume r_ij is at equilibrium r_ij_b. """
# result = np.power(x_j - x_i, 2) / np.power(r_ij_b, 2)
# return result
#
# def secondDerivateTermOffDiagonal(self, x_i, x_j, y_i, y_j, r_ij, r_ij_b):
# """ @V / @x_i@y_j (excluding gamma) from paper, assume r_ij is at equilibrium r_ij_b. """
# result = ((x_j - x_i)*(y_j - y_i))/ np.power(r_ij_b, 2)
# return result
# using r_ij
# def secondDerivativeTermOnDiagonal(self, x_i, x_j, r_ij, r_ij_b):
# """ @V / @x_i@x_i (excluding gamma) from paper, assume r_ij is at equilibrium r_ij_b. """
# result = np.power(x_j - x_i, 2) / np.power(r_ij, 2)
# return result
#
# def secondDerivateTermOffDiagonal(self, x_i, x_j, y_i, y_j, r_ij, r_ij_b):
# """ @V / @x_i@y_j (excluding gamma) from paper, assume r_ij is at equilibrium r_ij_b. """
# result = ((x_j - x_i)*(y_j - y_i))/ np.power(r_ij, 2)
# return result
def make3By3HessianTerm(self, refChainCalpha, elementcontact, r_ij, r_ij_b):
""" Create a 3 by 3 matrix with the added terms for the hessian diagnonal (excluding multiplication with gamma)"""
x_i = refChainCalpha.getCoords()[0]
y_i = refChainCalpha.getCoords()[1]
z_i = refChainCalpha.getCoords()[2]
x_j = elementcontact.getCoords()[0]
y_j = elementcontact.getCoords()[1]
z_j = elementcontact.getCoords()[2]
deltaTerm = np.zeros((3,3))
deltaTerm[0][0] = self.secondDerivativeTermOnDiagonal(x_i, x_j, r_ij, r_ij_b)
deltaTerm[0][1] = self.secondDerivateTermOffDiagonal(x_i, x_j, y_i, y_j, r_ij, r_ij_b)
deltaTerm[0][2] = self.secondDerivateTermOffDiagonal(x_i, x_j, z_i, z_j, r_ij, r_ij_b)
deltaTerm[1][0] = deltaTerm[0][1]
deltaTerm[1][1] = self.secondDerivativeTermOnDiagonal(y_i, y_j, r_ij, r_ij_b)
deltaTerm[1][2] = self.secondDerivateTermOffDiagonal(y_i, y_j, z_i, z_j, r_ij, r_ij_b)
deltaTerm[2][0] = deltaTerm[0][2]
deltaTerm[2][1] = deltaTerm[1][2]
deltaTerm[2][2] = self.secondDerivativeTermOnDiagonal(z_i, z_j, r_ij, r_ij_b)
return deltaTerm
def add3By3MatrixtoHessian(self, delta3by3, HR, topleftIndex):
""" Add the delta3by3 matrix to its corresponding position of HR, located by
the topleftIndex. """
HR[topleftIndex][topleftIndex] += delta3by3[0][0]
HR[topleftIndex][topleftIndex+1] += delta3by3[0][1]
HR[topleftIndex][topleftIndex+2] += delta3by3[0][2]
HR[topleftIndex+1][topleftIndex] += delta3by3[1][0]
HR[topleftIndex+1][topleftIndex+1] += delta3by3[1][1]
HR[topleftIndex+1][topleftIndex+2] += delta3by3[1][2]
HR[topleftIndex+2][topleftIndex] += delta3by3[2][0]
HR[topleftIndex+2][topleftIndex+1] += delta3by3[2][1]
HR[topleftIndex+2][topleftIndex+2] += delta3by3[2][2]
return HR
def add3By3MatrixtoOffDiagonalHessianMatrixIJ(self, delta3by3, offDiagonalHessianMatrix, topleftIndex, counterpartTopleftIndex):
""" Add the delta3by3 matrix to its corresponding position of HR, located by
the topleftIndex. """
offDiagonalHessianMatrix[topleftIndex][counterpartTopleftIndex] += delta3by3[0][0]
offDiagonalHessianMatrix[topleftIndex][counterpartTopleftIndex+1] += delta3by3[0][1]
offDiagonalHessianMatrix[topleftIndex][counterpartTopleftIndex+2] += delta3by3[0][2]
offDiagonalHessianMatrix[topleftIndex+1][counterpartTopleftIndex] += delta3by3[1][0]
offDiagonalHessianMatrix[topleftIndex+1][counterpartTopleftIndex+1] += delta3by3[1][1]
offDiagonalHessianMatrix[topleftIndex+1][counterpartTopleftIndex+2] += delta3by3[1][2]
offDiagonalHessianMatrix[topleftIndex+2][counterpartTopleftIndex] += delta3by3[2][0]
offDiagonalHessianMatrix[topleftIndex+2][counterpartTopleftIndex+1] += delta3by3[2][1]
offDiagonalHessianMatrix[topleftIndex+2][counterpartTopleftIndex+2] += delta3by3[2][2]
return offDiagonalHessianMatrix
def make3By3OffDiagonalHessianTermIJ(self, refChainCalpha, elementcontact, r_ij, r_ij_b):
""" Create a 3 by 3 matrix with the added terms for the hessian super element off the diagnonal (excluding multiplication with gamma). """
x_i = refChainCalpha.getCoords()[0]
y_i = refChainCalpha.getCoords()[1]
z_i = refChainCalpha.getCoords()[2]
x_j = elementcontact.getCoords()[0]
y_j = elementcontact.getCoords()[1]
z_j = elementcontact.getCoords()[2]
deltaTerm = np.zeros((3,3))
deltaTerm[0][0] = self.secondDerivateTermOffDiagonalAtomsIJ(x_i, x_j, x_i, x_j, r_ij, r_ij_b)
deltaTerm[0][1] = self.secondDerivateTermOffDiagonalAtomsIJ(x_i, x_j, y_i, y_j, r_ij, r_ij_b)
deltaTerm[0][2] = self.secondDerivateTermOffDiagonalAtomsIJ(x_i, x_j, z_i, z_j, r_ij, r_ij_b)
deltaTerm[1][0] = deltaTerm[0][1]
deltaTerm[1][1] = self.secondDerivateTermOffDiagonalAtomsIJ(y_i, y_j, y_i, y_j, r_ij, r_ij_b)
deltaTerm[1][2] = self.secondDerivateTermOffDiagonalAtomsIJ(y_i, y_j, z_i, z_j, r_ij, r_ij_b)
deltaTerm[2][0] = deltaTerm[0][2]
deltaTerm[2][1] = deltaTerm[1][2]
deltaTerm[2][2] = self.secondDerivateTermOffDiagonalAtomsIJ(z_i, z_j, z_i, z_j, r_ij, r_ij_b)
return deltaTerm
def getCalphaPosition(self, atom1, reference):
""" Returns the position of atom1 among the calphas of reference. Useful if one
desires to know the index of an calpha atom in the ANM hessian made from reference calphas.
Args:
atom1: the calpha atom that the position is desired to know
reference: the reference structure where the calpha position is obtained from
Returns: Positive integer denoting the calpha position
"""
assert atom1.getName() == 'CA'
referenceCalphas = reference.select('calpha')
# try:
# idx = zip(referenceCalphas).index((atom1, ))
# return idx
# except ValueError:
# print "Exception in getCalphaPosition. This calpha cannot be located in the structure provided. "
for idx, referenceCalpha in enumerate(referenceCalphas):
if atom1 == referenceCalpha:
return idx
raise StopIteration("Exception in getCalphaPosition. This calpha cannot be located in the structure provided. ")
def normalizeM(self, M):
""" Normalize a set of modes, which are the columnvectors in M.
Args:
M: set of modes as columnvectors
Returns: normalized (magnitude of each mode is 1) set of modes as columnvectors in M
"""
Mnormed = None
if M.ndim == 1:
modeVector = Vector(M)
return modeVector.getNormed().getArray()
else:
for element in M.T:
modeVector = Vector(element)
modeNormalized = modeVector.getNormed()
if Mnormed is None:
Mnormed = modeNormalized.getArray()
else:
Mnormed = np.column_stack((Mnormed, modeNormalized.getArray()))
return Mnormed
def getNoOfZeroEigvals(self, anm):
""" Return the number of zero eigenvalues, the treshold is defined in the constant ZERO.
Args:
anm: the anm
Returns: number of zero eigenvalues
"""
ZERO = 1e-10
return sum(anm.getEigvals() < ZERO)
def removeInterAtoms(self, arr, interCalphaIndices):
""" Set x,y,z coordinations of atoms indicated by calphasInterIndices to 0,0,0 in arr.
Args:
arr: the array with x,y,z coordinates
interCalphaIndices: calphas with intermolecular contacts
Returns: arr with x,y,z positions of atoms from interCalphaIndices set to 0,0,0
"""
for calphaIndex in interCalphaIndices:
arr[(calphaIndex*3)] = 0.0
arr[(calphaIndex*3+1)] = 0.0
arr[(calphaIndex*3+2)] = 0.0
return arr<|fim▁end|> | Ty = self.utils.fill3DArrayWithValue(Ty, 1.0, 1) |
<|file_name|>inverse_op.cc<|end_file_name|><|fim▁begin|>/* Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include "paddle/fluid/operators/inverse_op.h"
#include <string>
#include <unordered_map>
namespace paddle {
namespace operators {
class InverseOp : public framework::OperatorWithKernel {
public:
using framework::OperatorWithKernel::OperatorWithKernel;
void InferShape(framework::InferShapeContext* ctx) const override {
OP_INOUT_CHECK(ctx->HasInput("Input"), "Input", "Input", "Inverse");
OP_INOUT_CHECK(ctx->HasOutput("Output"), "Output", "Output", "Inverse");
auto input_dims = ctx->GetInputDim("Input");
int64_t input_rank = input_dims.size();
PADDLE_ENFORCE_GE(
input_rank, 2,
platform::errors::InvalidArgument(
"The dimension of Input(Input) is expected to be no less than 2. "
"But recieved: Input(Input)'s dimension = %d, shape = [%s].",
input_rank, input_dims));
for (int64_t i = 0; i < input_rank; ++i) {
PADDLE_ENFORCE_EQ(
(input_dims[i] == -1) || (input_dims[i] > 0), true,
platform::errors::InvalidArgument(
"Each dimension of input tensor is expected to be -1 or a "
"positive number, but recieved %d. Input's shape is [%s].",
input_dims[i], input_dims));
}
if (input_dims[input_rank - 2] > 0 && input_dims[input_rank - 1] > 0) {
PADDLE_ENFORCE_EQ(input_dims[input_rank - 2], input_dims[input_rank - 1],
platform::errors::InvalidArgument(
"The last two dimensions are expected to be equal. "
"But recieved: %d and %d; "
"Input(Input)'s shape = [%s].",
input_dims[input_rank - 2],
input_dims[input_rank - 1], input_dims));
}
ctx->SetOutputDim("Output", input_dims);
ctx->ShareLoD("Input", /*->*/ "Output");
}
};
class InverseOpInferVarType : public framework::PassInDtypeAndVarTypeToOutput {
protected:
std::unordered_map<std::string, std::string>& GetInputOutputWithSameType()
const override {
static std::unordered_map<std::string, std::string> m{
{"Input", /*->*/ "Output"}};
return m;
}
};
class InverseGradOp : public framework::OperatorWithKernel {
public:
using framework::OperatorWithKernel::OperatorWithKernel;
void InferShape(framework::InferShapeContext* ctx) const override {
auto input_grad = framework::GradVarName("Input");
auto output_grad = framework::GradVarName("Output");
OP_INOUT_CHECK(ctx->HasInput("Output"), "Input", "Output", "InverseGrad");
OP_INOUT_CHECK(ctx->HasInput(output_grad), "Input", output_grad,
"InverseGrad");
if (ctx->HasOutput(input_grad)) {
ctx->SetOutputDim(input_grad, ctx->GetInputDim(output_grad));
}
}
};
class InverseOpMaker : public framework::OpProtoAndCheckerMaker {
public:
void Make() override {
AddInput(
"Input",
"(Tensor) A square matrix (2-D Tensor) or batches of square matrices"
" to inverse.");
AddOutput("Output", "(Tensor) The inverse of input matrix.");
AddComment(R"DOC(
Inverse Operator
Takes the inverse of the square matrix.
)DOC");
}
};
template <typename T>
class InverseGradOpMaker : public framework::SingleGradOpMaker<T> {
public:
using framework::SingleGradOpMaker<T>::SingleGradOpMaker;
<|fim▁hole|> grad->SetInput(framework::GradVarName("Output"),
this->OutputGrad("Output"));
grad->SetOutput(framework::GradVarName("Input"), this->InputGrad("Input"));
}
};
} // namespace operators
} // namespace paddle
namespace ops = paddle::operators;
REGISTER_OPERATOR(inverse, ops::InverseOp, ops::InverseOpMaker,
ops::InverseOpInferVarType,
ops::InverseGradOpMaker<paddle::framework::OpDesc>,
ops::InverseGradOpMaker<paddle::imperative::OpBase>);
REGISTER_OPERATOR(inverse_grad, ops::InverseGradOp);
REGISTER_OP_CPU_KERNEL(
inverse, ops::InverseKernel<paddle::platform::CPUDeviceContext, float>,
ops::InverseKernel<paddle::platform::CPUDeviceContext, double>);
REGISTER_OP_CPU_KERNEL(
inverse_grad,
ops::InverseGradKernel<paddle::platform::CPUDeviceContext, float>,
ops::InverseGradKernel<paddle::platform::CPUDeviceContext, double>);<|fim▁end|> | protected:
void Apply(GradOpPtr<T> grad) const override {
grad->SetType(this->ForwardOpType() + "_grad");
grad->SetInput("Output", this->Output("Output")); |
<|file_name|>scopes.rs<|end_file_name|><|fim▁begin|>use {ast, typeinf, util};
use core::{Src, CompletionType, Session};
#[cfg(test)] use core;
use std::iter::Iterator;
use std::path::Path;
use std::str::from_utf8;
use util::char_at;
fn find_close<'a, A>(iter: A, open: u8, close: u8, level_end: u32) -> Option<usize> where A: Iterator<Item=&'a u8> {
let mut levels = 0u32;
for (count, &b) in iter.enumerate() {
if b == close {
if levels == level_end { return Some(count); }
levels -= 1;
} else if b == open { levels += 1; }
}
None
}
pub fn find_closing_paren(src: &str, pos: usize) -> usize {
find_close(src.as_bytes()[pos..].iter(), b'(', b')', 0)
.map_or(src.len(), |count| pos + count)
}
pub fn scope_start(src: Src, point: usize) -> usize {
let masked_src = mask_comments(src.to(point));
find_close(masked_src.as_bytes().iter().rev(), b'}', b'{', 0)
.map_or(0, |count| point - count)
}
pub fn find_stmt_start(msrc: Src, point: usize) -> Option<usize> {
// iterate the scope to find the start of the statement
let scopestart = scope_start(msrc, point);
msrc.from(scopestart).iter_stmts()
.find(|&(_, end)| scopestart + end > point)
.map(|(start, _)| scopestart + start)
}
pub fn get_local_module_path(msrc: Src, point: usize) -> Vec<String> {
let mut v = Vec::new();
get_local_module_path_(msrc, point, &mut v);
v
}
fn get_local_module_path_(msrc: Src, point: usize, out: &mut Vec<String>) {
for (start, end) in msrc.iter_stmts() {
if start < point && end > point {
let blob = msrc.from_to(start, end);
if blob.starts_with("pub mod ") || blob.starts_with("mod ") {
let p = typeinf::generate_skeleton_for_parsing(&blob);
ast::parse_mod(p).name.map(|name| {
out.push(name);
let newstart = blob.find("{").unwrap() + 1;
get_local_module_path_(blob.from(newstart),
point - start - newstart, out);
});
}
}
}
}
pub fn find_impl_start(msrc: Src, point: usize, scopestart: usize) -> Option<usize> {
let len = point-scopestart;
match msrc.from(scopestart).iter_stmts().find(|&(_, end)| end > len) {
Some((start, _)) => {
let blob = msrc.from(scopestart + start);
// TODO:: the following is a bit weak at matching traits. make this better
if blob.starts_with("impl") || blob.starts_with("trait") || blob.starts_with("pub trait") {
Some(scopestart + start)
} else {
let newstart = blob.find("{").unwrap() + 1;
find_impl_start(msrc, point, scopestart+start+newstart)
}
},
None => None
}
}
#[test]
fn finds_subnested_module() {
use core;
let src = "
pub mod foo {
pub mod bar {
here
}
}";
let point = coords_to_point(&src, 4, 12);
let src = core::new_source(String::from(src));
let v = get_local_module_path(src.as_ref(), point);
assert_eq!("foo", &v[0][..]);
assert_eq!("bar", &v[1][..]);
let point = coords_to_point(&src, 3, 8);
let v = get_local_module_path(src.as_ref(), point);
assert_eq!("foo", &v[0][..]);
}
pub fn split_into_context_and_completion(s: &str) -> (&str, &str, CompletionType) {
match s.char_indices().rev().find(|&(_, c)| !util::is_ident_char(c)) {
Some((i,c)) => {
//println!("PHIL s '{}' i {} c '{}'",s,i,c);
match c {
'.' => (&s[..i], &s[(i+1)..], CompletionType::CompleteField),
':' if s.len() > 1 => (&s[..(i-1)], &s[(i+1)..], CompletionType::CompletePath),
_ => (&s[..(i+1)], &s[(i+1)..], CompletionType::CompletePath)
}
},
None => ("", s, CompletionType::CompletePath)
}
}
pub fn get_start_of_search_expr(src: &str, point: usize) -> usize {
let mut i = point;
let mut levels = 0u32;
for &b in src.as_bytes()[..point].iter().rev() {
i -= 1;
match b {
b'(' => {
if levels == 0 { return i+1; }
levels -= 1;
},
b')' => { levels += 1; },
_ => {
if levels == 0 &&
!util::is_search_expr_char(char_at(src, i)) ||
util::is_double_dot(src,i) {
return i+1;
}
}
}
}
0
}
pub fn get_start_of_pattern(src: &str, point: usize) -> usize {
let mut i = point-1;
let mut levels = 0u32;
for &b in src.as_bytes()[..point].iter().rev() {
match b {
b'(' => {
if levels == 0 { return i+1; }
levels -= 1;
},
b')' => { levels += 1; },
_ => {
if levels == 0 &&
!util::is_pattern_char(char_at(src, i)) {
return i+1;
}
}
}
i -= 1;
}
0
}
#[test]
fn get_start_of_pattern_handles_variant() {
assert_eq!(4, get_start_of_pattern("foo, Some(a) =>",13));
}
#[test]
fn get_start_of_pattern_handles_variant2() {
assert_eq!(4, get_start_of_pattern("bla, ast::PatTup(ref tuple_elements) => {",36));
}
pub fn expand_search_expr(msrc: &str, point: usize) -> (usize, usize) {
let start = get_start_of_search_expr(msrc, point);
(start, util::find_ident_end(msrc, point))
}
#[test]
fn expand_search_expr_finds_ident() {
assert_eq!((0, 7), expand_search_expr("foo.bar", 5))
}
#[test]
fn expand_search_expr_handles_chained_calls() {
assert_eq!((0, 20), expand_search_expr("yeah::blah.foo().bar", 18))
}
#[test]
fn expand_search_expr_handles_inline_closures() {
assert_eq!((0, 24), expand_search_expr("yeah::blah.foo(||{}).bar", 22))
}
#[test]
fn expand_search_expr_handles_a_function_arg() {
assert_eq!((5, 25), expand_search_expr("myfn(foo::new().baz().com)", 23))
}
#[test]
fn expand_search_expr_handles_macros() {
assert_eq!((0, 9), expand_search_expr("my_macro!()", 9))
}
#[test]
fn expand_search_expr_handles_pos_at_end_of_search_str() {
assert_eq!((0, 7), expand_search_expr("foo.bar", 7))
}
pub fn mask_comments(src: Src) -> String {
let mut result = String::with_capacity(src.len());
let buf_byte = &[b' '; 128];
let buffer = from_utf8(buf_byte).unwrap();
let mut prev: usize = 0;
for (start, end) in src.chunk_indices() {
for _ in 0..((start-prev)/128) { result.push_str(buffer); }
result.push_str(&buffer[..((start-prev)%128)]);
result.push_str(&src[start..end]);
prev = end;
}
result
}
pub fn mask_sub_scopes(src: &str) -> String {
let mut result = String::with_capacity(src.len());
let buf_byte = [b' '; 128];
let buffer = from_utf8(&buf_byte).unwrap();
let mut levels = 0i32;
let mut start = 0usize;
let mut pos = 0usize;
for &b in src.as_bytes() {
pos += 1;
match b {
b'{' => {
if levels == 0 {
result.push_str(&src[start..(pos)]);
start = pos+1;
}
levels += 1;
},
b'}' => {
if levels == 1 {
let num_spaces = pos-start;
for _ in 0..(num_spaces/128) { result.push_str(buffer); }
result.push_str(&buffer[..((num_spaces)%128)]);
result.push_str("}");
start = pos;
}
levels -= 1;
},
b'\n' if levels > 0 => {
for _ in 0..((pos-start)/128) { result.push_str(buffer); }
result.push_str(&buffer[..((pos-start)%128)]);
result.push('\n');
start = pos+1;
},
_ => {}
}
}
if start > pos {
start = pos;
}
if levels > 0 {
for _ in 0..((pos - start)/128) { result.push_str(buffer); }
result.push_str(&buffer[..((pos-start)%128)]);
} else {
result.push_str(&src[start..pos]);
}
result
}
pub fn end_of_next_scope(src: &str) -> &str {
match find_close(src.as_bytes().iter(), b'{', b'}', 1) {
Some(count) => &src[..count+1],
None => ""
}
}
pub fn coords_to_point(src: &str, mut linenum: usize, col: usize) -> usize {
let mut point = 0;
for line in src.split('\n') {
linenum -= 1;
if linenum == 0 { break }
point += line.len() + 1; // +1 for the \n
}
point + col
}
pub fn point_to_coords(src: &str, point: usize) -> (usize, usize) {
let mut linestart = 0;
let mut nlines = 1; // lines start at 1
for (i, &b) in src[..point].as_bytes().iter().enumerate() {
if b == b'\n' {
nlines += 1;
linestart = i+1;
}
}
(nlines, point - linestart)
}
pub fn point_to_coords_from_file(path: &Path, point: usize, session: &Session) -> Option<(usize, usize)> {
let mut p = 0;
for (lineno, line) in session.load_file(path).split('\n').enumerate() {
if point < (p + line.len()) {
return Some((lineno+1, point - p));
}
p += line.len() + 1; // +1 for the newline char
}
None
}
#[test]
fn coords_to_point_works() {
let src = "
fn myfn() {
let a = 3;
print(a);
}";
assert!(coords_to_point(src, 3, 5) == 18);
}
<|fim▁hole|> let a = 3;
print(a);
}
");
let src = core::new_source(src);
let point = coords_to_point(&src, 4, 10);
let start = scope_start(src.as_ref(), point);
assert!(start == 12);
}
#[test]
fn test_scope_start_handles_sub_scopes() {
let src = String::from("
fn myfn() {
let a = 3;
{
let b = 4;
}
print(a);
}
");
let src = core::new_source(src);
let point = coords_to_point(&src, 7, 10);
let start = scope_start(src.as_ref(), point);
assert!(start == 12);
}
#[test]
fn masks_out_comments() {
let src = String::from("
this is some code
this is a line // with a comment
some more
");
let src = core::new_source(src);
let r = mask_comments(src.as_ref());
assert!(src.len() == r.len());
// characters at the start are the same
assert!(src.as_bytes()[5] == r.as_bytes()[5]);
// characters in the comments are masked
let commentoffset = coords_to_point(&src,3,23);
assert!(char_at(&r, commentoffset) == ' ');
assert!(src.as_bytes()[commentoffset] != r.as_bytes()[commentoffset]);
// characters afterwards are the same
assert!(src.as_bytes()[src.len()-3] == r.as_bytes()[src.len()-3]);
}
#[test]
fn test_point_to_coords() {
let src = "
fn myfn(b:usize) {
let a = 3;
if b == 12 {
let a = 24;
do_something_with(a);
}
do_something_with(a);
}
";
round_trip_point_and_coords(src, 4, 5);
}
pub fn round_trip_point_and_coords(src: &str, lineno: usize, charno: usize) {
let (a,b) = point_to_coords(src, coords_to_point(src, lineno, charno));
assert_eq!((a,b), (lineno,charno));
}
#[test]
fn finds_end_of_struct_scope() {
let src="
struct foo {
a: usize,
blah: ~str
}
Some other junk";
let expected="
struct foo {
a: usize,
blah: ~str
}";
let s = end_of_next_scope(src);
assert_eq!(expected, s);
}<|fim▁end|> | #[test]
fn test_scope_start() {
let src = String::from("
fn myfn() { |
<|file_name|>visualizer.js<|end_file_name|><|fim▁begin|>function playAudioVisualize(track) {
var bars = 50;
var waveResolution = 128;
var style = "bars"; //set default style upon loading here
var audio = new Audio();
var canvas, source, context, analyser, fFrequencyData, barX, barWidth, barHeight, red, green, blue, ctx;
audio.controls = true;
audio.src = track;
audio.loop = false;
audio.autoplay = false;
window.addEventListener("load", initPlayer, false);
function initPlayer() {
document.getElementById('audio-container').appendChild(audio);
context = new AudioContext();
analyser = context.createAnalyser();
canvas = document.getElementById('audio-display');
canvas.addEventListener("click", toggleStyle);
ctx = canvas.getContext('2d');
source = context.createMediaElementSource(audio);
source.connect(analyser);
analyser.connect(context.destination);
drawFrames();
function toggleStyle() {
style = (style === "wave" ? "bars" : "wave");
}
}
var k = 0; //keep track of total number of frames drawn
function drawFrames() {
window.requestAnimationFrame(drawFrames);
analyser.fftsize = 128;
fFrequencyData = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(fFrequencyData);
ctx.clearRect(0,0,canvas.width,canvas.height);
numBarsBars = 16;
//calculate average frequency for color
var total = 0;
for(var j = 0; j < fFrequencyData.length; j++) {
total += fFrequencyData[j];
}
var avg = total / fFrequencyData.length;
avg = avg / 1.2;
//bar style visual representation
function drawBars(numBars) {
for(var i = 0; i < numBars; i++) {
barX = i * (canvas.width / numBars);
barWidth = (canvas.width / numBars - 1);
barHeight = -(fFrequencyData[i] / 2);
//reduce frequency of color changing to avoid flickering
if(k % 15 === 0) {
getColors();
k = 0;
}
ctx.fillStyle = 'rgb('+red+','+green+','+blue+')';
ctx.fillRect(barX, canvas.height, barWidth, barHeight);
}
}
//waveform visualization
function drawWave(resolution, lineWidth) {
ctx.beginPath();
ctx.lineWidth = lineWidth;
var barX, barY;
for(var i = 0; i < resolution; i++) {
barX = i * (Math.ceil(canvas.width / resolution));
barY = -(fFrequencyData[i] / 2);
getColors();
k = 0;
ctx.strokeStyle = 'rgb('+red+','+green+','+blue+')';
ctx.lineTo(barX, barY + canvas.height );
ctx.stroke();
}
}
function getColors() {
//can edit these values to get overall different coloration!!
<|fim▁hole|> red = Math.round(Math.sin(avg/29.0 + 6.1) * 127 + 128);
green = Math.round(Math.sin(avg/42.0 - 7.4) * 127 + 128);
blue = Math.round(Math.sin(avg/34.0 - 3.8) * 127 + 128);
}
if(style === "wave") {
drawWave(waveResolution, 2);
}
if(style === "bars") {
drawBars(bars);
}
k++;
}
}<|fim▁end|> | |
<|file_name|>file_mime_process.cc<|end_file_name|><|fim▁begin|>//--------------------------------------------------------------------------
// Copyright (C) 2014-2015 Cisco and/or its affiliates. All rights reserved.
// Copyright (C) 2012-2013 Sourcefire, Inc.
//
// This program is free software; you can redistribute it and/or modify it
// under the terms of the GNU General Public License Version 2 as published
// by the Free Software Foundation. You may not use, modify or distribute
// this program under any other version of the GNU General Public License.
//
// This program is distributed in the hope that it will be useful, but
// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, write to the Free Software Foundation, Inc.,
// 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
//--------------------------------------------------------------------------
/*
** Author(s): Hui Cao <[email protected]>
**
** NOTES
** 9.25.2012 - Initial Source Code. Hui Cao
*/
#include "file_mime_process.h"
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "file_mime_config.h"
#include "file_mime_decode.h"
#include "file_api/file_api.h"
#include "file_api/file_flows.h"
#include "main/snort_types.h"
#include "search_engines/search_tool.h"
#include "protocols/packet.h"
#include "detection/detection_util.h"
#include "framework/data_bus.h"
#include "utils/util.h"
#include "utils/snort_bounds.h"
struct MimeToken
{
const char* name;
int name_len;
int search_id;
} ;
enum MimeHdrEnum
{
HDR_CONTENT_TYPE = 0,
HDR_CONT_TRANS_ENC,
HDR_CONT_DISP,
HDR_LAST
};
const MimeToken mime_hdrs[] =
{
{ "Content-type:", 13, HDR_CONTENT_TYPE },
{ "Content-Transfer-Encoding:", 26, HDR_CONT_TRANS_ENC },
{ "Content-Disposition:", 20, HDR_CONT_DISP },
{ NULL, 0, 0 }
};
struct MIMESearch
{
const char* name;
int name_len;
} ;
struct MIMESearchInfo
{
int id;
int index;
int length;
} ;
MIMESearchInfo mime_search_info;
SearchTool* mime_hdr_search_mpse = nullptr;
MIMESearch mime_hdr_search[HDR_LAST];
MIMESearch* mime_current_search = NULL;
void get_mime_eol(const uint8_t* ptr, const uint8_t* end,
const uint8_t** eol, const uint8_t** eolm)
{
const uint8_t* tmp_eol;
const uint8_t* tmp_eolm;
assert(eol and eolm);
if ( !ptr or !end )
{
*eol = *eolm = end;
return;
}
tmp_eol = (uint8_t*)memchr(ptr, '\n', end - ptr);
if (tmp_eol == NULL)
{
tmp_eol = end;
tmp_eolm = end;
}
else
{
/* end of line marker (eolm) should point to marker and
* end of line (eol) should point to end of marker */
if ((tmp_eol > ptr) && (*(tmp_eol - 1) == '\r'))
{
tmp_eolm = tmp_eol - 1;
}
else
{
tmp_eolm = tmp_eol;
}
/* move past newline */
tmp_eol++;
}
*eol = tmp_eol;
*eolm = tmp_eolm;
}
/*
* Callback function for string search
*
* @param id id in array of search strings from mime_config.cmds
* @param index index in array of search strings from mime_config.cmds
* @param data buffer passed in to search function
*
* @return response
* @retval 1 commands caller to stop searching
*/
static int search_str_found(void* id, void*, int index, void*, void*)
{
int search_id = (int)(uintptr_t)id;
mime_search_info.id = search_id;
mime_search_info.index = index;
mime_search_info.length = mime_current_search[search_id].name_len;
/* Returning non-zero stops search, which is okay since we only look for one at a time */
return 1;
}
void MimeSession::setup_decode(const char* data, int size, bool cnt_xf)
{
/* Check for Encoding Type */
if ( decode_conf && decode_conf->is_decoding_enabled())
{
if (decode_state == NULL)
{
decode_state = new MimeDecode(decode_conf);
}
if (decode_state != NULL)
{
decode_state->process_decode_type(data, size, cnt_xf);
state_flags |= MIME_FLAG_EMAIL_ATTACH;
}
}
}
/*
* Handle Headers - Data or Mime
*
* @param packet standard Packet structure
*
* @param i index into p->payload buffer to start looking at data
*
* @return i index into p->payload where we stopped looking at data
*/
const uint8_t* MimeSession::process_mime_header(const uint8_t* ptr,
const uint8_t* data_end_marker)
{
const uint8_t* eol = data_end_marker;
const uint8_t* eolm = eol;
const uint8_t* colon;
const uint8_t* content_type_ptr = NULL;
const uint8_t* cont_trans_enc = NULL;
const uint8_t* cont_disp = NULL;
int header_found;
const uint8_t* start_hdr;
start_hdr = ptr;
/* if we got a content-type in a previous packet and are
* folding, the boundary still needs to be checked for */
if (state_flags & MIME_FLAG_IN_CONTENT_TYPE)
content_type_ptr = ptr;
if (state_flags & MIME_FLAG_IN_CONT_TRANS_ENC)
cont_trans_enc = ptr;
if (state_flags & MIME_FLAG_IN_CONT_DISP)
cont_disp = ptr;
while (ptr < data_end_marker)
{
int header_name_len;
int max_header_name_len = 0;
get_mime_eol(ptr, data_end_marker, &eol, &eolm);
/* got a line with only end of line marker should signify end of header */
if (eolm == ptr)
{
/* reset global header state values */
state_flags &=
~(MIME_FLAG_FOLDING | MIME_FLAG_IN_CONTENT_TYPE | MIME_FLAG_DATA_HEADER_CONT
| MIME_FLAG_IN_CONT_TRANS_ENC );
data_state = STATE_DATA_BODY;
/* if no headers, treat as data */
if (ptr == start_hdr)
return eolm;
else
return eol;
}
/* if we're not folding, see if we should interpret line as a data line
* instead of a header line */
if (!(state_flags & (MIME_FLAG_FOLDING | MIME_FLAG_DATA_HEADER_CONT)))
{
char got_non_printable_in_header_name = 0;
/* if we're not folding and the first char is a space or
* colon, it's not a header */
if (isspace((int)*ptr) || *ptr == ':')
{
data_state = STATE_DATA_BODY;
return ptr;
}
/* look for header field colon - if we're not folding then we need
* to find a header which will be all printables (except colon)
* followed by a colon */
colon = ptr;
while ((colon < eolm) && (*colon != ':'))
{
if (((int)*colon < 33) || ((int)*colon > 126))
got_non_printable_in_header_name = 1;
colon++;
}
/* Check for Exim 4.32 exploit where number of chars before colon is greater than 64 */
header_name_len = colon - ptr;
if ((colon < eolm) && (header_name_len > MAX_HEADER_NAME_LEN))
{
max_header_name_len = header_name_len;
}
/* If the end on line marker and end of line are the same, assume
* header was truncated, so stay in data header state */
if ((eolm != eol) &&
((colon == eolm) || got_non_printable_in_header_name))
{
/* no colon or got spaces in header name (won't be interpreted as a header)
* assume we're in the body */
state_flags &=
~(MIME_FLAG_FOLDING | MIME_FLAG_IN_CONTENT_TYPE | MIME_FLAG_DATA_HEADER_CONT
|MIME_FLAG_IN_CONT_TRANS_ENC);
data_state = STATE_DATA_BODY;
return ptr;
}
if (tolower((int)*ptr) == 'c')
{
mime_current_search = &mime_hdr_search[0];
header_found = mime_hdr_search_mpse->find(
(const char*)ptr, eolm - ptr, search_str_found, true);
/* Headers must start at beginning of line */
if ((header_found > 0) && (mime_search_info.index == 0))
{
switch (mime_search_info.id)
{
case HDR_CONTENT_TYPE:
content_type_ptr = ptr + mime_search_info.length;
state_flags |= MIME_FLAG_IN_CONTENT_TYPE;
break;
case HDR_CONT_TRANS_ENC:
cont_trans_enc = ptr + mime_search_info.length;
state_flags |= MIME_FLAG_IN_CONT_TRANS_ENC;
break;
case HDR_CONT_DISP:
cont_disp = ptr + mime_search_info.length;
state_flags |= MIME_FLAG_IN_CONT_DISP;
break;
default:
break;
}
}
}
else if (tolower((int)*ptr) == 'e')
{
if ((eolm - ptr) >= 9)
{
if (strncasecmp((const char*)ptr, "Encoding:", 9) == 0)
{
cont_trans_enc = ptr + 9;
state_flags |= MIME_FLAG_IN_CONT_TRANS_ENC;
}
}
}
}
else
{
state_flags &= ~MIME_FLAG_DATA_HEADER_CONT;
}
int ret = handle_header_line(ptr, eol, max_header_name_len);
if (ret < 0)
return NULL;
else if (ret > 0)
{
/* assume we guessed wrong and are in the body */
data_state = STATE_DATA_BODY;
state_flags &=
~(MIME_FLAG_FOLDING | MIME_FLAG_IN_CONTENT_TYPE | MIME_FLAG_DATA_HEADER_CONT
| MIME_FLAG_IN_CONT_TRANS_ENC | MIME_FLAG_IN_CONT_DISP);
return ptr;
}
/* check for folding
* if char on next line is a space and not \n or \r\n, we are folding */
if ((eol < data_end_marker) && isspace((int)eol[0]) && (eol[0] != '\n'))
{
if ((eol < (data_end_marker - 1)) && (eol[0] != '\r') && (eol[1] != '\n'))
{
state_flags |= MIME_FLAG_FOLDING;
}
else
{
state_flags &= ~MIME_FLAG_FOLDING;
}
}
else if (eol != eolm)
{
state_flags &= ~MIME_FLAG_FOLDING;
}
/* check if we're in a content-type header and not folding. if so we have the whole
* header line/lines for content-type - see if we got a multipart with boundary
* we don't check each folded line, but wait until we have the complete header
* because boundary=BOUNDARY can be split across mulitple folded lines before
* or after the '=' */
if ((state_flags &
(MIME_FLAG_IN_CONTENT_TYPE | MIME_FLAG_FOLDING)) == MIME_FLAG_IN_CONTENT_TYPE)
{
if ((data_state == STATE_MIME_HEADER) && !(state_flags &
MIME_FLAG_EMAIL_ATTACH))
{
setup_decode((const char*)content_type_ptr, (eolm - content_type_ptr), false);
}
state_flags &= ~MIME_FLAG_IN_CONTENT_TYPE;
content_type_ptr = NULL;
}
else if ((state_flags &
(MIME_FLAG_IN_CONT_TRANS_ENC | MIME_FLAG_FOLDING)) == MIME_FLAG_IN_CONT_TRANS_ENC)
{
setup_decode((const char*)cont_trans_enc, (eolm - cont_trans_enc), true);
state_flags &= ~MIME_FLAG_IN_CONT_TRANS_ENC;
cont_trans_enc = NULL;
}
else if (((state_flags &
(MIME_FLAG_IN_CONT_DISP | MIME_FLAG_FOLDING)) == MIME_FLAG_IN_CONT_DISP) && cont_disp)
{
bool disp_cont = (state_flags & MIME_FLAG_IN_CONT_DISP_CONT) ? true : false;
if (log_config->log_filename && log_state )
{
log_state->log_file_name(cont_disp, eolm - cont_disp, &disp_cont);
}
if (disp_cont)
{
state_flags |= MIME_FLAG_IN_CONT_DISP_CONT;
}
else
{
state_flags &= ~MIME_FLAG_IN_CONT_DISP;
state_flags &= ~MIME_FLAG_IN_CONT_DISP_CONT;
}
cont_disp = NULL;
}
data_state = STATE_DATA_HEADER;
ptr = eol;
if (ptr == data_end_marker)
state_flags |= MIME_FLAG_DATA_HEADER_CONT;
}
return ptr;
}
/* Get the end of data body (excluding boundary)*/
static const uint8_t* GetDataEnd(const uint8_t* data_start,
const uint8_t* data_end_marker)
{
/* '\r\n' + '--' + MIME boundary string */
const int Max_Search = 4 + MAX_MIME_BOUNDARY_LEN;
uint8_t* start;
/*Exclude 2 bytes because either \r\n or '--' at the end */
uint8_t* end = (uint8_t*)data_end_marker - 2;
/*Search for the start of boundary, should be less than boundary length*/
if (end > data_start + Max_Search)
start = end - Max_Search;
else
start = (uint8_t*)data_start;
while (end > start)
{
if (*(--end) != '\n')
continue;
if ((*(end+1) == '-') && (*(end+2) == '-'))
{
if ((end > start) && (*(end-1) == '\r'))
return (end - 1);
else
return end;
}
break;
}
return data_end_marker;
}
/*
* Handle DATA_BODY state
* @param packet standard Packet structure
* @param i index into p->payload buffer to start looking at data
* @return i index into p->payload where we stopped looking at data
*/
const uint8_t* MimeSession::process_mime_body(const uint8_t* ptr,
const uint8_t* data_end, bool is_data_end)
{
if (state_flags & MIME_FLAG_EMAIL_ATTACH)
{
const uint8_t* attach_start = ptr;
const uint8_t* attach_end;
if (is_data_end )
{
attach_end = GetDataEnd(ptr, data_end);
}
else
{
attach_end = data_end;
}
if (( attach_start < attach_end ) && decode_state)
{
if (decode_state->decode_data(attach_start, attach_end) == DECODE_FAIL )
{
decode_alert();
}
}
}
if (is_data_end)
{
data_state = STATE_MIME_HEADER;
state_flags &= ~MIME_FLAG_EMAIL_ATTACH;
}
return data_end;
}
/*
* Reset MIME session state
*/
void MimeSession::reset_mime_state()
{
data_state = STATE_DATA_INIT;
state_flags = 0;
if (decode_state)
decode_state->clear_decode_state();
}
const uint8_t* MimeSession::process_mime_data_paf(
Flow* flow, const uint8_t* start, const uint8_t* end, bool upload, FilePosition position)
{
bool done_data = is_end_of_data(flow);
/* if we've just entered the data state, check for a dot + end of line
* if found, no data */
if ( data_state == STATE_DATA_INIT )
{
if ((start < end) && (*start == '.'))
{
const uint8_t* eol = NULL;
const uint8_t* eolm = NULL;
get_mime_eol(start, end, &eol, &eolm);
/* this means we got a real end of line and not just end of payload
* and that the dot is only char on line */
if ((eolm != end) && (eolm == (start + 1)))
{
/* if we're normalizing and not ignoring data copy data end marker
* and dot to alt buffer */
if (normalize_data(start, end) < 0)
return NULL;
reset_mime_state();
return eol;
}
}
if (data_state == STATE_DATA_INIT)
data_state = STATE_DATA_HEADER;
/* XXX A line starting with a '.' that isn't followed by a '.' is
* deleted (RFC 821 - 4.5.2. TRANSPARENCY). If data starts with
* '. text', i.e a dot followed by white space then text, some
* servers consider it data header and some data body.
* Postfix and Qmail will consider the start of data:
* . text\r\n
* . text\r\n
* to be part of the header and the effect will be that of a
* folded line with the '.' deleted. Exchange will put the same
* in the body which seems more reasonable. */
}
// FIXIT-L why is this being set? we don't search file data until
// we set it again below after decoding. can it be deleted?
if ( decode_conf && (!decode_conf->is_ignore_data()))
set_file_data((uint8_t*)start, (end - start));
if (data_state == STATE_DATA_HEADER)
{
#ifdef DEBUG_MSGS
if (data_state == STATE_DATA_HEADER)
{
DEBUG_WRAP(DebugMessage(DEBUG_FILE, "DATA HEADER STATE ~~~~~~~~~~~~~~~~~~~~~~\n"); );
}
else
{
DEBUG_WRAP(DebugMessage(DEBUG_FILE, "DATA UNKNOWN STATE ~~~~~~~~~~~~~~~~~~~~~\n"); );
}
#endif
start = process_mime_header(start, end);
if (start == NULL)
return NULL;
}
if (normalize_data(start, end) < 0)
return NULL;
/* now we shouldn't have to worry about copying any data to the alt buffer
* * only mime headers if we find them and only if we're ignoring data */
while ((start != NULL) && (start < end))
{
switch (data_state)
{
case STATE_MIME_HEADER:
DEBUG_WRAP(DebugMessage(DEBUG_FILE, "MIME HEADER STATE ~~~~~~~~~~~~~~~~~~~~~~\n"); );
start = process_mime_header(start, end);
break;
case STATE_DATA_BODY:
DEBUG_WRAP(DebugMessage(DEBUG_FILE, "DATA BODY STATE ~~~~~~~~~~~~~~~~~~~~~~~~\n"); );
start = process_mime_body(start, end, isFileEnd(position) );
break;
}
}
/* We have either reached the end of MIME header or end of MIME encoded data*/
if ((decode_state) != NULL)
{
DecodeConfig* conf= decode_conf;
uint8_t* buffer = NULL;
uint32_t buf_size = 0;
decode_state->get_decoded_data(&buffer, &buf_size);
if (conf)
{
int detection_size = decode_state->get_detection_depth();
set_file_data(buffer, (uint16_t)detection_size);
}
/*Process file type/file signature*/
FileFlows* file_flows = FileFlows::get_file_flows(flow);
if (file_flows && file_flows->file_process(buffer, buf_size, position, upload)
&& (isFileStart(position)) && log_state)
{
log_state->set_file_name_from_log(flow);
}
decode_state->reset_decoded_bytes();
}
<|fim▁hole|> reset_mime_state();
reset_state(flow);
}
return end;
}
// Main function for mime processing
// This should be called when mime data is available
const uint8_t* MimeSession::process_mime_data(Flow* flow, const uint8_t* start,
int data_size, bool upload, FilePosition position)
{
const uint8_t* attach_start = start;
const uint8_t* attach_end;
const uint8_t* data_end_marker = start + data_size;
if (position != SNORT_FILE_POSITION_UNKNOWN)
{
process_mime_data_paf(flow, attach_start, data_end_marker,
upload, position);
return data_end_marker;
}
initFilePosition(&position, get_file_processed_size(flow));
/* look for boundary */
while (start < data_end_marker)
{
/*Found the boundary, start processing data*/
if (process_mime_paf_data(&(mime_boundary), *start))
{
attach_end = start;
finalFilePosition(&position);
process_mime_data_paf(flow, attach_start, attach_end,
upload, position);
position = SNORT_FILE_START;
attach_start = start + 1;
}
start++;
}
if ((start == data_end_marker) && (attach_start < data_end_marker))
{
updateFilePosition(&position, get_file_processed_size(flow));
process_mime_data_paf(flow, attach_start, data_end_marker,
upload, position);
}
return data_end_marker;
}
int MimeSession::get_data_state()
{
return data_state;
}
void MimeSession::set_data_state(int state)
{
data_state = state;
}
MailLogState* MimeSession::get_log_state()
{
return log_state;
}
/*
* This is the initialization function for mime processing.
* This should be called when snort initializes
*/
void MimeSession::init(void)
{
const MimeToken* tmp;
/* Header search */
mime_hdr_search_mpse = new SearchTool();
if (mime_hdr_search_mpse == NULL)
{
// FIXIT-M make configurable or at least fall back to any
// available search engine
FatalError("Could not instantiate ac_bnfa search engine.\n");
}
for (tmp = &mime_hdrs[0]; tmp->name != NULL; tmp++)
{
mime_hdr_search[tmp->search_id].name = tmp->name;
mime_hdr_search[tmp->search_id].name_len = tmp->name_len;
mime_hdr_search_mpse->add(tmp->name, tmp->name_len, tmp->search_id);
}
mime_hdr_search_mpse->prep();
}
// Free anything that needs it before shutting down preprocessor
void MimeSession::exit(void)
{
if (mime_hdr_search_mpse != NULL)
delete mime_hdr_search_mpse;
}
MimeSession::MimeSession(DecodeConfig* dconf, MailLogConfig* lconf)
{
decode_conf = dconf;
log_config = lconf;
log_state = new MailLogState(log_config);
reset_mime_paf_state(&mime_boundary);
}
MimeSession::~MimeSession()
{
if ( decode_state )
delete(decode_state);
if ( log_state )
delete(log_state);
}<|fim▁end|> | /* if we got the data end reset state, otherwise we're probably still in the data
* * to expect more data in next packet */
if (done_data)
{ |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import collections
import itertools
import json
import os
import posixpath
import re
import time
from operator import attrgetter
from datetime import datetime
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.core.files.storage import default_storage as storage
from django.db import models, transaction
from django.dispatch import receiver
from django.db.models import Max, Q, signals as dbsignals
from django.utils.translation import trans_real, ugettext_lazy as _
import caching.base as caching
import commonware.log
from django_extensions.db.fields.json import JSONField
from django_statsd.clients import statsd
from jinja2.filters import do_dictsort
from olympia import amo
from olympia.amo.models import (
SlugField, OnChangeMixin, ModelBase, ManagerBase, manual_order)
from olympia.access import acl
from olympia.addons.utils import (
get_creatured_ids, get_featured_ids, generate_addon_guid)
from olympia.amo import helpers
from olympia.amo.decorators import use_master, write
from olympia.amo.utils import (
attach_trans_dict, cache_ns_key, chunked,
no_translation, send_mail, slugify, sorted_groupby, timer, to_language,
urlparams, find_language, AMOJSONEncoder)
from olympia.amo.urlresolvers import get_outgoing_url, reverse
from olympia.constants.categories import CATEGORIES, CATEGORIES_BY_ID
from olympia.files.models import File
from olympia.files.utils import (
extract_translations, resolve_i18n_message, parse_addon)
from olympia.reviews.models import Review
from olympia.tags.models import Tag
from olympia.translations.fields import (
LinkifiedField, PurifiedField, save_signal, TranslatedField, Translation)
from olympia.users.models import UserForeignKey, UserProfile
from olympia.versions.compare import version_int
from olympia.versions.models import inherit_nomination, Version
from . import signals
log = commonware.log.getLogger('z.addons')
def clean_slug(instance, slug_field='slug'):
"""Cleans a model instance slug.
This strives to be as generic as possible as it's used by Addons
and Collections, and maybe more in the future.
"""
slug = getattr(instance, slug_field, None) or instance.name
if not slug:
# Initialize the slug with what we have available: a name translation,
# or the id of the instance, or in last resort the model name.
translations = Translation.objects.filter(id=instance.name_id)
if translations.exists():
slug = translations[0]
elif instance.id:
slug = str(instance.id)
else:
slug = instance.__class__.__name__
max_length = instance._meta.get_field_by_name(slug_field)[0].max_length
slug = slugify(slug)[:max_length]
if DeniedSlug.blocked(slug):
slug = slug[:max_length - 1] + '~'
# The following trick makes sure we are using a manager that returns
# all the objects, as otherwise we could have a slug clash on our hands.
# Eg with the "Addon.objects" manager, which doesn't list deleted addons,
# we could have a "clean" slug which is in fact already assigned to an
# already existing (deleted) addon. Also, make sure we use the base class.
manager = models.Manager()
manager.model = instance._meta.proxy_for_model or instance.__class__
qs = manager.values_list(slug_field, flat=True) # Get list of all slugs.
if instance.id:
qs = qs.exclude(pk=instance.id) # Can't clash with itself.
# We first need to make sure there's a clash, before trying to find a
# suffix that is available. Eg, if there's a "foo-bar" slug, "foo" is still
# available.
clash = qs.filter(**{slug_field: slug})
if clash.exists():
# Leave space for 99 clashes.
slug = slugify(slug)[:max_length - 2]
# There is a clash, so find a suffix that will make this slug unique.
lookup = {'%s__startswith' % slug_field: slug}
clashes = qs.filter(**lookup)
# Try numbers between 1 and the number of clashes + 1 (+ 1 because we
# start the range at 1, not 0):
# if we have two clashes "foo1" and "foo2", we need to try "foox"
# for x between 1 and 3 to be absolutely sure to find an available one.
for idx in range(1, len(clashes) + 2):
new = ('%s%s' % (slug, idx))[:max_length]
if new not in clashes:
slug = new
break
else:
# This could happen. The current implementation (using
# ``[:max_length -3]``) only works for the first 100 clashes in the
# worst case (if the slug is equal to or longuer than
# ``max_length - 3`` chars).
# After that, {verylongslug}-100 will be trimmed down to
# {verylongslug}-10, which is already assigned, but it's the last
# solution tested.
raise RuntimeError
setattr(instance, slug_field, slug)
return instance
class AddonQuerySet(caching.CachingQuerySet):
def id_or_slug(self, val):
"""Get add-ons by id or slug."""
if isinstance(val, basestring) and not val.isdigit():
return self.filter(slug=val)
return self.filter(id=val)
def enabled(self):
"""Get add-ons that haven't been disabled by their developer(s)."""
return self.filter(disabled_by_user=False)
def public(self):
"""Get public add-ons only"""
return self.filter(self.valid_q([amo.STATUS_PUBLIC]))
def valid(self):
"""Get valid, enabled add-ons only"""
return self.filter(self.valid_q(amo.VALID_ADDON_STATUSES))
def valid_and_disabled_and_pending(self):
"""
Get valid, pending, enabled and disabled add-ons.
Used to allow pending theme pages to still be viewed.
"""
statuses = (list(amo.VALID_ADDON_STATUSES) +
[amo.STATUS_DISABLED, amo.STATUS_PENDING])
return (self.filter(Q(status__in=statuses) | Q(disabled_by_user=True))
.exclude(type=amo.ADDON_EXTENSION,
_current_version__isnull=True))
def featured(self, app, lang=None, type=None):
"""
Filter for all featured add-ons for an application in all locales.
"""
ids = get_featured_ids(app, lang, type)
return manual_order(self.listed(app), ids, 'addons.id')
def listed(self, app, *status):
"""
Return add-ons that support a given ``app``, have a version with a file
matching ``status`` and are not disabled.
"""
if len(status) == 0:
status = [amo.STATUS_PUBLIC]
return self.filter(self.valid_q(status), appsupport__app=app.id)
def valid_q(self, status=None, prefix=''):
"""
Return a Q object that selects a valid Addon with the given statuses.
An add-on is valid if not disabled and has a current version.
``prefix`` can be used if you're not working with Addon directly and
need to hop across a join, e.g. ``prefix='addon__'`` in
CollectionAddon.
"""
if not status:
status = [amo.STATUS_PUBLIC]
def q(*args, **kw):
if prefix:
kw = dict((prefix + k, v) for k, v in kw.items())
return Q(*args, **kw)
return q(q(_current_version__isnull=False),
disabled_by_user=False, status__in=status)
class AddonManager(ManagerBase):
def __init__(self, include_deleted=False):
# DO NOT change the default value of include_deleted unless you've read
# through the comment just above the Addon managers
# declaration/instantiation and understand the consequences.
ManagerBase.__init__(self)
self.include_deleted = include_deleted
def get_queryset(self):
qs = super(AddonManager, self).get_queryset()
qs = qs._clone(klass=AddonQuerySet)
if not self.include_deleted:
qs = qs.exclude(status=amo.STATUS_DELETED)
return qs.transform(Addon.transformer)
def id_or_slug(self, val):
"""Get add-ons by id or slug."""
return self.get_queryset().id_or_slug(val)
def enabled(self):
"""Get add-ons that haven't been disabled by their developer(s)."""
return self.get_queryset().enabled()
def public(self):
"""Get public add-ons only"""
return self.get_queryset().public()
def valid(self):
"""Get valid, enabled add-ons only"""
return self.get_queryset().valid()
def valid_and_disabled_and_pending(self):
"""
Get valid, pending, enabled and disabled add-ons.
Used to allow pending theme pages to still be viewed.
"""
return self.get_queryset().valid_and_disabled_and_pending()
def featured(self, app, lang=None, type=None):
"""
Filter for all featured add-ons for an application in all locales.
"""
return self.get_queryset().featured(app, lang=lang, type=type)
def listed(self, app, *status):
"""
Return add-ons that support a given ``app``, have a version with a file
matching ``status`` and are not disabled.
"""
return self.get_queryset().listed(app, *status)
class Addon(OnChangeMixin, ModelBase):
STATUS_CHOICES = amo.STATUS_CHOICES_ADDON
guid = models.CharField(max_length=255, unique=True, null=True)
slug = models.CharField(max_length=30, unique=True, null=True)
name = TranslatedField(default=None)
default_locale = models.CharField(max_length=10,
default=settings.LANGUAGE_CODE,
db_column='defaultlocale')
type = models.PositiveIntegerField(
choices=amo.ADDON_TYPE.items(), db_column='addontype_id', default=0)
status = models.PositiveIntegerField(
choices=STATUS_CHOICES.items(), db_index=True, default=0)
icon_type = models.CharField(max_length=25, blank=True,
db_column='icontype')
homepage = TranslatedField()
support_email = TranslatedField(db_column='supportemail')
support_url = TranslatedField(db_column='supporturl')
description = PurifiedField(short=False)
summary = LinkifiedField()
developer_comments = PurifiedField(db_column='developercomments')
eula = PurifiedField()
privacy_policy = PurifiedField(db_column='privacypolicy')
the_reason = PurifiedField()
the_future = PurifiedField()
average_rating = models.FloatField(max_length=255, default=0, null=True,
db_column='averagerating')
bayesian_rating = models.FloatField(default=0, db_index=True,
db_column='bayesianrating')
total_reviews = models.PositiveIntegerField(default=0,
db_column='totalreviews')
weekly_downloads = models.PositiveIntegerField(
default=0, db_column='weeklydownloads', db_index=True)
total_downloads = models.PositiveIntegerField(
default=0, db_column='totaldownloads')
hotness = models.FloatField(default=0, db_index=True)
average_daily_downloads = models.PositiveIntegerField(default=0)
average_daily_users = models.PositiveIntegerField(default=0)
last_updated = models.DateTimeField(
db_index=True, null=True,
help_text='Last time this add-on had a file/version update')
disabled_by_user = models.BooleanField(default=False, db_index=True,
db_column='inactive')
view_source = models.BooleanField(default=True, db_column='viewsource')
public_stats = models.BooleanField(default=False, db_column='publicstats')
prerelease = models.BooleanField(default=False)
admin_review = models.BooleanField(default=False, db_column='adminreview')
external_software = models.BooleanField(default=False,
db_column='externalsoftware')
dev_agreement = models.BooleanField(
default=False, help_text="Has the dev agreement been signed?")
auto_repackage = models.BooleanField(
default=True, help_text='Automatically upgrade jetpack add-on to a '
'new sdk version?')
target_locale = models.CharField(
max_length=255, db_index=True, blank=True, null=True,
help_text="For dictionaries and language packs")
locale_disambiguation = models.CharField(
max_length=255, blank=True, null=True,
help_text="For dictionaries and language packs")
wants_contributions = models.BooleanField(default=False)
paypal_id = models.CharField(max_length=255, blank=True)
charity = models.ForeignKey('Charity', null=True)
suggested_amount = models.DecimalField(
max_digits=9, decimal_places=2, blank=True,
null=True, help_text=_(u'Users have the option of contributing more '
'or less than this amount.'))
total_contributions = models.DecimalField(max_digits=9, decimal_places=2,
blank=True, null=True)
annoying = models.PositiveIntegerField(
choices=amo.CONTRIB_CHOICES, default=0,
help_text=_(u'Users will always be asked in the Add-ons'
u' Manager (Firefox 4 and above).'
u' Only applies to desktop.'))
enable_thankyou = models.BooleanField(
default=False, help_text='Should the thank you note be sent to '
'contributors?')
thankyou_note = TranslatedField()
authors = models.ManyToManyField('users.UserProfile', through='AddonUser',
related_name='addons')
categories = models.ManyToManyField('Category', through='AddonCategory')
dependencies = models.ManyToManyField('self', symmetrical=False,
through='AddonDependency',
related_name='addons')
_current_version = models.ForeignKey(Version, db_column='current_version',
related_name='+', null=True,
on_delete=models.SET_NULL)
whiteboard = models.TextField(blank=True)
is_experimental = models.BooleanField(default=False,
db_column='experimental')
# The order of those managers is very important:
# The first one discovered, if it has "use_for_related_fields = True"
# (which it has if it's inheriting from caching.base.CachingManager), will
# be used for relations like `version.addon`. We thus want one that is NOT
# filtered in any case, we don't want a 500 if the addon is not found
# (because it has the status amo.STATUS_DELETED for example).
# The CLASS of the first one discovered will also be used for "many to many
# relations" like `collection.addons`. In that case, we do want the
# filtered version by default, to make sure we're not displaying stuff by
# mistake. You thus want the CLASS of the first one to be filtered by
# default.
# We don't control the instantiation, but AddonManager sets include_deleted
# to False by default, so filtering is enabled by default. This is also why
# it's not repeated for 'objects' below.
unfiltered = AddonManager(include_deleted=True)
objects = AddonManager()
class Meta:
db_table = 'addons'
@staticmethod
def __new__(cls, *args, **kw):
try:
type_idx = Addon._meta._type_idx
except AttributeError:
type_idx = (idx for idx, f in enumerate(Addon._meta.fields)
if f.attname == 'type').next()
Addon._meta._type_idx = type_idx
return object.__new__(cls)
def __unicode__(self):
return u'%s: %s' % (self.id, self.name)
def __init__(self, *args, **kw):
super(Addon, self).__init__(*args, **kw)
if self.type == amo.ADDON_PERSONA:
self.STATUS_CHOICES = Persona.STATUS_CHOICES
def save(self, **kw):
self.clean_slug()
super(Addon, self).save(**kw)
@classmethod
def search_public(cls):
"""Legacy search method for public add-ons.
Note that typically, code using this method do a search in ES but then
will fetch the relevant objects from the database using Addon.objects,
so deleted addons won't be returned no matter what ES returns. See
amo.search.ES and amo.search.ObjectSearchResults for more details.
In new code, use elasticsearch-dsl instead.
"""
return cls.search().filter(
is_disabled=False,
status__in=amo.REVIEWED_STATUSES,
current_version__exists=True)
@use_master
def clean_slug(self, slug_field='slug'):
if self.status == amo.STATUS_DELETED:
return
clean_slug(self, slug_field)
def is_soft_deleteable(self):
return self.status or Version.unfiltered.filter(addon=self).exists()
@transaction.atomic
def delete(self, msg='', reason=''):
# To avoid a circular import.
from . import tasks
# Check for soft deletion path. Happens only if the addon status isn't
# 0 (STATUS_INCOMPLETE) with no versions.
soft_deletion = self.is_soft_deleteable()
if soft_deletion and self.status == amo.STATUS_DELETED:
# We're already done.
return
id = self.id
# Fetch previews before deleting the addon instance, so that we can
# pass the list of files to delete to the delete_preview_files task
# after the addon is deleted.
previews = list(Preview.objects.filter(addon__id=id)
.values_list('id', flat=True))
if soft_deletion:
# /!\ If we ever stop using soft deletion, and remove this code, we
# need to make sure that the logs created below aren't cascade
# deleted!
log.debug('Deleting add-on: %s' % self.id)
to = [settings.FLIGTAR]
user = amo.get_user()
# Don't localize email to admins, use 'en-US' always.
with no_translation():
# The types are lazy translated in apps/constants/base.py.
atype = amo.ADDON_TYPE.get(self.type).upper()
context = {
'atype': atype,
'authors': [u.email for u in self.authors.all()],
'adu': self.average_daily_users,
'guid': self.guid,
'id': self.id,
'msg': msg,
'reason': reason,
'name': self.name,
'slug': self.slug,
'total_downloads': self.total_downloads,
'url': helpers.absolutify(self.get_url_path()),
'user_str': ("%s, %s (%s)" % (user.display_name or
user.username, user.email,
user.id) if user else "Unknown"),
}
email_msg = u"""
The following %(atype)s was deleted.
%(atype)s: %(name)s
URL: %(url)s
DELETED BY: %(user_str)s
ID: %(id)s
GUID: %(guid)s
AUTHORS: %(authors)s
TOTAL DOWNLOADS: %(total_downloads)s
AVERAGE DAILY USERS: %(adu)s
NOTES: %(msg)s
REASON GIVEN BY USER FOR DELETION: %(reason)s
""" % context
log.debug('Sending delete email for %(atype)s %(id)s' % context)
subject = 'Deleting %(atype)s %(slug)s (%(id)d)' % context
# Update or NULL out various fields.
models.signals.pre_delete.send(sender=Addon, instance=self)
self._reviews.all().delete()
# The last parameter is needed to automagically create an AddonLog.
amo.log(amo.LOG.DELETE_ADDON, self.pk, unicode(self.guid), self)
self.update(status=amo.STATUS_DELETED, slug=None,
_current_version=None, modified=datetime.now())
models.signals.post_delete.send(sender=Addon, instance=self)
send_mail(subject, email_msg, recipient_list=to)
else:
# Real deletion path.
super(Addon, self).delete()
for preview in previews:
tasks.delete_preview_files.delay(preview)
return True
@classmethod
def initialize_addon_from_upload(cls, data, upload, channel):
fields = cls._meta.get_all_field_names()
guid = data.get('guid')
old_guid_addon = None
if guid: # It's an extension.
# Reclaim GUID from deleted add-on.
try:
old_guid_addon = Addon.unfiltered.get(guid=guid)
old_guid_addon.update(guid=None)
except ObjectDoesNotExist:
pass
generate_guid = (
not data.get('guid', None) and
data.get('is_webextension', False)
)
if generate_guid:
data['guid'] = guid = generate_addon_guid()
data = cls.resolve_webext_translations(data, upload)
addon = Addon(**dict((k, v) for k, v in data.items() if k in fields))
addon.status = amo.STATUS_NULL
locale_is_set = (addon.default_locale and
addon.default_locale in (
settings.AMO_LANGUAGES +
settings.HIDDEN_LANGUAGES) and
data.get('default_locale') == addon.default_locale)
if not locale_is_set:
addon.default_locale = to_language(trans_real.get_language())
addon.save()
if old_guid_addon:
old_guid_addon.update(guid='guid-reused-by-pk-{}'.format(addon.pk))
old_guid_addon.save()
return addon
@classmethod
def create_addon_from_upload_data(cls, data, upload, channel, user=None,
**kwargs):
addon = cls.initialize_addon_from_upload(data, upload, channel,
**kwargs)
AddonUser(addon=addon, user=user).save()
return addon
@classmethod
def from_upload(cls, upload, platforms, source=None,
channel=amo.RELEASE_CHANNEL_LISTED, parsed_data=None):
if not parsed_data:
parsed_data = parse_addon(upload)
addon = cls.initialize_addon_from_upload(parsed_data, upload, channel)
if upload.validation_timeout:
addon.update(admin_review=True)
Version.from_upload(upload, addon, platforms, source=source,
channel=channel)
amo.log(amo.LOG.CREATE_ADDON, addon)
log.debug('New addon %r from %r' % (addon, upload))
return addon
@classmethod
def resolve_webext_translations(cls, data, upload):
"""Resolve all possible translations from an add-on.
This returns a modified `data` dictionary accordingly with proper
translations filled in.
"""
default_locale = find_language(data.get('default_locale'))
if not data.get('is_webextension') or not default_locale:
# Don't change anything if we don't meet the requirements
return data
fields = ('name', 'homepage', 'summary')
messages = extract_translations(upload)
for field in fields:
data[field] = {
locale: resolve_i18n_message(
data[field],
locale=locale,
default_locale=default_locale,
messages=messages)
for locale in messages
}
return data
def get_url_path(self, more=False, add_prefix=True):
if not self.current_version:
return ''
# If more=True you get the link to the ajax'd middle chunk of the
# detail page.
view = 'addons.detail_more' if more else 'addons.detail'
return reverse(view, args=[self.slug], add_prefix=add_prefix)
def get_dev_url(self, action='edit', args=None, prefix_only=False):
args = args or []
prefix = 'devhub'
type_ = 'themes' if self.type == amo.ADDON_PERSONA else 'addons'
if not prefix_only:
prefix += '.%s' % type_
view_name = '{prefix}.{action}'.format(prefix=prefix,
action=action)
return reverse(view_name, args=[self.slug] + args)
def get_detail_url(self, action='detail', args=None):
if args is None:
args = []
return reverse('addons.%s' % action, args=[self.slug] + args)
def meet_the_dev_url(self):
return reverse('addons.meet', args=[self.slug])
@property
def reviews_url(self):
return helpers.url('addons.reviews.list', self.slug)
def get_ratings_url(self, action='list', args=None, add_prefix=True):
return reverse('ratings.themes.%s' % action,
args=[self.slug] + (args or []),
add_prefix=add_prefix)
@classmethod
def get_type_url(cls, type):
try:
type = amo.ADDON_SLUGS[type]
except KeyError:
return None
return reverse('browse.%s' % type)
def type_url(self):
"""The url for this add-on's type."""
return Addon.get_type_url(self.type)
def share_url(self):
return reverse('addons.share', args=[self.slug])
@amo.cached_property(writable=True)
def listed_authors(self):
return UserProfile.objects.filter(
addons=self,
addonuser__listed=True).order_by('addonuser__position')
@classmethod
def get_fallback(cls):
return cls._meta.get_field('default_locale')
@property
def reviews(self):
return Review.objects.filter(addon=self, reply_to=None)
def get_category(self, app_id):
categories = self.app_categories.get(amo.APP_IDS.get(app_id))
return categories[0] if categories else None
def language_ascii(self):
lang = trans_real.to_language(self.default_locale)
return settings.LANGUAGES.get(lang)
@property
def valid_file_statuses(self):
if self.status == amo.STATUS_PUBLIC:
return [amo.STATUS_PUBLIC]
return amo.VALID_FILE_STATUSES
def find_latest_public_listed_version(self):
"""Retrieve the latest public listed version of an addon.
If the add-on is not public, it can return a listed version awaiting
review (since non-public add-ons should not have public versions)."""
if self.type == amo.ADDON_PERSONA:
return
try:
statuses = self.valid_file_statuses
status_list = ','.join(map(str, statuses))
fltr = {
'channel': amo.RELEASE_CHANNEL_LISTED,
'files__status__in': statuses
}
return self.versions.no_cache().filter(**fltr).extra(
where=["""
NOT EXISTS (
SELECT 1 FROM files AS f2
WHERE f2.version_id = versions.id AND
f2.status NOT IN (%s))
""" % status_list])[0]
except (IndexError, Version.DoesNotExist):
return None
def find_latest_version(self, channel, ignore=None):
"""Retrieve the latest non-disabled version of an add-on for the
specified channel. If channel is None either channel is returned.
Accepts an optional ignore argument to ignore a specific version."""
# If the add-on is deleted or hasn't been saved yet, it should not
# have a latest version.
if not self.id or self.status == amo.STATUS_DELETED:
return None
# We can't use .exclude(files__status=STATUS_DISABLED) because this
# excludes a version if any of the files are the disabled and there may
# be files we do want to include. Having a single beta file /does/
# mean we want the whole version disqualified though.
statuses_without_disabled = (
set(amo.STATUS_CHOICES_FILE.keys()) -
{amo.STATUS_DISABLED, amo.STATUS_BETA})
try:
latest_qs = (
Version.objects.filter(addon=self)
.exclude(files__status=amo.STATUS_BETA)
.filter(files__status__in=statuses_without_disabled))
if ignore is not None:
latest_qs = latest_qs.exclude(pk=ignore.pk)
if channel is not None:
latest_qs = latest_qs.filter(channel=channel)
latest = latest_qs.latest()
latest.addon = self
except Version.DoesNotExist:
latest = None
return latest
def find_latest_version_including_rejected(self, channel):
"""Similar to latest_version but includes rejected versions. Used so
we correctly attach review content to the last version reviewed. If
channel is None either channel is returned."""
try:
latest_qs = self.versions.exclude(files__status=amo.STATUS_BETA)
if channel is not None:
latest_qs = latest_qs.filter(channel=channel)
latest = latest_qs.latest()
except Version.DoesNotExist:
latest = None
return latest
@write
def update_version(self, ignore=None, _signal=True):
"""
Update the current_version field on this add-on if necessary.
Returns True if we updated the current_version field.
The optional ``ignore`` parameter, if present, is a a version
to not consider as part of the update, since it may be in the
process of being deleted.
Pass ``_signal=False`` if you want to no signals fired at all.
"""
if self.is_persona():
# Themes should only have a single version. So, if there is not
# current version set, we just need to copy over the latest version
# to current_version and we should never have to set it again.
if not self._current_version:
latest_version = self.find_latest_version(None)
if latest_version:
self.update(_current_version=latest_version, _signal=False)
return True
return False
new_current_version = self.find_latest_public_listed_version()
updated = {}
send_signal = False
if self._current_version != new_current_version:
updated['_current_version'] = new_current_version
send_signal = True
# update_version can be called by a post_delete signal (such
# as File's) when deleting a version. If so, we should avoid putting
# that version-being-deleted in any fields.
if ignore is not None:
updated = {k: v for k, v in updated.iteritems() if v != ignore}
if updated:
diff = [self._current_version, new_current_version]
# Pass along _signal to the .update() to prevent it from firing
# signals if we don't want them.
updated['_signal'] = _signal
try:
self.update(**updated)
if send_signal and _signal:
signals.version_changed.send(sender=self)
log.info(u'Version changed from current: %s to %s '
u'for addon %s'
% tuple(diff + [self]))
except Exception, e:
log.error(u'Could not save version changes current: %s to %s '
u'for addon %s (%s)' %
tuple(diff + [self, e]))
return bool(updated)
def increment_theme_version_number(self):
"""Increment theme version number by 1."""
latest_version = self.find_latest_version(None)
version = latest_version or self.current_version
version.version = str(float(version.version) + 1)
# Set the current version.
self.update(_current_version=version.save())
def invalidate_d2c_versions(self):
"""Invalidates the cache of compatible versions.
Call this when there is an event that may change what compatible
versions are returned so they are recalculated.
"""
key = cache_ns_key('d2c-versions:%s' % self.id, increment=True)
log.info('Incrementing d2c-versions namespace for add-on [%s]: %s' % (
self.id, key))
@property
def current_version(self):
"""Return the latest public listed version of an addon
If the add-on is not public, it can return a listed version awaiting
review (since non-public add-ons should not have public versions).
If the add-on has not been created yet or is deleted, it returns None.
"""
if not self.id or self.status == amo.STATUS_DELETED:
return None
try:
return self._current_version
except ObjectDoesNotExist:
pass
return None
@amo.cached_property(writable=True)
def latest_unlisted_version(self):
"""Shortcut property for Addon.find_latest_version(
channel=RELEASE_CHANNEL_UNLISTED)."""
return self.find_latest_version(channel=amo.RELEASE_CHANNEL_UNLISTED)
@amo.cached_property
def binary(self):
"""Returns if the current version has binary files."""
version = self.current_version
if version:
return version.files.filter(binary=True).exists()
return False
@amo.cached_property
def binary_components(self):
"""Returns if the current version has files with binary_components."""
version = self.current_version
if version:
return version.files.filter(binary_components=True).exists()
return False
def get_icon_dir(self):
return os.path.join(helpers.user_media_path('addon_icons'),
'%s' % (self.id / 1000))
def get_icon_url(self, size, use_default=True):
"""
Returns the addon's icon url according to icon_type.
If it's a persona, it will return the icon_url of the associated
Persona instance.
If it's a theme and there is no icon set, it will return the default
theme icon.
If it's something else, it will return the default add-on icon, unless
use_default is False, in which case it will return None.
"""
icon_type_split = []
if self.icon_type:
icon_type_split = self.icon_type.split('/')
# Get the closest allowed size without going over
if (size not in amo.ADDON_ICON_SIZES and
size >= amo.ADDON_ICON_SIZES[0]):
size = [s for s in amo.ADDON_ICON_SIZES if s < size][-1]
elif size < amo.ADDON_ICON_SIZES[0]:
size = amo.ADDON_ICON_SIZES[0]
# Figure out what to return for an image URL
if self.type == amo.ADDON_PERSONA:
return self.persona.icon_url
if not self.icon_type:
if self.type == amo.ADDON_THEME:
icon = amo.ADDON_ICONS[amo.ADDON_THEME]
return "%simg/icons/%s" % (settings.STATIC_URL, icon)
else:
if not use_default:
return None
return self.get_default_icon_url(size)
elif icon_type_split[0] == 'icon':
return '{0}img/addon-icons/{1}-{2}.png'.format(
settings.STATIC_URL,
icon_type_split[1],
size
)
else:
# [1] is the whole ID, [2] is the directory
split_id = re.match(r'((\d*?)\d{1,3})$', str(self.id))
modified = int(time.mktime(self.modified.timetuple()))
path = '/'.join([
split_id.group(2) or '0',
'{0}-{1}.png?modified={2}'.format(self.id, size, modified),
])
return helpers.user_media_url('addon_icons') + path
def get_default_icon_url(self, size):
return '{0}img/addon-icons/{1}-{2}.png'.format(
settings.STATIC_URL, 'default', size
)
@write
def update_status(self, ignore_version=None):
self.reload()
if (self.status in [amo.STATUS_NULL, amo.STATUS_DELETED] or
self.is_disabled or self.is_persona()):
self.update_version(ignore=ignore_version)
return
versions = self.versions.filter(channel=amo.RELEASE_CHANNEL_LISTED)
status = None
if not versions.exists():
status = amo.STATUS_NULL
reason = 'no listed versions'
elif not versions.filter(
files__status__in=amo.VALID_FILE_STATUSES).exists():
status = amo.STATUS_NULL
reason = 'no listed version with valid file'
elif (self.status == amo.STATUS_PUBLIC and
not versions.filter(files__status=amo.STATUS_PUBLIC).exists()):
if versions.filter(
files__status=amo.STATUS_AWAITING_REVIEW).exists():
status = amo.STATUS_NOMINATED
reason = 'only an unreviewed file'
else:
status = amo.STATUS_NULL
reason = 'no reviewed files'
elif self.status == amo.STATUS_PUBLIC:
latest_version = self.find_latest_version(
channel=amo.RELEASE_CHANNEL_LISTED)
if (latest_version and latest_version.has_files and
(latest_version.all_files[0].status ==
amo.STATUS_AWAITING_REVIEW)):
# Addon is public, but its latest file is not (it's the case on
# a new file upload). So, call update, to trigger watch_status,
# which takes care of setting nomination time when needed.
status = self.status
reason = 'triggering watch_status'
if status is not None:
log.info('Changing add-on status [%s]: %s => %s (%s).'
% (self.id, self.status, status, reason))
self.update(status=status)
amo.log(amo.LOG.CHANGE_STATUS, self.get_status_display(), self)
self.update_version(ignore=ignore_version)
@staticmethod
def attach_related_versions(addons, addon_dict=None):
if addon_dict is None:
addon_dict = dict((a.id, a) for a in addons)
all_ids = set(filter(None, (a._current_version_id for a in addons)))
versions = list(Version.objects.filter(id__in=all_ids).order_by())
for version in versions:
try:
addon = addon_dict[version.addon_id]
except KeyError:
log.debug('Version %s has an invalid add-on id.' % version.id)
continue
if addon._current_version_id == version.id:
addon._current_version = version
version.addon = addon
@staticmethod
def attach_listed_authors(addons, addon_dict=None):
if addon_dict is None:
addon_dict = dict((a.id, a) for a in addons)
qs = (UserProfile.objects.no_cache()
.filter(addons__in=addons, addonuser__listed=True)
.extra(select={'addon_id': 'addons_users.addon_id',
'position': 'addons_users.position'}))
qs = sorted(qs, key=lambda u: (u.addon_id, u.position))
for addon_id, users in itertools.groupby(qs, key=lambda u: u.addon_id):
addon_dict[addon_id].listed_authors = list(users)
# FIXME: set listed_authors to empty list on addons without listed
# authors.
@staticmethod
def attach_previews(addons, addon_dict=None, no_transforms=False):
if addon_dict is None:
addon_dict = dict((a.id, a) for a in addons)
qs = Preview.objects.filter(addon__in=addons,
position__gte=0).order_by()
if no_transforms:
qs = qs.no_transforms()
qs = sorted(qs, key=lambda x: (x.addon_id, x.position, x.created))
for addon, previews in itertools.groupby(qs, lambda x: x.addon_id):
addon_dict[addon].all_previews = list(previews)
# FIXME: set all_previews to empty list on addons without previews.
@staticmethod
def attach_static_categories(addons, addon_dict=None):
if addon_dict is None:
addon_dict = dict((a.id, a) for a in addons)
qs = AddonCategory.objects.values_list(
'addon', 'category').filter(addon__in=addon_dict)
qs = sorted(qs, key=lambda x: (x[0], x[1]))
for addon_id, cats_iter in itertools.groupby(qs, key=lambda x: x[0]):
# The second value of each tuple in cats_iter are the category ids
# we want.
addon_dict[addon_id].category_ids = [c[1] for c in cats_iter]
addon_dict[addon_id].all_categories = [
CATEGORIES_BY_ID[cat_id] for cat_id
in addon_dict[addon_id].category_ids
if cat_id in CATEGORIES_BY_ID]
@staticmethod
@timer
def transformer(addons):
if not addons:
return
addon_dict = {a.id: a for a in addons}
# Attach categories. This needs to be done before separating addons
# from personas, because Personas need categories for the theme_data
# JSON dump, rest of the add-ons need the first category to be
# displayed in detail page / API.
Addon.attach_static_categories(addons, addon_dict=addon_dict)
personas = [a for a in addons if a.type == amo.ADDON_PERSONA]
addons = [a for a in addons if a.type != amo.ADDON_PERSONA]
# Set _current_version.
Addon.attach_related_versions(addons, addon_dict=addon_dict)
# Attach listed authors.
Addon.attach_listed_authors(addons, addon_dict=addon_dict)
# Persona-specific stuff
for persona in Persona.objects.no_cache().filter(addon__in=personas):
addon = addon_dict[persona.addon_id]
addon.persona = persona
addon.weekly_downloads = persona.popularity
# Attach previews.
Addon.attach_previews(addons, addon_dict=addon_dict)
return addon_dict
@property
def show_beta(self):
return self.status == amo.STATUS_PUBLIC and self.current_beta_version
def show_adu(self):
return self.type != amo.ADDON_SEARCH
@amo.cached_property(writable=True)
def current_beta_version(self):
"""Retrieves the latest version of an addon, in the beta channel."""
versions = self.versions.filter(files__status=amo.STATUS_BETA)[:1]
if versions:
return versions[0]
@property
def icon_url(self):
return self.get_icon_url(32)
def authors_other_addons(self, app=None):
"""
Return other addons by the author(s) of this addon,
optionally takes an app.
"""
if app:
qs = Addon.objects.listed(app)
else:
qs = Addon.objects.valid()
return (qs.exclude(id=self.id)
.filter(addonuser__listed=True,
authors__in=self.listed_authors)
.distinct())
@property
def contribution_url(self, lang=settings.LANGUAGE_CODE,
app=settings.DEFAULT_APP):
return reverse('addons.contribute', args=[self.slug])
@property
def thumbnail_url(self):
"""
Returns the addon's thumbnail url or a default.
"""
try:
preview = self.all_previews[0]
return preview.thumbnail_url
except IndexError:
return settings.STATIC_URL + '/img/icons/no-preview.png'<|fim▁hole|> def can_request_review(self):
"""Return whether an add-on can request a review or not."""
if (self.is_disabled or
self.status in (amo.STATUS_PUBLIC,
amo.STATUS_NOMINATED,
amo.STATUS_DELETED)):
return False
latest_version = self.find_latest_version_including_rejected(
channel=amo.RELEASE_CHANNEL_LISTED)
return latest_version is not None and latest_version.files.exists()
def is_persona(self):
return self.type == amo.ADDON_PERSONA
@property
def is_disabled(self):
"""True if this Addon is disabled.
It could be disabled by an admin or disabled by the developer
"""
return self.status == amo.STATUS_DISABLED or self.disabled_by_user
@property
def is_deleted(self):
return self.status == amo.STATUS_DELETED
def is_unreviewed(self):
return self.status in amo.UNREVIEWED_ADDON_STATUSES
def is_public(self):
return self.status == amo.STATUS_PUBLIC and not self.disabled_by_user
def has_complete_metadata(self, has_listed_versions=None):
"""See get_required_metadata for has_listed_versions details."""
return all(self.get_required_metadata(
has_listed_versions=has_listed_versions))
def get_required_metadata(self, has_listed_versions=None):
"""If has_listed_versions is not specified this method will return the
current (required) metadata (truthy values if present) for this Addon.
If has_listed_versions is specified then the method will act as if
Addon.has_listed_versions() returns that value. Used to predict if the
addon will require extra metadata before a version is created."""
if has_listed_versions is None:
has_listed_versions = self.has_listed_versions()
if not has_listed_versions:
# Add-ons with only unlisted versions have no required metadata.
return []
latest_version = self.find_latest_version_including_rejected(
channel=amo.RELEASE_CHANNEL_LISTED)
return [
self.all_categories,
self.summary,
(latest_version and latest_version.license),
]
def should_redirect_to_submit_flow(self):
return (
self.status == amo.STATUS_NULL and
not self.has_complete_metadata() and
self.find_latest_version(channel=amo.RELEASE_CHANNEL_LISTED))
def is_pending(self):
return self.status == amo.STATUS_PENDING
def is_rejected(self):
return self.status == amo.STATUS_REJECTED
def can_be_deleted(self):
return not self.is_deleted
def has_listed_versions(self):
return self.versions.filter(
channel=amo.RELEASE_CHANNEL_LISTED).exists()
def has_unlisted_versions(self):
return self.versions.filter(
channel=amo.RELEASE_CHANNEL_UNLISTED).exists()
@classmethod
def featured_random(cls, app, lang):
return get_featured_ids(app, lang)
@property
def requires_restart(self):
"""Whether the add-on current version requires a browser restart to
work."""
return self.current_version and self.current_version.requires_restart
def is_featured(self, app, lang=None):
"""Is add-on globally featured for this app and language?"""
if app:
return self.id in get_featured_ids(app, lang)
def has_full_profile(self):
"""Is developer profile public (completed)?"""
return self.the_reason and self.the_future
def has_profile(self):
"""Is developer profile (partially or entirely) completed?"""
return self.the_reason or self.the_future
@amo.cached_property
def tags_partitioned_by_developer(self):
"""Returns a tuple of developer tags and user tags for this addon."""
tags = self.tags.not_denied()
if self.is_persona:
return [], tags
user_tags = tags.exclude(addon_tags__user__in=self.listed_authors)
dev_tags = tags.exclude(id__in=[t.id for t in user_tags])
return dev_tags, user_tags
@amo.cached_property(writable=True)
def compatible_apps(self):
"""Shortcut to get compatible apps for the current version."""
# Search providers and personas don't list their supported apps.
if self.type in amo.NO_COMPAT:
return dict((app, None) for app in
amo.APP_TYPE_SUPPORT[self.type])
if self.current_version:
return self.current_version.compatible_apps
else:
return {}
def accepts_compatible_apps(self):
"""True if this add-on lists compatible apps."""
return self.type not in amo.NO_COMPAT
def incompatible_latest_apps(self):
"""Returns a list of applications with which this add-on is
incompatible (based on the latest version of each app).
"""
return [app for app, ver in self.compatible_apps.items() if ver and
version_int(ver.max.version) < version_int(app.latest_version)]
def has_author(self, user, roles=None):
"""True if ``user`` is an author with any of the specified ``roles``.
``roles`` should be a list of valid roles (see amo.AUTHOR_ROLE_*). If
not specified, has_author will return true if the user has any role.
"""
if user is None or user.is_anonymous():
return False
if roles is None:
roles = dict(amo.AUTHOR_CHOICES).keys()
return AddonUser.objects.filter(addon=self, user=user,
role__in=roles).exists()
@property
def takes_contributions(self):
return (self.status == amo.STATUS_PUBLIC and
self.wants_contributions and
(self.paypal_id or self.charity_id))
@classmethod
def _last_updated_queries(cls):
"""
Get the queries used to calculate addon.last_updated.
"""
status_change = Max('versions__files__datestatuschanged')
public = (
Addon.objects.no_cache().filter(
status=amo.STATUS_PUBLIC,
versions__files__status=amo.STATUS_PUBLIC)
.exclude(type=amo.ADDON_PERSONA)
.values('id').annotate(last_updated=status_change))
stati = amo.VALID_ADDON_STATUSES
exp = (Addon.objects.no_cache().exclude(status__in=stati)
.filter(versions__files__status__in=amo.VALID_FILE_STATUSES)
.values('id')
.annotate(last_updated=Max('versions__files__created')))
personas = (Addon.objects.no_cache().filter(type=amo.ADDON_PERSONA)
.extra(select={'last_updated': 'created'}))
return dict(public=public, exp=exp, personas=personas)
@amo.cached_property(writable=True)
def all_categories(self):
return filter(
None, [cat.to_static_category() for cat in self.categories.all()])
@amo.cached_property(writable=True)
def all_previews(self):
return list(self.get_previews())
def get_previews(self):
"""Exclude promo graphics."""
return self.previews.exclude(position=-1)
@property
def app_categories(self):
app_cats = {}
categories = sorted_groupby(
sorted(self.all_categories, key=attrgetter('weight', 'name')),
key=lambda x: amo.APP_IDS.get(x.application))
for app, cats in categories:
app_cats[app] = list(cats)
return app_cats
def remove_locale(self, locale):
"""NULLify strings in this locale for the add-on and versions."""
for o in itertools.chain([self], self.versions.all()):
Translation.objects.remove_for(o, locale)
def get_localepicker(self):
"""For language packs, gets the contents of localepicker."""
if (self.type == amo.ADDON_LPAPP and
self.status == amo.STATUS_PUBLIC and
self.current_version):
files = (self.current_version.files
.filter(platform=amo.PLATFORM_ANDROID.id))
try:
return unicode(files[0].get_localepicker(), 'utf-8')
except IndexError:
pass
return ''
def can_review(self, user):
return not(user and self.has_author(user))
@property
def all_dependencies(self):
"""Return all the (valid) add-ons this add-on depends on."""
return list(self.dependencies.valid().all()[:3])
def has_installed(self, user):
if not user or not isinstance(user, UserProfile):
return False
return self.installed.filter(user=user).exists()
def get_latest_file(self):
"""Get the latest file from the current version."""
cur = self.current_version
if cur:
res = cur.files.order_by('-created')
if res:
return res[0]
def in_escalation_queue(self):
return self.escalationqueue_set.exists()
def check_ownership(self, request, require_owner, require_author,
ignore_disabled, admin):
"""
Used by acl.check_ownership to see if request.user has permissions for
the addon.
"""
if require_author:
require_owner = False
ignore_disabled = True
admin = False
return acl.check_addon_ownership(request, self, admin=admin,
viewer=(not require_owner),
ignore_disabled=ignore_disabled)
@property
def feature_compatibility(self):
try:
feature_compatibility = self.addonfeaturecompatibility
except AddonFeatureCompatibility.DoesNotExist:
# If it does not exist, return a blank one, no need to create. It's
# the caller responsibility to create when needed to avoid
# unexpected database writes.
feature_compatibility = AddonFeatureCompatibility()
return feature_compatibility
dbsignals.pre_save.connect(save_signal, sender=Addon,
dispatch_uid='addon_translations')
@receiver(signals.version_changed, dispatch_uid='version_changed')
def version_changed(sender, **kw):
from . import tasks
tasks.version_changed.delay(sender.id)
@receiver(dbsignals.post_save, sender=Addon,
dispatch_uid='addons.search.index')
def update_search_index(sender, instance, **kw):
from . import tasks
if not kw.get('raw'):
tasks.index_addons.delay([instance.id])
@Addon.on_change
def watch_status(old_attr=None, new_attr=None, instance=None,
sender=None, **kwargs):
"""
Set nomination date if the addon is new in queue or updating.
The nomination date cannot be reset, say, when a developer cancels
their request for review and re-requests review.
If a version is rejected after nomination, the developer has
to upload a new version.
"""
if old_attr is None:
old_attr = {}
if new_attr is None:
new_attr = {}
new_status = new_attr.get('status')
old_status = old_attr.get('status')
latest_version = instance.find_latest_version(
channel=amo.RELEASE_CHANNEL_LISTED)
if (new_status not in amo.VALID_ADDON_STATUSES or
not new_status or not latest_version):
return
if old_status not in amo.UNREVIEWED_ADDON_STATUSES:
# New: will (re)set nomination only if it's None.
latest_version.reset_nomination_time()
elif latest_version.has_files:
# Updating: inherit nomination from last nominated version.
# Calls `inherit_nomination` manually given that signals are
# deactivated to avoid circular calls.
inherit_nomination(None, latest_version)
@Addon.on_change
def watch_disabled(old_attr=None, new_attr=None, instance=None, sender=None,
**kwargs):
if old_attr is None:
old_attr = {}
if new_attr is None:
new_attr = {}
attrs = dict((k, v) for k, v in old_attr.items()
if k in ('disabled_by_user', 'status'))
if Addon(**attrs).is_disabled and not instance.is_disabled:
for f in File.objects.filter(version__addon=instance.id):
f.unhide_disabled_file()
if instance.is_disabled and not Addon(**attrs).is_disabled:
for f in File.objects.filter(version__addon=instance.id):
f.hide_disabled_file()
@Addon.on_change
def watch_developer_notes(old_attr=None, new_attr=None, instance=None,
sender=None, **kwargs):
if old_attr is None:
old_attr = {}
if new_attr is None:
new_attr = {}
whiteboard_changed = (
new_attr.get('whiteboard') and
old_attr.get('whiteboard') != new_attr.get('whiteboard'))
developer_comments_changed = (new_attr.get('_developer_comments_cache') and
old_attr.get('_developer_comments_cache') !=
new_attr.get('_developer_comments_cache'))
if whiteboard_changed or developer_comments_changed:
instance.versions.update(has_info_request=False)
def attach_translations(addons):
"""Put all translations into a translations dict."""
attach_trans_dict(Addon, addons)
def attach_tags(addons):
addon_dict = dict((a.id, a) for a in addons)
qs = (Tag.objects.not_denied().filter(addons__in=addon_dict)
.values_list('addons__id', 'tag_text'))
for addon, tags in sorted_groupby(qs, lambda x: x[0]):
addon_dict[addon].tag_list = [t[1] for t in tags]
class Persona(caching.CachingMixin, models.Model):
"""Personas-specific additions to the add-on model."""
STATUS_CHOICES = amo.STATUS_CHOICES_PERSONA
addon = models.OneToOneField(Addon, null=True)
persona_id = models.PositiveIntegerField(db_index=True)
# name: deprecated in favor of Addon model's name field
# description: deprecated, ditto
header = models.CharField(max_length=64, null=True)
footer = models.CharField(max_length=64, null=True)
accentcolor = models.CharField(max_length=10, null=True)
textcolor = models.CharField(max_length=10, null=True)
author = models.CharField(max_length=255, null=True)
display_username = models.CharField(max_length=255, null=True)
submit = models.DateTimeField(null=True)
approve = models.DateTimeField(null=True)
movers = models.FloatField(null=True, db_index=True)
popularity = models.IntegerField(null=False, default=0, db_index=True)
license = models.PositiveIntegerField(
choices=amo.PERSONA_LICENSES_CHOICES, null=True, blank=True)
# To spot duplicate submissions.
checksum = models.CharField(max_length=64, blank=True, default='')
dupe_persona = models.ForeignKey('self', null=True)
objects = caching.CachingManager()
class Meta:
db_table = 'personas'
def __unicode__(self):
return unicode(self.addon.name)
def is_new(self):
return self.persona_id == 0
def _image_url(self, filename):
host = helpers.user_media_url('addons')
image_url = posixpath.join(host, str(self.addon.id), filename or '')
# TODO: Bust the cache on the hash of the image contents or something.
if self.addon.modified is not None:
modified = int(time.mktime(self.addon.modified.timetuple()))
else:
modified = 0
return '%s?%s' % (image_url, modified)
def _image_path(self, filename):
return os.path.join(helpers.user_media_path('addons'),
str(self.addon.id), filename)
@amo.cached_property
def thumb_url(self):
"""
Handles deprecated GetPersonas URL.
In days of yore, preview.jpg used to be a separate image.
In modern days, we use the same image for big preview + thumb.
"""
if self.is_new():
return self._image_url('preview.png')
else:
return self._image_url('preview.jpg')
@amo.cached_property
def thumb_path(self):
"""
Handles deprecated GetPersonas path.
In days of yore, preview.jpg used to be a separate image.
In modern days, we use the same image for big preview + thumb.
"""
if self.is_new():
return self._image_path('preview.png')
else:
return self._image_path('preview.jpg')
@amo.cached_property
def icon_url(self):
"""URL to personas square preview."""
if self.is_new():
return self._image_url('icon.png')
else:
return self._image_url('preview_small.jpg')
@amo.cached_property
def icon_path(self):
"""Path to personas square preview."""
if self.is_new():
return self._image_path('icon.png')
else:
return self._image_path('preview_small.jpg')
@amo.cached_property
def preview_url(self):
"""URL to Persona's big, 680px, preview."""
if self.is_new():
return self._image_url('preview.png')
else:
return self._image_url('preview_large.jpg')
@amo.cached_property
def preview_path(self):
"""Path to Persona's big, 680px, preview."""
if self.is_new():
return self._image_path('preview.png')
else:
return self._image_path('preview_large.jpg')
@amo.cached_property
def header_url(self):
return self._image_url(self.header)
@amo.cached_property
def footer_url(self):
return self.footer and self._image_url(self.footer) or ''
@amo.cached_property
def header_path(self):
return self._image_path(self.header)
@amo.cached_property
def footer_path(self):
return self.footer and self._image_path(self.footer) or ''
@amo.cached_property
def update_url(self):
locale = settings.LANGUAGE_URL_MAP.get(trans_real.get_language())
return settings.NEW_PERSONAS_UPDATE_URL % {
'locale': locale or settings.LANGUAGE_CODE,
'id': self.addon.id
}
@amo.cached_property
def theme_data(self):
"""Theme JSON Data for Browser/extension preview."""
def hexcolor(color):
return '#%s' % color
addon = self.addon
return {
'id': unicode(self.addon.id), # Personas dislikes ints
'name': unicode(addon.name),
'accentcolor': hexcolor(self.accentcolor),
'textcolor': hexcolor(self.textcolor),
'category': (unicode(addon.all_categories[0].name) if
addon.all_categories else ''),
# TODO: Change this to be `addons_users.user.display_name`.
'author': self.display_username,
'description': unicode(addon.description),
'header': self.header_url,
'footer': self.footer_url or '',
'headerURL': self.header_url,
'footerURL': self.footer_url or '',
'previewURL': self.preview_url,
'iconURL': self.icon_url,
'updateURL': self.update_url,
'detailURL': helpers.absolutify(self.addon.get_url_path()),
'version': '1.0'
}
@property
def json_data(self):
"""Persona JSON Data for Browser/extension preview."""
return json.dumps(self.theme_data,
separators=(',', ':'), cls=AMOJSONEncoder)
def authors_other_addons(self, app=None):
"""
Return other addons by the author(s) of this addon,
optionally takes an app.
"""
qs = (Addon.objects.valid()
.exclude(id=self.addon.id)
.filter(type=amo.ADDON_PERSONA))
return (qs.filter(addonuser__listed=True,
authors__in=self.addon.listed_authors)
.distinct())
@amo.cached_property(writable=True)
def listed_authors(self):
return self.addon.listed_authors
class AddonCategory(caching.CachingMixin, models.Model):
addon = models.ForeignKey(Addon, on_delete=models.CASCADE)
category = models.ForeignKey('Category')
feature = models.BooleanField(default=False)
feature_locales = models.CharField(max_length=255, default='', null=True)
objects = caching.CachingManager()
class Meta:
db_table = 'addons_categories'
unique_together = ('addon', 'category')
@classmethod
def creatured_random(cls, category, lang):
return get_creatured_ids(category, lang)
class AddonUser(caching.CachingMixin, models.Model):
addon = models.ForeignKey(Addon, on_delete=models.CASCADE)
user = UserForeignKey()
role = models.SmallIntegerField(default=amo.AUTHOR_ROLE_OWNER,
choices=amo.AUTHOR_CHOICES)
listed = models.BooleanField(_(u'Listed'), default=True)
position = models.IntegerField(default=0)
objects = caching.CachingManager()
def __init__(self, *args, **kwargs):
super(AddonUser, self).__init__(*args, **kwargs)
self._original_role = self.role
self._original_user_id = self.user_id
class Meta:
db_table = 'addons_users'
class AddonDependency(models.Model):
addon = models.ForeignKey(Addon, related_name='addons_dependencies')
dependent_addon = models.ForeignKey(Addon, related_name='dependent_on')
class Meta:
db_table = 'addons_dependencies'
unique_together = ('addon', 'dependent_addon')
class AddonFeatureCompatibility(ModelBase):
addon = models.OneToOneField(
Addon, primary_key=True, on_delete=models.CASCADE)
e10s = models.PositiveSmallIntegerField(
choices=amo.E10S_COMPATIBILITY_CHOICES, default=amo.E10S_UNKNOWN)
def __unicode__(self):
return unicode(self.addon) if self.pk else u""
def get_e10s_classname(self):
return amo.E10S_COMPATIBILITY_CHOICES_API[self.e10s]
class DeniedGuid(ModelBase):
guid = models.CharField(max_length=255, unique=True)
comments = models.TextField(default='', blank=True)
class Meta:
db_table = 'denied_guids'
def __unicode__(self):
return self.guid
class Category(OnChangeMixin, ModelBase):
# Old name translations, we now have constants translated via gettext, but
# this is for backwards-compatibility, for categories which have a weird
# type/application/slug combo that is not in the constants.
db_name = TranslatedField(db_column='name')
slug = SlugField(max_length=50,
help_text='Used in Category URLs.')
type = models.PositiveIntegerField(db_column='addontype_id',
choices=do_dictsort(amo.ADDON_TYPE))
application = models.PositiveIntegerField(choices=amo.APPS_CHOICES,
null=True, blank=True,
db_column='application_id')
count = models.IntegerField('Addon count', default=0)
weight = models.IntegerField(
default=0, help_text='Category weight used in sort ordering')
misc = models.BooleanField(default=False)
addons = models.ManyToManyField(Addon, through='AddonCategory')
class Meta:
db_table = 'categories'
verbose_name_plural = 'Categories'
@property
def name(self):
try:
value = CATEGORIES[self.application][self.type][self.slug].name
except KeyError:
# If we can't find the category in the constants dict, fall back
# to the db field.
value = self.db_name
return unicode(value)
def __unicode__(self):
return unicode(self.name)
def get_url_path(self):
try:
type = amo.ADDON_SLUGS[self.type]
except KeyError:
type = amo.ADDON_SLUGS[amo.ADDON_EXTENSION]
return reverse('browse.%s' % type, args=[self.slug])
def to_static_category(self):
"""Return the corresponding StaticCategory instance from a Category."""
try:
staticcategory = CATEGORIES[self.application][self.type][self.slug]
except KeyError:
staticcategory = None
return staticcategory
@classmethod
def from_static_category(cls, static_category):
"""Return a Category instance created from a StaticCategory.
Does not save it into the database. Useful in tests."""
return cls(**static_category.__dict__)
dbsignals.pre_save.connect(save_signal, sender=Category,
dispatch_uid='category_translations')
class Preview(ModelBase):
addon = models.ForeignKey(Addon, related_name='previews')
caption = TranslatedField()
position = models.IntegerField(default=0)
sizes = JSONField(max_length=25, default={})
class Meta:
db_table = 'previews'
ordering = ('position', 'created')
def _image_url(self, url_template):
if self.modified is not None:
modified = int(time.mktime(self.modified.timetuple()))
else:
modified = 0
args = [self.id / 1000, self.id, modified]
return url_template % tuple(args)
def _image_path(self, url_template):
args = [self.id / 1000, self.id]
return url_template % tuple(args)
def as_dict(self, src=None):
d = {'full': urlparams(self.image_url, src=src),
'thumbnail': urlparams(self.thumbnail_url, src=src),
'caption': unicode(self.caption)}
return d
@property
def is_landscape(self):
size = self.image_size
if not size:
return False
return size[0] > size[1]
@property
def thumbnail_url(self):
template = (
helpers.user_media_url('previews') +
'thumbs/%s/%d.png?modified=%s')
return self._image_url(template)
@property
def image_url(self):
template = (
helpers.user_media_url('previews') +
'full/%s/%d.png?modified=%s')
return self._image_url(template)
@property
def thumbnail_path(self):
template = os.path.join(
helpers.user_media_path('previews'),
'thumbs',
'%s',
'%d.png'
)
return self._image_path(template)
@property
def image_path(self):
template = os.path.join(
helpers.user_media_path('previews'),
'full',
'%s',
'%d.png'
)
return self._image_path(template)
@property
def thumbnail_size(self):
return self.sizes.get('thumbnail', []) if self.sizes else []
@property
def image_size(self):
return self.sizes.get('image', []) if self.sizes else []
dbsignals.pre_save.connect(save_signal, sender=Preview,
dispatch_uid='preview_translations')
def delete_preview_files(sender, instance, **kw):
"""On delete of the Preview object from the database, unlink the image
and thumb on the file system """
for filename in [instance.image_path, instance.thumbnail_path]:
if storage.exists(filename):
log.info('Removing filename: %s for preview: %s'
% (filename, instance.pk))
storage.delete(filename)
models.signals.post_delete.connect(delete_preview_files,
sender=Preview,
dispatch_uid='delete_preview_files')
class AppSupport(ModelBase):
"""Cache to tell us if an add-on's current version supports an app."""
addon = models.ForeignKey(Addon, on_delete=models.CASCADE)
app = models.PositiveIntegerField(choices=amo.APPS_CHOICES,
db_column='app_id')
min = models.BigIntegerField("Minimum app version", null=True)
max = models.BigIntegerField("Maximum app version", null=True)
class Meta:
db_table = 'appsupport'
unique_together = ('addon', 'app')
class Charity(ModelBase):
name = models.CharField(max_length=255)
url = models.URLField()
paypal = models.CharField(max_length=255)
class Meta:
db_table = 'charities'
@property
def outgoing_url(self):
if self.pk == amo.FOUNDATION_ORG:
return self.url
return get_outgoing_url(unicode(self.url))
class DeniedSlug(ModelBase):
name = models.CharField(max_length=255, unique=True, default='')
class Meta:
db_table = 'addons_denied_slug'
def __unicode__(self):
return self.name
@classmethod
def blocked(cls, slug):
return slug.isdigit() or cls.objects.filter(name=slug).exists()
class FrozenAddon(models.Model):
"""Add-ons in this table never get a hotness score."""
addon = models.ForeignKey(Addon)
class Meta:
db_table = 'frozen_addons'
def __unicode__(self):
return 'Frozen: %s' % self.addon_id
@receiver(dbsignals.post_save, sender=FrozenAddon)
def freezer(sender, instance, **kw):
# Adjust the hotness of the FrozenAddon.
if instance.addon_id:
Addon.objects.get(id=instance.addon_id).update(hotness=0)
class CompatOverride(ModelBase):
"""Helps manage compat info for add-ons not hosted on AMO."""
name = models.CharField(max_length=255, blank=True, null=True)
guid = models.CharField(max_length=255, unique=True)
addon = models.ForeignKey(Addon, blank=True, null=True,
help_text='Fill this out to link an override '
'to a hosted add-on')
class Meta:
db_table = 'compat_override'
unique_together = ('addon', 'guid')
def save(self, *args, **kw):
if not self.addon:
qs = Addon.objects.filter(guid=self.guid)
if qs:
self.addon = qs[0]
return super(CompatOverride, self).save(*args, **kw)
def __unicode__(self):
if self.addon:
return unicode(self.addon)
elif self.name:
return '%s (%s)' % (self.name, self.guid)
else:
return self.guid
def is_hosted(self):
"""Am I talking about an add-on on AMO?"""
return bool(self.addon_id)
@staticmethod
def transformer(overrides):
if not overrides:
return
id_map = dict((o.id, o) for o in overrides)
qs = CompatOverrideRange.objects.filter(compat__in=id_map)
for compat_id, ranges in sorted_groupby(qs, 'compat_id'):
id_map[compat_id].compat_ranges = list(ranges)
# May be filled in by a transformer for performance.
@amo.cached_property(writable=True)
def compat_ranges(self):
return list(self._compat_ranges.all())
def collapsed_ranges(self):
"""Collapse identical version ranges into one entity."""
Range = collections.namedtuple('Range', 'type min max apps')
AppRange = collections.namedtuple('AppRange', 'app min max')
rv = []
def sort_key(x):
return (x.min_version, x.max_version, x.type)
for key, compats in sorted_groupby(self.compat_ranges, key=sort_key):
compats = list(compats)
first = compats[0]
item = Range(first.override_type(), first.min_version,
first.max_version, [])
for compat in compats:
app = AppRange(amo.APPS_ALL[compat.app],
compat.min_app_version, compat.max_app_version)
item.apps.append(app)
rv.append(item)
return rv
OVERRIDE_TYPES = (
(0, 'Compatible (not supported)'),
(1, 'Incompatible'),
)
class CompatOverrideRange(ModelBase):
"""App compatibility for a certain version range of a RemoteAddon."""
compat = models.ForeignKey(CompatOverride, related_name='_compat_ranges')
type = models.SmallIntegerField(choices=OVERRIDE_TYPES, default=1)
min_version = models.CharField(
max_length=255, default='0',
help_text=u'If not "0", version is required to exist for the override'
u' to take effect.')
max_version = models.CharField(
max_length=255, default='*',
help_text=u'If not "*", version is required to exist for the override'
u' to take effect.')
app = models.PositiveIntegerField(choices=amo.APPS_CHOICES,
db_column='app_id')
min_app_version = models.CharField(max_length=255, default='0')
max_app_version = models.CharField(max_length=255, default='*')
class Meta:
db_table = 'compat_override_range'
def override_type(self):
"""This is what Firefox wants to see in the XML output."""
return {0: 'compatible', 1: 'incompatible'}[self.type]
class IncompatibleVersions(ModelBase):
"""
Denormalized table to join against for fast compat override filtering.
This was created to be able to join against a specific version record since
the CompatOverrideRange can be wildcarded (e.g. 0 to *, or 1.0 to 1.*), and
addon versioning isn't as consistent as Firefox versioning to trust
`version_int` in all cases. So extra logic needed to be provided for when
a particular version falls within the range of a compatibility override.
"""
version = models.ForeignKey(Version, related_name='+')
app = models.PositiveIntegerField(choices=amo.APPS_CHOICES,
db_column='app_id')
min_app_version = models.CharField(max_length=255, blank=True, default='0')
max_app_version = models.CharField(max_length=255, blank=True, default='*')
min_app_version_int = models.BigIntegerField(blank=True, null=True,
editable=False, db_index=True)
max_app_version_int = models.BigIntegerField(blank=True, null=True,
editable=False, db_index=True)
class Meta:
db_table = 'incompatible_versions'
def __unicode__(self):
return u'<IncompatibleVersion V:%s A:%s %s-%s>' % (
self.version.id, self.app.id, self.min_app_version,
self.max_app_version)
def save(self, *args, **kw):
self.min_app_version_int = version_int(self.min_app_version)
self.max_app_version_int = version_int(self.max_app_version)
return super(IncompatibleVersions, self).save(*args, **kw)
def update_incompatible_versions(sender, instance, **kw):
if not instance.compat.addon_id:
return
if not instance.compat.addon.type == amo.ADDON_EXTENSION:
return
from . import tasks
versions = instance.compat.addon.versions.values_list('id', flat=True)
for chunk in chunked(versions, 50):
tasks.update_incompatible_appversions.delay(chunk)
models.signals.post_save.connect(update_incompatible_versions,
sender=CompatOverrideRange,
dispatch_uid='cor_update_incompatible')
models.signals.post_delete.connect(update_incompatible_versions,
sender=CompatOverrideRange,
dispatch_uid='cor_update_incompatible')
def track_new_status(sender, instance, *args, **kw):
if kw.get('raw'):
# The addon is being loaded from a fixure.
return
if kw.get('created'):
track_addon_status_change(instance)
models.signals.post_save.connect(track_new_status,
sender=Addon,
dispatch_uid='track_new_addon_status')
@Addon.on_change
def track_status_change(old_attr=None, new_attr=None, **kw):
if old_attr is None:
old_attr = {}
if new_attr is None:
new_attr = {}
new_status = new_attr.get('status')
old_status = old_attr.get('status')
if new_status != old_status:
track_addon_status_change(kw['instance'])
def track_addon_status_change(addon):
statsd.incr('addon_status_change.all.status_{}'
.format(addon.status))<|fim▁end|> | |
<|file_name|>RateCalculationSwapLeg.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.product.swap;
import java.io.Serializable;
import java.time.LocalDate;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import org.joda.beans.Bean;
import org.joda.beans.ImmutableBean;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaBean;
import org.joda.beans.MetaProperty;
import org.joda.beans.gen.BeanDefinition;
import org.joda.beans.gen.DerivedProperty;
import org.joda.beans.gen.PropertyDefinition;
import org.joda.beans.impl.direct.DirectFieldsBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.opengamma.strata.basics.ReferenceData;
import com.opengamma.strata.basics.ReferenceDataNotFoundException;
import com.opengamma.strata.basics.currency.Currency;
import com.opengamma.strata.basics.date.AdjustableDate;
import com.opengamma.strata.basics.date.DayCount;
import com.opengamma.strata.basics.index.Index;
import com.opengamma.strata.basics.schedule.PeriodicSchedule;
import com.opengamma.strata.basics.schedule.Schedule;
import com.opengamma.strata.product.common.PayReceive;
/**
* A rate swap leg defined using a parameterized schedule and calculation.
* <p>
* This defines a single swap leg paying a rate, such as an interest rate.
* The rate may be fixed or floating, see {@link FixedRateCalculation},
* {@link IborRateCalculation} and {@link OvernightRateCalculation}.
* <p>
* Interest is calculated based on <i>accrual periods</i> which follow a regular schedule
* with optional initial and final stubs. Coupon payments are based on <i>payment periods</i>
* which are typically the same as the accrual periods.
* If the payment period is longer than the accrual period then compounding may apply.
* The schedule of periods is defined using {@link PeriodicSchedule}, {@link PaymentSchedule},
* {@link NotionalSchedule} and {@link ResetSchedule}.
* <p>
* If the schedule needs to be manually specified, or there are other unusual calculation
* rules then the {@link RatePeriodSwapLeg} class should be used instead.
*/
@BeanDefinition
public final class RateCalculationSwapLeg
implements ScheduledSwapLeg, ImmutableBean, Serializable {
/**
* Whether the leg is pay or receive.
* <p>
* A value of 'Pay' implies that the resulting amount is paid to the counterparty.
* A value of 'Receive' implies that the resulting amount is received from the counterparty.
* Note that negative interest rates can result in a payment in the opposite
* direction to that implied by this indicator.
*/
@PropertyDefinition(validate = "notNull", overrideGet = true)
private final PayReceive payReceive;
/**
* The accrual schedule.
* <p>
* This is used to define the accrual periods.
* These are used directly or indirectly to determine other dates in the swap.
*/
@PropertyDefinition(validate = "notNull", overrideGet = true)
private final PeriodicSchedule accrualSchedule;
/**
* The payment schedule.
* <p>
* This is used to define the payment periods, including any compounding.
* The payment period dates are based on the accrual schedule.
*/
@PropertyDefinition(validate = "notNull", overrideGet = true)
private final PaymentSchedule paymentSchedule;
/**
* The notional schedule.
* <p>
* The notional amount schedule, which can vary during the lifetime of the swap.
* In most cases, the notional amount is not exchanged, with only the net difference being exchanged.
* However, in certain cases, initial, final or intermediate amounts are exchanged.
*/
@PropertyDefinition(validate = "notNull")
private final NotionalSchedule notionalSchedule;
/**
* The interest rate accrual calculation.
* <p>
* Different kinds of swap leg are determined by the subclass used here.
* See {@link FixedRateCalculation}, {@link IborRateCalculation} and {@link OvernightRateCalculation}.
*/
@PropertyDefinition(validate = "notNull")
private final RateCalculation calculation;
//-------------------------------------------------------------------------
@Override<|fim▁hole|> @DerivedProperty
public SwapLegType getType() {
return calculation.getType();
}
@Override
@DerivedProperty
public AdjustableDate getStartDate() {
return accrualSchedule.calculatedStartDate();
}
@Override
@DerivedProperty
public AdjustableDate getEndDate() {
return accrualSchedule.calculatedEndDate();
}
@Override
@DerivedProperty
public Currency getCurrency() {
return notionalSchedule.getCurrency();
}
@Override
public void collectCurrencies(ImmutableSet.Builder<Currency> builder) {
builder.add(getCurrency());
calculation.collectCurrencies(builder);
notionalSchedule.getFxReset().ifPresent(fxReset -> builder.add(fxReset.getReferenceCurrency()));
}
@Override
public void collectIndices(ImmutableSet.Builder<Index> builder) {
calculation.collectIndices(builder);
notionalSchedule.getFxReset().ifPresent(fxReset -> builder.add(fxReset.getIndex()));
}
//-------------------------------------------------------------------------
/**
* Returns an instance based on this leg with the start date replaced.
* <p>
* This uses {@link PeriodicSchedule#replaceStartDate(LocalDate)}.
*
* @throws IllegalArgumentException if the start date cannot be replaced with the proposed start date
*/
@Override
public RateCalculationSwapLeg replaceStartDate(LocalDate adjustedStartDate) {
return toBuilder().accrualSchedule(accrualSchedule.replaceStartDate(adjustedStartDate)).build();
}
/**
* Converts this swap leg to the equivalent {@code ResolvedSwapLeg}.
* <p>
* An {@link ResolvedSwapLeg} represents the same data as this leg, but with
* a complete schedule of dates defined using {@link RatePaymentPeriod}.
*
* @return the equivalent resolved swap leg
* @throws ReferenceDataNotFoundException if an identifier cannot be resolved in the reference data
* @throws RuntimeException if unable to resolve due to an invalid swap schedule or definition
*/
@Override
public ResolvedSwapLeg resolve(ReferenceData refData) {
DayCount dayCount = calculation.getDayCount();
Schedule resolvedAccruals = accrualSchedule.createSchedule(refData);
Schedule resolvedPayments = paymentSchedule.createSchedule(resolvedAccruals, refData);
List<RateAccrualPeriod> accrualPeriods = calculation.createAccrualPeriods(resolvedAccruals, resolvedPayments, refData);
List<NotionalPaymentPeriod> payPeriods = paymentSchedule.createPaymentPeriods(
resolvedAccruals, resolvedPayments, accrualPeriods, dayCount, notionalSchedule, payReceive, refData);
LocalDate startDate = accrualPeriods.get(0).getStartDate();
ImmutableList<SwapPaymentEvent> payEvents = notionalSchedule.createEvents(payPeriods, startDate, refData);
return new ResolvedSwapLeg(getType(), payReceive, payPeriods, payEvents, getCurrency());
}
//------------------------- AUTOGENERATED START -------------------------
/**
* The meta-bean for {@code RateCalculationSwapLeg}.
* @return the meta-bean, not null
*/
public static RateCalculationSwapLeg.Meta meta() {
return RateCalculationSwapLeg.Meta.INSTANCE;
}
static {
MetaBean.register(RateCalculationSwapLeg.Meta.INSTANCE);
}
/**
* The serialization version id.
*/
private static final long serialVersionUID = 1L;
/**
* Returns a builder used to create an instance of the bean.
* @return the builder, not null
*/
public static RateCalculationSwapLeg.Builder builder() {
return new RateCalculationSwapLeg.Builder();
}
private RateCalculationSwapLeg(
PayReceive payReceive,
PeriodicSchedule accrualSchedule,
PaymentSchedule paymentSchedule,
NotionalSchedule notionalSchedule,
RateCalculation calculation) {
JodaBeanUtils.notNull(payReceive, "payReceive");
JodaBeanUtils.notNull(accrualSchedule, "accrualSchedule");
JodaBeanUtils.notNull(paymentSchedule, "paymentSchedule");
JodaBeanUtils.notNull(notionalSchedule, "notionalSchedule");
JodaBeanUtils.notNull(calculation, "calculation");
this.payReceive = payReceive;
this.accrualSchedule = accrualSchedule;
this.paymentSchedule = paymentSchedule;
this.notionalSchedule = notionalSchedule;
this.calculation = calculation;
}
@Override
public RateCalculationSwapLeg.Meta metaBean() {
return RateCalculationSwapLeg.Meta.INSTANCE;
}
//-----------------------------------------------------------------------
/**
* Gets whether the leg is pay or receive.
* <p>
* A value of 'Pay' implies that the resulting amount is paid to the counterparty.
* A value of 'Receive' implies that the resulting amount is received from the counterparty.
* Note that negative interest rates can result in a payment in the opposite
* direction to that implied by this indicator.
* @return the value of the property, not null
*/
@Override
public PayReceive getPayReceive() {
return payReceive;
}
//-----------------------------------------------------------------------
/**
* Gets the accrual schedule.
* <p>
* This is used to define the accrual periods.
* These are used directly or indirectly to determine other dates in the swap.
* @return the value of the property, not null
*/
@Override
public PeriodicSchedule getAccrualSchedule() {
return accrualSchedule;
}
//-----------------------------------------------------------------------
/**
* Gets the payment schedule.
* <p>
* This is used to define the payment periods, including any compounding.
* The payment period dates are based on the accrual schedule.
* @return the value of the property, not null
*/
@Override
public PaymentSchedule getPaymentSchedule() {
return paymentSchedule;
}
//-----------------------------------------------------------------------
/**
* Gets the notional schedule.
* <p>
* The notional amount schedule, which can vary during the lifetime of the swap.
* In most cases, the notional amount is not exchanged, with only the net difference being exchanged.
* However, in certain cases, initial, final or intermediate amounts are exchanged.
* @return the value of the property, not null
*/
public NotionalSchedule getNotionalSchedule() {
return notionalSchedule;
}
//-----------------------------------------------------------------------
/**
* Gets the interest rate accrual calculation.
* <p>
* Different kinds of swap leg are determined by the subclass used here.
* See {@link FixedRateCalculation}, {@link IborRateCalculation} and {@link OvernightRateCalculation}.
* @return the value of the property, not null
*/
public RateCalculation getCalculation() {
return calculation;
}
//-----------------------------------------------------------------------
/**
* Returns a builder that allows this bean to be mutated.
* @return the mutable builder, not null
*/
public Builder toBuilder() {
return new Builder(this);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
RateCalculationSwapLeg other = (RateCalculationSwapLeg) obj;
return JodaBeanUtils.equal(payReceive, other.payReceive) &&
JodaBeanUtils.equal(accrualSchedule, other.accrualSchedule) &&
JodaBeanUtils.equal(paymentSchedule, other.paymentSchedule) &&
JodaBeanUtils.equal(notionalSchedule, other.notionalSchedule) &&
JodaBeanUtils.equal(calculation, other.calculation);
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(payReceive);
hash = hash * 31 + JodaBeanUtils.hashCode(accrualSchedule);
hash = hash * 31 + JodaBeanUtils.hashCode(paymentSchedule);
hash = hash * 31 + JodaBeanUtils.hashCode(notionalSchedule);
hash = hash * 31 + JodaBeanUtils.hashCode(calculation);
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(320);
buf.append("RateCalculationSwapLeg{");
buf.append("payReceive").append('=').append(JodaBeanUtils.toString(payReceive)).append(',').append(' ');
buf.append("accrualSchedule").append('=').append(JodaBeanUtils.toString(accrualSchedule)).append(',').append(' ');
buf.append("paymentSchedule").append('=').append(JodaBeanUtils.toString(paymentSchedule)).append(',').append(' ');
buf.append("notionalSchedule").append('=').append(JodaBeanUtils.toString(notionalSchedule)).append(',').append(' ');
buf.append("calculation").append('=').append(JodaBeanUtils.toString(calculation)).append(',').append(' ');
buf.append("type").append('=').append(JodaBeanUtils.toString(getType())).append(',').append(' ');
buf.append("startDate").append('=').append(JodaBeanUtils.toString(getStartDate())).append(',').append(' ');
buf.append("endDate").append('=').append(JodaBeanUtils.toString(getEndDate())).append(',').append(' ');
buf.append("currency").append('=').append(JodaBeanUtils.toString(getCurrency()));
buf.append('}');
return buf.toString();
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code RateCalculationSwapLeg}.
*/
public static final class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code payReceive} property.
*/
private final MetaProperty<PayReceive> payReceive = DirectMetaProperty.ofImmutable(
this, "payReceive", RateCalculationSwapLeg.class, PayReceive.class);
/**
* The meta-property for the {@code accrualSchedule} property.
*/
private final MetaProperty<PeriodicSchedule> accrualSchedule = DirectMetaProperty.ofImmutable(
this, "accrualSchedule", RateCalculationSwapLeg.class, PeriodicSchedule.class);
/**
* The meta-property for the {@code paymentSchedule} property.
*/
private final MetaProperty<PaymentSchedule> paymentSchedule = DirectMetaProperty.ofImmutable(
this, "paymentSchedule", RateCalculationSwapLeg.class, PaymentSchedule.class);
/**
* The meta-property for the {@code notionalSchedule} property.
*/
private final MetaProperty<NotionalSchedule> notionalSchedule = DirectMetaProperty.ofImmutable(
this, "notionalSchedule", RateCalculationSwapLeg.class, NotionalSchedule.class);
/**
* The meta-property for the {@code calculation} property.
*/
private final MetaProperty<RateCalculation> calculation = DirectMetaProperty.ofImmutable(
this, "calculation", RateCalculationSwapLeg.class, RateCalculation.class);
/**
* The meta-property for the {@code type} property.
*/
private final MetaProperty<SwapLegType> type = DirectMetaProperty.ofDerived(
this, "type", RateCalculationSwapLeg.class, SwapLegType.class);
/**
* The meta-property for the {@code startDate} property.
*/
private final MetaProperty<AdjustableDate> startDate = DirectMetaProperty.ofDerived(
this, "startDate", RateCalculationSwapLeg.class, AdjustableDate.class);
/**
* The meta-property for the {@code endDate} property.
*/
private final MetaProperty<AdjustableDate> endDate = DirectMetaProperty.ofDerived(
this, "endDate", RateCalculationSwapLeg.class, AdjustableDate.class);
/**
* The meta-property for the {@code currency} property.
*/
private final MetaProperty<Currency> currency = DirectMetaProperty.ofDerived(
this, "currency", RateCalculationSwapLeg.class, Currency.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"payReceive",
"accrualSchedule",
"paymentSchedule",
"notionalSchedule",
"calculation",
"type",
"startDate",
"endDate",
"currency");
/**
* Restricted constructor.
*/
private Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -885469925: // payReceive
return payReceive;
case 304659814: // accrualSchedule
return accrualSchedule;
case -1499086147: // paymentSchedule
return paymentSchedule;
case 1447860727: // notionalSchedule
return notionalSchedule;
case -934682935: // calculation
return calculation;
case 3575610: // type
return type;
case -2129778896: // startDate
return startDate;
case -1607727319: // endDate
return endDate;
case 575402001: // currency
return currency;
}
return super.metaPropertyGet(propertyName);
}
@Override
public RateCalculationSwapLeg.Builder builder() {
return new RateCalculationSwapLeg.Builder();
}
@Override
public Class<? extends RateCalculationSwapLeg> beanType() {
return RateCalculationSwapLeg.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code payReceive} property.
* @return the meta-property, not null
*/
public MetaProperty<PayReceive> payReceive() {
return payReceive;
}
/**
* The meta-property for the {@code accrualSchedule} property.
* @return the meta-property, not null
*/
public MetaProperty<PeriodicSchedule> accrualSchedule() {
return accrualSchedule;
}
/**
* The meta-property for the {@code paymentSchedule} property.
* @return the meta-property, not null
*/
public MetaProperty<PaymentSchedule> paymentSchedule() {
return paymentSchedule;
}
/**
* The meta-property for the {@code notionalSchedule} property.
* @return the meta-property, not null
*/
public MetaProperty<NotionalSchedule> notionalSchedule() {
return notionalSchedule;
}
/**
* The meta-property for the {@code calculation} property.
* @return the meta-property, not null
*/
public MetaProperty<RateCalculation> calculation() {
return calculation;
}
/**
* The meta-property for the {@code type} property.
* @return the meta-property, not null
*/
public MetaProperty<SwapLegType> type() {
return type;
}
/**
* The meta-property for the {@code startDate} property.
* @return the meta-property, not null
*/
public MetaProperty<AdjustableDate> startDate() {
return startDate;
}
/**
* The meta-property for the {@code endDate} property.
* @return the meta-property, not null
*/
public MetaProperty<AdjustableDate> endDate() {
return endDate;
}
/**
* The meta-property for the {@code currency} property.
* @return the meta-property, not null
*/
public MetaProperty<Currency> currency() {
return currency;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case -885469925: // payReceive
return ((RateCalculationSwapLeg) bean).getPayReceive();
case 304659814: // accrualSchedule
return ((RateCalculationSwapLeg) bean).getAccrualSchedule();
case -1499086147: // paymentSchedule
return ((RateCalculationSwapLeg) bean).getPaymentSchedule();
case 1447860727: // notionalSchedule
return ((RateCalculationSwapLeg) bean).getNotionalSchedule();
case -934682935: // calculation
return ((RateCalculationSwapLeg) bean).getCalculation();
case 3575610: // type
return ((RateCalculationSwapLeg) bean).getType();
case -2129778896: // startDate
return ((RateCalculationSwapLeg) bean).getStartDate();
case -1607727319: // endDate
return ((RateCalculationSwapLeg) bean).getEndDate();
case 575402001: // currency
return ((RateCalculationSwapLeg) bean).getCurrency();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
metaProperty(propertyName);
if (quiet) {
return;
}
throw new UnsupportedOperationException("Property cannot be written: " + propertyName);
}
}
//-----------------------------------------------------------------------
/**
* The bean-builder for {@code RateCalculationSwapLeg}.
*/
public static final class Builder extends DirectFieldsBeanBuilder<RateCalculationSwapLeg> {
private PayReceive payReceive;
private PeriodicSchedule accrualSchedule;
private PaymentSchedule paymentSchedule;
private NotionalSchedule notionalSchedule;
private RateCalculation calculation;
/**
* Restricted constructor.
*/
private Builder() {
}
/**
* Restricted copy constructor.
* @param beanToCopy the bean to copy from, not null
*/
private Builder(RateCalculationSwapLeg beanToCopy) {
this.payReceive = beanToCopy.getPayReceive();
this.accrualSchedule = beanToCopy.getAccrualSchedule();
this.paymentSchedule = beanToCopy.getPaymentSchedule();
this.notionalSchedule = beanToCopy.getNotionalSchedule();
this.calculation = beanToCopy.getCalculation();
}
//-----------------------------------------------------------------------
@Override
public Object get(String propertyName) {
switch (propertyName.hashCode()) {
case -885469925: // payReceive
return payReceive;
case 304659814: // accrualSchedule
return accrualSchedule;
case -1499086147: // paymentSchedule
return paymentSchedule;
case 1447860727: // notionalSchedule
return notionalSchedule;
case -934682935: // calculation
return calculation;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
}
@Override
public Builder set(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case -885469925: // payReceive
this.payReceive = (PayReceive) newValue;
break;
case 304659814: // accrualSchedule
this.accrualSchedule = (PeriodicSchedule) newValue;
break;
case -1499086147: // paymentSchedule
this.paymentSchedule = (PaymentSchedule) newValue;
break;
case 1447860727: // notionalSchedule
this.notionalSchedule = (NotionalSchedule) newValue;
break;
case -934682935: // calculation
this.calculation = (RateCalculation) newValue;
break;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
return this;
}
@Override
public Builder set(MetaProperty<?> property, Object value) {
super.set(property, value);
return this;
}
@Override
public RateCalculationSwapLeg build() {
return new RateCalculationSwapLeg(
payReceive,
accrualSchedule,
paymentSchedule,
notionalSchedule,
calculation);
}
//-----------------------------------------------------------------------
/**
* Sets whether the leg is pay or receive.
* <p>
* A value of 'Pay' implies that the resulting amount is paid to the counterparty.
* A value of 'Receive' implies that the resulting amount is received from the counterparty.
* Note that negative interest rates can result in a payment in the opposite
* direction to that implied by this indicator.
* @param payReceive the new value, not null
* @return this, for chaining, not null
*/
public Builder payReceive(PayReceive payReceive) {
JodaBeanUtils.notNull(payReceive, "payReceive");
this.payReceive = payReceive;
return this;
}
/**
* Sets the accrual schedule.
* <p>
* This is used to define the accrual periods.
* These are used directly or indirectly to determine other dates in the swap.
* @param accrualSchedule the new value, not null
* @return this, for chaining, not null
*/
public Builder accrualSchedule(PeriodicSchedule accrualSchedule) {
JodaBeanUtils.notNull(accrualSchedule, "accrualSchedule");
this.accrualSchedule = accrualSchedule;
return this;
}
/**
* Sets the payment schedule.
* <p>
* This is used to define the payment periods, including any compounding.
* The payment period dates are based on the accrual schedule.
* @param paymentSchedule the new value, not null
* @return this, for chaining, not null
*/
public Builder paymentSchedule(PaymentSchedule paymentSchedule) {
JodaBeanUtils.notNull(paymentSchedule, "paymentSchedule");
this.paymentSchedule = paymentSchedule;
return this;
}
/**
* Sets the notional schedule.
* <p>
* The notional amount schedule, which can vary during the lifetime of the swap.
* In most cases, the notional amount is not exchanged, with only the net difference being exchanged.
* However, in certain cases, initial, final or intermediate amounts are exchanged.
* @param notionalSchedule the new value, not null
* @return this, for chaining, not null
*/
public Builder notionalSchedule(NotionalSchedule notionalSchedule) {
JodaBeanUtils.notNull(notionalSchedule, "notionalSchedule");
this.notionalSchedule = notionalSchedule;
return this;
}
/**
* Sets the interest rate accrual calculation.
* <p>
* Different kinds of swap leg are determined by the subclass used here.
* See {@link FixedRateCalculation}, {@link IborRateCalculation} and {@link OvernightRateCalculation}.
* @param calculation the new value, not null
* @return this, for chaining, not null
*/
public Builder calculation(RateCalculation calculation) {
JodaBeanUtils.notNull(calculation, "calculation");
this.calculation = calculation;
return this;
}
//-----------------------------------------------------------------------
@Override
public String toString() {
StringBuilder buf = new StringBuilder(320);
buf.append("RateCalculationSwapLeg.Builder{");
buf.append("payReceive").append('=').append(JodaBeanUtils.toString(payReceive)).append(',').append(' ');
buf.append("accrualSchedule").append('=').append(JodaBeanUtils.toString(accrualSchedule)).append(',').append(' ');
buf.append("paymentSchedule").append('=').append(JodaBeanUtils.toString(paymentSchedule)).append(',').append(' ');
buf.append("notionalSchedule").append('=').append(JodaBeanUtils.toString(notionalSchedule)).append(',').append(' ');
buf.append("calculation").append('=').append(JodaBeanUtils.toString(calculation)).append(',').append(' ');
buf.append("type").append('=').append(JodaBeanUtils.toString(null)).append(',').append(' ');
buf.append("startDate").append('=').append(JodaBeanUtils.toString(null)).append(',').append(' ');
buf.append("endDate").append('=').append(JodaBeanUtils.toString(null)).append(',').append(' ');
buf.append("currency").append('=').append(JodaBeanUtils.toString(null));
buf.append('}');
return buf.toString();
}
}
//-------------------------- AUTOGENERATED END --------------------------
}<|fim▁end|> | |
<|file_name|>modulebonusafterburner.py<|end_file_name|><|fim▁begin|># moduleBonusAfterburner<|fim▁hole|>#
# Used by:
# Modules from group: Propulsion Module (62 of 127)
type = "active"
runTime = "late"
def handler(fit, module, context):
fit.ship.increaseItemAttr("mass", module.getModifiedItemAttr("massAddition"))
speedBoost = module.getModifiedItemAttr("speedFactor")
mass = fit.ship.getModifiedItemAttr("mass")
thrust = module.getModifiedItemAttr("speedBoostFactor")
fit.ship.boostItemAttr("maxVelocity", speedBoost * thrust / mass)<|fim▁end|> | |
<|file_name|>viewFactory.js<|end_file_name|><|fim▁begin|>// namespaces
var dwv = dwv || {};
dwv.image = dwv.image || {};
/**
* {@link dwv.image.View} factory.
*
* @class
*/
dwv.image.ViewFactory = function () {};
/**
* {@link dwv.image.View} factory. Defaults to local one.
*
* @see dwv.image.ViewFactory
*/
dwv.ViewFactory = dwv.image.ViewFactory;
/**
* Get an View object from the read DICOM file.
*
* @param {object} dicomElements The DICOM tags.<|fim▁hole|> */
dwv.image.ViewFactory.prototype.create = function (dicomElements, image) {
// view
var view = new dwv.image.View(image);
// default color map
if (image.getPhotometricInterpretation() === 'MONOCHROME1') {
view.setDefaultColourMap(dwv.image.lut.invPlain);
} else if (image.getPhotometricInterpretation() === 'PALETTE COLOR') {
var paletteLut = image.getMeta().paletteLut;
if (typeof (paletteLut) !== 'undefined') {
view.setDefaultColourMap(paletteLut);
}
}
// window level presets
var windowPresets = {};
// image presets
if (typeof image.getMeta().windowPresets !== 'undefined') {
windowPresets = image.getMeta().windowPresets;
}
// min/max
// Not filled yet since it is stil too costly to calculate min/max
// for each slice... It will be filled at first use
// (see view.setWindowLevelPreset).
// Order is important, if no wl from DICOM, this will be the default.
windowPresets.minmax = {name: 'minmax'};
// optional modality presets
if (typeof dwv.tool !== 'undefined' &&
typeof dwv.tool.defaultpresets !== 'undefined') {
var modality = image.getMeta().Modality;
for (var key in dwv.tool.defaultpresets[modality]) {
var preset = dwv.tool.defaultpresets[modality][key];
windowPresets[key] = {
wl: new dwv.image.WindowLevel(preset.center, preset.width),
name: key
};
}
}
// store
view.setWindowPresets(windowPresets);
// set the initial position
view.setInitialPosition();
return view;
};<|fim▁end|> | * @param {dwv.image.Image} image The associated image.
* @returns {dwv.image.View} The new View. |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var parentchild = require('../'),
assert = require('assert');
assert.deepEqual(parentchild('foo'), []);
assert.deepEqual(parentchild({}), []);
assert.deepEqual(parentchild([1, 2, 3]), [['inarray', undefined, 1],
['inarray', undefined, 2],
['inarray', undefined, 3]]);
assert.deepEqual(parentchild({
a: {
b: 'foo'
},
c: ['a', 'b']
}), [['child', undefined, 'a'],
['child', 'a', 'b'],
['value', 'b', 'foo'],
['child', undefined, 'c'],
['inarray', 'c', 'a'],
['inarray', 'c', 'b']]);
assert.deepEqual(parentchild({
a: {
b: 'foo'
},
c: {<|fim▁hole|> d: {
e: 'bar'
},
g: 'darp',
h: {
i: 'derp',
o: {
p: {
q: 'qak'
},
r: 'rez'
}
},
j: 'gar'
},
k: {
l: 'one',
m: 'two',
n: 'three',
s: [1, 2, 3]
}
}), [['child', undefined, 'a'],
['child', 'a', 'b'],
['value', 'b', 'foo'],
['child', undefined, 'c'],
['child', 'c', 'f'],
['value', 'f', 'baz'],
['child', 'c', 'd'],
['child', 'd', 'e'],
['value', 'e', 'bar'],
['child', 'c', 'g'],
['value', 'g', 'darp'],
['child', 'c', 'h'],
['child', 'h', 'i'],
['value', 'i', 'derp'],
['child', 'h', 'o'],
['child', 'o', 'p'],
['child', 'p', 'q'],
['value', 'q', 'qak'],
['child', 'o', 'r'],
['value', 'r', 'rez'],
['child', 'c', 'j'],
['value', 'j', 'gar'],
['child', undefined, 'k'],
['child', 'k', 'l'],
['value', 'l', 'one'],
['child', 'k', 'm'],
['value', 'm', 'two'],
['child', 'k', 'n'],
['value', 'n', 'three'],
['child', 'k', 's'],
['inarray', 's', 1],
['inarray', 's', 2],
['inarray', 's', 3]]);
console.log('ok');<|fim▁end|> | f: 'baz', |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>from tc_python.arule import ARule
from t_core.messages import Packet
from HA import HA
from HAb import HAb
from HTopClass2TableLHS import HTopClass2TableLHS
from HTopClass2TableRHS import HTopClass2TableRHS
r1 = ARule(HTopClass2TableLHS(), HTopClass2TableRHS())
p = Packet()
p.graph = HA()
p1 = r1.packet_in(p)
print p1
print r1.is_success
if r1.exception:
raise r1.exception
<|fim▁hole|>p.graph = HAb()
p2 = r2.packet_in(p)
print p2
print r2.is_success
if r2.exception:
raise r2.exception<|fim▁end|> |
r2 = ARule(HTopClass2TableLHS(), HTopClass2TableRHS())
p = Packet()
|
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from django import forms
from oldcontrib.media.document.models import Document
class DocumentUpload(forms.ModelForm):
class Meta:
model = Document<|fim▁hole|><|fim▁end|> | fields = ('document',) |
<|file_name|>CustomerConversionGoalOperation.java<|end_file_name|><|fim▁begin|>// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v10/services/customer_conversion_goal_service.proto
package com.google.ads.googleads.v10.services;
/**
* <pre>
* A single operation (update) on a customer conversion goal.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.services.CustomerConversionGoalOperation}
*/
public final class CustomerConversionGoalOperation extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v10.services.CustomerConversionGoalOperation)
CustomerConversionGoalOperationOrBuilder {
private static final long serialVersionUID = 0L;
// Use CustomerConversionGoalOperation.newBuilder() to construct.
private CustomerConversionGoalOperation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CustomerConversionGoalOperation() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CustomerConversionGoalOperation();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CustomerConversionGoalOperation(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.ads.googleads.v10.resources.CustomerConversionGoal.Builder subBuilder = null;
if (operationCase_ == 1) {
subBuilder = ((com.google.ads.googleads.v10.resources.CustomerConversionGoal) operation_).toBuilder();
}
operation_ =
input.readMessage(com.google.ads.googleads.v10.resources.CustomerConversionGoal.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom((com.google.ads.googleads.v10.resources.CustomerConversionGoal) operation_);
operation_ = subBuilder.buildPartial();
}
operationCase_ = 1;
break;
}
case 18: {
com.google.protobuf.FieldMask.Builder subBuilder = null;
if (updateMask_ != null) {
subBuilder = updateMask_.toBuilder();
}
updateMask_ = input.readMessage(com.google.protobuf.FieldMask.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(updateMask_);
updateMask_ = subBuilder.buildPartial();
}
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v10_services_CustomerConversionGoalOperation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v10_services_CustomerConversionGoalOperation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.services.CustomerConversionGoalOperation.class, com.google.ads.googleads.v10.services.CustomerConversionGoalOperation.Builder.class);
}
private int operationCase_ = 0;
private java.lang.Object operation_;
public enum OperationCase
implements com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
UPDATE(1),
OPERATION_NOT_SET(0);
private final int value;
private OperationCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static OperationCase valueOf(int value) {
return forNumber(value);
}
public static OperationCase forNumber(int value) {
switch (value) {
case 1: return UPDATE;
case 0: return OPERATION_NOT_SET;
default: return null;
}
}
public int getNumber() {
return this.value;
}
};
public OperationCase
getOperationCase() {
return OperationCase.forNumber(
operationCase_);
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return updateMask_ != null;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return getUpdateMask();
}
public static final int UPDATE_FIELD_NUMBER = 1;
/**
* <pre>
* Update operation: The customer conversion goal is expected to have a
* valid resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CustomerConversionGoal update = 1;</code>
* @return Whether the update field is set.
*/
@java.lang.Override
public boolean hasUpdate() {
return operationCase_ == 1;
}
/**
* <pre>
* Update operation: The customer conversion goal is expected to have a
* valid resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CustomerConversionGoal update = 1;</code>
* @return The update.
*/
@java.lang.Override
public com.google.ads.googleads.v10.resources.CustomerConversionGoal getUpdate() {
if (operationCase_ == 1) {
return (com.google.ads.googleads.v10.resources.CustomerConversionGoal) operation_;
}
return com.google.ads.googleads.v10.resources.CustomerConversionGoal.getDefaultInstance();
}
/**
* <pre>
* Update operation: The customer conversion goal is expected to have a
* valid resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CustomerConversionGoal update = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v10.resources.CustomerConversionGoalOrBuilder getUpdateOrBuilder() {
if (operationCase_ == 1) {
return (com.google.ads.googleads.v10.resources.CustomerConversionGoal) operation_;
}
return com.google.ads.googleads.v10.resources.CustomerConversionGoal.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (operationCase_ == 1) {
output.writeMessage(1, (com.google.ads.googleads.v10.resources.CustomerConversionGoal) operation_);
}
if (updateMask_ != null) {
output.writeMessage(2, getUpdateMask());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (operationCase_ == 1) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, (com.google.ads.googleads.v10.resources.CustomerConversionGoal) operation_);
}
if (updateMask_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getUpdateMask());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v10.services.CustomerConversionGoalOperation)) {
return super.equals(obj);
}
com.google.ads.googleads.v10.services.CustomerConversionGoalOperation other = (com.google.ads.googleads.v10.services.CustomerConversionGoalOperation) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask()
.equals(other.getUpdateMask())) return false;
}
if (!getOperationCase().equals(other.getOperationCase())) return false;
switch (operationCase_) {
case 1:
if (!getUpdate()
.equals(other.getUpdate())) return false;
break;
case 0:
default:
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
switch (operationCase_) {
case 1:
hash = (37 * hash) + UPDATE_FIELD_NUMBER;
hash = (53 * hash) + getUpdate().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v10.services.CustomerConversionGoalOperation parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.services.CustomerConversionGoalOperation parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.CustomerConversionGoalOperation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.services.CustomerConversionGoalOperation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.CustomerConversionGoalOperation parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.services.CustomerConversionGoalOperation parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.CustomerConversionGoalOperation parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.services.CustomerConversionGoalOperation parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)<|fim▁hole|> }
public static com.google.ads.googleads.v10.services.CustomerConversionGoalOperation parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.services.CustomerConversionGoalOperation parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.CustomerConversionGoalOperation parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.services.CustomerConversionGoalOperation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v10.services.CustomerConversionGoalOperation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A single operation (update) on a customer conversion goal.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.services.CustomerConversionGoalOperation}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v10.services.CustomerConversionGoalOperation)
com.google.ads.googleads.v10.services.CustomerConversionGoalOperationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v10_services_CustomerConversionGoalOperation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v10_services_CustomerConversionGoalOperation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.services.CustomerConversionGoalOperation.class, com.google.ads.googleads.v10.services.CustomerConversionGoalOperation.Builder.class);
}
// Construct using com.google.ads.googleads.v10.services.CustomerConversionGoalOperation.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (updateMaskBuilder_ == null) {
updateMask_ = null;
} else {
updateMask_ = null;
updateMaskBuilder_ = null;
}
operationCase_ = 0;
operation_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v10.services.CustomerConversionGoalServiceProto.internal_static_google_ads_googleads_v10_services_CustomerConversionGoalOperation_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v10.services.CustomerConversionGoalOperation getDefaultInstanceForType() {
return com.google.ads.googleads.v10.services.CustomerConversionGoalOperation.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v10.services.CustomerConversionGoalOperation build() {
com.google.ads.googleads.v10.services.CustomerConversionGoalOperation result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v10.services.CustomerConversionGoalOperation buildPartial() {
com.google.ads.googleads.v10.services.CustomerConversionGoalOperation result = new com.google.ads.googleads.v10.services.CustomerConversionGoalOperation(this);
if (updateMaskBuilder_ == null) {
result.updateMask_ = updateMask_;
} else {
result.updateMask_ = updateMaskBuilder_.build();
}
if (operationCase_ == 1) {
if (updateBuilder_ == null) {
result.operation_ = operation_;
} else {
result.operation_ = updateBuilder_.build();
}
}
result.operationCase_ = operationCase_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v10.services.CustomerConversionGoalOperation) {
return mergeFrom((com.google.ads.googleads.v10.services.CustomerConversionGoalOperation)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v10.services.CustomerConversionGoalOperation other) {
if (other == com.google.ads.googleads.v10.services.CustomerConversionGoalOperation.getDefaultInstance()) return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
switch (other.getOperationCase()) {
case UPDATE: {
mergeUpdate(other.getUpdate());
break;
}
case OPERATION_NOT_SET: {
break;
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v10.services.CustomerConversionGoalOperation parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v10.services.CustomerConversionGoalOperation) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int operationCase_ = 0;
private java.lang.Object operation_;
public OperationCase
getOperationCase() {
return OperationCase.forNumber(
operationCase_);
}
public Builder clearOperation() {
operationCase_ = 0;
operation_ = null;
onChanged();
return this;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_;
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return updateMaskBuilder_ != null || updateMask_ != null;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
onChanged();
} else {
updateMaskBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(
com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
onChanged();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (updateMask_ != null) {
updateMask_ =
com.google.protobuf.FieldMask.newBuilder(updateMask_).mergeFrom(value).buildPartial();
} else {
updateMask_ = value;
}
onChanged();
} else {
updateMaskBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
if (updateMaskBuilder_ == null) {
updateMask_ = null;
onChanged();
} else {
updateMask_ = null;
updateMaskBuilder_ = null;
}
return this;
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null ?
com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
}
/**
* <pre>
* FieldMask that determines which resource fields are modified in an update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(),
getParentForChildren(),
isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v10.resources.CustomerConversionGoal, com.google.ads.googleads.v10.resources.CustomerConversionGoal.Builder, com.google.ads.googleads.v10.resources.CustomerConversionGoalOrBuilder> updateBuilder_;
/**
* <pre>
* Update operation: The customer conversion goal is expected to have a
* valid resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CustomerConversionGoal update = 1;</code>
* @return Whether the update field is set.
*/
@java.lang.Override
public boolean hasUpdate() {
return operationCase_ == 1;
}
/**
* <pre>
* Update operation: The customer conversion goal is expected to have a
* valid resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CustomerConversionGoal update = 1;</code>
* @return The update.
*/
@java.lang.Override
public com.google.ads.googleads.v10.resources.CustomerConversionGoal getUpdate() {
if (updateBuilder_ == null) {
if (operationCase_ == 1) {
return (com.google.ads.googleads.v10.resources.CustomerConversionGoal) operation_;
}
return com.google.ads.googleads.v10.resources.CustomerConversionGoal.getDefaultInstance();
} else {
if (operationCase_ == 1) {
return updateBuilder_.getMessage();
}
return com.google.ads.googleads.v10.resources.CustomerConversionGoal.getDefaultInstance();
}
}
/**
* <pre>
* Update operation: The customer conversion goal is expected to have a
* valid resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CustomerConversionGoal update = 1;</code>
*/
public Builder setUpdate(com.google.ads.googleads.v10.resources.CustomerConversionGoal value) {
if (updateBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
operation_ = value;
onChanged();
} else {
updateBuilder_.setMessage(value);
}
operationCase_ = 1;
return this;
}
/**
* <pre>
* Update operation: The customer conversion goal is expected to have a
* valid resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CustomerConversionGoal update = 1;</code>
*/
public Builder setUpdate(
com.google.ads.googleads.v10.resources.CustomerConversionGoal.Builder builderForValue) {
if (updateBuilder_ == null) {
operation_ = builderForValue.build();
onChanged();
} else {
updateBuilder_.setMessage(builderForValue.build());
}
operationCase_ = 1;
return this;
}
/**
* <pre>
* Update operation: The customer conversion goal is expected to have a
* valid resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CustomerConversionGoal update = 1;</code>
*/
public Builder mergeUpdate(com.google.ads.googleads.v10.resources.CustomerConversionGoal value) {
if (updateBuilder_ == null) {
if (operationCase_ == 1 &&
operation_ != com.google.ads.googleads.v10.resources.CustomerConversionGoal.getDefaultInstance()) {
operation_ = com.google.ads.googleads.v10.resources.CustomerConversionGoal.newBuilder((com.google.ads.googleads.v10.resources.CustomerConversionGoal) operation_)
.mergeFrom(value).buildPartial();
} else {
operation_ = value;
}
onChanged();
} else {
if (operationCase_ == 1) {
updateBuilder_.mergeFrom(value);
}
updateBuilder_.setMessage(value);
}
operationCase_ = 1;
return this;
}
/**
* <pre>
* Update operation: The customer conversion goal is expected to have a
* valid resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CustomerConversionGoal update = 1;</code>
*/
public Builder clearUpdate() {
if (updateBuilder_ == null) {
if (operationCase_ == 1) {
operationCase_ = 0;
operation_ = null;
onChanged();
}
} else {
if (operationCase_ == 1) {
operationCase_ = 0;
operation_ = null;
}
updateBuilder_.clear();
}
return this;
}
/**
* <pre>
* Update operation: The customer conversion goal is expected to have a
* valid resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CustomerConversionGoal update = 1;</code>
*/
public com.google.ads.googleads.v10.resources.CustomerConversionGoal.Builder getUpdateBuilder() {
return getUpdateFieldBuilder().getBuilder();
}
/**
* <pre>
* Update operation: The customer conversion goal is expected to have a
* valid resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CustomerConversionGoal update = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v10.resources.CustomerConversionGoalOrBuilder getUpdateOrBuilder() {
if ((operationCase_ == 1) && (updateBuilder_ != null)) {
return updateBuilder_.getMessageOrBuilder();
} else {
if (operationCase_ == 1) {
return (com.google.ads.googleads.v10.resources.CustomerConversionGoal) operation_;
}
return com.google.ads.googleads.v10.resources.CustomerConversionGoal.getDefaultInstance();
}
}
/**
* <pre>
* Update operation: The customer conversion goal is expected to have a
* valid resource name.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CustomerConversionGoal update = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v10.resources.CustomerConversionGoal, com.google.ads.googleads.v10.resources.CustomerConversionGoal.Builder, com.google.ads.googleads.v10.resources.CustomerConversionGoalOrBuilder>
getUpdateFieldBuilder() {
if (updateBuilder_ == null) {
if (!(operationCase_ == 1)) {
operation_ = com.google.ads.googleads.v10.resources.CustomerConversionGoal.getDefaultInstance();
}
updateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v10.resources.CustomerConversionGoal, com.google.ads.googleads.v10.resources.CustomerConversionGoal.Builder, com.google.ads.googleads.v10.resources.CustomerConversionGoalOrBuilder>(
(com.google.ads.googleads.v10.resources.CustomerConversionGoal) operation_,
getParentForChildren(),
isClean());
operation_ = null;
}
operationCase_ = 1;
onChanged();;
return updateBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v10.services.CustomerConversionGoalOperation)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v10.services.CustomerConversionGoalOperation)
private static final com.google.ads.googleads.v10.services.CustomerConversionGoalOperation DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v10.services.CustomerConversionGoalOperation();
}
public static com.google.ads.googleads.v10.services.CustomerConversionGoalOperation getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CustomerConversionGoalOperation>
PARSER = new com.google.protobuf.AbstractParser<CustomerConversionGoalOperation>() {
@java.lang.Override
public CustomerConversionGoalOperation parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CustomerConversionGoalOperation(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<CustomerConversionGoalOperation> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CustomerConversionGoalOperation> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v10.services.CustomerConversionGoalOperation getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}<|fim▁end|> | throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry); |
<|file_name|>LakeBTCMarketDataJsonTest.java<|end_file_name|><|fim▁begin|>package com.xeiam.xchange.lakebtc.marketdata;
import static org.fest.assertions.api.Assertions.assertThat;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigDecimal;
import org.junit.Test;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.xeiam.xchange.lakebtc.dto.marketdata.LakeBTCOrderBook;
import com.xeiam.xchange.lakebtc.dto.marketdata.LakeBTCTicker;
import com.xeiam.xchange.lakebtc.dto.marketdata.LakeBTCTickers;
public class LakeBTCMarketDataJsonTest {
@Test
public void testDeserializeTicker() throws IOException {
// Read in the JSON from the example resources
InputStream is = LakeBTCMarketDataJsonTest.class.getResourceAsStream("/marketdata/example-ticker-data.json");
// Use Jackson to parse it
ObjectMapper mapper = new ObjectMapper();
LakeBTCTickers tickers = mapper.readValue(is, LakeBTCTickers.class);
LakeBTCTicker cnyTicker = tickers.getCny();
assertThat(cnyTicker.getAsk()).isEqualTo("3524.07");
assertThat(cnyTicker.getBid()).isEqualTo("3517.13");
assertThat(cnyTicker.getLast()).isEqualTo("3524.07");
assertThat(cnyTicker.getHigh()).isEqualTo("3584.97");
assertThat(cnyTicker.getLow()).isEqualTo("3480.07");
assertThat(cnyTicker.getVolume()).isEqualTo("5964.7677");
LakeBTCTicker usdTicker = tickers.getUsd();
assertThat(usdTicker.getAsk()).isEqualTo("564.63");
assertThat(usdTicker.getBid()).isEqualTo("564.63");
assertThat(usdTicker.getLast()).isEqualTo("564.4");
assertThat(usdTicker.getHigh()).isEqualTo("573.83");
assertThat(usdTicker.getLow()).isEqualTo("557.7");
assertThat(usdTicker.getVolume()).isEqualTo("3521.2782");
}
@Test
public void testDeserializeOrderBook() throws IOException {
// Read in the JSON from the example resources
InputStream is = LakeBTCMarketDataJsonTest.class.getResourceAsStream("/marketdata/example-orderbook-data.json");
// Use Jackson to parse it
ObjectMapper mapper = new ObjectMapper();
LakeBTCOrderBook orderBook = mapper.readValue(is, LakeBTCOrderBook.class);
BigDecimal[][] asks = orderBook.getAsks();
assertThat(asks).hasSize(3);
assertThat(asks[0][0]).isEqualTo("564.87");
assertThat(asks[0][1]).isEqualTo("22.371");
BigDecimal[][] bids = orderBook.getBids();
assertThat(bids).hasSize(3);
assertThat(bids[2][0]).isEqualTo("558.08");
assertThat(bids[2][1]).isEqualTo("0.9878");
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>mode.rs<|end_file_name|><|fim▁begin|>use state::State;
use std::marker::PhantomData;
use typeahead::Parse;
pub trait Transition<K>
where
K: Ord,
K: Copy,
K: Parse,
{
fn name(&self) -> &'static str;
fn transition(&self, state: &mut State<K>) -> Mode<K>;
}
#[derive(Clone, Copy, Debug)]
pub struct NormalMode<K> {
t: PhantomData<K>,
}
/// Used by `PendingMode` to remember what mode to transition to next.
#[derive(Clone, Copy, Debug)]
pub enum NextMode {
Normal,
Insert,
}
#[derive(Clone, Copy, Debug)]
pub struct PendingMode<K> {
t: PhantomData<K>,
pub next_mode: NextMode, // Mode to return to after motion or text object.
}
#[derive(Clone, Copy, Debug)]
pub struct InsertMode<K> {
t: PhantomData<K>,
replace_mode: bool,
}
#[derive(Clone, Copy, Debug)]
pub enum Mode<K> {
Normal(NormalMode<K>),
Pending(PendingMode<K>),
Insert(InsertMode<K>),
}
pub fn normal<K>() -> Mode<K> {
Mode::Normal(NormalMode::<K> { t: PhantomData::<K> {} })
}
pub fn recast_normal<K>(orig: &NormalMode<K>) -> Mode<K> {
Mode::Normal(NormalMode::<K> { t: PhantomData::<K> {} })
}
pub fn pending<K>(next_mode: NextMode) -> Mode<K> {
Mode::Pending(PendingMode::<K> {
t: PhantomData::<K> {},
next_mode: next_mode,
})<|fim▁hole|>pub fn recast_pending<K>(orig: &PendingMode<K>) -> Mode<K> {
Mode::Pending(PendingMode::<K> {
t: PhantomData::<K> {},
next_mode: orig.next_mode,
})
}
pub fn insert<K>() -> Mode<K> {
Mode::Insert(InsertMode::<K> {
t: PhantomData::<K> {},
replace_mode: false,
})
}
pub fn replace<K>() -> Mode<K> {
Mode::Insert(InsertMode::<K> {
t: PhantomData::<K> {},
replace_mode: true,
})
}
impl<K> Transition<K> for Mode<K>
where
K: Ord,
K: Copy,
K: Parse,
{
fn name(&self) -> &'static str {
match *self {
Mode::Normal(x) => x.name(),
Mode::Pending(x) => x.name(),
Mode::Insert(x) => x.name(),
}
}
fn transition(&self, state: &mut State<K>) -> Mode<K> {
match *self {
Mode::Normal(x) => x.transition(state),
Mode::Pending(x) => x.transition(state),
Mode::Insert(x) => x.transition(state),
}
}
}<|fim▁end|> | }
|
<|file_name|>widgets.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import re
from django.utils.safestring import mark_safe
from django.contrib.admin.widgets import AdminFileWidget
from django.template.defaultfilters import slugify
from django.utils.encoding import smart_text
from unidecode import unidecode
from django.forms.widgets import FILE_INPUT_CONTRADICTION, CheckboxInput, ClearableFileInput
class ImagePreviewWidget(AdminFileWidget):
template_name = 'admin/attachment/widgets/preview_image_input.html'
def render(self, name, value, attrs=None, renderer=None):
output = []
output.append(super(AdminFileWidget, self).render(name, value, attrs)) # really for AdminFileWidget
instance = getattr(value, 'instance', None)
if instance is not None and value:
output = ['<a target="_blank" href="%s"><img src="%s" alt="%s"/></a>' % \
(instance.image.url, instance.thumb.url, instance.image)] + output
return mark_safe(u''.join(output))
def value_from_datadict(self, data, files, name):
for key, file in files.items():
filename = file._get_name()
ext = u""
if '.' in filename:
ext = u"." + filename.rpartition('.')[2]
filename = filename.rpartition('.')[0]
filename = re.sub(r'[_.,:;@#$%^&?*|()\[\]]', '-', filename)
filename = slugify(unidecode(smart_text(filename))) + ext
files[key]._set_name(filename)
upload = super(ImagePreviewWidget, self).value_from_datadict(data, files, name)
if not self.is_required and CheckboxInput().value_from_datadict(
data, files, self.clear_checkbox_name(name)):
if upload:
# If the user contradicts themselves (uploads a new file AND
# checks the "clear" checkbox), we return a unique marker
<|fim▁hole|> # False signals to clear any existing value, as opposed to just None
return False
return upload
class ImagePreviewWidgetHorizontal(ImagePreviewWidget):
template_name = 'admin/attachment/widgets/preview_image_input_horizontal.html'
class ImagePreviewWidgetVertical(ImagePreviewWidget):
template_name = 'admin/attachment/widgets/preview_image_input_vertical.html'
class FileWidget(ClearableFileInput):
def value_from_datadict(self, data, files, name):
for key, file in files.items():
filename = file._get_name()
ext = u""
if '.' in filename:
ext = u"." + filename.rpartition('.')[2]
filename = filename.rpartition('.')[0]
filename = re.sub(r'[_.,:;@#$%^&?*|()\[\]]', '-', filename)
filename = slugify(unidecode(smart_text(filename))) + ext
files[key]._set_name(filename)
return files.get(name, None)<|fim▁end|> | # object that FileField will turn into a ValidationError.
return FILE_INPUT_CONTRADICTION
|
<|file_name|>bigquery.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations<|fim▁hole|>import re
from datetime import datetime
from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING
import pandas as pd
from sqlalchemy import literal_column
from sqlalchemy.sql.expression import ColumnClause
from superset.db_engine_specs.base import BaseEngineSpec
from superset.utils import core as utils
if TYPE_CHECKING:
from superset.models.core import Database # pragma: no cover
class BigQueryEngineSpec(BaseEngineSpec):
"""Engine spec for Google's BigQuery
As contributed by @mxmzdlv on issue #945"""
engine = "bigquery"
engine_name = "Google BigQuery"
max_column_name_length = 128
"""
https://www.python.org/dev/peps/pep-0249/#arraysize
raw_connections bypass the pybigquery query execution context and deal with
raw dbapi connection directly.
If this value is not set, the default value is set to 1, as described here,
https://googlecloudplatform.github.io/google-cloud-python/latest/_modules/google/cloud/bigquery/dbapi/cursor.html#Cursor
The default value of 5000 is derived from the pybigquery.
https://github.com/mxmzdlv/pybigquery/blob/d214bb089ca0807ca9aaa6ce4d5a01172d40264e/pybigquery/sqlalchemy_bigquery.py#L102
"""
arraysize = 5000
_date_trunc_functions = {
"DATE": "DATE_TRUNC",
"DATETIME": "DATETIME_TRUNC",
"TIME": "TIME_TRUNC",
"TIMESTAMP": "TIMESTAMP_TRUNC",
}
_time_grain_expressions = {
None: "{col}",
"PT1S": "{func}({col}, SECOND)",
"PT1M": "{func}({col}, MINUTE)",
"PT1H": "{func}({col}, HOUR)",
"P1D": "{func}({col}, DAY)",
"P1W": "{func}({col}, WEEK)",
"P1M": "{func}({col}, MONTH)",
"P0.25Y": "{func}({col}, QUARTER)",
"P1Y": "{func}({col}, YEAR)",
}
@classmethod
def convert_dttm(cls, target_type: str, dttm: datetime) -> Optional[str]:
tt = target_type.upper()
if tt == utils.TemporalType.DATE:
return f"CAST('{dttm.date().isoformat()}' AS DATE)"
if tt == utils.TemporalType.DATETIME:
return f"""CAST('{dttm.isoformat(timespec="microseconds")}' AS DATETIME)"""
if tt == utils.TemporalType.TIME:
return f"""CAST('{dttm.strftime("%H:%M:%S.%f")}' AS TIME)"""
if tt == utils.TemporalType.TIMESTAMP:
return f"""CAST('{dttm.isoformat(timespec="microseconds")}' AS TIMESTAMP)"""
return None
@classmethod
def fetch_data(
cls, cursor: Any, limit: Optional[int] = None
) -> List[Tuple[Any, ...]]:
data = super().fetch_data(cursor, limit)
# Support type BigQuery Row, introduced here PR #4071
# google.cloud.bigquery.table.Row
if data and type(data[0]).__name__ == "Row":
data = [r.values() for r in data] # type: ignore
return data
@staticmethod
def _mutate_label(label: str) -> str:
"""
BigQuery field_name should start with a letter or underscore and contain only
alphanumeric characters. Labels that start with a number are prefixed with an
underscore. Any unsupported characters are replaced with underscores and an
md5 hash is added to the end of the label to avoid possible collisions.
:param label: Expected expression label
:return: Conditionally mutated label
"""
label_hashed = "_" + hashlib.md5(label.encode("utf-8")).hexdigest()
# if label starts with number, add underscore as first character
label_mutated = "_" + label if re.match(r"^\d", label) else label
# replace non-alphanumeric characters with underscores
label_mutated = re.sub(r"[^\w]+", "_", label_mutated)
if label_mutated != label:
# add first 5 chars from md5 hash to label to avoid possible collisions
label_mutated += label_hashed[:6]
return label_mutated
@classmethod
def _truncate_label(cls, label: str) -> str:
"""BigQuery requires column names start with either a letter or
underscore. To make sure this is always the case, an underscore is prefixed
to the md5 hash of the original label.
:param label: expected expression label
:return: truncated label
"""
return "_" + hashlib.md5(label.encode("utf-8")).hexdigest()
@classmethod
def extra_table_metadata(
cls, database: "Database", table_name: str, schema_name: str
) -> Dict[str, Any]:
indexes = database.get_indexes(table_name, schema_name)
if not indexes:
return {}
partitions_columns = [
index.get("column_names", [])
for index in indexes
if index.get("name") == "partition"
]
cluster_columns = [
index.get("column_names", [])
for index in indexes
if index.get("name") == "clustering"
]
return {
"partitions": {"cols": partitions_columns},
"clustering": {"cols": cluster_columns},
}
@classmethod
def _get_fields(cls, cols: List[Dict[str, Any]]) -> List[ColumnClause]:
"""
BigQuery dialect requires us to not use backtick in the fieldname which are
nested.
Using literal_column handles that issue.
https://docs.sqlalchemy.org/en/latest/core/tutorial.html#using-more-specific-text-with-table-literal-column-and-column
Also explicility specifying column names so we don't encounter duplicate
column names in the result.
"""
return [
literal_column(c["name"]).label(c["name"].replace(".", "__")) for c in cols
]
@classmethod
def epoch_to_dttm(cls) -> str:
return "TIMESTAMP_SECONDS({col})"
@classmethod
def epoch_ms_to_dttm(cls) -> str:
return "TIMESTAMP_MILLIS({col})"
@classmethod
def df_to_sql(cls, df: pd.DataFrame, **kwargs: Any) -> None:
"""
Upload data from a Pandas DataFrame to BigQuery. Calls
`DataFrame.to_gbq()` which requires `pandas_gbq` to be installed.
:param df: Dataframe with data to be uploaded
:param kwargs: kwargs to be passed to to_gbq() method. Requires that `schema`,
`name` and `con` are present in kwargs. `name` and `schema` are combined
and passed to `to_gbq()` as `destination_table`.
"""
try:
import pandas_gbq
from google.oauth2 import service_account
except ImportError:
raise Exception(
"Could not import libraries `pandas_gbq` or `google.oauth2`, which are "
"required to be installed in your environment in order "
"to upload data to BigQuery"
)
if not ("name" in kwargs and "schema" in kwargs and "con" in kwargs):
raise Exception("name, schema and con need to be defined in kwargs")
gbq_kwargs = {}
gbq_kwargs["project_id"] = kwargs["con"].engine.url.host
gbq_kwargs["destination_table"] = f"{kwargs.pop('schema')}.{kwargs.pop('name')}"
# add credentials if they are set on the SQLAlchemy Dialect:
creds = kwargs["con"].dialect.credentials_info
if creds:
credentials = service_account.Credentials.from_service_account_info(creds)
gbq_kwargs["credentials"] = credentials
# Only pass through supported kwargs
supported_kwarg_keys = {"if_exists"}
for key in supported_kwarg_keys:
if key in kwargs:
gbq_kwargs[key] = kwargs[key]
pandas_gbq.to_gbq(df, **gbq_kwargs)<|fim▁end|> | # under the License.
import hashlib |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from base import MediaFile
<|fim▁hole|>from fields import MediaFileField
from widgets import AdminMediaFileWidget<|fim▁end|> | |
<|file_name|>dump_sdcard.py<|end_file_name|><|fim▁begin|>from goprocam import GoProCamera, constants<|fim▁hole|>gpCam.downloadAll()
gpCam.delete("all")<|fim▁end|> | gpCam = GoProCamera.GoPro()
## Downloads all of the SD card's contents and then formats the sd card.
|
<|file_name|>E0116.rs<|end_file_name|><|fim▁begin|>impl Vec<u8> {}
//~^ ERROR E0116
<|fim▁hole|><|fim▁end|> | fn main() {
} |
<|file_name|>sqlx_test.go<|end_file_name|><|fim▁begin|>// The following environment variables, if set, will be used:
//
// * SQLX_SQLITE_DSN
// * SQLX_POSTGRES_DSN
// * SQLX_MYSQL_DSN
//
// Set any of these variables to 'skip' to skip them. Note that for MySQL,
// the string '?parseTime=True' will be appended to the DSN if it's not there
// already.
//
package sqlx
import (
"database/sql"
"database/sql/driver"
"encoding/json"
"fmt"
"log"
"os"
"reflect"
"strings"
"testing"
"time"
_ "github.com/go-sql-driver/mysql"
"github.com/jmoiron/sqlx/reflectx"
_ "github.com/lib/pq"
_ "github.com/mattn/go-sqlite3"
)
/* compile time checks that Db, Tx, Stmt (qStmt) implement expected interfaces */
var _, _ Ext = &DB{}, &Tx{}
var _, _ ColScanner = &Row{}, &Rows{}
var _ Queryer = &qStmt{}
var _ Execer = &qStmt{}
var TestPostgres = true
var TestSqlite = true
var TestMysql = true
var sldb *DB
var pgdb *DB
var mysqldb *DB
var active = []*DB{}
func init() {
ConnectAll()
}
func ConnectAll() {
var err error
pgdsn := os.Getenv("SQLX_POSTGRES_DSN")
mydsn := os.Getenv("SQLX_MYSQL_DSN")
sqdsn := os.Getenv("SQLX_SQLITE_DSN")
TestPostgres = pgdsn != "skip"
TestMysql = mydsn != "skip"
TestSqlite = sqdsn != "skip"
if !strings.Contains(mydsn, "parseTime=true") {
mydsn += "?parseTime=true"
}
if TestPostgres {
pgdb, err = Connect("postgres", pgdsn)
if err != nil {
fmt.Printf("Disabling PG tests:\n %v\n", err)
TestPostgres = false
}
} else {
fmt.Println("Disabling Postgres tests.")
}
if TestMysql {
mysqldb, err = Connect("mysql", mydsn)
if err != nil {
fmt.Printf("Disabling MySQL tests:\n %v", err)
TestMysql = false
}
} else {
fmt.Println("Disabling MySQL tests.")
}
if TestSqlite {
sldb, err = Connect("sqlite3", sqdsn)
if err != nil {
fmt.Printf("Disabling SQLite:\n %v", err)
TestSqlite = false
}
} else {
fmt.Println("Disabling SQLite tests.")
}
}
type Schema struct {
create string
drop string
}
func (s Schema) Postgres() (string, string) {
return s.create, s.drop
}
func (s Schema) MySQL() (string, string) {
return strings.Replace(s.create, `"`, "`", -1), s.drop
}
func (s Schema) Sqlite3() (string, string) {
return strings.Replace(s.create, `now()`, `CURRENT_TIMESTAMP`, -1), s.drop
}
var defaultSchema = Schema{
create: `
CREATE TABLE person (
first_name text,
last_name text,
email text,
added_at timestamp default now()
);
CREATE TABLE place (
country text,
city text NULL,
telcode integer
);
CREATE TABLE capplace (
"COUNTRY" text,
"CITY" text NULL,
"TELCODE" integer
);
CREATE TABLE nullperson (
first_name text NULL,
last_name text NULL,
email text NULL
);
CREATE TABLE employees (
name text,
id integer,
boss_id integer
);
`,
drop: `
drop table person;
drop table place;
drop table capplace;
drop table nullperson;
drop table employees;
`,
}
type Person struct {
FirstName string `db:"first_name"`
LastName string `db:"last_name"`
Email string
AddedAt time.Time `db:"added_at"`
}
type Person2 struct {
FirstName sql.NullString `db:"first_name"`
LastName sql.NullString `db:"last_name"`
Email sql.NullString
}
type Place struct {
Country string
City sql.NullString
TelCode int
}
type PlacePtr struct {
Country string
City *string
TelCode int
}
type PersonPlace struct {
Person
Place
}
type PersonPlacePtr struct {
*Person
*Place
}
type EmbedConflict struct {
FirstName string `db:"first_name"`
Person
}
type SliceMember struct {
Country string
City sql.NullString
TelCode int
People []Person `db:"-"`
Addresses []Place `db:"-"`
}
// Note that because of field map caching, we need a new type here
// if we've used Place already somewhere in sqlx
type CPlace Place
func MultiExec(e Execer, query string) {
stmts := strings.Split(query, ";\n")
if len(strings.Trim(stmts[len(stmts)-1], " \n\t\r")) == 0 {
stmts = stmts[:len(stmts)-1]
}
for _, s := range stmts {
_, err := e.Exec(s)
if err != nil {
fmt.Println(err, s)
}
}
}
func RunWithSchema(schema Schema, t *testing.T, test func(db *DB, t *testing.T)) {
runner := func(db *DB, t *testing.T, create, drop string) {
defer func() {
MultiExec(db, drop)
}()
MultiExec(db, create)
test(db, t)
}
if TestPostgres {
create, drop := schema.Postgres()
runner(pgdb, t, create, drop)
}
if TestSqlite {
create, drop := schema.Sqlite3()
runner(sldb, t, create, drop)
}
if TestMysql {
create, drop := schema.MySQL()
runner(mysqldb, t, create, drop)
}
}
func loadDefaultFixture(db *DB, t *testing.T) {
tx := db.MustBegin()
tx.MustExec(tx.Rebind("INSERT INTO person (first_name, last_name, email) VALUES (?, ?, ?)"), "Jason", "Moiron", "[email protected]")
tx.MustExec(tx.Rebind("INSERT INTO person (first_name, last_name, email) VALUES (?, ?, ?)"), "John", "Doe", "[email protected]")
tx.MustExec(tx.Rebind("INSERT INTO place (country, city, telcode) VALUES (?, ?, ?)"), "United States", "New York", "1")
tx.MustExec(tx.Rebind("INSERT INTO place (country, telcode) VALUES (?, ?)"), "Hong Kong", "852")
tx.MustExec(tx.Rebind("INSERT INTO place (country, telcode) VALUES (?, ?)"), "Singapore", "65")
if db.DriverName() == "mysql" {
tx.MustExec(tx.Rebind("INSERT INTO capplace (`COUNTRY`, `TELCODE`) VALUES (?, ?)"), "Sarf Efrica", "27")
} else {
tx.MustExec(tx.Rebind("INSERT INTO capplace (\"COUNTRY\", \"TELCODE\") VALUES (?, ?)"), "Sarf Efrica", "27")
}
tx.MustExec(tx.Rebind("INSERT INTO employees (name, id) VALUES (?, ?)"), "Peter", "4444")
tx.MustExec(tx.Rebind("INSERT INTO employees (name, id, boss_id) VALUES (?, ?, ?)"), "Joe", "1", "4444")
tx.MustExec(tx.Rebind("INSERT INTO employees (name, id, boss_id) VALUES (?, ?, ?)"), "Martin", "2", "4444")
tx.Commit()
}
// Test a new backwards compatible feature, that missing scan destinations
// will silently scan into sql.RawText rather than failing/panicing
func TestMissingNames(t *testing.T) {
RunWithSchema(defaultSchema, t, func(db *DB, t *testing.T) {
loadDefaultFixture(db, t)
type PersonPlus struct {
FirstName string `db:"first_name"`
LastName string `db:"last_name"`
Email string
//AddedAt time.Time `db:"added_at"`
}
// test Select first
pps := []PersonPlus{}
// pps lacks added_at destination
err := db.Select(&pps, "SELECT * FROM person")
if err == nil {
t.Error("Expected missing name from Select to fail, but it did not.")
}
// test Get
pp := PersonPlus{}
err = db.Get(&pp, "SELECT * FROM person LIMIT 1")
if err == nil {
t.Error("Expected missing name Get to fail, but it did not.")
}
// test naked StructScan
pps = []PersonPlus{}
rows, err := db.Query("SELECT * FROM person LIMIT 1")
if err != nil {
t.Fatal(err)
}
rows.Next()
err = StructScan(rows, &pps)
if err == nil {
t.Error("Expected missing name in StructScan to fail, but it did not.")
}
rows.Close()
// now try various things with unsafe set.
db = db.Unsafe()
pps = []PersonPlus{}
err = db.Select(&pps, "SELECT * FROM person")
if err != nil {
t.Error(err)
}
// test Get
pp = PersonPlus{}
err = db.Get(&pp, "SELECT * FROM person LIMIT 1")
if err != nil {
t.Error(err)
}
// test naked StructScan
pps = []PersonPlus{}
rowsx, err := db.Queryx("SELECT * FROM person LIMIT 1")
if err != nil {
t.Fatal(err)
}
rowsx.Next()
err = StructScan(rowsx, &pps)
if err != nil {
t.Error(err)
}
rowsx.Close()
// test Named stmt
if !isUnsafe(db) {
t.Error("Expected db to be unsafe, but it isn't")
}
nstmt, err := db.PrepareNamed(`SELECT * FROM person WHERE first_name != :name`)
if err != nil {
t.Fatal(err)
}
// its internal stmt should be marked unsafe
if !nstmt.Stmt.unsafe {
t.Error("expected NamedStmt to be unsafe but its underlying stmt did not inherit safety")
}
pps = []PersonPlus{}
err = nstmt.Select(&pps, map[string]interface{}{"name": "Jason"})
if err != nil {
t.Fatal(err)
}
if len(pps) != 1 {
t.Errorf("Expected 1 person back, got %d", len(pps))
}
// test it with a safe db
db.unsafe = false
if isUnsafe(db) {
t.Error("expected db to be safe but it isn't")
}
nstmt, err = db.PrepareNamed(`SELECT * FROM person WHERE first_name != :name`)
if err != nil {
t.Fatal(err)
}
// it should be safe
if isUnsafe(nstmt) {
t.Error("NamedStmt did not inherit safety")
}
nstmt.Unsafe()
if !isUnsafe(nstmt) {
t.Error("expected newly unsafed NamedStmt to be unsafe")
}
pps = []PersonPlus{}
err = nstmt.Select(&pps, map[string]interface{}{"name": "Jason"})
if err != nil {
t.Fatal(err)
}
if len(pps) != 1 {
t.Errorf("Expected 1 person back, got %d", len(pps))
}
})
}
func TestEmbeddedStructs(t *testing.T) {
type Loop1 struct{ Person }
type Loop2 struct{ Loop1 }
type Loop3 struct{ Loop2 }
RunWithSchema(defaultSchema, t, func(db *DB, t *testing.T) {
loadDefaultFixture(db, t)
peopleAndPlaces := []PersonPlace{}
err := db.Select(
&peopleAndPlaces,
`SELECT person.*, place.* FROM
person natural join place`)
if err != nil {
t.Fatal(err)
}
for _, pp := range peopleAndPlaces {
if len(pp.Person.FirstName) == 0 {
t.Errorf("Expected non zero lengthed first name.")
}
if len(pp.Place.Country) == 0 {
t.Errorf("Expected non zero lengthed country.")
}
}
// test embedded structs with StructScan
rows, err := db.Queryx(
`SELECT person.*, place.* FROM
person natural join place`)
if err != nil {
t.Error(err)
}
perp := PersonPlace{}
rows.Next()
err = rows.StructScan(&perp)
if err != nil {
t.Error(err)
}
if len(perp.Person.FirstName) == 0 {
t.Errorf("Expected non zero lengthed first name.")
}
if len(perp.Place.Country) == 0 {
t.Errorf("Expected non zero lengthed country.")
}
rows.Close()
// test the same for embedded pointer structs
peopleAndPlacesPtrs := []PersonPlacePtr{}
err = db.Select(
&peopleAndPlacesPtrs,
`SELECT person.*, place.* FROM
person natural join place`)
if err != nil {
t.Fatal(err)
}
for _, pp := range peopleAndPlacesPtrs {
if len(pp.Person.FirstName) == 0 {
t.Errorf("Expected non zero lengthed first name.")
}
if len(pp.Place.Country) == 0 {
t.Errorf("Expected non zero lengthed country.")
}
}
// test "deep nesting"
l3s := []Loop3{}
err = db.Select(&l3s, `select * from person`)
if err != nil {
t.Fatal(err)
}
for _, l3 := range l3s {
if len(l3.Loop2.Loop1.Person.FirstName) == 0 {
t.Errorf("Expected non zero lengthed first name.")
}
}
// test "embed conflicts"
ec := []EmbedConflict{}
err = db.Select(&ec, `select * from person`)
// I'm torn between erroring here or having some kind of working behavior
// in order to allow for more flexibility in destination structs
if err != nil {
t.Errorf("Was not expecting an error on embed conflicts.")
}
})
}
func TestJoinQuery(t *testing.T) {
type Employee struct {
Name string
ID int64
// BossID is an id into the employee table
BossID sql.NullInt64 `db:"boss_id"`
}
type Boss Employee
RunWithSchema(defaultSchema, t, func(db *DB, t *testing.T) {
loadDefaultFixture(db, t)
var employees []struct {
Employee
Boss `db:"boss"`
}
err := db.Select(
&employees,
`SELECT employees.*, boss.id "boss.id", boss.name "boss.name" FROM employees
JOIN employees AS boss ON employees.boss_id = boss.id`)
if err != nil {
t.Fatal(err)
}
for _, em := range employees {
if len(em.Employee.Name) == 0 {
t.Errorf("Expected non zero lengthed name.")
}
if em.Employee.BossID.Int64 != em.Boss.ID {
t.Errorf("Expected boss ids to match")
}
}
})
}
func TestJoinQueryNamedPointerStructs(t *testing.T) {
type Employee struct {
Name string
ID int64
// BossID is an id into the employee table
BossID sql.NullInt64 `db:"boss_id"`
}
type Boss Employee
RunWithSchema(defaultSchema, t, func(db *DB, t *testing.T) {
loadDefaultFixture(db, t)
var employees []struct {
Emp1 *Employee `db:"emp1"`
Emp2 *Employee `db:"emp2"`
*Boss `db:"boss"`
}
err := db.Select(
&employees,
`SELECT emp.name "emp1.name", emp.id "emp1.id", emp.boss_id "emp1.boss_id",
emp.name "emp2.name", emp.id "emp2.id", emp.boss_id "emp2.boss_id",
boss.id "boss.id", boss.name "boss.name" FROM employees AS emp
JOIN employees AS boss ON emp.boss_id = boss.id
`)
if err != nil {
t.Fatal(err)
}
for _, em := range employees {
if len(em.Emp1.Name) == 0 || len(em.Emp2.Name) == 0 {
t.Errorf("Expected non zero lengthed name.")
}
if em.Emp1.BossID.Int64 != em.Boss.ID || em.Emp2.BossID.Int64 != em.Boss.ID {
t.Errorf("Expected boss ids to match")
}
}
})
}
func TestSelectSliceMapTime(t *testing.T) {
RunWithSchema(defaultSchema, t, func(db *DB, t *testing.T) {
loadDefaultFixture(db, t)
rows, err := db.Queryx("SELECT * FROM person")
if err != nil {
t.Fatal(err)
}
for rows.Next() {
_, err := rows.SliceScan()
if err != nil {
t.Error(err)
}
}
rows, err = db.Queryx("SELECT * FROM person")
if err != nil {
t.Fatal(err)
}
for rows.Next() {
m := map[string]interface{}{}
err := rows.MapScan(m)
if err != nil {
t.Error(err)
}
}
})
}
func TestNilReceiver(t *testing.T) {
RunWithSchema(defaultSchema, t, func(db *DB, t *testing.T) {
loadDefaultFixture(db, t)
var p *Person
err := db.Get(p, "SELECT * FROM person LIMIT 1")
if err == nil {
t.Error("Expected error when getting into nil struct ptr.")
}
var pp *[]Person
err = db.Select(pp, "SELECT * FROM person")
if err == nil {
t.Error("Expected an error when selecting into nil slice ptr.")
}
})
}
func TestNamedQuery(t *testing.T) {
var schema = Schema{
create: `
CREATE TABLE place (
id integer PRIMARY KEY,
name text NULL
);
CREATE TABLE person (
first_name text NULL,
last_name text NULL,
email text NULL
);
CREATE TABLE placeperson (
first_name text NULL,
last_name text NULL,
email text NULL,
place_id integer NULL
);
CREATE TABLE jsperson (
"FIRST" text NULL,
last_name text NULL,
"EMAIL" text NULL
);`,
drop: `
drop table person;
drop table jsperson;
drop table place;
drop table placeperson;
`,
}
RunWithSchema(schema, t, func(db *DB, t *testing.T) {
type Person struct {
FirstName sql.NullString `db:"first_name"`
LastName sql.NullString `db:"last_name"`
Email sql.NullString
}
p := Person{
FirstName: sql.NullString{String: "ben", Valid: true},
LastName: sql.NullString{String: "doe", Valid: true},
Email: sql.NullString{String: "[email protected]", Valid: true},
}
q1 := `INSERT INTO person (first_name, last_name, email) VALUES (:first_name, :last_name, :email)`
_, err := db.NamedExec(q1, p)
if err != nil {
log.Fatal(err)
}
p2 := &Person{}
rows, err := db.NamedQuery("SELECT * FROM person WHERE first_name=:first_name", p)
if err != nil {
log.Fatal(err)
}<|fim▁hole|> }
if p2.FirstName.String != "ben" {
t.Error("Expected first name of `ben`, got " + p2.FirstName.String)
}
if p2.LastName.String != "doe" {
t.Error("Expected first name of `doe`, got " + p2.LastName.String)
}
}
// these are tests for #73; they verify that named queries work if you've
// changed the db mapper. This code checks both NamedQuery "ad-hoc" style
// queries and NamedStmt queries, which use different code paths internally.
old := *db.Mapper
type JSONPerson struct {
FirstName sql.NullString `json:"FIRST"`
LastName sql.NullString `json:"last_name"`
Email sql.NullString
}
jp := JSONPerson{
FirstName: sql.NullString{String: "ben", Valid: true},
LastName: sql.NullString{String: "smith", Valid: true},
Email: sql.NullString{String: "[email protected]", Valid: true},
}
db.Mapper = reflectx.NewMapperFunc("json", strings.ToUpper)
// prepare queries for case sensitivity to test our ToUpper function.
// postgres and sqlite accept "", but mysql uses ``; since Go's multi-line
// strings are `` we use "" by default and swap out for MySQL
pdb := func(s string, db *DB) string {
if db.DriverName() == "mysql" {
return strings.Replace(s, `"`, "`", -1)
}
return s
}
q1 = `INSERT INTO jsperson ("FIRST", last_name, "EMAIL") VALUES (:FIRST, :last_name, :EMAIL)`
_, err = db.NamedExec(pdb(q1, db), jp)
if err != nil {
t.Fatal(err, db.DriverName())
}
// Checks that a person pulled out of the db matches the one we put in
check := func(t *testing.T, rows *Rows) {
jp = JSONPerson{}
for rows.Next() {
err = rows.StructScan(&jp)
if err != nil {
t.Error(err)
}
if jp.FirstName.String != "ben" {
t.Errorf("Expected first name of `ben`, got `%s` (%s) ", jp.FirstName.String, db.DriverName())
}
if jp.LastName.String != "smith" {
t.Errorf("Expected LastName of `smith`, got `%s` (%s)", jp.LastName.String, db.DriverName())
}
if jp.Email.String != "[email protected]" {
t.Errorf("Expected first name of `doe`, got `%s` (%s)", jp.Email.String, db.DriverName())
}
}
}
ns, err := db.PrepareNamed(pdb(`
SELECT * FROM jsperson
WHERE
"FIRST"=:FIRST AND
last_name=:last_name AND
"EMAIL"=:EMAIL
`, db))
if err != nil {
t.Fatal(err)
}
rows, err = ns.Queryx(jp)
if err != nil {
t.Fatal(err)
}
check(t, rows)
// Check exactly the same thing, but with db.NamedQuery, which does not go
// through the PrepareNamed/NamedStmt path.
rows, err = db.NamedQuery(pdb(`
SELECT * FROM jsperson
WHERE
"FIRST"=:FIRST AND
last_name=:last_name AND
"EMAIL"=:EMAIL
`, db), jp)
if err != nil {
t.Fatal(err)
}
check(t, rows)
db.Mapper = &old
// Test nested structs
type Place struct {
ID int `db:"id"`
Name sql.NullString `db:"name"`
}
type PlacePerson struct {
FirstName sql.NullString `db:"first_name"`
LastName sql.NullString `db:"last_name"`
Email sql.NullString
Place Place `db:"place"`
}
pl := Place{
Name: sql.NullString{String: "myplace", Valid: true},
}
pp := PlacePerson{
FirstName: sql.NullString{String: "ben", Valid: true},
LastName: sql.NullString{String: "doe", Valid: true},
Email: sql.NullString{String: "[email protected]", Valid: true},
}
q2 := `INSERT INTO place (id, name) VALUES (1, :name)`
_, err = db.NamedExec(q2, pl)
if err != nil {
log.Fatal(err)
}
id := 1
pp.Place.ID = id
q3 := `INSERT INTO placeperson (first_name, last_name, email, place_id) VALUES (:first_name, :last_name, :email, :place.id)`
_, err = db.NamedExec(q3, pp)
if err != nil {
log.Fatal(err)
}
pp2 := &PlacePerson{}
rows, err = db.NamedQuery(`
SELECT
first_name,
last_name,
email,
place.id AS "place.id",
place.name AS "place.name"
FROM placeperson
INNER JOIN place ON place.id = placeperson.place_id
WHERE
place.id=:place.id`, pp)
if err != nil {
log.Fatal(err)
}
for rows.Next() {
err = rows.StructScan(pp2)
if err != nil {
t.Error(err)
}
if pp2.FirstName.String != "ben" {
t.Error("Expected first name of `ben`, got " + pp2.FirstName.String)
}
if pp2.LastName.String != "doe" {
t.Error("Expected first name of `doe`, got " + pp2.LastName.String)
}
if pp2.Place.Name.String != "myplace" {
t.Error("Expected place name of `myplace`, got " + pp2.Place.Name.String)
}
if pp2.Place.ID != pp.Place.ID {
t.Errorf("Expected place name of %v, got %v", pp.Place.ID, pp2.Place.ID)
}
}
})
}
func TestNilInserts(t *testing.T) {
var schema = Schema{
create: `
CREATE TABLE tt (
id integer,
value text NULL DEFAULT NULL
);`,
drop: "drop table tt;",
}
RunWithSchema(schema, t, func(db *DB, t *testing.T) {
type TT struct {
ID int
Value *string
}
var v, v2 TT
r := db.Rebind
db.MustExec(r(`INSERT INTO tt (id) VALUES (1)`))
db.Get(&v, r(`SELECT * FROM tt`))
if v.ID != 1 {
t.Errorf("Expecting id of 1, got %v", v.ID)
}
if v.Value != nil {
t.Errorf("Expecting NULL to map to nil, got %s", *v.Value)
}
v.ID = 2
// NOTE: this incidentally uncovered a bug which was that named queries with
// pointer destinations would not work if the passed value here was not addressable,
// as reflectx.FieldByIndexes attempts to allocate nil pointer receivers for
// writing. This was fixed by creating & using the reflectx.FieldByIndexesReadOnly
// function. This next line is important as it provides the only coverage for this.
db.NamedExec(`INSERT INTO tt (id, value) VALUES (:id, :value)`, v)
db.Get(&v2, r(`SELECT * FROM tt WHERE id=2`))
if v.ID != v2.ID {
t.Errorf("%v != %v", v.ID, v2.ID)
}
if v2.Value != nil {
t.Errorf("Expecting NULL to map to nil, got %s", *v.Value)
}
})
}
func TestScanError(t *testing.T) {
var schema = Schema{
create: `
CREATE TABLE kv (
k text,
v integer
);`,
drop: `drop table kv;`,
}
RunWithSchema(schema, t, func(db *DB, t *testing.T) {
type WrongTypes struct {
K int
V string
}
_, err := db.Exec(db.Rebind("INSERT INTO kv (k, v) VALUES (?, ?)"), "hi", 1)
if err != nil {
t.Error(err)
}
rows, err := db.Queryx("SELECT * FROM kv")
if err != nil {
t.Error(err)
}
for rows.Next() {
var wt WrongTypes
err := rows.StructScan(&wt)
if err == nil {
t.Errorf("%s: Scanning wrong types into keys should have errored.", db.DriverName())
}
}
})
}
// FIXME: this function is kinda big but it slows things down to be constantly
// loading and reloading the schema..
func TestUsage(t *testing.T) {
RunWithSchema(defaultSchema, t, func(db *DB, t *testing.T) {
loadDefaultFixture(db, t)
slicemembers := []SliceMember{}
err := db.Select(&slicemembers, "SELECT * FROM place ORDER BY telcode ASC")
if err != nil {
t.Fatal(err)
}
people := []Person{}
err = db.Select(&people, "SELECT * FROM person ORDER BY first_name ASC")
if err != nil {
t.Fatal(err)
}
jason, john := people[0], people[1]
if jason.FirstName != "Jason" {
t.Errorf("Expecting FirstName of Jason, got %s", jason.FirstName)
}
if jason.LastName != "Moiron" {
t.Errorf("Expecting LastName of Moiron, got %s", jason.LastName)
}
if jason.Email != "[email protected]" {
t.Errorf("Expecting Email of [email protected], got %s", jason.Email)
}
if john.FirstName != "John" || john.LastName != "Doe" || john.Email != "[email protected]" {
t.Errorf("John Doe's person record not what expected: Got %v\n", john)
}
jason = Person{}
err = db.Get(&jason, db.Rebind("SELECT * FROM person WHERE first_name=?"), "Jason")
if err != nil {
t.Fatal(err)
}
if jason.FirstName != "Jason" {
t.Errorf("Expecting to get back Jason, but got %v\n", jason.FirstName)
}
err = db.Get(&jason, db.Rebind("SELECT * FROM person WHERE first_name=?"), "Foobar")
if err == nil {
t.Errorf("Expecting an error, got nil\n")
}
if err != sql.ErrNoRows {
t.Errorf("Expected sql.ErrNoRows, got %v\n", err)
}
// The following tests check statement reuse, which was actually a problem
// due to copying being done when creating Stmt's which was eventually removed
stmt1, err := db.Preparex(db.Rebind("SELECT * FROM person WHERE first_name=?"))
if err != nil {
t.Fatal(err)
}
jason = Person{}
row := stmt1.QueryRowx("DoesNotExist")
row.Scan(&jason)
row = stmt1.QueryRowx("DoesNotExist")
row.Scan(&jason)
err = stmt1.Get(&jason, "DoesNotExist User")
if err == nil {
t.Error("Expected an error")
}
err = stmt1.Get(&jason, "DoesNotExist User 2")
if err == nil {
t.Fatal(err)
}
stmt2, err := db.Preparex(db.Rebind("SELECT * FROM person WHERE first_name=?"))
if err != nil {
t.Fatal(err)
}
jason = Person{}
tx, err := db.Beginx()
if err != nil {
t.Fatal(err)
}
tstmt2 := tx.Stmtx(stmt2)
row2 := tstmt2.QueryRowx("Jason")
err = row2.StructScan(&jason)
if err != nil {
t.Error(err)
}
tx.Commit()
places := []*Place{}
err = db.Select(&places, "SELECT telcode FROM place ORDER BY telcode ASC")
if err != nil {
t.Fatal(err)
}
usa, singsing, honkers := places[0], places[1], places[2]
if usa.TelCode != 1 || honkers.TelCode != 852 || singsing.TelCode != 65 {
t.Errorf("Expected integer telcodes to work, got %#v", places)
}
placesptr := []PlacePtr{}
err = db.Select(&placesptr, "SELECT * FROM place ORDER BY telcode ASC")
if err != nil {
t.Error(err)
}
//fmt.Printf("%#v\n%#v\n%#v\n", placesptr[0], placesptr[1], placesptr[2])
// if you have null fields and use SELECT *, you must use sql.Null* in your struct
// this test also verifies that you can use either a []Struct{} or a []*Struct{}
places2 := []Place{}
err = db.Select(&places2, "SELECT * FROM place ORDER BY telcode ASC")
if err != nil {
t.Fatal(err)
}
usa, singsing, honkers = &places2[0], &places2[1], &places2[2]
// this should return a type error that &p is not a pointer to a struct slice
p := Place{}
err = db.Select(&p, "SELECT * FROM place ORDER BY telcode ASC")
if err == nil {
t.Errorf("Expected an error, argument to select should be a pointer to a struct slice")
}
// this should be an error
pl := []Place{}
err = db.Select(pl, "SELECT * FROM place ORDER BY telcode ASC")
if err == nil {
t.Errorf("Expected an error, argument to select should be a pointer to a struct slice, not a slice.")
}
if usa.TelCode != 1 || honkers.TelCode != 852 || singsing.TelCode != 65 {
t.Errorf("Expected integer telcodes to work, got %#v", places)
}
stmt, err := db.Preparex(db.Rebind("SELECT country, telcode FROM place WHERE telcode > ? ORDER BY telcode ASC"))
if err != nil {
t.Error(err)
}
places = []*Place{}
err = stmt.Select(&places, 10)
if len(places) != 2 {
t.Error("Expected 2 places, got 0.")
}
if err != nil {
t.Fatal(err)
}
singsing, honkers = places[0], places[1]
if singsing.TelCode != 65 || honkers.TelCode != 852 {
t.Errorf("Expected the right telcodes, got %#v", places)
}
rows, err := db.Queryx("SELECT * FROM place")
if err != nil {
t.Fatal(err)
}
place := Place{}
for rows.Next() {
err = rows.StructScan(&place)
if err != nil {
t.Fatal(err)
}
}
rows, err = db.Queryx("SELECT * FROM place")
if err != nil {
t.Fatal(err)
}
m := map[string]interface{}{}
for rows.Next() {
err = rows.MapScan(m)
if err != nil {
t.Fatal(err)
}
_, ok := m["country"]
if !ok {
t.Errorf("Expected key `country` in map but could not find it (%#v)\n", m)
}
}
rows, err = db.Queryx("SELECT * FROM place")
if err != nil {
t.Fatal(err)
}
for rows.Next() {
s, err := rows.SliceScan()
if err != nil {
t.Error(err)
}
if len(s) != 3 {
t.Errorf("Expected 3 columns in result, got %d\n", len(s))
}
}
// test advanced querying
// test that NamedExec works with a map as well as a struct
_, err = db.NamedExec("INSERT INTO person (first_name, last_name, email) VALUES (:first, :last, :email)", map[string]interface{}{
"first": "Bin",
"last": "Smuth",
"email": "[email protected]",
})
if err != nil {
t.Fatal(err)
}
// ensure that if the named param happens right at the end it still works
// ensure that NamedQuery works with a map[string]interface{}
rows, err = db.NamedQuery("SELECT * FROM person WHERE first_name=:first", map[string]interface{}{"first": "Bin"})
if err != nil {
t.Fatal(err)
}
ben := &Person{}
for rows.Next() {
err = rows.StructScan(ben)
if err != nil {
t.Fatal(err)
}
if ben.FirstName != "Bin" {
t.Fatal("Expected first name of `Bin`, got " + ben.FirstName)
}
if ben.LastName != "Smuth" {
t.Fatal("Expected first name of `Smuth`, got " + ben.LastName)
}
}
ben.FirstName = "Ben"
ben.LastName = "Smith"
ben.Email = "[email protected]"
// Insert via a named query using the struct
_, err = db.NamedExec("INSERT INTO person (first_name, last_name, email) VALUES (:first_name, :last_name, :email)", ben)
if err != nil {
t.Fatal(err)
}
rows, err = db.NamedQuery("SELECT * FROM person WHERE first_name=:first_name", ben)
if err != nil {
t.Fatal(err)
}
for rows.Next() {
err = rows.StructScan(ben)
if err != nil {
t.Fatal(err)
}
if ben.FirstName != "Ben" {
t.Fatal("Expected first name of `Ben`, got " + ben.FirstName)
}
if ben.LastName != "Smith" {
t.Fatal("Expected first name of `Smith`, got " + ben.LastName)
}
}
// ensure that Get does not panic on emppty result set
person := &Person{}
err = db.Get(person, "SELECT * FROM person WHERE first_name=$1", "does-not-exist")
if err == nil {
t.Fatal("Should have got an error for Get on non-existant row.")
}
// lets test prepared statements some more
stmt, err = db.Preparex(db.Rebind("SELECT * FROM person WHERE first_name=?"))
if err != nil {
t.Fatal(err)
}
rows, err = stmt.Queryx("Ben")
if err != nil {
t.Fatal(err)
}
for rows.Next() {
err = rows.StructScan(ben)
if err != nil {
t.Fatal(err)
}
if ben.FirstName != "Ben" {
t.Fatal("Expected first name of `Ben`, got " + ben.FirstName)
}
if ben.LastName != "Smith" {
t.Fatal("Expected first name of `Smith`, got " + ben.LastName)
}
}
john = Person{}
stmt, err = db.Preparex(db.Rebind("SELECT * FROM person WHERE first_name=?"))
if err != nil {
t.Error(err)
}
err = stmt.Get(&john, "John")
if err != nil {
t.Error(err)
}
// test name mapping
// THIS USED TO WORK BUT WILL NO LONGER WORK.
db.MapperFunc(strings.ToUpper)
rsa := CPlace{}
err = db.Get(&rsa, "SELECT * FROM capplace;")
if err != nil {
t.Error(err, "in db:", db.DriverName())
}
db.MapperFunc(strings.ToLower)
// create a copy and change the mapper, then verify the copy behaves
// differently from the original.
dbCopy := NewDb(db.DB, db.DriverName())
dbCopy.MapperFunc(strings.ToUpper)
err = dbCopy.Get(&rsa, "SELECT * FROM capplace;")
if err != nil {
fmt.Println(db.DriverName())
t.Error(err)
}
err = db.Get(&rsa, "SELECT * FROM cappplace;")
if err == nil {
t.Error("Expected no error, got ", err)
}
// test base type slices
var sdest []string
rows, err = db.Queryx("SELECT email FROM person ORDER BY email ASC;")
if err != nil {
t.Error(err)
}
err = scanAll(rows, &sdest, false)
if err != nil {
t.Error(err)
}
// test Get with base types
var count int
err = db.Get(&count, "SELECT count(*) FROM person;")
if err != nil {
t.Error(err)
}
if count != len(sdest) {
t.Errorf("Expected %d == %d (count(*) vs len(SELECT ..)", count, len(sdest))
}
// test Get and Select with time.Time, #84
var addedAt time.Time
err = db.Get(&addedAt, "SELECT added_at FROM person LIMIT 1;")
if err != nil {
t.Error(err)
}
var addedAts []time.Time
err = db.Select(&addedAts, "SELECT added_at FROM person;")
if err != nil {
t.Error(err)
}
// test it on a double pointer
var pcount *int
err = db.Get(&pcount, "SELECT count(*) FROM person;")
if err != nil {
t.Error(err)
}
if *pcount != count {
t.Errorf("expected %d = %d", *pcount, count)
}
// test Select...
sdest = []string{}
err = db.Select(&sdest, "SELECT first_name FROM person ORDER BY first_name ASC;")
if err != nil {
t.Error(err)
}
expected := []string{"Ben", "Bin", "Jason", "John"}
for i, got := range sdest {
if got != expected[i] {
t.Errorf("Expected %d result to be %s, but got %s", i, expected[i], got)
}
}
var nsdest []sql.NullString
err = db.Select(&nsdest, "SELECT city FROM place ORDER BY city ASC")
if err != nil {
t.Error(err)
}
for _, val := range nsdest {
if val.Valid && val.String != "New York" {
t.Errorf("expected single valid result to be `New York`, but got %s", val.String)
}
}
})
}
type Product struct {
ProductID int
}
// tests that sqlx will not panic when the wrong driver is passed because
// of an automatic nil dereference in sqlx.Open(), which was fixed.
func TestDoNotPanicOnConnect(t *testing.T) {
db, err := Connect("bogus", "hehe")
if err == nil {
t.Errorf("Should return error when using bogus driverName")
}
if db != nil {
t.Errorf("Should not return the db on a connect failure")
}
}
func TestRebind(t *testing.T) {
q1 := `INSERT INTO foo (a, b, c, d, e, f, g, h, i) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
q2 := `INSERT INTO foo (a, b, c) VALUES (?, ?, "foo"), ("Hi", ?, ?)`
s1 := Rebind(DOLLAR, q1)
s2 := Rebind(DOLLAR, q2)
if s1 != `INSERT INTO foo (a, b, c, d, e, f, g, h, i) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)` {
t.Errorf("q1 failed")
}
if s2 != `INSERT INTO foo (a, b, c) VALUES ($1, $2, "foo"), ("Hi", $3, $4)` {
t.Errorf("q2 failed")
}
s1 = Rebind(NAMED, q1)
s2 = Rebind(NAMED, q2)
ex1 := `INSERT INTO foo (a, b, c, d, e, f, g, h, i) VALUES ` +
`(:arg1, :arg2, :arg3, :arg4, :arg5, :arg6, :arg7, :arg8, :arg9, :arg10)`
if s1 != ex1 {
t.Error("q1 failed on Named params")
}
ex2 := `INSERT INTO foo (a, b, c) VALUES (:arg1, :arg2, "foo"), ("Hi", :arg3, :arg4)`
if s2 != ex2 {
t.Error("q2 failed on Named params")
}
}
func TestBindMap(t *testing.T) {
// Test that it works..
q1 := `INSERT INTO foo (a, b, c, d) VALUES (:name, :age, :first, :last)`
am := map[string]interface{}{
"name": "Jason Moiron",
"age": 30,
"first": "Jason",
"last": "Moiron",
}
bq, args, _ := bindMap(QUESTION, q1, am)
expect := `INSERT INTO foo (a, b, c, d) VALUES (?, ?, ?, ?)`
if bq != expect {
t.Errorf("Interpolation of query failed: got `%v`, expected `%v`\n", bq, expect)
}
if args[0].(string) != "Jason Moiron" {
t.Errorf("Expected `Jason Moiron`, got %v\n", args[0])
}
if args[1].(int) != 30 {
t.Errorf("Expected 30, got %v\n", args[1])
}
if args[2].(string) != "Jason" {
t.Errorf("Expected Jason, got %v\n", args[2])
}
if args[3].(string) != "Moiron" {
t.Errorf("Expected Moiron, got %v\n", args[3])
}
}
// Test for #117, embedded nil maps
type Message struct {
Text string `db:"string"`
Properties PropertyMap `db:"properties"` // Stored as JSON in the database
}
type PropertyMap map[string]string
// Implement driver.Valuer and sql.Scanner interfaces on PropertyMap
func (p PropertyMap) Value() (driver.Value, error) {
if len(p) == 0 {
return nil, nil
}
return json.Marshal(p)
}
func (p PropertyMap) Scan(src interface{}) error {
v := reflect.ValueOf(src)
if !v.IsValid() || v.CanAddr() && v.IsNil() {
return nil
}
switch ts := src.(type) {
case []byte:
return json.Unmarshal(ts, &p)
case string:
return json.Unmarshal([]byte(ts), &p)
default:
return fmt.Errorf("Could not not decode type %T -> %T", src, p)
}
}
func TestEmbeddedMaps(t *testing.T) {
var schema = Schema{
create: `
CREATE TABLE message (
string text,
properties text
);`,
drop: `drop table message;`,
}
RunWithSchema(schema, t, func(db *DB, t *testing.T) {
messages := []Message{
{"Hello, World", PropertyMap{"one": "1", "two": "2"}},
{"Thanks, Joy", PropertyMap{"pull": "request"}},
}
q1 := `INSERT INTO message (string, properties) VALUES (:string, :properties);`
for _, m := range messages {
_, err := db.NamedExec(q1, m)
if err != nil {
t.Fatal(err)
}
}
var count int
err := db.Get(&count, "SELECT count(*) FROM message")
if err != nil {
t.Fatal(err)
}
if count != len(messages) {
t.Fatalf("Expected %d messages in DB, found %d", len(messages), count)
}
var m Message
err = db.Get(&m, "SELECT * FROM message LIMIT 1;")
if err != nil {
t.Fatal(err)
}
if m.Properties == nil {
t.Fatal("Expected m.Properties to not be nil, but it was.")
}
})
}
func TestIssue197(t *testing.T) {
// this test actually tests for a bug in database/sql:
// https://github.com/golang/go/issues/13905
// this potentially makes _any_ named type that is an alias for []byte
// unsafe to use in a lot of different ways (basically, unsafe to hold
// onto after loading from the database).
t.Skip()
type mybyte []byte
type Var struct{ Raw json.RawMessage }
type Var2 struct{ Raw []byte }
type Var3 struct{ Raw mybyte }
RunWithSchema(defaultSchema, t, func(db *DB, t *testing.T) {
var err error
var v, q Var
if err = db.Get(&v, `SELECT '{"a": "b"}' AS raw`); err != nil {
t.Fatal(err)
}
if err = db.Get(&q, `SELECT 'null' AS raw`); err != nil {
t.Fatal(err)
}
var v2, q2 Var2
if err = db.Get(&v2, `SELECT '{"a": "b"}' AS raw`); err != nil {
t.Fatal(err)
}
if err = db.Get(&q2, `SELECT 'null' AS raw`); err != nil {
t.Fatal(err)
}
var v3, q3 Var3
if err = db.QueryRow(`SELECT '{"a": "b"}' AS raw`).Scan(&v3.Raw); err != nil {
t.Fatal(err)
}
if err = db.QueryRow(`SELECT '{"c": "d"}' AS raw`).Scan(&q3.Raw); err != nil {
t.Fatal(err)
}
t.Fail()
})
}
func TestIn(t *testing.T) {
// some quite normal situations
type tr struct {
q string
args []interface{}
c int
}
tests := []tr{
{"SELECT * FROM foo WHERE x = ? AND v in (?) AND y = ?",
[]interface{}{"foo", []int{0, 5, 7, 2, 9}, "bar"},
7},
{"SELECT * FROM foo WHERE x in (?)",
[]interface{}{[]int{1, 2, 3, 4, 5, 6, 7, 8}},
8},
{"SELECT * FROM foo WHERE x = ? AND y in (?)",
[]interface{}{[]byte("foo"), []int{0, 5, 3}},
4},
}
for _, test := range tests {
q, a, err := In(test.q, test.args...)
if err != nil {
t.Error(err)
}
if len(a) != test.c {
t.Errorf("Expected %d args, but got %d (%+v)", test.c, len(a), a)
}
if strings.Count(q, "?") != test.c {
t.Errorf("Expected %d bindVars, got %d", test.c, strings.Count(q, "?"))
}
}
// too many bindVars, but no slices, so short circuits parsing
// i'm not sure if this is the right behavior; this query/arg combo
// might not work, but we shouldn't parse if we don't need to
{
orig := "SELECT * FROM foo WHERE x = ? AND y = ?"
q, a, err := In(orig, "foo", "bar", "baz")
if err != nil {
t.Error(err)
}
if len(a) != 3 {
t.Errorf("Expected 3 args, but got %d (%+v)", len(a), a)
}
if q != orig {
t.Error("Expected unchanged query.")
}
}
tests = []tr{
// too many bindvars; slice present so should return error during parse
{"SELECT * FROM foo WHERE x = ? and y = ?",
[]interface{}{"foo", []int{1, 2, 3}, "bar"},
0},
// empty slice, should return error before parse
{"SELECT * FROM foo WHERE x = ?",
[]interface{}{[]int{}},
0},
// too *few* bindvars, should return an error
{"SELECT * FROM foo WHERE x = ? AND y in (?)",
[]interface{}{[]int{1, 2, 3}},
0},
}
for _, test := range tests {
_, _, err := In(test.q, test.args...)
if err == nil {
t.Error("Expected an error, but got nil.")
}
}
RunWithSchema(defaultSchema, t, func(db *DB, t *testing.T) {
loadDefaultFixture(db, t)
//tx.MustExec(tx.Rebind("INSERT INTO place (country, city, telcode) VALUES (?, ?, ?)"), "United States", "New York", "1")
//tx.MustExec(tx.Rebind("INSERT INTO place (country, telcode) VALUES (?, ?)"), "Hong Kong", "852")
//tx.MustExec(tx.Rebind("INSERT INTO place (country, telcode) VALUES (?, ?)"), "Singapore", "65")
telcodes := []int{852, 65}
q := "SELECT * FROM place WHERE telcode IN(?) ORDER BY telcode"
query, args, err := In(q, telcodes)
if err != nil {
t.Error(err)
}
query = db.Rebind(query)
places := []Place{}
err = db.Select(&places, query, args...)
if err != nil {
t.Error(err)
}
if len(places) != 2 {
t.Fatalf("Expecting 2 results, got %d", len(places))
}
if places[0].TelCode != 65 {
t.Errorf("Expecting singapore first, but got %#v", places[0])
}
if places[1].TelCode != 852 {
t.Errorf("Expecting hong kong second, but got %#v", places[1])
}
})
}
func TestBindStruct(t *testing.T) {
var err error
q1 := `INSERT INTO foo (a, b, c, d) VALUES (:name, :age, :first, :last)`
type tt struct {
Name string
Age int
First string
Last string
}
type tt2 struct {
Field1 string `db:"field_1"`
Field2 string `db:"field_2"`
}
type tt3 struct {
tt2
Name string
}
am := tt{"Jason Moiron", 30, "Jason", "Moiron"}
bq, args, _ := bindStruct(QUESTION, q1, am, mapper())
expect := `INSERT INTO foo (a, b, c, d) VALUES (?, ?, ?, ?)`
if bq != expect {
t.Errorf("Interpolation of query failed: got `%v`, expected `%v`\n", bq, expect)
}
if args[0].(string) != "Jason Moiron" {
t.Errorf("Expected `Jason Moiron`, got %v\n", args[0])
}
if args[1].(int) != 30 {
t.Errorf("Expected 30, got %v\n", args[1])
}
if args[2].(string) != "Jason" {
t.Errorf("Expected Jason, got %v\n", args[2])
}
if args[3].(string) != "Moiron" {
t.Errorf("Expected Moiron, got %v\n", args[3])
}
am2 := tt2{"Hello", "World"}
bq, args, _ = bindStruct(QUESTION, "INSERT INTO foo (a, b) VALUES (:field_2, :field_1)", am2, mapper())
expect = `INSERT INTO foo (a, b) VALUES (?, ?)`
if bq != expect {
t.Errorf("Interpolation of query failed: got `%v`, expected `%v`\n", bq, expect)
}
if args[0].(string) != "World" {
t.Errorf("Expected 'World', got %s\n", args[0].(string))
}
if args[1].(string) != "Hello" {
t.Errorf("Expected 'Hello', got %s\n", args[1].(string))
}
am3 := tt3{Name: "Hello!"}
am3.Field1 = "Hello"
am3.Field2 = "World"
bq, args, err = bindStruct(QUESTION, "INSERT INTO foo (a, b, c) VALUES (:name, :field_1, :field_2)", am3, mapper())
if err != nil {
t.Fatal(err)
}
expect = `INSERT INTO foo (a, b, c) VALUES (?, ?, ?)`
if bq != expect {
t.Errorf("Interpolation of query failed: got `%v`, expected `%v`\n", bq, expect)
}
if args[0].(string) != "Hello!" {
t.Errorf("Expected 'Hello!', got %s\n", args[0].(string))
}
if args[1].(string) != "Hello" {
t.Errorf("Expected 'Hello', got %s\n", args[1].(string))
}
if args[2].(string) != "World" {
t.Errorf("Expected 'World', got %s\n", args[0].(string))
}
}
func TestEmbeddedLiterals(t *testing.T) {
var schema = Schema{
create: `
CREATE TABLE x (
k text
);`,
drop: `drop table x;`,
}
RunWithSchema(schema, t, func(db *DB, t *testing.T) {
type t1 struct {
K *string
}
type t2 struct {
Inline struct {
F string
}
K *string
}
db.MustExec(db.Rebind("INSERT INTO x (k) VALUES (?), (?), (?);"), "one", "two", "three")
target := t1{}
err := db.Get(&target, db.Rebind("SELECT * FROM x WHERE k=?"), "one")
if err != nil {
t.Error(err)
}
if *target.K != "one" {
t.Error("Expected target.K to be `one`, got ", target.K)
}
target2 := t2{}
err = db.Get(&target2, db.Rebind("SELECT * FROM x WHERE k=?"), "one")
if err != nil {
t.Error(err)
}
if *target2.K != "one" {
t.Errorf("Expected target2.K to be `one`, got `%v`", target2.K)
}
})
}
func BenchmarkBindStruct(b *testing.B) {
b.StopTimer()
q1 := `INSERT INTO foo (a, b, c, d) VALUES (:name, :age, :first, :last)`
type t struct {
Name string
Age int
First string
Last string
}
am := t{"Jason Moiron", 30, "Jason", "Moiron"}
b.StartTimer()
for i := 0; i < b.N; i++ {
bindStruct(DOLLAR, q1, am, mapper())
}
}
func BenchmarkBindMap(b *testing.B) {
b.StopTimer()
q1 := `INSERT INTO foo (a, b, c, d) VALUES (:name, :age, :first, :last)`
am := map[string]interface{}{
"name": "Jason Moiron",
"age": 30,
"first": "Jason",
"last": "Moiron",
}
b.StartTimer()
for i := 0; i < b.N; i++ {
bindMap(DOLLAR, q1, am)
}
}
func BenchmarkIn(b *testing.B) {
q := `SELECT * FROM foo WHERE x = ? AND v in (?) AND y = ?`
for i := 0; i < b.N; i++ {
_, _, _ = In(q, []interface{}{"foo", []int{0, 5, 7, 2, 9}, "bar"}...)
}
}
func BenchmarkIn1k(b *testing.B) {
q := `SELECT * FROM foo WHERE x = ? AND v in (?) AND y = ?`
var vals [1000]interface{}
for i := 0; i < b.N; i++ {
_, _, _ = In(q, []interface{}{"foo", vals[:], "bar"}...)
}
}
func BenchmarkIn1kInt(b *testing.B) {
q := `SELECT * FROM foo WHERE x = ? AND v in (?) AND y = ?`
var vals [1000]int
for i := 0; i < b.N; i++ {
_, _, _ = In(q, []interface{}{"foo", vals[:], "bar"}...)
}
}
func BenchmarkIn1kString(b *testing.B) {
q := `SELECT * FROM foo WHERE x = ? AND v in (?) AND y = ?`
var vals [1000]string
for i := 0; i < b.N; i++ {
_, _, _ = In(q, []interface{}{"foo", vals[:], "bar"}...)
}
}
func BenchmarkRebind(b *testing.B) {
b.StopTimer()
q1 := `INSERT INTO foo (a, b, c, d, e, f, g, h, i) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`
q2 := `INSERT INTO foo (a, b, c) VALUES (?, ?, "foo"), ("Hi", ?, ?)`
b.StartTimer()
for i := 0; i < b.N; i++ {
Rebind(DOLLAR, q1)
Rebind(DOLLAR, q2)
}
}
func BenchmarkRebindBuffer(b *testing.B) {
b.StopTimer()
q1 := `INSERT INTO foo (a, b, c, d, e, f, g, h, i) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`
q2 := `INSERT INTO foo (a, b, c) VALUES (?, ?, "foo"), ("Hi", ?, ?)`
b.StartTimer()
for i := 0; i < b.N; i++ {
rebindBuff(DOLLAR, q1)
rebindBuff(DOLLAR, q2)
}
}<|fim▁end|> | for rows.Next() {
err = rows.StructScan(p2)
if err != nil {
t.Error(err) |
<|file_name|>func_geometric.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2014-2018 Shi Chi(Mack Stone)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
from .func_exponential import *
if sys.version_info > (3, 0):
long = int
def length(x):
"""Returns the length of x, i.e., sqrt(x * x).
:param x: Floating-point vector types.
.. seealso::
`GLSL length man page <http://www.opengl.org/sdk/docs/manglsl/xhtml/length.xml>`_
`GLSL 4.20.8 specification, section 8.5 Geometric Functions <http://www.opengl.org/registry/doc/GLSLangSpec.4.20.8.pdf>`_"""
# TODO: implement vec2 type
# if isinstance(x, Vec2):
# sqr = x.x * x.x + x.y * x.y
# return math.sqrt(sqr)
if isinstance(x, Vec3):
sqr = x.x * x.x + x.y * x.y + x.z * x.z
return math.sqrt(sqr)
elif isinstance(x, Vec4):
sqr = x.x * x.x + x.y * x.y + x.z * x.z + x.w * x.w
return math.sqrt(sqr)
elif isinstance(x, float) or isinstance(x, int) or isinstance(x, long):
return abs(x)
else:
raise TypeError('unsupport type %s' % type(x))
def dot(x, y):
"""Returns the dot product of x and y, i.e., result = x * y.
:param x: Floating-point vector types.
.. seealso::
`GLSL dot man page <http://www.opengl.org/sdk/docs/manglsl/xhtml/dot.xml>`_
`GLSL 4.20.8 specification, section 8.5 Geometric Functions <http://www.opengl.org/registry/doc/GLSLangSpec.4.20.8.pdf>`_"""
# TODO: implement vec2
# if isinstance(x, Vec2) and isinstance(y, Vec2):
# tmp = Vec2(x * y)
# return tmp.x + tmp.y
if isinstance(x, Vec3) and isinstance(y, Vec3):
tmp = Vec3(x * y)
return tmp.x + tmp.y + tmp.z
elif isinstance(x, Vec4) and isinstance(y, Vec4):
tmp = Vec4(x * y)
return (tmp.x + tmp.y) + (tmp.z + tmp.w)
elif isinstance(x, float) or isinstance(x, int) or isinstance(x, long):
return x * y
else:
raise TypeError('unsupport type %s' % type(x))
def normalize(x):
"""Returns a vector in the same direction as x but with length of 1.
.. seealso::
`GLSL normalize man page <http://www.opengl.org/sdk/docs/manglsl/xhtml/normalize.xml>`_
`GLSL 4.20.8 specification, section 8.5 Geometric Functions <http://www.opengl.org/registry/doc/GLSLangSpec.4.20.8.pdf>`_"""
if isinstance(x, float) or isinstance(x, int) or isinstance(x, long):
return -1.0 if x < 0.0 else 1.0
#elif isinstance(x, Vec2):
#sqr = x.x * x.x + x.y * x.y
#return x * inversesqrt(sqr)
elif isinstance(x, Vec3):
sqr = x.x * x.x + x.y * x.y + x.z * x.z
return x * inversesqrt(sqr)<|fim▁hole|> elif isinstance(x, Vec4):
sqr = x.x * x.x + x.y * x.y + x.z * x.z + x.w * x.w
return x * inversesqrt(sqr)<|fim▁end|> | |
<|file_name|>bitcoin_eu_ES.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="eu_ES" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Tradecoin</source>
<translation>Tradecoin-i buruz</translation>
</message>
<message>
<location line="+39"/>
<source><b>Tradecoin</b> version</source>
<translation><b>Tradecoin</b> bertsioa</translation>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The Tradecoin developers</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Helbide-liburua</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>Klik bikoitza helbidea edo etiketa editatzeko</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Sortu helbide berria</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Kopiatu hautatutako helbidea sistemaren arbelera</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your Tradecoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>Erakutsi &QR kodea</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Tradecoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified Tradecoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Ezabatu</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your Tradecoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation>Esportatu Helbide-liburuaren datuak</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Komaz bereizitako artxiboa (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>Errorea esportatzean</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Ezin idatzi %1 artxiboan.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Etiketa</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Helbidea</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(etiketarik ez)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Sartu pasahitza</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Pasahitz berria</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Errepikatu pasahitz berria</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Sartu zorrorako pasahitz berria.<br/> Mesedez erabili <b>gutxienez ausazko 10 karaktere</b>, edo <b>gutxienez zortzi hitz</b> pasahitza osatzeko.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Enkriptatu zorroa</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Eragiketa honek zorroaren pasahitza behar du zorroa desblokeatzeko.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Desblokeatu zorroa</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Eragiketa honek zure zorroaren pasahitza behar du, zorroa desenkriptatzeko.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Desenkriptatu zorroa</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Aldatu pasahitza</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Sartu zorroaren pasahitz zaharra eta berria.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Berretsi zorroaren enkriptazioa</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR BITCOINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation>Zorroa enkriptatuta</translation>
</message>
<message>
<location line="-56"/>
<source>Tradecoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your tradecoins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Zorroaren enkriptazioak huts egin du</translation>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Zorroaren enkriptazioak huts egin du barne-errore baten ondorioz. Zure zorroa ez da enkriptatu.</translation>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation>Eman dituzun pasahitzak ez datoz bat.</translation>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation>Zorroaren desblokeoak huts egin du</translation>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Zorroa desenkriptatzeko sartutako pasahitza okerra da.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Zorroaren desenkriptazioak huts egin du</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TradecoinGUI</name>
<message>
<location filename="../tradecoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation>Sarearekin sinkronizatzen...</translation>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation>&Gainbegiratu</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Ikusi zorroaren begirada orokorra</translation>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation>&Transakzioak</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Ikusi transakzioen historia</translation>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation>Editatu gordetako helbide eta etiketen zerrenda</translation>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation>Erakutsi ordainketak jasotzeko helbideen zerrenda</translation>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation>Irten</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Irten aplikaziotik</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about Tradecoin</source>
<translation>Erakutsi Tradecoin-i buruzko informazioa</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>&Qt-ari buruz</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Erakutsi Tradecoin-i buruzko informazioa</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Aukerak...</translation>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-347"/>
<source>Send coins to a Tradecoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for Tradecoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Aldatu zorroa enkriptatzeko erabilitako pasahitza</translation>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>Tradecoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>&About Tradecoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your Tradecoin addresses to prove you own them</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified Tradecoin addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation>&Artxiboa</translation>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation>&Ezarpenak</translation>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation>&Laguntza</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>Fitxen tresna-barra</translation>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+47"/>
<source>Tradecoin client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to Tradecoin network</source>
<translation><numerusform>Konexio aktibo %n Tradecoin-en sarera</numerusform><numerusform>%n konexio aktibo Tradecoin-en sarera</numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation>Egunean</translation>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation>Eguneratzen...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation>Bidalitako transakzioa</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>Sarrerako transakzioa</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid Tradecoin address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Zorroa <b>enkriptatuta</b> eta <b>desblokeatuta</b> dago une honetan</translation>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Zorroa <b>enkriptatuta</b> eta <b>blokeatuta</b> dago une honetan</translation>
</message>
<message>
<location filename="../tradecoin.cpp" line="+111"/>
<source>A fatal error occurred. Tradecoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Editatu helbidea</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Etiketa</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation>Helbide-liburuko sarrera honekin lotutako etiketa</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Helbidea</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>Helbide-liburuko sarrera honekin lotutako helbidea. Bidaltzeko helbideeta soilik alda daiteke.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation>Jasotzeko helbide berria</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Bidaltzeko helbide berria</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Editatu jasotzeko helbidea</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Editatu bidaltzeko helbidea</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Sartu berri den helbidea, "%1", helbide-liburuan dago jadanik.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Tradecoin address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Ezin desblokeatu zorroa.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>Gako berriaren sorrerak huts egin du.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>Tradecoin-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Aukerak</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start Tradecoin after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start Tradecoin on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Tradecoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the Tradecoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Tradecoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show Tradecoin addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Tradecoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Inprimakia</translation>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Tradecoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation>Saldoa:</translation>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>Konfirmatu gabe:</translation>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Azken transakzioak</b></translation>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation>Zure uneko saldoa</translation>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>Oraindik konfirmatu gabe daudenez, uneko saldoab kontatu gabe dagoen transakzio kopurua</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start tradecoin: click-to-pay handler</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation>Kopurua</translation>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>&Etiketa:</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation>Mezua</translation>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation>Gorde honela...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the Tradecoin-Qt help message to get a list with possible Tradecoin command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>Tradecoin - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Tradecoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the Tradecoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the Tradecoin RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Bidali txanponak</translation>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation>Bidali hainbat jasotzaileri batera</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation>Saldoa:</translation>
</message>
<message>
<location line="+10"/>
<source>123.456 TRD</source>
<translation>123.456 TRD</translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Berretsi bidaltzeko ekintza</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> honi: %2 (%3)</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Berretsi txanponak bidaltzea</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>Ziur zaude %1 bidali nahi duzula?</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation>eta</translation>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>Ordaintzeko kopurua 0 baino handiagoa izan behar du.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation>Inprimakia</translation>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>K&opurua:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>Ordaindu &honi:</translation>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Sartu etiketa bat helbide honetarako, eta gehitu zure helbide-liburuan</translation>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation>&Etiketa:</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Itsatsi helbidea arbeletik</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation>Ezabatu jasotzaile hau</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Tradecoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>Sartu Bitocin helbide bat (adb.: 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L) </translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation>Itsatsi helbidea arbeletik</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Tradecoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Tradecoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Tradecoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>Sartu Bitocin helbide bat (adb.: 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L) </translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter Tradecoin signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+22"/>
<source>The Tradecoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation>Zabalik %1 arte</translation>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/konfirmatu gabe</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 konfirmazioak</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Kopurua</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation>, ez da arrakastaz emititu oraindik</translation>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation>ezezaguna</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Transakzioaren xehetasunak</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Panel honek transakzioaren deskribapen xehea erakusten du</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Mota</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Helbidea</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Kopurua</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation>Zabalik %1 arte</translation>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation>Offline (%1 konfirmazio)</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Konfirmatuta (%1 konfirmazio)</translation>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Bloke hau ez du beste inongo nodorik jaso, eta seguruenik ez da onartuko!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Sortua, baina ez onartua</translation>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation>Jasoa honekin: </translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation type="unfinished"/>
</message>
<message><|fim▁hole|> </message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Ordainketa zeure buruari</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Bildua</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Transakzioaren egoera. Pasatu sagua gainetik konfirmazio kopurua ikusteko.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Transakzioa jasotako data eta ordua.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Transakzio mota.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Transakzioaren xede-helbidea.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Saldoan kendu edo gehitutako kopurua.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation>Denak</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Gaur</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Aste honetan</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Hil honetan</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Azken hilean</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Aurten</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Muga...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Jasota honekin: </translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Hona bidalia: </translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Zeure buruari</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Bildua</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Beste</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Sartu bilatzeko helbide edo etiketa</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Kopuru minimoa</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Kopiatu helbidea</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Kopiatu etiketa</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation>Transakzioaren xehetasunak</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Komaz bereizitako artxiboa (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Mota</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Etiketa</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Helbidea</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Kopurua</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>Errorea esportatzean</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Ezin idatzi %1 artxiboan.</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>tradecoin-core</name>
<message>
<location filename="../tradecoinstrings.cpp" line="+94"/>
<source>Tradecoin version</source>
<translation>Botcoin bertsioa</translation>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or tradecoind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation>Komandoen lista</translation>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation>Laguntza komando batean</translation>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation>Aukerak</translation>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: tradecoin.conf)</source>
<translation>Ezarpen fitxategia aukeratu (berezkoa: tradecoin.conf)</translation>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: tradecoind.pid)</source>
<translation>pid fitxategia aukeratu (berezkoa: tradecoind.pid)</translation>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 9335 or testnet: 19335)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 9334 or testnet: 19334)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=tradecoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Tradecoin Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. Tradecoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Tradecoin will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the Tradecoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation>Laguntza mezu hau</translation>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of Tradecoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart Tradecoin to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. Tradecoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation>Birbilatzen...</translation>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation>Zamaketa amaitua</translation>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|> | <location line="+3"/>
<source>Sent to</source>
<translation>Honi bidalia: </translation> |
<|file_name|>application.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# author: J.Y Han
# start
# spawn-fcgi -d /users/hanjiyun/project/geeksoho -f /users/hanjiyun/project/geeksoho/application.py -a 127.0.0.1 -p 9001
#stop
# kill `pgrep -f "/users/hanjiyun/project/geeksoho/application.py"`
import os
import web
import rediswebpy
from web.contrib.template import render_jinja
import misc
db = web.database(dbn='mysql', db='geeksoho', user='geeksoho', passwd='geeksoho')
urls = (
'/', 'index',
'/test', 'test'
)
# controllers
# ===============
class index:
"""Home"""
def GET(self):
# return pjax('jobs.html')
jobsList = GetJobs()
return render.jobs(jobsList=jobsList)
def POST(self):
data = web.input(title='', link='', company='', company_weibo='', company_website='', city='', salary='', intro='')
CreatNewJob(data)
raise web.seeother('/')
class test:
"""test"""
def GET(self):
# return pjax('test.html')
return render.test()
# models
# =============
def CreatNewJob(data):
db.insert(
'jobs',
title = data.title,
link = data.link,
company = data.company,
company_weibo = data.company_weibo,
company_website = data.company_website,
city = data.city,
salary = data.salary,
intro = data.intro)
def GetJobs():
return db.select('jobs', limit = 100, order='id DESC')
# globals = get_all_functions(misc)
app = web.application(urls, globals())
web.config.debug = True
cache = False
session = web.session.Session(app, rediswebpy.RedisStore(), initializer={'count': 0})
render = render_jinja(
'templates', # 设置模板路径.
encoding = 'utf-8', # 编码.
)
myFilters = {'filter_tags': misc.filter_tags,}
render._lookup.filters.update(myFilters)
<|fim▁hole|> web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr)
app.run()<|fim▁end|> |
if __name__ == "__main__":
|
<|file_name|>types.ts<|end_file_name|><|fim▁begin|>import {
featureCollection,
lineString,
multiLineString,
// Typescript Definitions
Polygon,
LineString,
MultiLineString,
MultiPolygon,
Feature,
FeatureCollection
} from '@turf/helpers'
import lineStringToPolygon from './'
// Fixtures
const coords = [[125, -30], [145, -30], [145, -20], [125, -20], [125, -30]];
const line = lineString(coords);
const multiLine = multiLineString([coords, coords]);
const fc = featureCollection([line, multiLine]);
// Assert results with types
const poly1: Feature<Polygon> = lineStringToPolygon(line);
const poly2: Feature<Polygon> = lineStringToPolygon(multiLine);<|fim▁hole|><|fim▁end|> | const poly3: Feature<MultiPolygon> = lineStringToPolygon(fc); |
<|file_name|>destination.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------<|fim▁hole|>
class Destination(Model):
"""Capture storage details for capture description.
:param name: Name for capture destination
:type name: str
:param storage_account_resource_id: Resource id of the storage account to
be used to create the blobs
:type storage_account_resource_id: str
:param blob_container: Blob container Name
:type blob_container: str
:param archive_name_format: Blob naming convention for archive, e.g.
{Namespace}/{EventHub}/{PartitionId}/{Year}/{Month}/{Day}/{Hour}/{Minute}/{Second}.
Here all the parameters (Namespace,EventHub .. etc) are mandatory
irrespective of order
:type archive_name_format: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'storage_account_resource_id': {'key': 'properties.storageAccountResourceId', 'type': 'str'},
'blob_container': {'key': 'properties.blobContainer', 'type': 'str'},
'archive_name_format': {'key': 'properties.archiveNameFormat', 'type': 'str'},
}
def __init__(self, name=None, storage_account_resource_id=None, blob_container=None, archive_name_format=None):
self.name = name
self.storage_account_resource_id = storage_account_resource_id
self.blob_container = blob_container
self.archive_name_format = archive_name_format<|fim▁end|> |
from msrest.serialization import Model |
<|file_name|>fe_field_function.cc<|end_file_name|><|fim▁begin|>// ---------------------------------------------------------------------
//
// Copyright (C) 2007 - 2013 by the deal.II authors
//
// This file is part of the deal.II library.
//
// The deal.II library is free software; you can use it, redistribute
// it, and/or modify it under the terms of the GNU Lesser General
// Public License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
// The full text of the license can be found in the file LICENSE at
// the top level of the deal.II distribution.
//
// ---------------------------------------------------------------------
#include <deal.II/numerics/fe_field_function.templates.h>
#include <deal.II/multigrid/mg_dof_handler.h>
#include <deal.II/dofs/dof_handler.h>
#include <deal.II/dofs/dof_accessor.h>
#include <deal.II/hp/dof_handler.h>
#include <deal.II/lac/vector.h>
#include <deal.II/lac/block_vector.h>
#include <deal.II/lac/parallel_vector.h><|fim▁hole|>#include <deal.II/lac/trilinos_block_vector.h>
DEAL_II_NAMESPACE_OPEN
namespace Functions
{
# include "fe_field_function.inst"
}
DEAL_II_NAMESPACE_CLOSE<|fim▁end|> | #include <deal.II/lac/parallel_block_vector.h>
#include <deal.II/lac/petsc_vector.h>
#include <deal.II/lac/petsc_block_vector.h>
#include <deal.II/lac/trilinos_vector.h> |
<|file_name|>device_mojo_mock_to_media_adapter.cc<|end_file_name|><|fim▁begin|>// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "services/video_capture/device_mojo_mock_to_media_adapter.h"
#include "services/video_capture/device_client_media_to_mojo_mock_adapter.h"
namespace video_capture {
<|fim▁hole|>DeviceMojoMockToMediaAdapter::DeviceMojoMockToMediaAdapter(
mojom::MockMediaDevicePtr* device)
: device_(device) {}
DeviceMojoMockToMediaAdapter::~DeviceMojoMockToMediaAdapter() = default;
void DeviceMojoMockToMediaAdapter::AllocateAndStart(
const media::VideoCaptureParams& params,
std::unique_ptr<Client> client) {
mojom::MockDeviceClientPtr client_proxy;
auto client_request = mojo::GetProxy(&client_proxy);
mojo::MakeStrongBinding(
base::MakeUnique<DeviceClientMediaToMojoMockAdapter>(std::move(client)),
std::move(client_request));
(*device_)->AllocateAndStart(std::move(client_proxy));
}
void DeviceMojoMockToMediaAdapter::RequestRefreshFrame() {}
void DeviceMojoMockToMediaAdapter::StopAndDeAllocate() {
(*device_)->StopAndDeAllocate();
}
void DeviceMojoMockToMediaAdapter::GetPhotoCapabilities(
GetPhotoCapabilitiesCallback callback) {}
void DeviceMojoMockToMediaAdapter::SetPhotoOptions(
media::mojom::PhotoSettingsPtr settings,
SetPhotoOptionsCallback callback) {}
void DeviceMojoMockToMediaAdapter::TakePhoto(TakePhotoCallback callback) {}
} // namespace video_capture<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import socket
from .attrtree import AttrTree
from .checks import Checks
config = AttrTree()
# the list of checks
config.install_attr('checks', Checks())
# This is the base granularity (in seconds) for polling
# Each check may then individually be configured to run every N * tick
config.install_attr('base_tick', 60)
# Default "every" check parameter, can be overridden on a per-check basis
config.install_attr('default_every', 1)
# Default "error_every" (how often we retry checks that are in error) parameter
# -1 disables feature (same as regular "every"), can be also be overridden
config.install_attr('default_error_every', -1)
# Verbosity level (one of CRITICAL, ERROR, WARNING, INFO, DEBUG)
config.install_attr('verb_level', 'INFO')
# Email addresses to send to when an alert is triggered
config.install_attr('emails.to', [])
# The From: address
config.install_attr('emails.addr_from',
'Picomon <picomon@%s>' % socket.getfqdn())
# The SMTP host, with optional :port suffix
config.install_attr('emails.smtp_host', 'localhost:25')
# The inactive timeout after which to close the SMTP connection
config.install_attr('emails.smtp_keepalive_timeout', 60)
# Timeout after which to retry sending emails after a failure
config.install_attr('emails.smtp_retry_timeout', 60)
# Interval in seconds between global reports when some checks are in error
# 0 disables reports
config.install_attr('emails.report.every', 0)
# Subject template for state change email notifications
# available substitutions:
# - state ("Problem" or "OK")
# - check (check's name, like "CheckDNSRec6")
# - dest (the target of the check ie. an IP or a Host's 'name'
# parameter)
config.install_attr('emails.subject_tpl',
'[DOMAIN] {state}: {check} on {dest}')
# reports email subject
config.install_attr('emails.report.subject', '[DOMAIN] Picomon error report')
# watchdog error email subject<|fim▁hole|><|fim▁end|> | config.install_attr('emails.watchdog_subject', '[DOMAIN] Picomon stopped') |
<|file_name|>gen.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
from math import *
import sys
import string
k = (sqrt(2.)-1.)*4./3.
chars = []
encoding = []
count = 1
first = 1
def append(s):
chars.append(s)
def rint(x):
return int(round(x))
"""
if x>=0:
return int(x+0.5)
else:
return int(x-0.5)
"""
class vec:
def __init__(self, x, y=0):
if type(x) is type(()):
self.x, self.y = x
else:
self.x = x
self.y = y
def set(self, x, y):
self.__init__(x, y)
def move(self, x, y):
self.x = self.x + x
self.y = self.y + y
def __add__(self, v):
return vec(self.x+v.x, self.y+v.y)
def __sub__(self, v):
return vec(self.x-v.x, self.y-v.y)
def int(self):
return vec(rint(self.x), rint(self.y))
def t(self):
return (self.x, self.y)
class pvec(vec):
def __init__(self, l, a):
self.x = l * cos(a)
self.y = l * sin(a)
pen = vec(0,0)
def moveto(x, y=0):
global first
dx = rint(x-pen.x)
dy = rint(y-pen.y)
if dx!=0:
if dy!=0:
append("\t%i %i rmoveto" % (dx, dy))
else:
append("\t%i hmoveto" % (dx))
elif dy!=0:
append("\t%i vmoveto" % (dy))
elif first:
append("\t0 hmoveto")
first = 0
pen.x = pen.x+dx
pen.y = pen.y+dx
def rlineto(v):
if v.x!=0:
if v.y!=0:
append("\t%i %i rlineto" % (v.x, v.y))
else:
append("\t%i hlineto" % (v.x))
elif v.y!=0:
append("\t%i vlineto" % (v.y))
def closepath():
append("\tclosepath")
history = []
def movebase(x, y=0):
history.append((x,y))
pen.move(-x, -y)
def moveback():
x, y = history.pop()
pen.move(x, y)
def ellipse(rx, ry = None, half=0):
# rx>0 => counter-clockwise (filled)
# rx<0 => clockwise
if ry==None: ry = abs(rx)
dx1 = rint(k*rx)
dx2 = rx-dx1
dy1 = rint(k*ry)
dy2 = ry-dy1
rx = abs(rx)
moveto(0, -ry)
append("\t%i 0 %i %i 0 %i rrcurveto" % (+dx1, +dx2, +dy2, +dy1))
append("\t0 %i %i %i %i 0 rrcurveto" % (+dy1, -dx2, +dy2, -dx1))
if not half:
append("\t%i 0 %i %i 0 %i rrcurveto" % (-dx1, -dx2, -dy2, -dy1))
append("\t0 %i %i %i %i 0 rrcurveto" % (-dy1, +dx2, -dy2, +dx1))
closepath()
if half:
pen.set(0, ry)
else:
pen.set(0, -ry)
circle = ellipse
def rect(w, h):
moveto(0, 0)
if w>0:
append("\t%i hlineto" % (w))
append("\t%i vlineto" % (h))
append("\t%i hlineto" % (-w))
pen.set(0, h)
else:
append("\t%i vlineto" % (h))
append("\t%i hlineto" % (-w))
append("\t%i vlineto" % (-h))
pen.set(-w, 0)
closepath()
def poly(p):
moveto(0, 0)
prev = vec(0, 0)
for q in p:
rlineto(vec(q)-prev)
prev = vec(q)
closepath()
pen.set(prev.x, prev.y)
def line(w, l, a):
vw = pvec(w*.5, a-pi*.5)
vl = pvec(l, a)
p = vw
moveto(p.x, p.y)
p0 = p
#print '%%wla %i %i %.3f: %.3f %.3f' % (w, l, a, p0.x, p0.y)
p = p+vl
rlineto((p-p0).int())
p0 = p
#print '%%wla %i %i %.3f: %.3f %.3f' % (w, l, a, p0.x, p0.y)
p = p-vw-vw
rlineto((p-p0).int())
p0 = p
#print '%%wla %i %i %.3f: %.3f %.3f' % (w, l, a, p0.x, p0.y)
p = p-vl
#print '%%wla %i %i %.3f: %.3f %.3f' % (w, l, a, p.x, p.y)
rlineto((p-p0).int())
closepath()
pen.set(p.x, p.y)
def begin(name, code, hsb, w):
global first, count, history
history = []
pen.set(0, 0)
append("""\
/uni%04X { %% %s
%i %i hsbw""" % (code+0xE000, name, hsb, w))
i = len(encoding)
while i<code:
encoding.append('dup %i /.notdef put' % (i,))
i = i+1
encoding.append('dup %i /uni%04X put' % (code, code+0xE000))
count = count + 1
first = 1
def end():
append("""\
endchar
} ND""")
########################################
r = 400
s = 375
hsb = 200 # horizontal side bearing
hsb2 = 30
over = 10 # overshoot
width = 2*r+2*over+2*hsb2
########################################
begin('play', 0x01, hsb, width)
poly(( (s,r),
(0, 2*r),))
end()
########################################
w=150
begin('pause', 0x02, hsb, width)
rect(w, 2*r)
movebase(2*w)
rect(w, 2*r)
end()
########################################
begin('stop', 0x03, hsb, width)
rect(665, 720)
end()
########################################
begin('rewind', 0x04, hsb/2, width)
movebase(2*s+15)
poly(( (0, 2*r),
(-s, r),))
movebase(-s-15)
poly(( (0, 2*r),
(-s, r),))
end()
########################################
begin('fast forward', 0x05, hsb/2, width)
poly(( (s,r),
(0, 2*r),))
movebase(s+15)
poly(( (s,r),
(0, 2*r),))
end()
########################################
begin('clock', 0x06, hsb2, width)
movebase(r, r)
circle(r+over)
wc = 65
r0 = r-3*wc
n = 4
movebase(-wc/2, -wc/2)
rect(-wc, wc)
moveback()
for i in range(n):
a = i*2*pi/n
v = pvec(r0, a)
movebase(v.x, v.y)
line(-wc, r-r0, a)
moveback()
hh = 11
mm = 8
line(-50, r*.5, pi/2-2*pi*(hh+mm/60.)/12)
line(-40, r*.9, pi/2-2*pi*mm/60.)
end()
########################################
begin('contrast', 0x07, hsb2, width)
movebase(r, r)
circle(r+over)
circle(-(r+over-80), half=1)
end()
########################################
begin('saturation', 0x08, hsb2, width)
movebase(r, r)
circle(r+over)
circle(-(r+over-80))
v = pvec(160, pi/2)
movebase(v.x, v.y)
circle(80)
moveback()
v = pvec(160, pi/2+pi*2/3)
movebase(v.x, v.y)
circle(80)
moveback()
v = pvec(160, pi/2-pi*2/3)
movebase(v.x, v.y)
circle(80)
end()
########################################
begin('volume', 0x09, 0, 1000)
poly(( (1000, 0),
(1000, 500),))
end()
########################################
begin('brightness', 0x0A, hsb2, width)
movebase(r, r)
circle(150)
circle(-100)
rb = 375
wb = 50
l = 140
n = 8
for i in range(n):
a = i*2*pi/n
v = pvec(l, a)
movebase(v.x, v.y)
line(wb, rb-l, a)
moveback()
end()
########################################
begin('hue', 0x0B, hsb2, width)
movebase(r, r)
circle(r+over)
ellipse(-(322), 166)
movebase(0, 280)
circle(-(60))
end()
<|fim▁hole|>begin('progress [', 0x10, (334-182)/2, 334)
poly(( (182, 0),
(182, 90),
(145, 90),
(145, 550),
(182, 550),
(182, 640),
(0, 640),
))
end()
########################################
begin('progress |', 0x11, (334-166)/2, 334)
rect(166, 640)
end()
########################################
begin('progress ]', 0x12, (334-182)/2, 334)
poly(( (182, 0),
(182, 640),
(0, 640),
(0, 550),
(37, 550),
(37, 90),
(0, 90),
))
end()
########################################
begin('progress .', 0x13, (334-130)/2, 334)
movebase(0, (640-130)/2)
rect(130, 130)
end()
########################################
print """\
%!PS-AdobeFont-1.0: OSD 1.00
%%CreationDate: Sun Jul 22 12:38:28 2001
%
%%EndComments
12 dict begin
/FontInfo 9 dict dup begin
/version (Version 1.00) readonly def
/Notice (This is generated file.) readonly def
/FullName (OSD) readonly def
/FamilyName (OSD) readonly def
/Weight (Regular) readonly def
/ItalicAngle 0.000000 def
/isFixedPitch false def
/UnderlinePosition -133 def
/UnderlineThickness 49 def
end readonly def
/FontName /OSD def
/PaintType 0 def
/StrokeWidth 0 def
/FontMatrix [0.001 0 0 0.001 0 0] def
/FontBBox {0 -10 1000 810} readonly def
/Encoding 256 array"""
print string.join(encoding, '\n')
i = len(encoding)
while i<256:
print 'dup %i /.notdef put' % i
i = i+1
print """\
readonly def
currentdict end
currentfile eexec
dup /Private 15 dict dup begin
/RD{string currentfile exch readstring pop}executeonly def
/ND{noaccess def}executeonly def
/NP{noaccess put}executeonly def
/ForceBold false def
/BlueValues [ -10 0 800 810 640 650 720 730 ] def
/StdHW [ 65 ] def
/StdVW [ 65 ] def
/StemSnapH [ 65 800 ] def
/StemSnapV [ 65 150 ] def
/MinFeature {16 16} def
/password 5839 def
/Subrs 1 array
dup 0 {
return
} NP
ND
2 index
/CharStrings %i dict dup begin""" % count
print """\
/.notdef {
0 400 hsbw
endchar
} ND"""
print string.join(chars, '\n')
print """\
end
end
readonly put
noaccess put
dup/FontName get exch definefont pop
mark currentfile closefile"""<|fim▁end|> | ######################################## |
<|file_name|>index.aot.ts<|end_file_name|><|fim▁begin|>import "./polyfills"
import "self/common/libs/prismjs/prism"
<|fim▁hole|>import { platformBrowser } from "@angular/platform-browser"
import { DemoModuleNgFactory } from "./demo.module.ngfactory"
const platform = platformBrowser()
if (process.env.NODE_ENV === 'production') {
enableProdMode()
}
window.onload = () => platform.bootstrapModuleFactory(DemoModuleNgFactory)<|fim▁end|> | import { enableProdMode } from "@angular/core" |
<|file_name|>libsubstkeyword.py<|end_file_name|><|fim▁begin|>#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2010 Craig J. Anderson
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
"""
Provide the SubstKeywords class that will replace keywords in a passed
string with information about the person/marriage/spouse. For sample:
foo = SubstKeywords(database, person_handle)
print foo.replace_and_clean(['$n was born on $b.'])
Will return a value such as:
Mary Smith was born on 3/28/1923.
"""
from __future__ import print_function
#------------------------------------------------------------------------
#
# Gramps modules
#
#------------------------------------------------------------------------
from gramps.gen.display.name import displayer as name_displayer
from gramps.gen.datehandler import displayer
from gramps.gen.lib import EventType
from gramps.gen.utils.db import get_birth_or_fallback, get_death_or_fallback
from gramps.gen.constfunc import STRTYPE, cuni
#------------------------------------------------------------------------
#
# Local constants
#
#------------------------------------------------------------------------
class TextTypes():
"""Four enumerations that are used to for the four main parts of a string.
and used for states. Separator is not used in states.
text -> remove or display
remove -> display
"""
separator, text, remove, display = list(range(4))
TXT = TextTypes()
#------------------------------------------------------------------------
#
# Formatting classes
#
#------------------------------------------------------------------------
class GenericFormat(object):
"""A Generic parsing class. Will be subclassed by specific format strings
"""
def __init__(self, string_in):
self.string_in = string_in
def _default_format(self, item):
""" The default format if there is no format string """
pass
def is_blank(self, item):
""" if the information is not known (item is None), remove the format
string information from the input string if any.
"""
if item is None:
self.string_in.remove_start_end("(", ")")
return True
return False
def generic_format(self, item, code, uppr, function):
"""the main parsing engine.
Needed are the following: the input string
code - List of one character (string) codes (all lowercase)
uppr - list of one character (string) codes that can be uppercased
each needs to have a lowercase equivalent in code
function - list of functions.
there is a one to one relationship with character codes and functions.
"""
if self.string_in.this != "(":
return self._default_format(item)
self.string_in.step()
main = VarString()
separator = SeparatorParse(self.string_in)
#code given in args
#function given in args
while self.string_in.this and self.string_in.this != ")":
#Check to see if _in.this is in code
to_upper = False
if uppr.find(self.string_in.this) != -1:
#and the result should be uppercased.
to_upper = True
where = code.find(self.string_in.this.lower())
else:
where = code.find(self.string_in.this)
if where != -1:
self.string_in.step()
tmp = function[where]()
if to_upper:
tmp = tmp.upper()
if tmp == "" or tmp is None:
main.add_remove()
elif isinstance(tmp, VarString): #events cause this
main.extend(tmp)
else:
main.add_variable(tmp)
elif separator.is_a():
main.add_separator(separator.parse_format())
else:
main.add_text(self.string_in.parse_format())
if self.string_in.this == ")":
self.string_in.step()
return main
#------------------------------------------------------------------------
# Name Format strings
#------------------------------------------------------------------------
class NameFormat(GenericFormat):
""" The name format class.
If no format string, the name is displayed as per preference options
otherwise, parse through a format string and put the name parts in
"""
def get_name(self, person):
""" A helper method for retrieving the person's name """
if person:
return person.get_primary_name()
return None
def _default_format(self, name):
""" display the name as set in preferences """
return name_displayer.sorted_name(name)
def parse_format(self, name):
""" Parse the name """
if self.is_blank(name):
return
def common():
""" return the common name of the person """
return (name.get_call_name() or
name.get_first_name().split(' ')[0])
code = "tfcnxslg"
upper = code.upper()
function = [name.get_title, #t
name.get_first_name, #f
name.get_call_name, #c
name.get_nick_name, #n
common, #x
name.get_suffix, #s
name.get_surname, #l
name.get_family_nick_name #g
]
return self.generic_format(name, code, upper, function)
#------------------------------------------------------------------------
# Date Format strings
#------------------------------------------------------------------------
class DateFormat(GenericFormat):
""" The date format class.
If no format string, the date is displayed as per preference options
otherwise, parse through a format string and put the date parts in
"""
def get_date(self, event):
""" A helper method for retrieving a date from an event """
if event:
return event.get_date_object()
return None
def _default_format(self, date):
return displayer.display(date)
def __count_chars(self, char, max_amount):
""" count the year/month/day codes """
count = 1 #already have seen/passed one
while count < max_amount and self.string_in.this == char:
self.string_in.step()
count = count +1
return count
def parse_format(self, date):
""" Parse the name """
if self.is_blank(date):
return
def year():
""" The year part only """
year = cuni(date.get_year())
count = self.__count_chars("y", 4)
if year == "0":
return
if count == 1: #found 'y'
if len(year) == 1:
return year
elif year[-2] == "0":
return year[-1]
else:
return year[-2:]
elif count == 2: #found 'yy'
tmp = "0" + year
return tmp[-2:]
elif count == 3: #found 'yyy'
if len(year) > 2:
return year
else:
tmp = "00" + year
return tmp[-3:]
else: #count == 4 #found 'yyyy'
tmp = "000" + year
return tmp[-4:]
def month(char_found = "m"):
""" The month part only """
month = cuni(date.get_month())
count = self.__count_chars(char_found, 4)
if month == "0":
return
if count == 1:
return month
elif count == 2: #found 'mm'
tmp = "0" + month
return tmp[-2:]
elif count == 3: #found 'mmm'
return displayer.short_months[int(month)]
else: #found 'mmmm'
return displayer.long_months[int(month)]
def month_up():
return month("M").upper()
def day():
""" The day part only """
day = cuni(date.get_day())
count = self.__count_chars("d", 2)
if day == "0": #0 means not defined!
return
if count == 1: #found 'd'
return day
else: #found 'dd'
tmp = "0" + day
return tmp[-2:]
def modifier():
#ui_mods taken from date.py def lookup_modifier(self, modifier):
ui_mods = ["", _("before"), _("after"), _("about"),
"", "", ""]
return ui_mods[date.get_modifier()].capitalize()
code = "ymdMo"
upper = "O"
function = [year, month, day, month_up, modifier]
return self.generic_format(date, code, upper, function)
#------------------------------------------------------------------------
# Place Format strings
#------------------------------------------------------------------------
class PlaceFormat(GenericFormat):
""" The place format class.
If no format string, the place is displayed as per preference options
otherwise, parse through a format string and put the place parts in
"""
def get_place(self, database, event):
""" A helper method for retrieving a place from an event """
if event:
bplace_handle = event.get_place_handle()
if bplace_handle:
return database.get_place_from_handle(bplace_handle)
return None
def _default_format(self, place):
return place.get_title()
def parse_format(self, place):
""" Parse the place """
if self.is_blank(place):
return
code = "elcuspnitxy"
upper = code.upper()
function = [place.get_main_location().get_street,
place.get_main_location().get_locality,
place.get_main_location().get_city,
place.get_main_location().get_county,
place.get_main_location().get_state,
place.get_main_location().get_postal_code,
place.get_main_location().get_country,
place.get_main_location().get_parish,
place.get_title,
place.get_longitude,
place.get_latitude
]
return self.generic_format(place, code, upper, function)
#------------------------------------------------------------------------
# Event Format strings
#------------------------------------------------------------------------
class EventFormat(GenericFormat):
""" The event format class.
If no format string, the event description is displayed
otherwise, parse through the format string and put in the parts
dates and places can have their own format strings
"""
def __init__(self, database, _in):
self.database = database
GenericFormat.__init__(self, _in)
def _default_format(self, event):
if event is None:
return
else:
return event.get_description()
def __empty_format(self):
""" clear out a sub format string """
self.string_in.remove_start_end("(", ")")
return
def __empty_attrib(self):
""" clear out an attribute name """
self.string_in.remove_start_end("[", "]")
return
def parse_format(self, event):
""" Parse the event format string.
let the date or place classes handle any sub-format strings """
if self.is_blank(event):
return
def format_date():
""" start formatting a date in this event """
date_format = DateFormat(self.string_in)
return date_format.parse_format(date_format.get_date(event))
def format_place():
""" start formatting a place in this event """
place_format = PlaceFormat(self.string_in)
place = place_format.get_place(self.database, event)
return place_format.parse_format(place)
def format_attrib():
""" Get the name and then get the attributes value """
#Event's Atribute
attrib_parse = AttributeParse(self.string_in)
#self.string_in.step()
name = attrib_parse.get_name()
if name:
return attrib_parse.get_attribute(event.get_attribute_list(),
name)
else:
return
code = "ndDia"
upper = ""
function = [event.get_description,
format_date,
format_place,
event.get_gramps_id,
format_attrib
]
return self.generic_format(event, code, upper, function)
def parse_empty(self):
""" remove the format string """
code = "dDa"
function = [self.__empty_format, self.__empty_format,
self.__empty_attrib]
return self.generic_format(None, code, "", function)
#------------------------------------------------------------------------
# Gallery Format strings
#------------------------------------------------------------------------
class GalleryFormat(GenericFormat):
""" The gallery format class.
If no format string, the photo description is displayed
otherwise, parse through the format string and put in the parts
dates (no places) can have their own format strings
"""
def __init__(self, database, _in):
self.database = database
GenericFormat.__init__(self, _in)
def _default_format(self, photo):
if photo is None:
return
else:
return photo.get_description()
def __empty_format(self):
""" clear out a sub format string """
self.string_in.remove_start_end("(", ")")
return
def __empty_attrib(self):
""" clear out an attribute name """
self.string_in.remove_start_end("[", "]")
return
def parse_format(self, photo):
""" Parse the photo format string.
let the date or place classes handle any sub-format strings """
if self.is_blank(photo):
return
def format_date():
""" start formatting a date in this photo """
date_format = DateFormat(self.string_in)
return date_format.parse_format(date_format.get_date(photo))
def format_attrib():
""" Get the name and then get the attributes value """
#photo's Atribute
attrib_parse = AttributeParse(self.string_in)
name = attrib_parse.get_name()
if name:
return attrib_parse.get_attribute(photo.get_attribute_list(),
name)
else:
return
code = "ndia"
upper = ""
function = [photo.get_description,
format_date,
photo.get_gramps_id,
format_attrib
]
return self.generic_format(photo, code, upper, function)
def parse_empty(self):
""" remove the format string """
code = "da"
function = [self.__empty_format, self.__empty_attrib]
return self.generic_format(None, code, "", function)
#------------------------------------------------------------------------
#
# ConsumableString - The Input string class
#
#------------------------------------------------------------------------
class ConsumableString(object):
"""
A simple string implementation with extras to help with parsing.
This will contain the string to be parsed. or string in.
There will only be one of these for each processed line.
"""
def __init__(self, string):
self.__this_string = string
self.__setup()
def __setup(self):
""" update class attributes this and next """
if len(self.__this_string) > 0:
self.this = self.__this_string[0]
else:
self.this = None
if len(self.__this_string) > 1:
self.next = self.__this_string[1]
else:
self.next = None
def step(self):
""" remove the first char from the string """
self.__this_string = self.__this_string[1:]
self.__setup()
return self.this
def step2(self):
""" remove the first two chars from the string """
self.__this_string = self.__this_string[2:]
self.__setup()
return self.this
def remove_start_end(self, start, end):
""" Removes a start, end block from the string if there """
if self.this == start:
self.text_to_next(end)
def __get_a_char_of_text(self):
""" Removes one char of TEXT from the string and returns it. """
if self.this == "\\":
if self.next == None:
rtrn = "\\"
else:
rtrn = self.next
self.step2()
else:
rtrn = self.this
self.step()
return rtrn
def text_to_next(self, char):
""" return/remove a format strings from here """
new_str = ""
while self.this is not None and self.this != char:
new_str += self.__get_a_char_of_text()
if self.this == char:
self.step()
return new_str
def is_a(self):
return True
def parse_format(self):
rtrn = self.__get_a_char_of_text()
if rtrn:
return rtrn
return ''
#------------------------------------------------------------------------
#
# VarString class - The Output string class
#
#------------------------------------------------------------------------
class VarString(object):
"""
The current state of the entire string (integer from TextTypes)
A list to hold tuple object (integer from TextTypes, string)
This will contain the string that will be displayed. or string out.
it is used for groups and format strings.
"""
def __init__(self, start_state = TXT.remove):
self.state = start_state #overall state of the string.
self._text = [] #list of tuples (TXT.?, string)
def __update_state(self, new_status):
if new_status > self.state:
self.state = new_status
def add_text(self, text):
self._text.append((TXT.text, text))
def add_variable(self, text):
self.state = TXT.display
self._text.append((TXT.text, text))
def add_remove(self):
self.__update_state(TXT.remove)
self._text.append((TXT.remove, ""))
def add_separator(self, text):
self._text.append((TXT.separator, text))
def get_final(self):
#if self.state == TXT.remove:
# return (TXT.remove, "")
curr_string = ""
index = 0
while index < len(self._text):
if self._text[index][0] == TXT.text:
curr_string += self._text[index][1]
index = index + 1
continue #while self._text:
if index +1 == len(self._text):
if self._text[index][0] == TXT.separator and curr_string != '':
curr_string += self._text[index][1]
index = index + 1
break #while self._text:
type_0_1 = (self._text[index][0], self._text[index+1][0])
#if type_0_1 == (TXT.remove, TXT.remove):
# pass
if type_0_1 == (TXT.remove, TXT.separator):
index = index + 1
#elif type_0_1 == (TXT.remove, TXT.text):
# pass
elif type_0_1 == (TXT.separator, TXT.remove):
index = index + 1
#elif type_0_1 == (TXT.separator, TXT.separator):
# pass
elif type_0_1 == (TXT.separator, TXT.text):
curr_string += self._text[index][1]
#else:
# print "#oops Should never get here."
index = index + 1
#return what we have
return (self.state, curr_string)
print("===" + str(self.state) + " '" + str(curr_string) + "'")
def extend(self, acquisition):
"""
acquisition is a VarString object
Merge the content of acquisition into this place.
"""
self.__update_state(acquisition.state)
if acquisition.state != TXT.display:
#The sub {} was TXT.remove. We don't want to simply ignore it.
self.add_remove() #add a remove que here to note it.
return
self._text.extend(acquisition._text)
#------------------------------------------------------------------------
#
# Parsers
#
#------------------------------------------------------------------------
#------------------------------------------------------------------------
# SeparatorParse
#------------------------------------------------------------------------
class SeparatorParse(object):
""" parse out a separator """
def __init__(self, consumer_in):
self._in = consumer_in
def is_a(self):
return self._in.this == "<"
def parse_format(self):
if not self.is_a():
return
""" get the text and return it """
self._in.step()
return self._in.text_to_next(">")
#------------------------------------------------------------------------
# AttributeParse
#------------------------------------------------------------------------
class AttributeParse(object):
""" Parse attributes """
def __init__(self, consumer_in):
self._in = consumer_in
def get_name(self):
""" Gets a name inside a [] block """
if self._in.this != "[":
return
self._in.step()
return self._in.text_to_next("]")
def get_attribute(self, attrib_list, attrib_name):
""" Get an attribute by name """
if attrib_name == "":
return
for attr in attrib_list:
if str(attr.get_type()) == attrib_name:
return str(attr.get_value())
return
def is_a(self):
""" check """
return self._in.this == "a"
def parse_format(self, attrib_list):
""" Get the attribute and add it to the string out """
name = self.get_name()
return self.get_attribute(attrib_list, name)
#------------------------------------------------------------------------
# VariableParse
#------------------------------------------------------------------------
class VariableParse(object):
""" Parse the individual variables """
def __init__(self, friend, database, consumer_in):
self.friend = friend
self.database = database
self._in = consumer_in
def is_a(self):
""" check """
return self._in.this == "$" and self._in.next is not None and \
"nsijbBdDmMvVauetTpP".find(self._in.next) != -1
def get_event_by_type(self, marriage, e_type):
""" get an event from a type """
if marriage is None:
return None
for e_ref in marriage.get_event_ref_list():
if not e_ref:
continue
event = self.friend.database.get_event_from_handle(e_ref.ref)
if event.get_type() == e_type:
return event
return None
def get_event_by_name(self, person, event_name):
""" get an event from a name. """
if not person:
return None
for e_ref in person.get_event_ref_list():
if not e_ref:
continue
event = self.friend.database.get_event_from_handle(e_ref.ref)
if event.get_type().is_type(event_name):
return event
return None
def empty_item(self, item):
""" return false if there is a valid item(date or place).
Otherwise
add a TXT.remove marker in the output string
remove any format strings from the input string
"""
if item is not None:
return False
self._in.remove_start_end("(", ")")
return True
def empty_attribute(self, person):
""" return false if there is a valid person.
Otherwise
add a TXT.remove marker in the output string
remove any attribute name from the input string
"""
if person:
return False
self._in.remove_start_end("[", "]")
return True
def __parse_date(self, event):
""" sub to process a date
Given an event, get the date object, process the format,
return the result """
date_f = DateFormat(self._in)
date = date_f.get_date(event)
if self.empty_item(date):
return
return date_f.parse_format(date)
def __parse_place(self, event):
""" sub to process a date
Given an event, get the place object, process the format,
return the result """
place_f = PlaceFormat(self._in)
place = place_f.get_place(self.database, event)
if self.empty_item(place):
return
return place_f.parse_format(place)
def __parse_name(self, person):
name_format = NameFormat(self._in)
name = name_format.get_name(person)
return name_format.parse_format(name)
def __parse_id(self, first_class_object):
if first_class_object is not None:<|fim▁hole|> return first_class_object.get_gramps_id()
else:
return
def __parse_event(self, person, attrib_parse):
event = self.get_event_by_name(person, attrib_parse.get_name())
event_f = EventFormat(self.database, self._in)
if event:
return event_f.parse_format(event)
else:
event_f.parse_empty()
return
def __get_photo(self, person_or_marriage):
""" returns the first photo in the media list or None """
media_list = person_or_marriage.get_media_list()
for media_ref in media_list:
media_handle = media_ref.get_reference_handle()
media = self.database.get_object_from_handle(media_handle)
mime_type = media.get_mime_type()
if mime_type and mime_type.startswith("image"):
return media
return None
def __parse_photo(self, person_or_marriage):
photo_f = GalleryFormat(self.database, self._in)
if person_or_marriage is None:
return photo_f.parse_empty()
photo = self.__get_photo(person_or_marriage)
if photo:
return photo_f.parse_format(photo)
else:
return photo_f.parse_empty()
def parse_format(self):
"""Parse the $ variables. """
if not self.is_a():
return
attrib_parse = AttributeParse(self._in)
next_char = self._in.next
self._in.step2()
if next_char == "n":
#Person's name
return self.__parse_name(self.friend.person)
elif next_char == "s":
#Souses name
return self.__parse_name(self.friend.spouse)
elif next_char == "i":
#Person's Id
return self.__parse_id(self.friend.person)
elif next_char == "j":
#Marriage Id
return self.__parse_id(self.friend.family)
elif next_char == "b":
#Person's Birth date
if self.empty_item(self.friend.person):
return
return self.__parse_date(
get_birth_or_fallback(self.friend.database, self.friend.person))
elif next_char == "d":
#Person's Death date
if self.empty_item(self.friend.person):
return
return self.__parse_date(
get_death_or_fallback(self.friend.database, self.friend.person))
elif next_char == "m":
#Marriage date
if self.empty_item(self.friend.family):
return
return self.__parse_date(
self.get_event_by_type(self.friend.family,
EventType.MARRIAGE))
elif next_char == "v":
#Divorce date
if self.empty_item(self.friend.family):
return
return self.__parse_date(
self.get_event_by_type(self.friend.family,
EventType.DIVORCE))
elif next_char == "T":
#Todays date
date_f = DateFormat(self._in)
from gramps.gen.lib.date import Today
date = Today()
if self.empty_item(date):
return
return date_f.parse_format(date)
elif next_char == "B":
#Person's birth place
if self.empty_item(self.friend.person):
return
return self.__parse_place(
get_birth_or_fallback(self.friend.database, self.friend.person))
elif next_char == "D":
#Person's death place
if self.empty_item(self.friend.person):
return
return self.__parse_place(
get_death_or_fallback(self.friend.database, self.friend.person))
elif next_char == "M":
#Marriage place
if self.empty_item(self.friend.family):
return
return self.__parse_place(
self.get_event_by_type(self.friend.family,
EventType.MARRIAGE))
elif next_char == "V":
#Divorce place
if self.empty_item(self.friend.family):
return
return self.__parse_place(
self.get_event_by_type(self.friend.family,
EventType.DIVORCE))
elif next_char == "a":
#Person's Atribute
if self.empty_attribute(self.friend.person):
return
return attrib_parse.parse_format(
self.friend.person.get_attribute_list())
elif next_char == "u":
#Marriage Atribute
if self.empty_attribute(self.friend.family):
return
return attrib_parse.parse_format(
self.friend.family.get_attribute_list())
elif next_char == "e":
#person event
return self.__parse_event(self.friend.person, attrib_parse)
elif next_char == "t":
#person event
return self.__parse_event(self.friend.family, attrib_parse)
elif next_char == 'p':
#photo for the person
return self.__parse_photo(self.friend.person)
elif next_char == 'P':
#photo for the marriage
return self.__parse_photo(self.friend.family)
#------------------------------------------------------------------------
#
# SubstKeywords
#
#------------------------------------------------------------------------
class SubstKeywords(object):
"""Accepts a person/family with format lines and returns a new set of lines
using variable substitution to make it.
The individual variables are defined with the classes that look for them.
Needed:
Database object
person_handle
This will be the center person for the display
family_handle
this will specify the specific family/spouse to work with.
If none given, then the first/preferred family/spouse is used
"""
def __init__(self, database, person_handle, family_handle=None):
"""get the person and find the family/spouse to use for this display"""
self.database = database
self.person = database.get_person_from_handle(person_handle)
self.family = None
self.spouse = None
self.line = None #Consumable_string - set below
if self.person is None:
return
fam_hand_list = self.person.get_family_handle_list()
if fam_hand_list:
if family_handle in fam_hand_list:
self.family = database.get_family_from_handle(family_handle)
else:
#Error. fam_hand_list[0] below may give wrong marriage info.
#only here because of OLD specifications. Specs read:
# * $S/%S
# Displays the name of the person's preferred ...
# 'preferred' means FIRST.
#The first might not be the correct marriage to display.
#else: clause SHOULD be removed.
self.family = database.get_family_from_handle(fam_hand_list[0])
father_handle = self.family.get_father_handle()
mother_handle = self.family.get_mother_handle()
self.spouse = None
if father_handle == person_handle:
if mother_handle:
self.spouse = database.get_person_from_handle(mother_handle)
else:
if father_handle:
self.spouse = database.get_person_from_handle(father_handle)
def __parse_line(self):
"""parse each line of text and return the new displayable line
There are four things we can find here
A {} group which will make/end as needed.
A <> separator
A $ variable - Handled separately
or text
"""
stack_var = []
curr_var = VarString(TXT.text)
#First we are going take care of all variables/groups
#break down all {} (groups) and $ (vars) into either
#(TXT.text, resulting_string) or (TXT.remove, '')
variable = VariableParse(self, self.database, self.line) # $
while self.line.this:
if self.line.this == "{":
#Start of a group
#push what we have onto the stack
stack_var.append(curr_var)
#Setup
curr_var = VarString()
#step
self.line.step()
elif self.line.this == "}" and len(stack_var) > 0: #End of a group
#add curr to what is on the (top) stack and pop into current
#or pop the stack into current and add TXT.remove
direction = curr_var.state
if direction == TXT.display:
#add curr onto the top slot of the stack
stack_var[-1].extend(curr_var)
#pop what we have on the stack
curr_var = stack_var.pop()
if direction == TXT.remove:
#add remove que
curr_var.add_remove()
#step
self.line.step()
elif variable.is_a(): # $ (variables)
rtrn = variable.parse_format()
if rtrn is None:
curr_var.add_remove()
elif isinstance(rtrn, VarString):
curr_var.extend(rtrn)
else:
curr_var.add_variable(rtrn)
elif self.line.this == "<": # separator
self.line.step()
curr_var.add_separator(self.line.text_to_next(">"))
else: #regular text
curr_var.add_text(self.line.parse_format())
#the stack is for groups/subgroup and may contain items
#if the user does not close his/her {}
#squash down the stack
while stack_var:
direction = curr_var.state
if direction == TXT.display:
#add curr onto the top slot of the stack
stack_var[-1].extend(curr_var)
#pop what we have on the stack
curr_var = stack_var.pop()
if direction == TXT.remove:
#add remove que
curr_var.add_remove()
#step
self.line.step()
#return what we have
return curr_var.get_final()
def __main_level(self):
#Check only if the user wants to not display the line if TXT.remove
remove_line_tag = False
if self.line.this == "-":
remove_line_tag = True
self.line.step()
state, line = self.__parse_line()
if state is TXT.remove and remove_line_tag:
return None
return line
def replace_and_clean(self, lines):
"""
return a new array of lines with all of the substitutions done
"""
new = []
for this_line in lines:
if this_line == "":
new.append(this_line)
continue
#print "- ", this_line
self.line = ConsumableString(this_line)
new_line = self.__main_level()
#print "+ ", new_line
if new_line is not None:
new.append(new_line)
if new == []:
new = [""]
return new
#Acts 20:35 (New International Version)
#In everything I did, I showed you that by this kind of hard work
#we must help the weak, remembering the words the Lord Jesus himself
#said: 'It is more blessed to give than to receive.'
if __name__ == '__main__':
#-------------------------------------------------------------------------
#
# For Testing everything except VariableParse, SubstKeywords and EventFormat
# apply it as a script:
#
# ==> in command line do "PYTHONPATH=??? python libsubstkeyword.py"
#
# You will need to put in your own path to the src directory
#
#-------------------------------------------------------------------------
# pylint: disable-msg=C0103
def combinations(c, r):
# combinations('ABCD', 2) --> AB AC AD BC BD CD
# combinations(range(4), 3) --> 012 013 023 123
pool = tuple(range(c))
n = len(pool)
if r > n:
return
indices = list(range(r))
yield tuple(pool[i] for i in indices)
while True:
for i in reversed(list(range(r))):
if indices[i] != i + n - r:
break
else:
return
indices[i] += 1
for j in range(i+1, r):
indices[j] = indices[j-1] + 1
yield tuple(pool[i] for i in indices)
def main_level_test(_in, testing_class, testing_what):
"""This is a mini def __main_level(self):
"""
main = LevelParse(_in)
sepa = SeparatorParse(_in)
test = testing_class(_in)
while _in.this:
if main.is_a():
main.parse_format(_in)
elif sepa.is_a():
sepa.parse_format(main)
elif _in.this == "$":
_in.step()
main.add_variable(
test.parse_format(testing_what))
else:
_in.parse_format(main)
main.combine_all()
state, line = main.get_string()
if state is TXT.remove:
return None
else:
return line
from gramps.gen.lib.date import Date
y_or_n = ()
date_to_test = Date()
def date_set():
date_to_test.set_yr_mon_day(
1970 if 0 in y_or_n else 0,
9 if 1 in y_or_n else 0,
3 if 2 in y_or_n else 0
)
#print date_to_test
line_in = "<Z>$(yyy) <a>$(<Z>Mm)<b>$(mm){<c>$(d)}{<d>$(yyyy)<e>}<f>$(yy)"
consume_str = ConsumableString(line_in)
print(line_in)
print("#None are known")
tmp = main_level_test(consume_str, DateFormat, date_to_test)
print(tmp)
print("Good" if tmp == " " else "!! bad !!")
print()
print()
print("#One is known")
answer = []
for y_or_n in combinations(3, 1):
date_set()
consume_str = ConsumableString(line_in)
tmp = main_level_test(consume_str, DateFormat, date_to_test)
print(tmp)
answer.append(tmp)
print("Good" if answer == [
"1970 d1970f70",
" a99b09",
" c3"
] else "!! bad !!")
print()
print()
print("#Two are known")
answer = []
for y_or_n in combinations(3, 2):
date_set()
consume_str = ConsumableString(line_in)
tmp = main_level_test(consume_str, DateFormat, date_to_test)
print(tmp)
answer.append(tmp)
print("Good" if answer == [
"1970 a99b09d1970f70",
"1970 c3d1970f70",
" a99b09c3"
] else "!! bad !!")
print()
print()
print("#All are known")
answer = []
y_or_n = (0, 1, 2)
date_set()
consume_str = ConsumableString(line_in)
tmp = main_level_test(consume_str, DateFormat, date_to_test)
print(tmp)
answer.append(tmp)
print("Good" if answer == ["1970 a99b09c3d1970f70"
] else "!! bad !!")
import sys
sys.exit()
print()
print()
print("=============")
print("=============")
from gramps.gen.lib.name import Name
y_or_n = ()
name_to_test = Name()
def name_set():
#code = "tfcnxslg"
name_to_test.set_call_name("Bob" if 0 in y_or_n else "")
name_to_test.set_title("Dr." if 1 in y_or_n else "")
name_to_test.set_first_name("Billy" if 2 in y_or_n else "")
name_to_test.set_nick_name("Buck" if 3 in y_or_n else "")
name_to_test.set_suffix("IV" if 4 in y_or_n else "")
#now can we put something in for the last name?
name_to_test.set_family_nick_name("The Clubs" if 5 in y_or_n else "")
line_in = "{$(c)$(t)<1>{<2>$(f)}{<3>$(n){<0> <0>}<4>$(x)}$(s)<5>$(l)<6>$(g)<0>"
consume_str = ConsumableString(line_in)
print()
print()
print(line_in)
print("#None are known")
tmp = main_level_test(consume_str, NameFormat, name_to_test)
print(tmp)
print("Good" if tmp == None else "!! bad !!")
print()
print()
print("#Two are known")
answer = []
for y_or_n in combinations(6, 2):
name_set()
consume_str = ConsumableString(line_in)
tmp = main_level_test(consume_str, NameFormat, name_to_test)
print(tmp)
answer.append(tmp)
print("Good" if answer == [
"BobDr.4Bob",
"Bob2Billy4Bob",
"Bob3Buck4Bob",
"Bob4BobIV",
"Bob4BobThe Clubs",
"Dr.2Billy4Billy",
"Dr.3Buck",
"Dr.1IV",
"Dr.6The Clubs",
"Billy3Buck4Billy",
"Billy4BillyIV",
"Billy4BillyThe Clubs",
"BuckIV",
"BuckThe Clubs",
"IV6The Clubs"
] else "!! bad !!")
print()
print()
print("#All are known")
y_or_n = (0, 1, 2, 3, 4, 5)
name_set()
consume_str = ConsumableString(line_in)
answer = main_level_test(consume_str, NameFormat, name_to_test)
print(answer)
print("Good" if answer == "BobDr.2Billy3Buck4BobIV6The Clubs" \
else "!! bad !!")
print()
print()
print("=============")
print("=============")
from gramps.gen.lib.place import Place
y_or_n = ()
place_to_test = Place()
def place_set():
#code = "elcuspnitxy"
main_loc = place_to_test.get_main_location()
main_loc.set_street(
"Lost River Ave." if 0 in y_or_n else ""
)
main_loc.set_locality(
"Second district" if 1 in y_or_n else ""
)
main_loc.set_city(
"Arco" if 2 in y_or_n else ""
)
main_loc.set_county(
"Butte" if 3 in y_or_n else ""
)
main_loc.set_state(
"Idaho" if 4 in y_or_n else ""
)
main_loc.set_postal_code(
"83213" if 5 in y_or_n else ""
)
main_loc.set_country(
"USA" if 6 in y_or_n else ""
)
main_loc.set_parish(
"St Anns" if 7 in y_or_n else ""
)
place_to_test.set_title(
"Atomic City" if 8 in y_or_n else ""
)
place_to_test.set_longitude(
"N43H38'5\"N" if 9 in y_or_n else ""
)
place_to_test.set_latitude(
"W113H18'5\"W" if 10 in y_or_n else ""
)
#code = "txy"
line_in = "$(e)<1>{<2>$(l) <3> $(c)<4><0><5>{$(s)<6>$(p)<7>" + \
"{<1>$(n)<2>}<3>$(i<0>)<4>}<5>$(t)<6>$(x)<7>}<8>$(y)"
consume_str = ConsumableString(line_in)
print()
print()
print(line_in)
print("#None are known")
tmp = main_level_test(consume_str, PlaceFormat, place_to_test)
print(tmp)
print("Good" if tmp == "" else "!! bad !!")
print()
print()
print("#Three are known (string lengths only)")
answer = []
for y_or_n in combinations(11, 4):
place_set()
consume_str = ConsumableString(line_in)
tmp = main_level_test(consume_str, PlaceFormat, place_to_test)
#print tmp
answer.append(len(tmp))
print(answer)
print("Good" if answer == [38, 44, 44, 42, 46, 50, 49, 50, 40, 40, 38, 42,
46, 45, 46, 46, 44, 48, 52, 51, 52, 44, 48, 52, 51, 52, 46, 50, 49, 50,
54, 53, 54, 57, 58, 57, 28, 28, 26, 30, 34, 33, 34, 34, 32, 36, 40, 39,
40, 32, 36, 40, 39, 40, 34, 38, 37, 38, 42, 41, 42, 45, 46, 45, 30, 28,
32, 36, 35, 36, 28, 32, 36, 35, 36, 30, 34, 33, 34, 38, 37, 38, 41, 42,
41, 34, 38, 42, 41, 42, 36, 40, 39, 40, 44, 43, 44, 47, 48, 47, 36, 40,
39, 40, 44, 43, 44, 47, 48, 47, 42, 41, 42, 45, 46, 45, 49, 50, 49, 53,
28, 28, 26, 30, 34, 33, 34, 34, 32, 36, 40, 39, 40, 32, 36, 40, 39, 40,
34, 38, 37, 38, 42, 41, 42, 45, 46, 45, 30, 28, 32, 36, 35, 36, 28, 32,
36, 35, 36, 30, 34, 33, 34, 38, 37, 38, 41, 42, 41, 34, 38, 42, 41, 42,
36, 40, 39, 40, 44, 43, 44, 47, 48, 47, 36, 40, 39, 40, 44, 43, 44, 47,
48, 47, 42, 41, 42, 45, 46, 45, 49, 50, 49, 53, 19, 17, 21, 25, 24, 25,
17, 21, 25, 24, 25, 19, 23, 22, 23, 27, 26, 27, 30, 31, 30, 23, 27, 31,
30, 31, 25, 29, 28, 29, 33, 32, 33, 36, 37, 36, 25, 29, 28, 29, 33, 32,
33, 36, 37, 36, 31, 30, 31, 34, 35, 34, 38, 39, 38, 42, 19, 23, 27, 26,
27, 21, 25, 24, 25, 29, 28, 29, 32, 33, 32, 21, 25, 24, 25, 29, 28, 29,
32, 33, 32, 27, 26, 27, 30, 31, 30, 34, 35, 34, 38, 27, 31, 30, 31, 35,
34, 35, 38, 39, 38, 33, 32, 33, 36, 37, 36, 40, 41, 40, 44, 33, 32, 33,
36, 37, 36, 40, 41, 40, 44, 38, 39, 38, 42, 46] else "!! bad !!")<|fim▁end|> | |
<|file_name|>vote.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
# Copyright (c) 2015 Davide Gessa
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from libcontractvm import Wallet, WalletExplorer, ConsensusManager
from forum import ForumManager
import sys
import time
consMan = ConsensusManager.ConsensusManager ()
consMan.bootstrap ("http://127.0.0.1:8181")
wallet = WalletExplorer.WalletExplorer (wallet_file='test.wallet')
srMan = ForumManager.ForumManager (consMan, wallet=wallet)
while True:
pollid = input ('Insert the id of the poll: ')
choice = input ('Insert the choice for the poll: ')
<|fim▁hole|><|fim▁end|> | try:
print ('Broadcasted:', srMan.vote (pollid, choice))
except:
print ('Error.') |
<|file_name|>iterar.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
# -*- coding: utf-8 -*-<|fim▁hole|>from random import randint
from argparse import ArgumentParser
""" Exemplo de retorno de carro e passagem de argumentos. """
parser = ArgumentParser(description='Exemplo de retorno de carro e passagem de argumentos')
parser.add_argument('-s',
action='store',
type=float,
dest='segundos',
default=0.0,
help='Intervalo de impressão em segundos, padrão: 0.0 segundos')
parser.add_argument('-n',
action='store',
type=int,
dest='numero',
default=100,
help='Quantidade de iterações, padrão: 100 iterações')
args = parser.parse_args()
# os parametros são capturados por atributos do objeto args, segundo informado no parâmetro dest de
# parser.add_argument()
Q = args.numero
# system('clear')
for i in range(1, Q + 1):
sleep(args.segundos)
# retorno de carro para imprimir no mesmo local a contagem de números, semelhante a uma barras de progresso.
# deve ter o carriage return no início da impressão(\r) e o parâmetro flush=True.
# a formatação irá imprimir os números com zeros a esquerda de acordo com a quantidade de caracteres de Q.
print('\r{:0>{}}'.format(i, len(str(Q))), end="", flush=True)
if i == randint(1, Q):
# sorteio aleatório
print(' -> Número sorteado')
else:
print('\nEND OF LINE.')<|fim▁end|> |
from time import sleep |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.