prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>0004_auto_20150623_1623.py<|end_file_name|><๏ฝfimโbegin๏ฝ># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('server', '0003_auto_20150612_1123'),
]<๏ฝfimโhole๏ฝ>
operations = [
migrations.RemoveField(
model_name='apikey',
name='read_only',
),
migrations.AlterField(
model_name='condition',
name='machine',
field=models.ForeignKey(related_name='conditions', to='server.Machine'),
),
migrations.AlterField(
model_name='fact',
name='machine',
field=models.ForeignKey(related_name='facts', to='server.Machine'),
),
migrations.AlterField(
model_name='historicalfact',
name='machine',
field=models.ForeignKey(related_name='historical_facts', to='server.Machine'),
),
migrations.AlterField(
model_name='pendingappleupdate',
name='machine',
field=models.ForeignKey(related_name='pending_apple_updates', to='server.Machine'),
),
migrations.AlterField(
model_name='pendingupdate',
name='machine',
field=models.ForeignKey(related_name='pending_updates', to='server.Machine'),
),
migrations.AlterField(
model_name='plugin',
name='name',
field=models.CharField(unique=True, max_length=512),
),
]<๏ฝfimโend๏ฝ> | |
<|file_name|>E0687.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>#![allow(warnings)]
#![feature(in_band_lifetimes)]
fn foo(x: fn(&'a u32)) {} //~ ERROR must be explicitly
fn bar(x: &Fn(&'a u32)) {} //~ ERROR must be explicitly
fn baz(x: fn(&'a u32), y: &'a u32) {} //~ ERROR must be explicitly
<๏ฝfimโhole๏ฝ>impl Foo<'a> {
fn bar(&self, x: fn(&'a u32)) {} //~ ERROR must be explicitly
}
fn main() {}<๏ฝfimโend๏ฝ> | struct Foo<'a> { x: &'a u32 }
|
<|file_name|>ValidateProjectIT.java<|end_file_name|><๏ฝfimโbegin๏ฝ>package acceptance;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import utils.CommandStatus;
import utils.TemporaryDigdagServer;
import java.nio.file.Path;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static utils.TestUtils.copyResource;
import static utils.TestUtils.main;
//
// This file doesn't contain normal case.
// It defined in another test.
//
public class ValidateProjectIT
{
@Rule
public TemporaryFolder folder = new TemporaryFolder();
@Rule
public TemporaryDigdagServer server = TemporaryDigdagServer.builder()
.build();
<๏ฝfimโhole๏ฝ>
@Before
public void setUp()
throws Exception
{
projectDir = folder.getRoot().toPath().resolve("foobar");
config = folder.newFile().toPath();
}
@Test
public void uploadInvalidTaskProject()
throws Exception
{
// Create new project
CommandStatus initStatus = main("init",
"-c", config.toString(),
projectDir.toString());
assertThat(initStatus.code(), is(0));
copyResource("acceptance/error_task/invalid_at_group.dig", projectDir.resolve("invalid_at_group.dig"));
// Push the project
CommandStatus pushStatus = main(
"push",
"--project", projectDir.toString(),
"foobar",
"-c", config.toString(),
"-e", server.endpoint());
assertThat(pushStatus.code(), is(1));
assertThat(pushStatus.errUtf8(), containsString("A task can't have more than one operator"));
}
@Test
public void uploadInvalidScheduleProject()
throws Exception
{
// Create new project
CommandStatus initStatus = main("init",
"-c", config.toString(),
projectDir.toString());
assertThat(initStatus.code(), is(0));
copyResource("acceptance/schedule/invalid_schedule.dig", projectDir.resolve("invalid_schedule.dig"));
// Push the project
CommandStatus pushStatus = main(
"push",
"--project", projectDir.toString(),
"foobar",
"-c", config.toString(),
"-e", server.endpoint());
assertThat(pushStatus.code(), is(1));
assertThat(pushStatus.errUtf8(), containsString("scheduler requires mm:ss format"));
}
}<๏ฝfimโend๏ฝ> | private Path config;
private Path projectDir; |
<|file_name|>ResourcePropertiesEditStub.java<|end_file_name|><๏ฝfimโbegin๏ฝ>/**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/providers/tags/sakai-10.6/jldap-mock/src/java/edu/amc/sakai/user/ResourcePropertiesEditStub.java $
* $Id: ResourcePropertiesEditStub.java 105079 2012-02-24 23:08:11Z [email protected] $
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package edu.amc.sakai.user;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.Properties;
import org.sakaiproject.entity.api.ResourcePropertiesEdit;
import org.sakaiproject.util.BaseResourcePropertiesEdit;
import org.apache.commons.lang.StringUtils;
/**
* Not a stub per-se, so much as a {@link BaseResourcePropertiesEdit} extension
* with enhancements for testing equality, outputting a meaningful String
* representation, and initializing state from a set of default and<๏ฝfimโhole๏ฝ> *
* @author [email protected]
*
*/
public class ResourcePropertiesEditStub extends BaseResourcePropertiesEdit {
private static final long serialVersionUID = 1L;
public ResourcePropertiesEditStub() {
super();
}
public ResourcePropertiesEditStub(Properties defaultConfig, Properties configOverrides) {
super();
if ( defaultConfig != null && !(defaultConfig.isEmpty()) ) {
for ( Enumeration i = defaultConfig.propertyNames() ; i.hasMoreElements() ; ) {
String propertyName = (String)i.nextElement();
String propertyValue = StringUtils.trimToNull((String)defaultConfig.getProperty(propertyName));
if ( propertyValue == null ) {
continue;
}
String[] propertyValues = propertyValue.split(";");
if ( propertyValues.length > 1 ) {
for ( String splitPropertyValue : propertyValues ) {
super.addPropertyToList(propertyName, splitPropertyValue);
}
} else {
super.addProperty(propertyName, propertyValue);
}
}
}
if ( configOverrides != null && !(configOverrides.isEmpty()) ) {
// slightly different... configOverrides are treated as complete
// overwrites of existing values.
for ( Enumeration i = configOverrides.propertyNames() ; i.hasMoreElements() ; ) {
String propertyName = (String)i.nextElement();
super.removeProperty(propertyName);
String propertyValue = StringUtils.trimToNull((String)configOverrides.getProperty(propertyName));
String[] propertyValues = propertyValue.split(";");
if ( propertyValues.length > 1 ) {
for ( String splitPropertyValue : propertyValues ) {
super.addPropertyToList(propertyName, splitPropertyValue);
}
} else {
super.addProperty(propertyName, propertyValue);
}
}
}
}
public String toString() {
StringBuilder sb = new StringBuilder();
for ( Iterator names = getPropertyNames(); names.hasNext(); ) {
String name = (String)names.next();
sb.append(name + "=" + this.getPropertyFormatted(name) + "; ");
}
return sb.toString();
}
/** Pure hack, but BaseResourceProperties doesn't have a meaningful impl */
public boolean equals(Object o) {
if ( o == this ) {
return true;
}
if ( o == null ) {
return false;
}
if ( !(o instanceof ResourcePropertiesEdit) ) {
return false;
}
ResourcePropertiesEdit otherProps = (ResourcePropertiesEdit)o;
int cnt = 0;
Iterator namesInOther = otherProps.getPropertyNames();
while ( namesInOther.hasNext() ) {
cnt++;
String nameInOther = (String)namesInOther.next();
Object valueInOther = otherProps.get(nameInOther);
Object valueInThis = get(nameInOther);
if ( valueInThis == valueInOther ) {
continue;
}
if ( valueInThis == null || valueInOther == null ) {
return false;
}
if ( !(valueInThis.equals(valueInOther)) ) {
return false;
}
}
if ( m_props.size() != cnt ) {
return false;
}
return true;
}
}<๏ฝfimโend๏ฝ> | * override {@link Properties}.
* |
<|file_name|>lib.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>#[macro_use]
extern crate log;
use special_fun::FloatSpecial;
use std::collections::VecDeque;
use std::f64;
#[derive(Clone, Debug)]
pub struct PhiFailureDetector {
min_stddev: f64,
history_size: usize,
buf: VecDeque<u64>,
prev_heartbeat: Option<u64>,
}
impl PhiFailureDetector {
pub fn new() -> PhiFailureDetector {
Self::default()
}
pub fn min_stddev(self, min_stddev: f64) -> PhiFailureDetector {
assert!(min_stddev > 0.0, "min_stddev must be > 0.0");
PhiFailureDetector { min_stddev, ..self }
}
pub fn history_size(self, count: usize) -> PhiFailureDetector {
assert!(count > 0, "history_size must > 0");
PhiFailureDetector {
history_size: count,
..self
}
}
pub fn heartbeat(&mut self, t: u64) {
match &mut self.prev_heartbeat {
prev @ &mut None => {
*prev = Some(t);
}
&mut Some(ref mut prev) => {
if t < *prev {
return;
};
let delta = t - *prev;
self.buf.push_back(delta);
*prev = t;
if self.buf.len() > self.history_size {
let _ = self.buf.pop_front();
}
}
}
}
/// def ฯ(Tnow ) = โ log10(Plater (Tnow โ Tlast))
pub fn phi(&self, now: u64) -> f64 {
match &self.prev_heartbeat {
Some(prev_time) if now > *prev_time => {
trace!(
"now:{} - prev_heartbeat:{} = {:?}",
now,
prev_time,
now - prev_time
);
let p_later = self.p_later(now - prev_time);
-p_later.log10()
}
Some(prev_time) => {
trace!("now:{} <= prev_heartbeat:{}", now, prev_time);
0.0
}
None => 0.0,
}
}
/// Returns the time t (within epsilon) at which phi will be >= val .
pub fn next_crossing_at(&self, now: u64, threshold: f64) -> u64 {
let phappened = 1.0 - (10.0f64).powf(-threshold);
let x = phappened.norm_inv();
let mean = stats::mean(self.buf.iter().cloned());
let stddev = stats::stddev(self.buf.iter().cloned()).max(self.min_stddev);
let diff = x * stddev + mean;
let then = now + diff.ceil() as u64;
trace!(
"threshold:{}; phappened:{}; x:{}; mean:{}; stddev:{}; diff:{}; then:{}",
threshold,
phappened,
x,
mean,
stddev,
diff,
then
);
then
}
fn p_later(&self, diff: u64) -> f64 {
let mean = stats::mean(self.buf.iter().cloned());
let stddev = stats::stddev(self.buf.iter().cloned()).max(self.min_stddev);
let x = (diff as f64 - mean) / stddev;
// let cdf = 0.5*(1.0+ (x/(2.0f64).sqrt()).erf())
let p = 1.0 - x.norm();
trace!(
"diff:{:e}; mean:{:e}; stddev:{:e} x:{:e}; p_later:{:e}",
diff as f64,
mean,
stddev,
x,
p
);
// We want to avoid returning zero, as we want the logarithm of the probability.
// And the log of zero is meaningless.
if p < f64::MIN_POSITIVE {
f64::MIN_POSITIVE
} else {
p
}
}
}
impl Default for PhiFailureDetector {
fn default() -> Self {
PhiFailureDetector {
min_stddev: 1.0,
history_size: 10,
buf: VecDeque::new(),
prev_heartbeat: None,
}
}
}
#[cfg(test)]
mod tests {
use super::PhiFailureDetector;
use rand::thread_rng;
use rand_distr::Distribution;
use rand_distr::LogNormal;
#[test]
fn should_fail_when_no_heartbeats() {
env_logger::try_init().unwrap_or_default();
let mut detector = PhiFailureDetector::new();
for t in 0..100 {
detector.heartbeat(t);
let phi = detector.phi(t);
trace!("at:{:?}, phi:{:?}; det: {:?}", t, phi, detector);
if t > 10 {
assert!(phi < 1.0);
}
}
for t in 100..110 {
let phi = detector.phi(t);
trace!("at:{:?}, phi:{:?}; det: {:?}", t, phi, detector);
}
for &t in &[110, 200, 300] {
let phi = detector.phi(t);
trace!("at:{:?}, phi:{:?}; det: {:?}", t, phi, detector);
assert!(phi > 1.0, "t:{:?}; phi:{:?} > 1.0", t, phi);
}
}
#[test]
fn should_recover() {<๏ฝfimโhole๏ฝ> let mut detector = PhiFailureDetector::new().history_size(3);
for t in 0..10 {
detector.heartbeat(t);
let phi = detector.phi(t);
trace!("at:{:?}, phi:{:?}; det: {:?}", t, phi, detector);
}
for t in 20..30 {
detector.heartbeat(t);
let phi = detector.phi(t);
trace!("at:{:?}, phi:{:?}; det: {:?}", t, phi, detector);
if t > 10 {
assert!(phi < 1.0);
}
}
}
#[test]
fn should_estimate_threshold_times() {
env_logger::try_init().unwrap_or_default();
let epsilon = 2;
let mut detector = PhiFailureDetector::new().history_size(3);
let mut t = 0;
for n in 0u64..10 {
let dist = LogNormal::new(10.0, 100.0).expect("lognormal");
let diff = dist.sample(&mut thread_rng());
t = n * 1000;
trace!(
"at:{:?}, diff:{:e}; phi:{:?}; det: {:?}",
t,
diff,
detector.phi(t),
detector
);
detector.heartbeat(t);
}
// Estimate the point at which
let threshold = 1.0;
let est_1 = detector.next_crossing_at(t, threshold);
let pre = detector.phi(est_1 - epsilon);
let at = detector.phi(est_1);
assert!(
pre < threshold && at >= threshold,
"phi({}):{:?} < {:?} && phi({}):{:?} >= {:?}",
est_1 - epsilon,
pre,
threshold,
est_1,
at,
threshold
);
}
}<๏ฝfimโend๏ฝ> | env_logger::try_init().unwrap_or_default(); |
<|file_name|>package.py<|end_file_name|><๏ฝfimโbegin๏ฝ># Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Prokka(Package):
"""Prokka is a software tool to annotate bacterial, archaeal and viral
genomes quickly and produce standards-compliant output files."""
homepage = "https://github.com/tseemann/prokka"
url = "https://github.com/tseemann/prokka/archive/v1.14.5.tar.gz"
version('1.14.6', sha256='f730b5400ea9e507bfe6c5f3d22ce61960a897195c11571c2e1308ce2533faf8')
depends_on('perl', type='run')
depends_on('perl-bioperl', type='run')
depends_on('perl-xml-simple', type='run')
depends_on('perl-bio-searchio-hmmer', type='run')
depends_on('hmmer', type='run')
depends_on('blast-plus', type='run')
depends_on('prodigal', type='run')<๏ฝfimโhole๏ฝ> depends_on('tbl2asn', type='run')
def install(self, spec, prefix):
install_tree('bin', prefix.bin)
install_tree('binaries', prefix.binaries)
install_tree('db', prefix.db)
install_tree('doc', prefix.doc)<๏ฝfimโend๏ฝ> | |
<|file_name|>mtmap.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>#[deriving(Clone)]
#[deriving(ToStr)]
type LinkedList = Option<~Node>;
#[deriving(Clone)]
struct Node {
val: int,
tail: LinkedList
}
trait Length {
fn length(&self) -> int;
}
#[allow(dead_code)]
fn length(p: LinkedList) -> int {
match p {
None => { 0 }
Some(node) => { 1 + length(node.tail) }
}
}
impl Length for LinkedList {
fn length(&self) -> int {
match self {
&Some(ref node) => { 1 + node.tail.length() }
&None => 0
}
}
}
fn construct_list(n: int, x: int) -> LinkedList {
match n {
0 => { None }
_ => { Some(~Node{val: x, tail: construct_list(n - 1, x + 1)}) }
}
}
fn print_list(p: LinkedList) -> ~str {
match p {
None => { ~"" }
Some(node) => { node.val.to_str() + ", " + print_list(node.tail) }
}
}
trait Map {
fn mapr(&mut self, fn(int) -> int);
}
impl Map for LinkedList {
fn mapr(&mut self, f: fn(int) -> int) {
match(*self) {
None => { }
Some(ref mut current) => {
let (port, chan) : (Port<int>, Chan<int>) = Chan::new();
let val = current.val; // Can't use current inside task!
spawn(proc() { chan.send(f(val)); });
current.tail.mapr(f); // Make sure to do this first, so we're not waiting for recv!
current.val = port.recv();
}
}
}
}
<๏ฝfimโhole๏ฝ> for _ in range(0, 10000) {
for _ in range(0, 1000000) {
a = a + 1;
}
}
println!("finished inc: {:d} ({:d})", n, a);
n + 1
}
fn main() {
let mut l10 : LinkedList = construct_list(5, 10);
l10.mapr(expensive_inc);
println!("List: {:s}", print_list(l10.clone()));
}<๏ฝfimโend๏ฝ> | fn expensive_inc(n: int) -> int {
let mut a = 1;
println!("starting inc: {:d}", n); |
<|file_name|>main.ts<|end_file_name|><๏ฝfimโbegin๏ฝ>function pyramid (a:number):void{
for(var i:number = 1; i <= a; i++){
var b:string = '';
for(var j:number = 0; j <= a-i-1 ; j++){
b += ' ';
}
for(var k:number = 1; k <= 2*i-1; k++){<๏ฝfimโhole๏ฝ> b += '*';
}
console.log(b);
}
}
//var numS:string = prompt("่ฏท่พๅ
ฅไธไธชๆฐๅญ","");
//var num:number = +numS;
var numS:string = process.argv[2];
//var num:number = parseInt(numS);
pyramid(parseInt(numS));<๏ฝfimโend๏ฝ> | |
<|file_name|>bitcoin_he.ts<|end_file_name|><๏ฝfimโbegin๏ฝ><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS language="he" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Speedcoin</source>
<translation>ืืืืืช ืืืืืงืืื</translation>
</message>
<message>
<location line="+39"/>
<source><b>Speedcoin</b> version</source>
<translation>ืืจืกืช <b>ืืืืืงืืื</b></translation>
</message>
<message>
<location line="+57"/>
<source>
Speedcoin Official Website: https://www.speedcoin.org
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>
ืืืื ืชืืื ื ื ืืกืืื ืืช.
ืืืคืฆืช ืชืืช ืจืืฉืืื ืืชืืื ื MIT/X11, ืจืื ืืช ืืงืืืฅ ืืืฆืืจืฃ COPYING ืื http://www.opensource.org/licenses/mit-license.php.
ืืืืฆืจ ืืื ืืืื ืชืืื ื ืฉืคืืชืื ืข"ื ืคืจืืืงื OpenSSL ืืฉืืืืฉ ืืชืืืช ืืืืื OpenSSL (http://www.openssl.org/) ืืชืืื ื ืงืจืืคืืืืจืคืืช ืฉื ืืชืื ืข"ื ืืจืืง ืืื ื ([email protected]) ืืชืืื ืช UPnP ืฉื ืืชืื ืข"ื ืชืืืก ืืจื ืจื.</translation>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation>ืืืืืืช ืืืฆืจืื</translation>
</message>
<message>
<location line="+0"/>
<source>The Speedcoin developers</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>ืคื ืงืก ืืชืืืืช</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>ืืืฅ ืืืืฆื ืืคืืื ืืขืจืื ืืชืืืช ืื ืชืืืช</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>ืืฆืืจืช ืืชืืืช ืืืฉื</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>ืืขืชืง ืืช ืืืชืืืช ืืืกืืื ืช ืืืื ืืขืจืืื</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>ืืชืืืช ืืืฉื</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your Speedcoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>ืืื ืืชืืืช ืืืืืืงืืื ืฉืื ืขืืืจ ืงืืืช ืชืฉืืืืื. ืืืชืื ืืชืจืฆื ืืชืช ืืชืืืช ืฉืื ื ืืื ืฉืืื ืืื ืฉืชืืื ืืขืงืื ืืืจ ืื ืืฉืื ืื.</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation>ืืขืชืง ืืชืืืช</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>ืืฆื &ืงืื QR</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Speedcoin address</source>
<translation>ืืชืื ืขื ืืืืขื ืืืื ืืืืืื ืื ืืชื ืืืขืืื ืฉื ืืชืืืช ืืืืืงืืื.</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>ืืชืื ืขื ืืืืขื</translation>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation>ืืืง ืืช ืืืชืืืช ืฉื ืืืจื ืืืจืฉืืื</translation>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation>ืืฆืื ืื ืชืื ืื ืืืื ืื ืืืื ืืงืืืฅ</translation>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified Speedcoin address</source>
<translation>ืืืช ืืืืขื ืืืื ืืืืืื ืฉืืื ื ืืชืื ืขื ืืชืืืช ืืืืืงืืื ืืกืืืืช.</translation>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation>ืืืช ืืืืขื</translation>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>ืืืง</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your Speedcoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>ืืื ืืชืืืช ืืืืืืงืืื ืฉืื ืขืืืจ ืฉืืืืช ืชืฉืืืืื. ืชืืื ืืืืง ืืช ืืกืคืจ ืืืช ืืชืืืืช ืืงืืื ืืชืฉืืืืื ืืคื ื ืฉืืืืช ืืืืขืืช.</translation>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation>ืืขืชืง ืชืืืช</translation>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation>ืขืจืืื</translation>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation>ืฉืื ืืืืขืืช</translation>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation>ืืฆืื ื ืชืื ื ืคื ืงืก ืืชืืืืช</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>ืงืืืฅ ืืืคืจื ืืคืกืืงืื (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>ืฉืืืื ืืืฆืื</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>ืื ืืกืืื ืืืชืื ืืงืืืฅ %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>ืชืืืช</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>ืืชืืืช</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(ืืื ืชืืืช)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>ืฉืื ืกืืกืื</translation>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>ืืื ืก ืกืืกืื</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>ืกืืกืื ืืืฉื</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>ืืืืจ ืขื ืืกืืกืื ืืืืฉื</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>ืืื ืก ืืช ืืกืืกืื ืืืืฉื ืืืจื ืง. <br/>ืื ื ืืฉืชืืฉ ืืกืืกืื ืืืืืื <b>10 ืชืืื ืืงืจืืืื ืื ืืืชืจ</b>, ืื <b>ืฉืืื ื ืืืืื ืื ืืืชืจ</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>ืืฆืคื ืืจื ืง</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>ืืคืขืืื ืืื ืืืจืฉืช ืืช ืกืืกืืช ืืืจื ืง ืฉืื ืืฉืืื ืืคืชืื ืืช ืืืจื ืง.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>ืคืชืืืช ืืจื ืง</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>ืืคืขืืื ืืื ืืืจืฉืช ืืช ืกืืกืืช ืืืจื ืง ืฉืื ืืฉืืื ืืคืขื ื ืืช ืืืจื ืง.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>ืคืขื ืื ืืจื ืง</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>ืฉืื ืื ืกืืกืื</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>ืืื ืก ืืช ืืกืืกืืืช ืืืฉื ื ืืืืืฉื ืืืจื ืง.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>ืืฉืจ ืืฆืคื ืช ืืจื ืง</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR LITECOINS</b>!</source>
<translation>ืืืืจื: ืื ืืชื ืืฆืคืื ืืช ืืืจื ืง ืืืืื ืืช ืืกืืกืื, ืืชื <b>ืชืืื ืืช ืื ืืืืืืงืืื ืื ืฉืื</b>!</translation>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>ืืื ืืชื ืืืื ืฉืืจืฆืื ื ืืืฆืคืื ืืช ืืืจื ืง?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>ืืฉืื! ืื ืืืืื ืงืืื ืฉืขืฉืืช ืืืจื ืง ืฉืื ืืฉ ืืืืืืฃ ืขื ืงืืืฅ ืืืจื ืง ืืืืฆืคื ืฉืื ืขืชื ื ืืฆืจ. ืืกืืืืช ืืืืื, ืืืืืืื ืงืืืืื ืฉื ืงืืืฅ ืืืจื ืง ืืื-ืืืฆืคื ืืืคืื ืืืกืจื ืฉืืืืฉ ืืจืืข ืฉืชืชืืื ืืืฉืชืืฉ ืืืจื ืง ืืืืฉ ืืืืฆืคื.</translation>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>ืืืืจืืช: ืืงืฉ Caps Lock ืืืคืขื!</translation>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation>ืืืจื ืง ืืืฆืคื</translation>
</message>
<message>
<location line="-56"/>
<source>Speedcoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your litecoins from being stolen by malware infecting your computer.</source>
<translation>ืืืืืงืืื ืืืกืืจ ืขืืฉืื ืืื ืืืฉืืื ืืช ืชืืืื ืืืฆืคื ื. ืืืืจ ืฉืืฆืคื ืช ืืืจื ืง ืฉืื ืืื ื ืืืื ืืืื ืืืืคื ืืื ืขื ืืืืืืงืืื ืื ืฉืื ืืชืืื ืืช ืืืื ืืืช ืืืืฉืชืืืช ืขื ืืืืฉื.</translation>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>ืืฆืคื ืช ืืืจื ืง ื ืืฉืื</translation>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>ืืฆืคื ืช ืืืจื ืง ื ืืฉืื ืขืงื ืฉืืืื ืคื ืืืืช. ืืืจื ืง ืฉืื ืื ืืืฆืคื.</translation>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation>ืืกืืกืืืช ืฉื ืืชื ื ืืื ื ืชืืืืืช.</translation>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation>ืคืชืืืช ืืืจื ืง ื ืืฉืื</translation>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>ืืกืืกืื ืฉืืืื ืกื ืืคืขื ืื ืืืจื ืง ืฉืืืื.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>ืคืขื ืื ืืืจื ืง ื ืืฉื</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>ืกืืกืืช ืืืจื ืง ืฉืื ืชื ืืืฆืืื.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation>ืืชืื ืขื ืืืืขื</translation>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation>ืืกืชื ืืจื ืขื ืืจืฉืช...</translation>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation>&ืกืงืืจื</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>ืืฆื ืกืงืืจื ืืืืืช ืฉื ืืืจื ืง</translation>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation>&ืคืขืืืืช</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>ืืคืืฃ ืืืืกืืืจืืืช ืืคืขืืืืช</translation>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation>ืขืจืื ืืช ืจืฉืืืช ืืืชืืืืช ืืืชืืืืช</translation>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation>ืืฆื ืืช ืจืฉืืืช ืืืชืืืืช ืืงืืืช ืชืฉืืืืื</translation>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation>ื&ืฆืืื</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>ืกืืืจ ืชืืื ื</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about Speedcoin</source>
<translation>ืืฆื ืืืืข ืขื ืืืืืงืืื</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>ืืืืืช Qt</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>ืืฆื ืืืืข ืขื Qt</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&ืืคืฉืจืืืืช</translation>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation>ืืฆืคื ืืจื ืง</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>ืืืืื ืืจื ืง</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>ืฉื ื ืกืืกืื</translation>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation>ืืืืื ืืืืงืื ืืืืืกืง...</translation>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation>ืืืืฉ ืืช ืืื ืืงืก ืืืืืงืื ืืืืกืง...</translation>
</message>
<message>
<location line="-347"/>
<source>Send coins to a Speedcoin address</source>
<translation>ืฉืื ืืืืขืืช ืืืชืืืช ืืืืืงืืื</translation>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for Speedcoin</source>
<translation>ืฉื ื ืืคืฉืจืืืืช ืชืฆืืจื ืขืืืจ ืืืืืงืืื</translation>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation>ืืืืื ืืืจื ืง ืืืงืื ืืืจ</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>ืฉื ื ืืช ืืกืืกืื ืืืฆืคื ืช ืืืจื ืง</translation>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation>ืืืื ื ืืคืื</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>ืคืชื ืืช ืืื ืืืงืจื ืืืืืื ืื ืืคืื</translation>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation>ืืืช ืืืืขื...</translation>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>Speedcoin</source>
<translation>ืืืืืงืืื</translation>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation>ืืจื ืง</translation>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation>ืืฉืื</translation>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation>ืืงืื</translation>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation>ืืืชืืืืช</translation>
</message>
<message>
<location line="+22"/>
<source>&About Speedcoin</source>
<translation>ืืืืืช ืืืืืงืืื</translation>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation>ืืฆื / ืืกืชืจ</translation>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation>ืืฆื ืื ืืกืชืจ ืืช ืืืืื ืืจืืฉื</translation>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>ืืฆืคื ืืช ืืืคืชืืืช ืืคืจืืืื ืฉืฉืืืืื ืืืจื ืง ืฉืื</translation>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your Speedcoin addresses to prove you own them</source>
<translation>ืืชืื ืขื ืืืืขืืช ืขื ืืชืืืืช ืืืืืืงืืื ืฉืื ืืื ืืืืืื ืฉืื ืืืขืืืชื</translation>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified Speedcoin addresses</source>
<translation>ืืืช ืืืืขืืช ืืื ืืืืืื ืฉืื ื ืืชืื ืขื ืืชืืืช ืืืืืงืืื ืืกืืืืืช</translation>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation>&ืงืืืฅ</translation>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation>ื&ืืืจืืช</translation>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation>&ืขืืจื</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>ืกืจืื ืืืื ืืืืื</translation>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation>[ืจืฉืช-ืืืืงื]</translation>
</message>
<message>
<location line="+47"/>
<source>Speedcoin client</source>
<translation>ืชืืื ืช ืืืืืงืืื</translation>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to Speedcoin network</source>
<translation><numerusform>ืืืืืจ ืคืขืื ืืื ืืจืฉืช ืืืืืืงืืื</numerusform><numerusform>%n ืืืืืจืื ืคืขืืืื ืืจืฉืช ืืืืืืงืืื</numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation>1% ืืชืื 2% (ืืฉืืขืจ) ืืืืงืื ืฉื ืืกืืืจืืืช ืคืขืืืช ืขืืืื </translation>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation>ืืืฉืื ืขืืืื ืฉื %1 ืืืืงืื ืฉื ืืืกืืืจืืืช ืคืขืืืืช.</translation>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation><numerusform>%n ืฉืขื</numerusform><numerusform>%n ืฉืขืืช</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation><numerusform>%n ืืื</numerusform><numerusform>%n ืืืื</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation><numerusform>%n ืฉืืืข</numerusform><numerusform>%n ืฉืืืขืืช</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation>1% ืืืืืจ</translation>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation>ืืืืืง ืืืืจืื ืฉืืชืงืื ื ืืฆืจ ืืคื ื %1</translation>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation>ืืืืจ ืืืช ืคืขืืืืช ื ืกืคืืช ืืจื ืืืื ืืืืืืช</translation>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation>ืฉืืืื</translation>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation>ืืืืจื</translation>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation>ืืืืข</translation>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation>ืคืขืืื ืื ืืืจืืช ืืืืืืืช ืืืืื. ืขืืืื ืืืคืฉืจืืชื ืืฉืืื ืืืชื ืชืืืจืช ืขืืื ืฉื %1, ืืืืืขืืช ืืฆืืชืื ืฉืืขืืืื ืืช ืืคืขืืื ืฉืื ืืขืืืจืช ืืชืืื ืืจืฉืช. ืืื ืืจืฆืื ื ืืฉืื ืืช ืืขืืื?</translation>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation>ืขืืื ื</translation>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation>ืืชืขืืื...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation>ืืฉืจ ืขืืืช ืคืขืืื</translation>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation>ืคืขืืื ืฉื ืฉืืื</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>ืคืขืืื ืฉืืชืงืืื</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>ืชืืจืื: %1
ืืืืช: %2
ืกืื: %3
ืืชืืืช: %4</translation>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation>ืชืคืขืื URI</translation>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid Speedcoin address or malformed URI parameters.</source>
<translation>ืื ื ืืชื ืื ืชื URI! ืื ืืืื ืืืืืจื ืืชืืฆืื ืืืชืืืช ืืืืืงืืื ืื ืชืงืื ื ืื ืคืจืืืจื URI ืืกืจื ืฆืืจื ืชืงืื ื.</translation>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>ืืืจื ืง <b>ืืืฆืคื</b> ืืืจืืข <b>ืคืชืื</b></translation>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>ืืืจื ืง <b>ืืืฆืคื</b> ืืืจืืข <b>ื ืขืื</b></translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. Speedcoin can no longer continue safely and will quit.</source>
<translation>ืฉืืืื ืกืืคื ืืช ืืืจืขื. ืืืืืงืืื ืืื ื ืืืื ืืืืฉืื ืืคืขืื ืืืืื ืืืื ืืืกืืจ.</translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation>ืืืขืงืช ืจืฉืช</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>ืขืจืื ืืชืืืช</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>ืช&ืืืช</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation>ืืชืืืช ืืืฉืืืืช ืืจืฉืืื ืืื ืืคื ืงืก ืืืชืืืืช</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&ืืชืืืช</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>ืืืชืืืช ืืืฉืืืืช ืืจืฉืืื ืื ืืคื ืงืก ืืืชืืืืช. ื ืืชื ืืฉื ืืช ืืืช ืจืง ืขืืืจ ืืชืืืืช ืืฉืืืื.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation>ืืชืืืช ืืืฉื ืืงืืื</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>ืืชืืืช ืืืฉื ืืฉืืืื</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>ืขืจืื ืืชืืืช ืืงืืื</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>ืขืจืื ืืชืืืช ืืฉืืืื</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>ืืืชืืืช ืฉืืื ืกืช "%1" ืืืจ ื ืืฆืืช ืืคื ืงืก ืืืชืืืืช.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Speedcoin address.</source>
<translation>ืืืชืืืช ืฉืืืื ืกื "%1" ืืื ื ืืชืืืช ืืืืืงืืื ืชืงืื ื.</translation>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>ืคืชืืืช ืืืจื ืง ื ืืฉืื.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>ืืฆืืจืช ืืคืชื ืืืฉ ื ืืฉืื.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>Speedcoin-Qt</source>
<translation>Speedcoin-Qt</translation>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation>ืืจืกื</translation>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation>ืฉืืืืฉ:</translation>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation>ืืคืฉืจืืืืช ืฉืืจืช ืคืงืืื</translation>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation>ืืคืฉืจืืืืช ืืืฉืง</translation>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>ืงืืข ืฉืคื, ืืืฉื "he_il" (ืืจืืจืช ืืืื: ืฉืคืช ืืืขืจืืช)</translation>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation>ืืชืื ืืืืืขืจ</translation>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation>ืืฆื ืืกื ืคืชืืื ืืขืช ืืคืขืื (ืืจืืจืช ืืืื: 1)</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>ืืคืฉืจืืืืช</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>ืจืืฉื</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>ืฉืื &ืขืืืช ืคืขืืื</translation>
</message>
<message>
<location line="+31"/>
<source>Automatically start Speedcoin after logging in to the system.</source>
<translation>ืืคืขื ืืช ืืืืืงืืื ืืืืคื ืขืฆืืื ืืืืจ ืืชืืืจืืช ืืืขืจืืช.</translation>
</message>
<message>
<location line="+3"/>
<source>&Start Speedcoin on system login</source>
<translation>ืืชืื ืืช ืืืืืงืืื ืืขืช ืืชืืืจืืช ืืืขืจืืช</translation>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation>ืืคืก ืื ืืคืฉืจืืืืช ืืชืืื ื ืืืจืืจืช ืืืืื.</translation>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation>ืืืคืืก ืืคืฉืจืืืืช</translation>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation>ืจืฉืช</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Speedcoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>ืคืชื ืืช ืคืืจื ืืืืืงืืื ืื ืชื ืืืืคื ืืืืืืื. ืขืืื ืจืง ืื UPnP ืืืืคืฉืจ ืื ืชืื ืข"ื ืื ืชื.</translation>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>ืืืคืื ืคืืจื ืืืืฆืขืืช UPnP</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the Speedcoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation>ืืชืืืจ ืืจืฉืช ืืืืืืงืืื ืืจื ืคืจืืงืกื SOCKS (ืืืฉื ืืขืช ืืชืืืจืืช ืืจื Tor).</translation>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation>ืืชืืืจ ืืจื ืคืจืืงืกื SOCKS</translation>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>ืืชืืืช IP ืฉื ืคืจืืงืกื:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation>ืืชืืืช ืืืื ืืจื ื ืฉื ืืคืจืืงืกื (ืืืฉื 127.0.0.1)</translation>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>ืคืืจื:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>ืืคืืจื ืฉื ืืคืจืืงืกื (ืืืฉื 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation>ืืจืกืช SOCKS:</translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>ืืจืกืช SOCKS ืฉื ืืคืจืืงืกื (ืืืฉื 5)</translation>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>ืืืื</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>ืืฆื ืกืื ืืืฉ ืืืื ืืืืจ ืืืขืืจ ืืืืื.</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>ื&ืืขืจ ืืืืฉ ืืืงืื ืืฉืืจืช ืืืฉืืืืช</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>ืืืขืจ ืืช ืืชืืื ื ืืืงืื ืืฆืืช ืืื ื ืืฉืืืืื ื ืกืืจ. ืืฉืืคืฉืจืืช ืื ืคืขืืื, ืืชืืื ื ืชืืกืืจ ืจืง ืืืืจ ืืืืจืช ืืฆืืื ืืืชืคืจืื.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>ืืืขืจ ืืขืช ืกืืืจื</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>ืชืฆืืื</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>ืฉืคืช ืืืฉืง ืืืฉืชืืฉ:</translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Speedcoin.</source>
<translation>ื ืืชื ืืงืืืข ืืื ืืช ืฉืคืช ืืืฉืง ืืืฉืชืืฉ. ืืืืจื ืื ืชืืื ืืืืจ ืืคืขืื ืืืืฉ ืฉื ืืืืืงืืื.</translation>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>ืืืืืช ืืืืื ืืืฆืืช ืืืืืืช:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>ืืืจ ืืช ืืจืืจืช ืืืืื ืืืืืืช ืืืืืงื ืืฉืจ ืชืืฆื ืืืืฉืง ืืืขืช ืฉืืืืช ืืืืขืืช.</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show Speedcoin addresses in the transaction list or not.</source>
<translation>ืืื ืืืฆืื ืืชืืืืช ืืืืืงืืื ืืจืฉืืืช ืืคืขืืืืช ืื ืื.</translation>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>ืืฆื ืืชืืืืช ืืจืฉืืืช ืืคืขืืืืช</translation>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>ืืืฉืืจ</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>ืืืืื</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation>ืืืฉืื</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation>ืืจืืจืช ืืืื</translation>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation>ืืฉืจ ืืช ืืืคืืก ืืืคืฉืจืืืืช</translation>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation>ืืื ืืืืืืจืืช ืขืฉืืืืช ืืืจืืฉ ืืชืืื ืืชืืื ื ืืื ืืืืื ืก ืืคืืขื.</translation>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation>ืืื ืืจืฆืื ื ืืืืฉืื?</translation>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation>ืืืืจื</translation>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Speedcoin.</source>
<translation>ืืืืจื ืื ืชืืื ืืืืจ ืืคืขืื ืืืืฉ ืฉื ืืืืืงืืื.</translation>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation>ืืชืืืช ืืคืจืืงืกื ืฉืกืืคืงื ืืื ื ืชืงืื ื.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>ืืืคืก</translation>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Speedcoin network after a connection is established, but this process has not completed yet.</source>
<translation>ืืืืืข ืืืืฆื ืขืฉืื ืืืืืช ืืืืฉื. ืืืจื ืง ืฉืื ืืกืชื ืืจื ืืืืคื ืืืืืืื ืขื ืจืฉืช ืืืืืืงืืื ืืืืจ ืืื ืื ืืืืืจ, ืื ืืชืืืื ืืจื ืืกืชืืื.</translation>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation>ืืชืจื:</translation>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>ืืืชืื ืืืืฉืืจ:</translation>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation>ืืจื ืง</translation>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation>ืื ืืฉื:</translation>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation>ืืืื ืฉื ืืจื ืืืจื ืืืฉืื</translation>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>ืคืขืืืืช ืืืจืื ืืช</b></translation>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation>ืืืชืจื ืื ืืืืืช ืฉืื</translation>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>ืืกืืื ืืืืื ืฉื ืคืขืืืืช ืฉืืจื ืืืฉืจื, ืืขืื ืืื ื ื ืกืคืจืืช ืืืืฉืื ืืืชืจื ืื ืืืืืช</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation>ืื ืืกืื ืืจื</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start litecoin: click-to-pay handler</source>
<translation>ืื ื ืืชื ืืืชืืื ืืช ืืืืืงืืื: ืืคืขืื ืืืฅ-ืืชืฉืืื </translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation>ืฉืื ืงืื QR</translation>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation>ืืงืฉ ืชืฉืืื</translation>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation>ืืืืช:</translation>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>ืชืืืช:</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation>ืืืืขื:</translation>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation>&ืฉืืืจ ืืฉื...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation>ืฉืืืื ืืงืืืื URI ืืงืื QR</translation>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation>ืืืืืช ืฉืืืื ืกื ืืื ื ืชืงืื ื, ืื ื ืืื.</translation>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>ืืืืื ืืืชืงืื ืืจืื ืืื, ื ืกื ืืืคืืืช ืืช ืืืงืกื ืืชืืืช / ืืืืขื.</translation>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation>ืฉืืืจ ืงืื QR</translation>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation>ืชืืื ืืช PNG (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>ืฉื ืืืฉืง</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>ืืจืกืช ืืืฉืง</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>ืืืืข</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation>ืืฉืชืืฉ ื-OpenSSL ืืจืกื</translation>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation>ืืื ืืชืืื</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>ืจืฉืช</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>ืืกืคืจ ืืืืืจืื</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation>ืืจืฉืช ืืืืืงื</translation>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>ืฉืจืฉืจืช ืืืืืงืื</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation>ืืกืคืจ ืืืืืงืื ืื ืืืื</translation>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation>ืืกืคืจ ืืืื ืืฉืืขืจ ืฉื ืืืืงืื</translation>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation>ืืื ืืืืืง ืืืืจืื</translation>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>ืคืชื</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation>ืืคืฉืจืืืืช ืฉืืจืช ืคืงืืื</translation>
</message>
<message>
<location line="+7"/>
<source>Show the Speedcoin-Qt help message to get a list with possible Speedcoin command-line options.</source>
<translation>ืืฆื ืืช ืืืืขื ืืขืืจื ืฉื litecoin-qt ืืื ืืงืื ืจืฉืืื ืฉื ืืคืฉืจืืืืช ืฉืืจืช ืคืงืืื ืฉื ืืืืืงืืื.</translation>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation>ืืฆื</translation>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>ืืื ืืงืจื</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation>ืชืืจืื ืื ืื</translation>
</message>
<message>
<location line="-104"/>
<source>Speedcoin - Debug window</source>
<translation>ืืืืืงืืื - ืืืื ื ืืคืื</translation>
</message>
<message>
<location line="+25"/>
<source>Speedcoin Core</source>
<translation>ืืืืช ืืืืืงืืื</translation>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation>ืงืืืฅ ืืืื ื ืืคืื</translation>
</message>
<message>
<location line="+7"/>
<source>Open the Speedcoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>ืคืชื ืืช ืงืืืฅ ืืืื ืื ืืคืื ืืชืืงืืืช ืื ืชืื ืื ืื ืืืืืช. ืื ืขืฉืื ืืงืืช ืืกืคืจ ืฉื ืืืช ืขืืืจ ืงืืืฆื ืืืื ืืืืืื.</translation>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>ื ืงื ืืื ืืงืจื</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the Speedcoin RPC console.</source>
<translation>ืืจืืืื ืืืืื ืืืื ืืงืจืช RPC ืฉื ืืืืืงืืื</translation>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>ืืฉืชืืฉ ืืืืฆืื ืืืขืื ืืืืื ืืื ืื ืืื ืืืืกืืืจืื, ื- <b>Ctrl-L</b> ืืื ืื ืงืืช ืืช ืืืกื.</translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>ืืงืื <b>help</b> ืืฉืืื ืกืงืืจื ืฉื ืืคืงืืืืช ืืืืื ืืช.</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>ืฉืื ืืืืขืืช</translation>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation>ืฉืื ืืืกืคืจ ืืงืืืื ืื-ืืื ืืช</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>ืืืกืฃ ืืงืื</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation>ืืกืจ ืืช ืื ืืฉืืืช ืืคืขืืื</translation>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>ื ืงื ืืื</translation>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation>ืืชืจื:</translation>
</message>
<message>
<location line="+10"/>
<source>123.456 SPD</source>
<translation>123.456 ืืืืืงืืื</translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>ืืฉืจ ืืช ืคืขืืืช ืืฉืืืื</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>ืฉืื</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> ื- %2 (%3)</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>ืืฉืจ ืฉืืืืช ืืืืขืืช</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>ืืื ืืชื ืืืื ืฉืืจืฆืื ื ืืฉืืื %1?</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation> ื- </translation>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>ืืชืืืช ืืืงืื ืืื ื ืชืงืื ื, ืื ื ืืืืง ืฉื ืืช.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>ืืืืืช ืืฉืื ืืืืืช ืืืืืช ืืืืื ื-0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>ืืืืืช ืขืืื ืขื ืืืืื ืฉืื.</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>ืืืืืช ืืืืืืช, ืืืืืื ืขืืืช ืคืขืืื ืืกื %1, ืขืืื ืขื ืืืืื ืฉืื.</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>ืืชืืืช ืืคืืื ื ืืฆืื, ื ืืชื ืืฉืืื ืืื ืืชืืืช ืจืง ืคืขื ืืืช ืืื ืคืขืืืช ืฉืืืื.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation>ืฉืืืื: ืืฆืืจืช ืืคืขืืื ื ืืฉืื!</translation>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>ืฉืืืื: ืืคืขืืื ื ืืืชื. ืื ืขืฉืื ืืงืจืืช ืขื ืืืง ืืืืืืขืืช ืืืจื ืง ืฉืื ืืืจ ื ืืฆืื, ืืืฉื ืื ืืฉืชืืฉืช ืืขืืชืง ืฉื wallet.dat ืืืืืขืืช ื ืืฆืื ืืขืืชืง ืื ืื ืกืืื ื ืืื ืืฆืืืช ืืื.</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation>ืืืคืก</translation>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>ื&ืืืช:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>ืฉืื &ื:</translation>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. SbJtSLn5C6NsuBqfYPTfZ3rGD7idcfRXM4)</source>
<translation>ืืืชืืืช ืฉืืืื ืืฉืื ืืชืฉืืื (ืืืฉื SbJtSLn5C6NsuBqfYPTfZ3rGD7idcfRXM4)</translation>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>ืืื ืก ืชืืืช ืืืชืืืช ืืืืช ืืื ืืืื ืืก ืืคื ืงืก ืืืชืืืืช</translation>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation>ืช&ืืืช:</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation>ืืืจ ืืชืืืช ืืคื ืงืก ืืืชืืืืช</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>ืืืืจ ืืชืืืช ืืืืื</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation>ืืกืจ ืืช ืืืงืื ืืื</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Speedcoin address (e.g. SbJtSLn5C6NsuBqfYPTfZ3rGD7idcfRXM4)</source>
<translation>ืืื ืก ืืชืืืช ืืืืืงืืื (ืืืฉื SbJtSLn5C6NsuBqfYPTfZ3rGD7idcfRXM4)</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation>ืืชืืืืช - ืืชืื ืื ืืืช ืืืืขื</translation>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation>ืืชืื ืขื ืื&ืืขื</translation>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>ืืชื ืืืื ืืืชืื ืขื ืืืืขืืช ืขื ืืืชืืืืช ืฉืื ืืื ืืืืืื ืฉืื ืืืขืืืชื. ืืืืืจ ืื ืืืชืื ืขื ืืฉืื ืืขืืจืคื, ืฉืื ืืชืงืคืืช ืคืืฉืื ื ืขืฉืืืืช ืืืจืื ืื ืืขืืจืื ืืืกืืจ ืืช ืืืืชื. ืืชืื ืจืง ืขื ืืืจืืช ืืคืืจืืืช ืืืืืืื ืฉืืชื ืืกืืื ืขืืื.</translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. SbJtSLn5C6NsuBqfYPTfZ3rGD7idcfRXM4)</source>
<translation>ืืืชืืืช ืืืชื ืืืชืื ืขื ืืืืืขื (ืืืฉื SbJtSLn5C6NsuBqfYPTfZ3rGD7idcfRXM4)</translation>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation>ืืืจ ืืชืืืช ืืคื ืงืก ืืืชืืืืช</translation>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation>ืืืืง ืืชืืืช ืืืืื</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>ืืื ืก ืืื ืืช ืืืืืขื ืฉืขืืื ืืจืฆืื ื ืืืชืื</translation>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation>ืืชืืื</translation>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation>ืืขืชืง ืืช ืืืชืืื ืื ืืืืืช ืืืื ืืืขืจืืช</translation><๏ฝfimโhole๏ฝ> <location line="+21"/>
<source>Sign the message to prove you own this Speedcoin address</source>
<translation>ืืชืื ืขื ืืืืืขื ืืื ืืืืืื ืฉืืชืืืช ืืืืืืงืืื ืืื ืืืขืืืชื.</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>ืืชืื ืขื ืืืืขื</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation>ืืคืก ืืช ืื ืฉืืืช ืืืชืืื ืขื ืืืืขื</translation>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>ื ืงื ืืื</translation>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation>ืืืช ืืืืขื</translation>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>ืืื ืก ืืืื ืืช ืืืชืืืช ืืืืชืืช, ืืืืืขื (ืืื ืฉืืชื ืืขืชืืง ืืขืืจื ืฉืืจื, ืจืืืืื, ืืืืื ืืื' ืืืืคื ืืืืืง) ืืืืชืืื ืืื ืืืืช ืืช ืืืืืขื. ืืืืืจ ืื ืืคืจืฉ ืืช ืืืชืืื ืืืืชืจ ืืื ืฉืืืคืืข ืืืืืขื ืืืชืืื ืืขืฆืื, ืืื ืืืืื ืข ืืืืคืื ืงืืจืื ืืืชืงืคืช ืืืฉ-ืืืืฆืข.</translation>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. SbJtSLn5C6NsuBqfYPTfZ3rGD7idcfRXM4)</source>
<translation>ืืืชืืืช ืืืชื ืืืืืขื ื ืืชืื (ืืืฉื SbJtSLn5C6NsuBqfYPTfZ3rGD7idcfRXM4)</translation>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Speedcoin address</source>
<translation>ืืืช ืืช ืืืืืขื ืืื ืืืืืื ืฉืืื ื ืืชืื ืขื ืืชืืืช ืืืืืืงืืื ืื ืชืื ื</translation>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation>ืืืืืช ืืืืขื</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation>ืืคืก ืืช ืื ืฉืืืช ืืืืืช ืืืืขื</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Speedcoin address (e.g. SbJtSLn5C6NsuBqfYPTfZ3rGD7idcfRXM4)</source>
<translation>ืืื ืก ืืชืืืช ืืืืืงืืื (ืืืฉื SbJtSLn5C6NsuBqfYPTfZ3rGD7idcfRXM4)</translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation>ืืืฅ "ืืชืื ืขื ืืืืืขื" ืืื ืืืืื ืืชืืื</translation>
</message>
<message>
<location line="+3"/>
<source>Enter Speedcoin signature</source>
<translation>ืืื ืก ืืชืืืช ืืืืืงืืื</translation>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>ืืืชืืืช ืฉืืืื ืกื ืืื ื ืชืงืื ื.</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>ืื ื ืืืืง ืืช ืืืชืืืช ืื ืกื ืฉื ืืช.</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation>ืืืชืืืช ืฉืืืื ืกื ืืื ื ืืชืืืืกืช ืืืคืชื.</translation>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation>ืคืชืืืช ืืืจื ืง ืืืืื.</translation>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation>ืืืคืชื ืืคืจืื ืขืืืจ ืืืชืืืช ืฉืืืื ืกื ืืื ื ืืืื.</translation>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation>ืืืชืืื ืขื ืืืืืขื ื ืืฉืื.</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>ืืืืืขื ื ืืชืื.</translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation>ืื ื ืืชื ืืคืขื ื ืืช ืืืชืืื.</translation>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation>ืื ื ืืืืง ืืช ืืืชืืื ืื ืกื ืฉื ืืช.</translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation>ืืืชืืื ืื ืชืืืืช ืืช ืชืงืฆืืจ ืืืืืขื.</translation>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation>ืืืืืช ืืืืืขื ื ืืฉื.</translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>ืืืืืขื ืืืืชื.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+22"/>
<source>The Speedcoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation>[ืจืฉืช-ืืืืงื]</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation>ืคืชืื ืขื %1</translation>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation>%1/ืื ืืชืง</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/ืืืชืื ืืืืฉืืจ</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 ืืืฉืืจืื</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>ืืฆื</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, ืืืคืฅ ืืจื ืฆืืืช ืืื</numerusform><numerusform>, ืืืคืฅ ืืจื %n ืฆืืชืื</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>ืชืืจืื</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation>ืืงืืจ</translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>ื ืืฆืจ</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>ืืืช</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>ืื</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>ืืชืืืช ืขืฆืืืช</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>ืชืืืช</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>ืืืืื</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation><numerusform>ืืืฉืื ืืขืื ืืืืง ืืื</numerusform><numerusform>ืืืฉืื ืืขืื %n ืืืืงืื</numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>ืื ืืชืงืื</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>ืืืื</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>ืขืืืช ืคืขืืื</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>ืืืืช ื ืงืื</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>ืืืืขื</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>ืืขืจื</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>ืืืืื ืคืขืืื</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>ืืืืขืืช ืฉื ืืฆืจืื ืืืืืื ืืืืฉืื ืืืฉื 120 ืืืืงืื ืืคื ื ืฉื ืืชื ืื ืฆื ืืืชื. ืืฉืืฆืจืช ืืช ืืืืืง ืืื, ืืื ืืืคืฅ ืืจืฉืช ืืื ืืืชืืืกืฃ ืืฉืจืฉืจืช ืืืืืงืื. ืื ืืื ืืื ื ืืฆืืื ืืืืข ืืฉืจืฉืจืช, ืืืฆื ืฉืื ืืฉืชื ื ื"ืื ืืชืงืื" ืืื ื ืืชื ืืืื ืื ืฆื ืืืชื. ืื ืขืฉืื ืืงืจืืช ืืขืช ืืขืช ืื ืฆืืืช ืืืจ ืืืฆืจ ืืืืง ืืืืื ืฉื ืืกืคืจ ืฉื ืืืช ืืืืืืง ืฉืื.</translation>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation>ืืืืข ื ืืคืื</translation>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>ืคืขืืื</translation>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation>ืงืืืื</translation>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>ืืืืช</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation>ืืืช</translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation>ืฉืงืจ</translation>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation>, ืืจื ืฉืืืจ ืืืฆืืื</translation>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>ืคืชื ืืืฉื ืืืืง %n ืืืชืจ</numerusform><numerusform>ืคืชื ืืืฉื %n ืืืืงืื ื ืืกืคืื</numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation>ืื ืืืืข</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>ืคืจืื ืืคืขืืื</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>ืืืื ืืช ืื ืืฆืืื ืชืืืืจ ืืคืืจื ืฉื ืืคืขืืื</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation>ืชืืจืื</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>ืกืื</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>ืืชืืืช</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>ืืืืช</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>ืคืชื ืืืฉื ืืืืง %n ืืืชืจ</numerusform><numerusform>ืคืชื ืืืฉื %n ืืืืงืื ื ืืกืคืื</numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation>ืคืชืื ืขื %1</translation>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation>ืื ืืืืืจ (%1 ืืืฉืืจืื)</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation>ืืืชืื ืืืืฉืืจ (%1 ืืชืื %2 ืืืฉืืจืื)</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation>ืืืืฉืจ (%1 ืืืฉืืจืื)</translation>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation><numerusform>ืืืืื ืฉื ืืจื ืืืื ืืืื ืืฉืืื ืืืฉืื ืืขืื ืืืืง ืืื</numerusform><numerusform>ืืืืื ืฉื ืืจื ืืืื ืืืื ืืฉืืื ืืืฉืื ืืขืื %n ืืืืงืื</numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>ืืืืืง ืืื ืื ื ืงืื ืขื ืืื ืืฃ ืฆืืืช ืืืจ, ืืื ืจืื ืื ืืชืงืื!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>ื ืืฆืจ ืื ืื ืืชืงืื</translation>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation>ืืชืงืื ืขื</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>ืืชืงืื ืืืช</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>ื ืฉืื ื</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>ืชืฉืืื ืืขืฆืื</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>ื ืืจื</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>ืืฆื ืืคืขืืื. ืืฉืื ืืช ืืกืื ืืขื ืฉืื ืื ืืื ืืจืืืช ืืช ืืกืคืจ ืืืืฉืืจืื.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>ืืชืืจืื ืืืฉืขื ืื ืืคืขืืื ืืืืช ืืชืงืืื.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>ืกืื ืืคืขืืื.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>ืืชืืืช ืืืขื ืฉื ืืคืขืืื.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>ืืืืืช ืฉืืชืืืกืคื ืื ืืืกืจื ืืืืชืจื.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation>ืืื</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>ืืืื</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>ืืฉืืืข</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>ืืืืืฉ</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>ืืืืืฉ ืฉืขืืจ</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>ืืฉื ื</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>ืืืื...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>ืืชืงืื ืขื</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>ื ืฉืื ื</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>ืืขืฆืื</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>ื ืืจื</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>ืืืจ</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>ืืื ืก ืืชืืืช ืื ืชืืืช ืืืคืฉ</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>ืืืืช ืืืขืจืืช</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>ืืขืชืง ืืชืืืช</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>ืืขืชืง ืชืืืช</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>ืืขืชืง ืืืืช</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation>ืืขืชืง ืืืื ืคืขืืื</translation>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>ืขืจืื ืชืืืช</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>ืืฆื ืคืจืื ืคืขืืื</translation>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation>ืืฆืื ื ืชืื ื ืคืขืืืืช</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>ืงืืืฅ ืืืคืจื ืืคืกืืงืื (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>ืืืืฉืจ</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>ืชืืจืื</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>ืกืื</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>ืชืืืช</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>ืืชืืืช</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>ืืืืช</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>ืืืื</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>ืฉืืืื ืืืฆืื</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>ืื ืืกืืื ืืืชืื ืืงืืืฅ %1.</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>ืืืื:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>ืื</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation>ืฉืื ืืืืขืืช</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation>ืืฆืื ืื ืชืื ืื ืืืื ืื ืืืื ืืงืืืฅ</translation>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation>ืืื ืืจื ืง</translation>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation>ื ืชืื ื ืืจื ืง (*.dat)</translation>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation>ืืืืื ื ืืฉื</translation>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation>ืืืืชื ืฉืืืื ืื ืกืืื ืืฉืืืจ ืืช ืืืืืข ืืืจื ืง ืืืืงืื ืืืืฉ.</translation>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation>ืืืืื ืืืฉืื ืืืฆืืื</translation>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation>ื ืชืื ื ืืืจื ืง ื ืฉืืจื ืืืฆืืื ืืืงืื ืืืืฉ.</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>Speedcoin version</source>
<translation>ืืจืกืช ืืืืืงืืื</translation>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation>ืฉืืืืฉ:</translation>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or litecoind</source>
<translation>ืฉืื ืคืงืืื ื -server ืื litecoind</translation>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation>ืจืฉืืืช ืคืงืืืืช</translation>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation>ืงืื ืขืืจื ืขืืืจ ืคืงืืื</translation>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation>ืืคืฉืจืืืืช:</translation>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: litecoin.conf)</source>
<translation>ืฆืืื ืงืืืฅ ืืืืจืืช (ืืจืืจืช ืืืื: litecoin.conf)</translation>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: litecoind.pid)</source>
<translation>ืฆืืื ืงืืืฅ pid (ืืจืืจืช ืืืื: litecoind.pid)</translation>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>ืฆืืื ืชืืงืืืช ื ืชืื ืื</translation>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>ืงืืข ืืช ืืืื ืืืืืื ืฉื ืืกื ืื ืชืื ืื ืืืืืืืื (ืืจืืจืช ืืืื: 25)</translation>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 24777 or testnet: 34777)</source>
<translation>ืืืื ืืืืืืจืื ื<ืคืืจื> (ืืจืืจืช ืืืื: 24777 ืื ืืจืฉืช ืืืืืงื: 34777)</translation>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>ืืืืง ืืื ืืืืชืจ <n> ืืืืืจืื ืืขืืืชืื (ืืจืืจืช ืืืื: 125)</translation>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>ืืชืืืจ ืืฆืืืช ืืื ืืืืืช ืืชืืืืช ืขืืืชืื, ืืื ืืชื ืชืง</translation>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation>ืฆืืื ืืช ืืืชืืืช ืืคืืืืืช ืฉืื</translation>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>ืกืฃ ืืืชื ืชืงืืช ืืขืืืชืื ืื ืืืืื ืฉืื ืืืืื (ืืจืืจืช ืืืื: 100)</translation>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>ืืกืคืจ ืฉื ืืืช ืืื ืืข ืืขืืืชืื ืื ืืืืื ืฉืื ืืืืื ืืืืชืืืจ ืืืืฉ (ืืจืืจืช ืืืื: 86400)</translation>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation>ืืืจืขื ืฉืืืื ืืขืช ืืืืจืช ืคืืจื RPC %u ืืืืื ื ื-IPv4: %s</translation>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 24776 or testnet: 34776)</source>
<translation>ืืืื ืืืืืืจื JSON-RPC ื- <port> (ืืจืืจืช ืืืื: 24776 ืื ืจืฉืช ืืืืงื: 34776)</translation>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>ืงืื ืคืงืืืืช ืืฉืืจืช ืืคืงืืื ื- JSON-RPC</translation>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>ืจืืฅ ืืจืงืข ืืืืืื ืืงืื ืคืงืืืืช</translation>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation>ืืฉืชืืฉ ืืจืฉืช ืืืืืงื</translation>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>ืงืื ืืืืืจืื ืืืืืฅ (ืืจืืจืช ืืืื: 1 ืืื -proxy ืื -connect)</translation>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=litecoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Speedcoin Alert" [email protected]
</source>
<translation>%s, ืขืืื ืืงืืืข ืกืืกืืช RPC ืืงืืืฅ ืืงืื ืคืืืืจืฆืื:
%s
ืืืืืฅ ืืืฉืชืืฉ ืืกืืกืื ืืืงืจืืืช ืืืื:
rpcuser=litecoinrpc
rpcpassword=%s
(ืืื ืฆืืจื ืืืืืจ ืืช ืืกืืกืื)
ืืกืืจ ืฉืฉื ืืืฉืชืืฉ ืืืกืืกืื ืืืื ืืืื.
ืื ืืงืืืฅ ืืื ื ืงืืื, ืฆืืจ ืืืชื ืขื ืืจืฉืืืช ืงืจืืื ืืืขืืื ืืืื.
ืื ืืืืืฅ ืืกืื alertnotify ืืื ืืงืื ืืืืื ืขื ืชืงืืืช;
ืืืฉื: alertnotify=echo %%s | mail -s "Speedcoin Alert" [email protected]
</translation>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation>ืืืจืขื ืฉืืืื ืืขืช ืืืืจืช ืคืืจื RPC %u ืืืืื ื ื-IPv6, ื ืกืื ื-IPv4: %s</translation>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>ืงืฉืืจ ืขื ืืชืืืช ื ืชืื ื ืืืืื ืื ืชืืื. ืืฉืชืืฉ ืืกืืืื [host]:port ืขืืื IPv6.</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. Speedcoin is probably running already. You need to look at the Speedcoin icon on your taskbar/system tray.</source>
<translation>ืื ืืกืืื ืืืฉืื ื ืขืืื ืขื ืชืืงืืืช ืื ืชืื ืื %s. ืื ืจืื ืฉืืืืืงืืื ืืืจ ืจืฅ.</translation>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>ืฉืืืื: ืืคืขืืื ื ืืืชื! ืื ืขืืื ืืงืจืืช ืื ืืื ืืืืืืขืืช ืืืจื ืง ืฉืื ืืืจ ื ืืฆืื, ืืืฉื ืื ืืฉืชืืฉืช ืืขืืชืง ืฉื wallet.dat ืืืืืขืืช ื ืฉืืื ืืขืืชืง ืื ืื ืกืืื ื ืืื ืืฆืืืช ืืื.</translation>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation>ืฉืืืื: ืืคืขืืื ืืืืช ืืืจืฉืช ืขืืืช ืคืขืืื ืฉื ืืคืืืช %s ืขืงื ืืืืืช, ืืืืจืืืืช, ืื ืืฉืืืืฉ ืืืกืคืื ืฉืืชืงืืื ืืืืจืื ื!</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation>ืืฆืข ืคืขืืื ืืืฉืจ ืืืืืขื ืืจืืืื ืืืช ืืชืงืืืช(%s ืืฉืืจืช ืืคืงืืื ืืฉืชื ื ืขื-ืืื ืืืืืขื)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>ืืฆืข ืคืงืืื ืืืฉืจ ืคืขืืืช ืืจื ืง ืืฉืชื ื (%s ื cmd ืืืืืฃ ื TxID)</translation>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation>ืงืืข ืืืื ืืงืกืืืื ืขืืืจ ืคืขืืืืช ืขืืืคืืช ืืืืื/ืขืืื ื ืืืื ืืืชืื (ืืจืืจืช ืืืื: 27000)</translation>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>ืืืื ืื ืืืช ื ืืกืืื ืืจืื-ืฉืืจืืจ - ืืฉืืืืฉ ืื ืขื ืืืจืืืชื - ืืื ืืืฉืชืืฉ ืืฆืืจื ืืจืื ืื ืืืฉืืื ืืกืืจ</translation>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>ืืืืจื: -paytxfee ื ืงืืข ืืขืจื ืืื ืืืื! ืืืื ืขืืืช ืืคืขืืื ืฉืชืฉืื ืื ืืชื ืฉืืื ืคืขืืื.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>ืืืืจื: ืืคืขืืืืช ืืืืฆืืืช ืขืฉืืืืช ืื ืืืืืช ื ืืื ืืช! ืืืชืื ืืืชื ืฆืจืื ืืฉืืจื, ืื ืฉืฆืืชืื ืืืจืื ืฆืจืืืื ืืฉืืจื.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Speedcoin will not work properly.</source>
<translation>ืืืืจื: ืื ื ืืืืง ืฉืืชืืจืื ืืืฉืขื ืฉื ืืืืฉื ืฉืื ื ืืื ืื! ืื ืืฉืขืื ืฉืื ืืื ื ื ืืื ืืืืืงืืื ืื ืืขืืื ืืจืืื.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>ืืืืจื: ืฉืืืื ืืงืจืืืช wallet.dat! ืื ืืืชืคืืืช ื ืงืจืื ืืืืคื ืชืงืื, ืื ื ืชืื ื ืืคืขืืืืช ืื ืกืคืจ ืืืชืืืืช ืขืืืืื ืืืืืช ืืกืจืื ืื ืฉืืืืื.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>ืืืืจื: ืงืืืฅ wallet.dat ืืืฉืืช, ืืืืืข ืืืืฅ! ืงืืืฅ wallet.dat ืืืงืืจื ื ืฉืืจ ื - wallet.{timestamp}.bak ื - %s; ืื ืืืืื ืื ืืคืขืืืืช ืฉืืืืื ืขืืื ืืฉืืืจ ืืืืื.</translation>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>ื ืกื ืืฉืืืจ ืืคืชืืืช ืคืจืืืื ืืงืืืฅ wallet.dat ืืืฉืืช.</translation>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation>ืืคืฉืจืืืืช ืืฆืืจืช ืืืืง:</translation>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation>ืืชืืืจ ืจืง ืืฆืืชืื ืืืฆืืื ืื</translation>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation>ืืชืืื ืืกื ื ืชืื ื ืืืืงืื ืื ืชืงืื</translation>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>ืืื ืืช ืืชืืืช ื-IP ืืขืฆืืืช (ืืจืืจืช ืืืื: 1 ืืฉืืืืื ืื ืืืื -externalip)</translation>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation>ืืื ืชืจืฆื ืืขื ืืื ืืช ืืืืฉ ืืช ืืกื ื ืชืื ื ืืืืืงืื?</translation>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation>ืฉืืืื ืืืชืืื ืืกื ื ืชืื ื ืืืืืงืื</translation>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation>ืฉืืืื ืืืชืืื ืกืืืืช ืืกื ื ืชืื ื ืืืจื ืงืื %s!</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation>ืฉืืืื ืืืขืื ืช ืืกื ื ืชืื ื ืืืืืงืื</translation>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation>ืฉืืืื ืืืขืื ืช ืืกื ื ืชืื ื ืืืืืงืื</translation>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation>ืฉืืืื: ืืขื ืืงืื ืคื ืื ืืืืกืง!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation>ืฉืืืื: ืืืจื ืง ื ืขืื, ืืื ืืคืฉืจืืช ืืืฆืืจ ืคืขืืื!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation>ืฉืืืื: ืฉืืืืช ืืขืจืืช:</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>ืืืื ื ื ืืฉืื ืืื ืคืืจื. ืืฉืชืืฉ ื- -listen=0 ืื ืืจืฆืื ื ืืื.</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation>ืงืจืืืช ืืืืข ืืืืืงืื ื ืืฉืื</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation>ืงืจืืืช ืืืืืง ื ืืฉืื</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation>ืกื ืืจืื ืืื ืืงืก ืืืืืงืื ื ืืฉื</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation>ืืชืืืช ืืื ืืงืก ืืืืืงืื ื ืืฉื</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation>ืืชืืืช ืืืืข ืืืืืงืื ื ืืฉื</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation>ืืชืืืช ืืืืืง ื ืืฉืื</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation>ืืชืืืช ืืืืข ืืงืืฆืื ื ืืฉืื</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation>ืืชืืืช ืืกื ื ืชืื ื ืืืืืขืืช ื ืืฉืื</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation>ืืชืืืช ืืื ืืงืก ืืคืขืืืืช ื ืืฉืื</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation>ืืชืืืช ื ืชืื ื ืืืืื ื ืืฉืื</translation>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation>ืืฆื ืขืืืชืื ืข"ื ืืืคืืฉ DNS (ืืจืืจืช ืืืื: 1 ืืื -connect)</translation>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation>ืืกืคืจ ืืืืืงืื ืืืืืง ืืขืช ืืชืืื (ืืจืืจืช ืืืื: 288, 0 = ืืืื)</translation>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation>ืืืืช ืืืกืืืืืช ืฉื ืืืืืช ืืืืืงืื (0-4, ืืจืืจืช ืืืื: 3)</translation>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation>ืื ื ืืืืฉ ืืช ืืื ืืง ืฉืจืฉืจืช ืืืืืงืื ืืงืืฆื ื-blk000??.dat ืื ืืืืืื.</translation>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation>ืงืืข ืืช ืืกืคืจ ืชืืืืืื ื ืืฉืืจืืช ืงืจืืืืช RPC (ืืจืืจืช ืืืื: 4)</translation>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation>ืืืืช ืืช ืฉืืืืช ืืกื ืื ืชืื ืื...</translation>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation>ืืืืช ืืช ืืืฉืจืช ืืืจื ืง...</translation>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation>ืืืืื ืืืืงืื ืืงืืืฆื blk000??.dat ืืืฆืื ืืื</translation>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation>ืืืืข</translation>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation>ืืชืืืช ืื ืชืงืื ื ื -tor: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation>ืชืืืง ืืื ืืงืก ืคืขืืืืช ืืื (ืืจืืจืช ืืืื: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation>ืืืฆืฅ ืงืืื ืืืจืื ืืื ืืืืืจ, <n>*1000 ืืชืื (ืืจืืจืช ืืืื: 5000)</translation>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation>ืืืฆืฅ ืฉืืืื ืืืจืื ืืื ืืืืืจ, <n>*1000 ืืชืื (ืืจืืจืช ืืืื: 1000)</translation>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation>ืงืื ืจืง ืฉืจืฉืจืช ืืืืงืื ืืชืืืืช ื ืงืืืืช ืืืงืืจืช ืืืื ืืช (ืืจืืจืช ืืืื: 1)</translation>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation>ืืชืืืจ ืจืง ืืฆืืชืื ืืจืฉืช <net> (IPv4, IPv6 ืื Tor)</translation>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation>ืคืืื ืืืืข ื ืืคืื ื ืืกืฃ. ื ืืืข ืืื ืื ืืคืฉืจืืืืช -debug* ืืืืจืืช.</translation>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation>ืคืืื ืืืืข ื ืืกืฃ ืื ืืคืื ืฉืืืืืช ืืจืฉืช.</translation>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation>ืืืกืฃ ืืืชืืช ืืื ืืคื ื ืคืื ืืืืื</translation>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the Speedcoin Wiki for SSL setup instructions)</source>
<translation>ืืคืฉืจืืืืช SSL: (ืจืื ืืช ืืืืงื ืฉื ืืืืืงืืื ืขืืืจ ืืืจืืืช ืืืืจืช SSL)</translation>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation>ืืืจ ืืช ืืจืกืช ืคืจืืงืกื SOCKS ืืืฉืชืืฉ ืื (4-5, ืืจืืจืช ืืืื: 5)</translation>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>ืฉืื ืืืืข ืืืืื ืืขืงืื ืืงืื ืกืืื ืืืงืื ืืงืืืฅ debug.log</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation>ืฉืื ืืืืข ืืืืื ืืขืงืื ืืืื ืืืืื</translation>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation>ืงืืข ืืช ืืืื ืืืืืง ืืืืจืื ืืืชืื (ืืจืืจืช ืืืื: 250000)</translation>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>ืงืืข ืืช ืืืื ืืืืืง ืืืื ืืืื ืืืชืื (ืืจืืจืช ืืืื: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>ืืืืฅ ืืช ืงืืืฅ debug.log ืืืคืขืืช ืืงืืืื ื (ืืจืืจืช ืืืื: 1 ืืื -debug)</translation>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>ืฆืืื ืืืืืช ืืื ืืืืืืจ ืืืืืืฉื ืืืช (ืืจืืจืช ืืืื: 5000)</translation>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation>ืฉืืืืช ืืขืจืืช:</translation>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>ืืฉืชืืฉ ื-UPnP ืืื ืืืคืืช ืืช ืืคืืจื ืืืืื ื (ืืจืืจืช ืืืื: 0)</translation>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>ืืฉืชืืฉ ื-UPnP ืืื ืืืคืืช ืืช ืืคืืจื ืืืืื ื (ืืจืืจืช ืืืื: 1 ืืขืช ืืืื ื)</translation>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation>ืืฉืชืืฉ ืืคืจืืงืกื ืืื ืืืืืข ืืฉืืจืืชืื ืืืืืื ื-tor (ืืจืืจืช ืืืื: ืืื ื- -proxy)</translation>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation>ืฉื ืืฉืชืืฉ ืืืืืืจื JSON-RPC</translation>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation>ืืืืจื</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>ืืืืจื: ืืืจืกื ืืืืช ืืืืฉื ืช, ืืฉ ืฆืืจื ืืฉืืจืื!</translation>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation>ืขืืื ืืื ืืช ืืืืฉ ืืช ืืกืื ืื ืชืื ืื ืชืื ืฉืืืืฉ ื- -reindex ืขื ืื ืช ืืฉื ืืช ืืช -txindex</translation>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation>ืงืืืฅ wallet.dat ืืืฉืืช, ืืืืืืฅ ื ืืฉื</translation>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation>ืกืืกืื ืืืืืืจื JSON-RPC</translation>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>ืืคืฉืจ ืืืืืจื JSON-RPC ืืืชืืืช ืืืื ืืจื ื ืืืฆืืื ืช</translation>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>ืฉืื ืคืงืืืืช ืืฆืืืช ื-<ip> (ืืจืืจืช ืืืื: 127.0.0.1)</translation>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>ืืฆืข ืคืงืืื ืื ืืฉืืืืืง ืืืื ืืืืชืจ ืืฉืชื ื (%s ืืคืงืืื ืืืืืฃ ืืืืืื ืืืืืง)</translation>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation>ืฉืืจื ืืช ืืืจื ืง ืืคืืจืื ืืขืืื ื</translation>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>ืงืืข ืืช ืืืื ืืืืืจ ื -<n> (ืืจืืจืช ืืืื: 100)</translation>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>ืกืจืืง ืืืืฉ ืืช ืฉืจืฉืจืช ืืืืืงืื ืืืฆืืืช ืคืขืืืืช ืืกืจืืช ืืืจื ืง</translation>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>ืืฉืชืืฉ ื-OpenSSL (https( ืขืืืจ ืืืืืจื JSON-RPC</translation>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation>ืงืืืฅ ืชืขืืืช ืฉืจืช (ืืจืืจืช ืืืื: server.cert)</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>ืืคืชื ืคืจืื ืฉื ืืฉืจืช (ืืจืืจืช ืืืื: server.pem)</translation>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation>ืฆืคื ืื ืงืืืืื (ืืจืืจืช ืืืื: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation>ืืืืขืช ืืขืืจื ืืื</translation>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>ืื ืืกืืื ืืงืฉืืจ ื-%s ืืืืฉื ืื (ืืงืฉืืจื ืืืืืจื ืฉืืืื %d, %s)</translation>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation>ืืชืืืจ ืืจื ืคืจืืงืกื SOCKS</translation>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>ืืคืฉืจ ืืืืงืช DNS ืขืืืจ -addnode, -seednode ื- -connect</translation>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation>ืืืขื ืืชืืืืช...</translation>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>ืฉืืืื ืืืขืื ืช ืืงืืืฅ wallet.dat: ืืืจื ืง ืืืฉืืช</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of Speedcoin</source>
<translation>ืฉืืืื ืืืขืื ืช ืืงืืืฅ wallet.dat: ืืืจื ืง ืืืจืฉ ืืจืกื ืืืฉื ืืืชืจ ืฉื ืืืืืงืืื</translation>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart Speedcoin to complete</source>
<translation>ืืฉ ืืืชืื ืืืืฉ ืืช ืืืจื ืง: ืืชืื ืืช ืืืืืงืืื ืืกืืื</translation>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation>ืฉืืืื ืืืขืื ืช ืืงืืืฅ wallet.dat</translation>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation>ืืชืืืช -proxy ืื ืชืงืื ื: '%s'</translation>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>ืจืฉืช ืื ืืืืขื ืฆืืื ื ื- -onlynet: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation>ืืชืืงืฉื ืืจืกืช ืคืจืืงืกื -socks ืื ืืืืขื: %i</translation>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation>ืื ืืกืืื ืืคืชืืจ ืืชืืืช -bind: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>ืื ืืกืืื ืืคืชืืจ ืืชืืืช -externalip: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>ืืืืช ืื ืชืงืื ื ืขืืืจ -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation>ืืืืช ืื ืชืงืื ื</translation>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation>ืืื ืืกืคืืง ืืกืคืื</translation>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation>ืืืขื ืืช ืืื ืืงืก ืืืืืงืื...</translation>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>ืืืกืฃ ืฆืืืช ืืืชืืืจืืช ืื ืกื ืืฉืืืจ ืืช ืืืืืืจ ืคืชืื</translation>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. Speedcoin is probably already running.</source>
<translation>ืื ื ืืชื ืืงืฉืืจ ื-%s ืืืืฉื ืื. ืืืืืงืืื ืื ืจืื ืขืืืื ืจืฅ.</translation>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation>ืขืืื ืืืืกืืฃ ืืคืขืืืืช ืฉืืชื ืฉืืื ืขืืืจ ืื KB</translation>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation>ืืืขื ืืจื ืง...</translation>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation>ืื ืืืื ืืืืจืื ืืจืืช ืืืจื ืง</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation>ืื ืืืื ืืืชืื ืืช ืืชืืืช ืืจืืจืช ืืืืื</translation>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation>ืกืืจืง ืืืืฉ...</translation>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation>ืืขืื ื ืืืฉืืื</translation>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation>ืืืฉืชืืฉ ืืืคืฉืจืืช %s</translation>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation>ืฉืืืื</translation>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>ืขืืื ืืงืืืข rpcpassword=yourpassword ืืงืืืฅ ืืืืืจืืช:
%s
ืื ืืงืืืฅ ืืื ื ืงืืื, ืฆืืจ ืืืชื ืขื ืืจืฉืืืช ืงืจืืื ืืืขืืื ืืืื.</translation>
</message>
</context>
</TS><๏ฝfimโend๏ฝ> | </message>
<message> |
<|file_name|>remote.py<|end_file_name|><๏ฝfimโbegin๏ฝ>import re
from contextlib import contextmanager
from tempfile import NamedTemporaryFile
from plumbum.commands import CommandNotFound, ConcreteCommand, shquote
from plumbum.lib import ProcInfo
from plumbum.machines.base import BaseMachine
from plumbum.machines.env import BaseEnv
from plumbum.machines.local import LocalPath
from plumbum.path.remote import RemotePath, RemoteWorkdir, StatRes
class RemoteEnv(BaseEnv):
"""The remote machine's environment; exposes a dict-like interface"""
__slots__ = ["_orig", "remote"]
def __init__(self, remote):
session = remote._session
# GNU env has a -0 argument; use it if present. Otherwise,
# fall back to calling printenv on each (possible) variable
# from plain env.
env0 = session.run("env -0; echo")
if env0[0] == 0 and not env0[2].rstrip():
_curr = dict(
line.split("=", 1) for line in env0[1].split("\x00") if "=" in line
)
else:
lines = session.run("env; echo")[1].splitlines()
split = (line.split("=", 1) for line in lines)
keys = (line[0] for line in split if len(line) > 1)
runs = ((key, session.run(f'printenv "{key}"; echo')) for key in keys)
_curr = {
key: run[1].rstrip("\n")
for (key, run) in runs
if run[0] == 0 and run[1].rstrip("\n") and not run[2]
}
super().__init__(remote.path, ":", _curr=_curr)
self.remote = remote
self._orig = self._curr.copy()
def __delitem__(self, name):
BaseEnv.__delitem__(self, name)
self.remote._session.run(f"unset {name}")
def __setitem__(self, name, value):
BaseEnv.__setitem__(self, name, value)
self.remote._session.run(f"export {name}={shquote(value)}")
def pop(self, name, *default):
BaseEnv.pop(self, name, *default)
self.remote._session.run(f"unset {name}")
def update(self, *args, **kwargs):
BaseEnv.update(self, *args, **kwargs)
self.remote._session.run(
"export " + " ".join(f"{k}={shquote(v)}" for k, v in self.getdict().items())
)
def expand(self, expr):
"""Expands any environment variables and home shortcuts found in ``expr``
(like ``os.path.expanduser`` combined with ``os.path.expandvars``)
:param expr: An expression containing environment variables (as ``$FOO``) or
home shortcuts (as ``~/.bashrc``)
:returns: The expanded string"""
return self.remote.expand(expr)
def expanduser(self, expr):
"""Expand home shortcuts (e.g., ``~/foo/bar`` or ``~john/foo/bar``)
:param expr: An expression containing home shortcuts
:returns: The expanded string"""
return self.remote.expanduser(expr)
# def clear(self):
# BaseEnv.clear(self, *args, **kwargs)
# self.remote._session.run("export %s" % " ".join("%s=%s" % (k, v) for k, v in self.getdict()))
def getdelta(self):
"""Returns the difference between the this environment and the original environment of
the remote machine"""
self._curr["PATH"] = self.path.join()
delta = {}
for k, v in self._curr.items():
if k not in self._orig:
delta[k] = str(v)
for k, v in self._orig.items():
if k not in self._curr:
delta[k] = ""
else:
if v != self._curr[k]:
delta[k] = self._curr[k]
return delta
class RemoteCommand(ConcreteCommand):
__slots__ = ["remote", "executable"]
QUOTE_LEVEL = 1
def __init__(self, remote, executable, encoding="auto"):
self.remote = remote
ConcreteCommand.__init__(
self, executable, remote.custom_encoding if encoding == "auto" else encoding
)
@property
def machine(self):
return self.remote
def __repr__(self):
return f"RemoteCommand({self.remote!r}, {self.executable!r})"
def popen(self, args=(), **kwargs):
return self.remote.popen(self[args], **kwargs)
def nohup(self, cwd=".", stdout="nohup.out", stderr=None, append=True):
"""Runs a command detached."""
return self.machine.daemonic_popen(self, cwd, stdout, stderr, append)
class ClosedRemoteMachine(Exception):
pass
class ClosedRemote:
__slots__ = ["_obj", "__weakref__"]
def __init__(self, obj):
self._obj = obj
def close(self):
pass
def __getattr__(self, name):
raise ClosedRemoteMachine(f"{self._obj!r} has been closed")
class BaseRemoteMachine(BaseMachine):
"""Represents a *remote machine*; serves as an entry point to everything related to that
remote machine, such as working directory and environment manipulation, command creation,
etc.
Attributes:
* ``cwd`` - the remote working directory
* ``env`` - the remote environment
* ``custom_encoding`` - the remote machine's default encoding (assumed to be UTF8)
* ``connect_timeout`` - the connection timeout
There also is a _cwd attribute that exists if the cwd is not current (del if cwd is changed).
"""
# allow inheritors to override the RemoteCommand class
RemoteCommand = RemoteCommand
@property
def cwd(self):
if not hasattr(self, "_cwd"):
self._cwd = RemoteWorkdir(self)
return self._cwd
def __init__(self, encoding="utf8", connect_timeout=10, new_session=False):
self.custom_encoding = encoding
self.connect_timeout = connect_timeout
self._session = self.session(new_session=new_session)
self.uname = self._get_uname()
self.env = RemoteEnv(self)
self._python = None
self._program_cache = {}
def _get_uname(self):
rc, out, _ = self._session.run("uname", retcode=None)
if rc == 0:
return out.strip()
rc, out, _ = self._session.run(
"python3 -c 'import platform;print(platform.uname()[0])'", retcode=None
)
if rc == 0:
return out.strip()
# all POSIX systems should have uname. make an educated guess it's Windows
return "Windows"
def __repr__(self):
return f"<{self.__class__.__name__} {self}>"
def __enter__(self):
return self
def __exit__(self, t, v, tb):
self.close()
def close(self):
"""closes the connection to the remote machine; all paths and programs will
become defunct"""
self._session.close()
self._session = ClosedRemote(self)
def path(self, *parts):
"""A factory for :class:`RemotePaths <plumbum.path.remote.RemotePath>`.
Usage: ``p = rem.path("/usr", "lib", "python2.7")``
"""
parts2 = [str(self.cwd)]
for p in parts:
if isinstance(p, LocalPath):
raise TypeError(f"Cannot construct RemotePath from {p!r}")
parts2.append(self.expanduser(str(p)))
return RemotePath(self, *parts2)
def which(self, progname):
"""Looks up a program in the ``PATH``. If the program is not found, raises
:class:`CommandNotFound <plumbum.commands.CommandNotFound>`
:param progname: The program's name. Note that if underscores (``_``) are present
in the name, and the exact name is not found, they will be replaced
in turn by hyphens (``-``) then periods (``.``), and the name will
be looked up again for each alternative
:returns: A :class:`RemotePath <plumbum.path.local.RemotePath>`
"""
key = (progname, self.env.get("PATH", ""))
try:
return self._program_cache[key]
except KeyError:
pass
alternatives = [progname]
if "_" in progname:
alternatives.append(progname.replace("_", "-"))
alternatives.append(progname.replace("_", "."))
for name in alternatives:
for p in self.env.path:
fn = p / name
if fn.access("x") and not fn.is_dir():<๏ฝfimโhole๏ฝ> return fn
raise CommandNotFound(progname, self.env.path)
def __getitem__(self, cmd):
"""Returns a `Command` object representing the given program. ``cmd`` can be a string or
a :class:`RemotePath <plumbum.path.remote.RemotePath>`; if it is a path, a command
representing this path will be returned; otherwise, the program name will be looked up in
the system's ``PATH`` (using ``which``). Usage::
r_ls = rem["ls"]
"""
if isinstance(cmd, RemotePath):
if cmd.remote is self:
return self.RemoteCommand(self, cmd)
raise TypeError(
f"Given path does not belong to this remote machine: {cmd!r}"
)
if not isinstance(cmd, LocalPath):
return self.RemoteCommand(
self, self.path(cmd) if "/" in cmd or "\\" in cmd else self.which(cmd)
)
raise TypeError(f"cmd must not be a LocalPath: {cmd!r}")
@property
def python(self):
"""A command that represents the default remote python interpreter"""
if not self._python:
self._python = self["python3"]
return self._python
def session(self, isatty=False, new_session=False):
"""Creates a new :class:`ShellSession <plumbum.session.ShellSession>` object; this invokes the user's
shell on the remote machine and executes commands on it over stdin/stdout/stderr"""
raise NotImplementedError()
def download(self, src, dst):
"""Downloads a remote file/directory (``src``) to a local destination (``dst``).
``src`` must be a string or a :class:`RemotePath <plumbum.path.remote.RemotePath>`
pointing to this remote machine, and ``dst`` must be a string or a
:class:`LocalPath <plumbum.machines.local.LocalPath>`"""
raise NotImplementedError()
def upload(self, src, dst):
"""Uploads a local file/directory (``src``) to a remote destination (``dst``).
``src`` must be a string or a :class:`LocalPath <plumbum.machines.local.LocalPath>`,
and ``dst`` must be a string or a :class:`RemotePath <plumbum.path.remote.RemotePath>`
pointing to this remote machine"""
raise NotImplementedError()
def popen(self, args, **kwargs):
"""Spawns the given command on the remote machine, returning a ``Popen``-like object;
do not use this method directly, unless you need "low-level" control on the remote
process"""
raise NotImplementedError()
def list_processes(self):
"""
Returns information about all running processes (on POSIX systems: using ``ps``)
.. versionadded:: 1.3
"""
ps = self["ps"]
lines = ps("-e", "-o", "pid,uid,stat,args").splitlines()
lines.pop(0) # header
for line in lines:
parts = line.strip().split()
yield ProcInfo(int(parts[0]), int(parts[1]), parts[2], " ".join(parts[3:]))
def pgrep(self, pattern):
"""
Process grep: return information about all processes whose command-line args match the given regex pattern
"""
pat = re.compile(pattern)
for procinfo in self.list_processes():
if pat.search(procinfo.args):
yield procinfo
@contextmanager
def tempdir(self):
"""A context manager that creates a remote temporary directory, which is removed when
the context exits"""
_, out, _ = self._session.run(
"mktemp -d 2>/dev/null || mktemp -d tmp.XXXXXXXXXX"
)
local_dir = self.path(out.strip())
try:
yield local_dir
finally:
local_dir.delete()
#
# Path implementation
#
def _path_listdir(self, fn):
files = self._session.run(f"ls -a {shquote(fn)}")[1].splitlines()
files.remove(".")
files.remove("..")
return files
def _path_glob(self, fn, pattern):
# shquote does not work here due to the way bash loops use space as a separator
pattern = pattern.replace(" ", r"\ ")
fn = fn.replace(" ", r"\ ")
matches = self._session.run(rf"for fn in {fn}/{pattern}; do echo $fn; done")[
1
].splitlines()
if len(matches) == 1 and not self._path_stat(matches[0]):
return [] # pattern expansion failed
return matches
def _path_getuid(self, fn):
stat_cmd = (
"stat -c '%u,%U' "
if self.uname not in ("Darwin", "FreeBSD")
else "stat -f '%u,%Su' "
)
return self._session.run(stat_cmd + shquote(fn))[1].strip().split(",")
def _path_getgid(self, fn):
stat_cmd = (
"stat -c '%g,%G' "
if self.uname not in ("Darwin", "FreeBSD")
else "stat -f '%g,%Sg' "
)
return self._session.run(stat_cmd + shquote(fn))[1].strip().split(",")
def _path_stat(self, fn):
if self.uname not in ("Darwin", "FreeBSD"):
stat_cmd = "stat -c '%F,%f,%i,%d,%h,%u,%g,%s,%X,%Y,%Z' "
else:
stat_cmd = "stat -f '%HT,%Xp,%i,%d,%l,%u,%g,%z,%a,%m,%c' "
rc, out, _ = self._session.run(stat_cmd + shquote(fn), retcode=None)
if rc != 0:
return None
statres = out.strip().split(",")
text_mode = statres.pop(0).lower()
res = StatRes((int(statres[0], 16),) + tuple(int(sr) for sr in statres[1:]))
res.text_mode = text_mode
return res
def _path_delete(self, fn):
self._session.run(f"rm -rf {shquote(fn)}")
def _path_move(self, src, dst):
self._session.run(f"mv {shquote(src)} {shquote(dst)}")
def _path_copy(self, src, dst):
self._session.run(f"cp -r {shquote(src)} {shquote(dst)}")
def _path_mkdir(
self,
fn,
mode=None, # pylint: disable=unused-argument
minus_p=True,
):
p_str = "-p " if minus_p else ""
cmd = f"mkdir {p_str}{shquote(fn)}"
self._session.run(cmd)
def _path_chmod(self, mode, fn):
self._session.run(f"chmod {mode:o} {shquote(fn)}")
def _path_touch(self, path):
self._session.run(f"touch {path}")
def _path_chown(self, fn, owner, group, recursive):
args = ["chown"]
if recursive:
args.append("-R")
if owner is not None and group is not None:
args.append(f"{owner}:{group}")
elif owner is not None:
args.append(str(owner))
elif group is not None:
args.append(f":{group}")
args.append(shquote(fn))
self._session.run(" ".join(args))
def _path_read(self, fn):
data = self["cat"](fn)
if self.custom_encoding and isinstance(data, str):
data = data.encode(self.custom_encoding)
return data
def _path_write(self, fn, data):
if self.custom_encoding and isinstance(data, str):
data = data.encode(self.custom_encoding)
with NamedTemporaryFile() as f:
f.write(data)
f.flush()
f.seek(0)
self.upload(f.name, fn)
def _path_link(self, src, dst, symlink):
symlink_str = "-s " if symlink else ""
self._session.run(f"ln {symlink_str}{shquote(src)} {shquote(dst)}")
def expand(self, expr):
return self._session.run(f"echo {expr}")[1].strip()
def expanduser(self, expr):
if not any(part.startswith("~") for part in expr.split("/")):
return expr
# we escape all $ signs to avoid expanding env-vars
expr_repl = expr.replace("$", "\\$")
return self._session.run(f"echo {expr_repl}")[1].strip()<๏ฝfimโend๏ฝ> | self._program_cache[key] = fn |
<|file_name|>seamcarver.js<|end_file_name|><๏ฝfimโbegin๏ฝ>๏ปฟ
function SeamCarver(ctx) {
var w = ctx.canvas.width;
var h = ctx.canvas.height;
var imgd = ctx.getImageData(0, 0, w, h);
var pix = imgd.data;
var img = [];
for (var i = 0; i < h; i++) {
img.push(new Uint32Array(w));
for (var j = 0; j < w; j++) {
img[i][j] = (pix[4 * i * w + 4 * j] << 16) + (pix[4 * i * w + 4 * j + 1] << 8) + pix[4 * i * w + 4 * j + 2];
}
}
this._img = img;
this._w = w;
this._h = h;
this._removedSeams = []
}
SeamCarver.prototype.energy = function (x, y) {
return this._energyInternal(x, y);
}
SeamCarver.prototype.imageData = function (ctx) {
var w = this._w;
var h = this._h;
var id = ctx.createImageData(w, h);
for (var i = 0; i < h; i++) {
for (var j = 0; j < w; j++) {
var color = this._img[i][j];
var r = color >> 16 & 0xFF;
var g = color >> 8 & 0xFF;
var b = color & 0xFF;
var index = 4 * w * i + 4 * j;
id.data[index] = r;
id.data[index + 1] = g;
id.data[index + 2] = b;
id.data[index + 3] = 255;
}
}
return id;
}
SeamCarver.prototype.findVerticalSeam = function () {
var w = this._w;
var h = this._h;
var edgeTo = [];
var distTo = [];
distTo.push(new Float32Array(w));
edgeTo.push(new Int16Array(w).fill(-1));
for (var i = 1; i < h; i++) {
distTo[i] = new Float32Array(w);
edgeTo[i] = new Int16Array(w).fill(-1);
for (var j = 0; j < w; j++) {
distTo[i][j] = Number.MAX_VALUE;
}
}
for (var i = 1; i < h; i++) {
var prevRow = distTo[i - 1];
for (var j = 1; j < w - 1; j++) {
<๏ฝfimโhole๏ฝ> var dright = prevRow[j + 1];
if (dleft < dcenter && dleft < dright) {
distTo[i][j] = dleft + energy;
edgeTo[i][j] = j - 1;
}
else if (dcenter < dright) {
distTo[i][j] = dcenter + energy;
edgeTo[i][j] = j;
}
else {
distTo[i][j] = dright + energy;
edgeTo[i][j] = j + 1;
}
}
}
var min = Number.MAX_VALUE;
var minIndex = -1;
for (var i = 0; i < w; i++) {
if (distTo[h - 1][i] < min) {
min = distTo[h - 1][i];
minIndex = i;
}
}
distTo[h - 1][minIndex] = Number.MAX_VALUE;
var path = [minIndex];
var curIndex = minIndex;
for (var i = h - 1; i > 0; i--) {
var curIndex = edgeTo[i][curIndex];
path.push(curIndex);
}
return path;
}
SeamCarver.prototype.removeVerticalSeam = function () {
var seam = this.findVerticalSeam();
var h = this._h;
var res = [];
for (var i = 0; i < seam.length; i++) {
var col = seam[i];
res.push({ col: col, color: this._img[h - i - 1][col] });
for (var j = col; j < this._w - 1; j++) {
this._img[h - i - 1][j] = this._img[h - i - 1][j + 1];
}
}
this._w--;
this._removedSeams.push(res);
}
SeamCarver.prototype.restoreVerticalSeam = function () {
var w = this._w;
var h = this._h;
if (this._removedSeams.length == 0) {
return;
}
var seam = this._removedSeams.pop();
for (var i = 0; i < seam.length; i++) {
var row = this._img[h - i - 1];
var col = seam[i].col;
var color = seam[i].color;
for (var j = w - 1; j >= col; j--) {
row[j + 1] = row[j];
}
row[col] = color;
}
this._w++;
}
SeamCarver.prototype.width = function () {
return this._w;
}
SeamCarver.prototype._energyInternal = function (col, row) {
if (col == 0 || row == 0 || col == this._w - 1 || row == this._h - 1) {
return 1000;
}
var x1 = this._img[row][col - 1];
var x1r = x1 >> 16 & 0xFF;
var x1g = x1 >> 8 & 0xFF;
var x1b = x1 & 0xFF;
var x2 = this._img[row][col + 1];
var x2r = x2 >> 16 & 0xFF;
var x2g = x2 >> 8 & 0xFF;
var x2b = x2 & 0xFF;
var y1 = this._img[row - 1][col];
var y1r = y1 >> 16 & 0xFF;
var y1g = y1 >> 8 & 0xFF;
var y1b = y1 & 0xFF;
var y2 = this._img[row + 1][col];
var y2r = y2 >> 16 & 0xFF;
var y2g = y2 >> 8 & 0xFF;
var y2b = y2 & 0xFF;
var dx = (x1r - x2r) * (x1r - x2r) + (x1g - x2g) * (x1g - x2g) + (x1b - x2b) * (x1b - x2b);
var dy = (y1r - y2r) * (y1r - y2r) + (y1g - y2g) * (y1g - y2g) + (y1b - y2b) * (y1b - y2b);
return Math.sqrt(dx + dy);
}<๏ฝfimโend๏ฝ> | var energy = this._energyInternal(j, i);
var dleft = prevRow[j - 1];
var dcenter = prevRow[j]; |
<|file_name|>data-field.go<|end_file_name|><๏ฝfimโbegin๏ฝ>package sprucelib
// A Field is a concrete instance of a FieldType attached to a ContentType in a particular<๏ฝfimโhole๏ฝ>//
// For example, the "Department" Field on a "Staff" ContentType which should be the
// 3rd field in the UI.
//
// NOTE: This type does not store data values. In the example above, the value "Marketing"
// is attached to a Resource as a FieldValue
type Field struct {
ID string // TODO Should this be a GUID?
Name string
TypeID string
Type FieldType
Position int // Used to sort fields in the UI
MinValues int // The minimum number of FieldValues required to be assigned in the UI
MaxValues int // The maximum number of FieldValues allowed to be assigned in the UI
}<๏ฝfimโend๏ฝ> | // position. |
<|file_name|>classes_a.js<|end_file_name|><๏ฝfimโbegin๏ฝ>var searchData=
[
['map',['Map',['../d7/db0/classMap.html',1,'']]],
['memorystream',['MemoryStream',['../dd/d97/classMemoryStream.html',1,'']]],
['message',['Message',['../d6/d28/classMessage.html',1,'']]],
['module',['module',['../d0/dd3/classmodule.html',1,'']]],
['mongocollection',['MongoCollection',['../d2/d07/classMongoCollection.html',1,'']]],
['mongocursor',['MongoCursor',['../db/d7d/classMongoCursor.html',1,'']]],
['mongodb',['MongoDB',['../d6/d3d/classMongoDB.html',1,'']]],
['mongoid',['MongoID',['../d2/d6d/classMongoID.html',1,'']]],
['mq',['mq',['../d4/d86/classmq.html',1,'']]],
['mysql',['MySQL',['../d3/da8/classMySQL.html',1,'']]]<๏ฝfimโhole๏ฝ>];<๏ฝfimโend๏ฝ> | |
<|file_name|>show_license.py<|end_file_name|><๏ฝfimโbegin๏ฝ># -*- coding: utf-8 -*-
#
# license.py
#
# ะัะฒะพะดะธั ะพะบะฝะพ ั ัะตะบััะพะผ ะปะธัะตะฝะทะธะธ.
#
import os
from kivy.clock import Clock
from kivy.uix.rst import RstDocument
from libs.uix.dialogs import dialog, card
class ShowLicense(object):
def show_license(self, *args):
def choice_language_license(on_language):
window = dialog(text=self.data.string_lang_wait, title=self.title)
Clock.schedule_once(
lambda *args: show_license(window, on_language), 0
)
choice_dialog.dismiss()
<๏ฝfimโhole๏ฝ> )
if not os.path.exists(path_to_license):
dialog(text=self.data.string_lang_not_license, title=self.title)
dialog.dismiss()
return
text_license = open(path_to_license).read()
widget_license = RstDocument(
text=text_license, background_color=self.data.alpha,
underline_color=self.data.underline_rst_color
)
card(widget_license, size=(.9, .8))
dialog.dismiss()
choice_dialog = dialog(
text=self.data.string_lang_prev_license, title=self.title,
buttons=[
[self.data.string_lang_on_russian,
lambda *x: choice_language_license(self.data.string_lang_on_russian)],
[self.data.string_lang_on_english,
lambda *x: choice_language_license(self.data.string_lang_on_english)]
]
)<๏ฝfimโend๏ฝ> | def show_license(dialog, on_language):
path_to_license = '{}/license/license_{}.rst'.format(
self.directory, self.data.dict_language[on_language] |
<|file_name|>poll.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>extern crate futures_mutex;
extern crate futures;
<๏ฝfimโhole๏ฝ>fn main() {
let future = future::lazy(|| {
let lock1 = Mutex::new(0);
// Mutex can be easily cloned as long as you need
let lock2 = lock1.clone();
assert!(lock1.poll_lock().is_ready());
assert!(lock2.poll_lock().is_ready());
let mut guard = match lock1.poll_lock() {
Async::Ready(v) => v,
Async::NotReady => unreachable!()
};
*guard += 1;
assert!(lock1.poll_lock().is_not_ready());
assert!(lock2.poll_lock().is_not_ready());
drop(guard);
assert!(lock1.poll_lock().is_ready());
assert!(lock2.poll_lock().is_ready());
Ok::<(), ()>(())
});
future.wait().unwrap();
}<๏ฝfimโend๏ฝ> | use futures_mutex::Mutex;
use futures::{future, Async, Future};
|
<|file_name|>create_table_test.py<|end_file_name|><๏ฝfimโbegin๏ฝ># -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
from crate.client.sqlalchemy.types import Object, ObjectArray
from crate.client.cursor import Cursor
from unittest import TestCase
from unittest.mock import patch, MagicMock
fake_cursor = MagicMock(name='fake_cursor')
FakeCursor = MagicMock(name='FakeCursor', spec=Cursor)
FakeCursor.return_value = fake_cursor
@patch('crate.client.connection.Cursor', FakeCursor)
class CreateTableTest(TestCase):
<๏ฝfimโhole๏ฝ> def test_create_table_with_basic_types(self):
class User(self.Base):
__tablename__ = 'users'
string_col = sa.Column(sa.String, primary_key=True)
unicode_col = sa.Column(sa.Unicode)
text_col = sa.Column(sa.Text)
int_col = sa.Column(sa.Integer)
long_col1 = sa.Column(sa.BigInteger)
long_col2 = sa.Column(sa.NUMERIC)
bool_col = sa.Column(sa.Boolean)
short_col = sa.Column(sa.SmallInteger)
datetime_col = sa.Column(sa.DateTime)
date_col = sa.Column(sa.Date)
float_col = sa.Column(sa.Float)
double_col = sa.Column(sa.DECIMAL)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE users (\n\tstring_col STRING, '
'\n\tunicode_col STRING, \n\ttext_col STRING, \n\tint_col INT, '
'\n\tlong_col1 LONG, \n\tlong_col2 LONG, '
'\n\tbool_col BOOLEAN, '
'\n\tshort_col SHORT, '
'\n\tdatetime_col TIMESTAMP, \n\tdate_col TIMESTAMP, '
'\n\tfloat_col FLOAT, \n\tdouble_col DOUBLE, '
'\n\tPRIMARY KEY (string_col)\n)\n\n'),
())
def test_with_obj_column(self):
class DummyTable(self.Base):
__tablename__ = 'dummy'
pk = sa.Column(sa.String, primary_key=True)
obj_col = sa.Column(Object)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE dummy (\n\tpk STRING, \n\tobj_col OBJECT, '
'\n\tPRIMARY KEY (pk)\n)\n\n'),
())
def test_with_clustered_by(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_clustered_by': 'p'
}
pk = sa.Column(sa.String, primary_key=True)
p = sa.Column(sa.String)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'p STRING, \n\t'
'PRIMARY KEY (pk)\n'
') CLUSTERED BY (p)\n\n'),
())
def test_with_partitioned_by(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_partitioned_by': 'p',
'invalid_option': 1
}
pk = sa.Column(sa.String, primary_key=True)
p = sa.Column(sa.String)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'p STRING, \n\t'
'PRIMARY KEY (pk)\n'
') PARTITIONED BY (p)\n\n'),
())
def test_with_number_of_shards_and_replicas(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_number_of_replicas': '2',
'crate_number_of_shards': 3
}
pk = sa.Column(sa.String, primary_key=True)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'PRIMARY KEY (pk)\n'
') CLUSTERED INTO 3 SHARDS WITH (NUMBER_OF_REPLICAS = 2)\n\n'),
())
def test_with_clustered_by_and_number_of_shards(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_clustered_by': 'p',
'crate_number_of_shards': 3
}
pk = sa.Column(sa.String, primary_key=True)
p = sa.Column(sa.String, primary_key=True)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'p STRING, \n\t'
'PRIMARY KEY (pk, p)\n'
') CLUSTERED BY (p) INTO 3 SHARDS\n\n'),
())
def test_table_with_object_array(self):
class DummyTable(self.Base):
__tablename__ = 't'
pk = sa.Column(sa.String, primary_key=True)
tags = sa.Column(ObjectArray)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'tags ARRAY(OBJECT), \n\t'
'PRIMARY KEY (pk)\n)\n\n'), ())<๏ฝfimโend๏ฝ> | def setUp(self):
self.engine = sa.create_engine('crate://')
self.Base = declarative_base(bind=self.engine)
|
<|file_name|>__init__.py<|end_file_name|><๏ฝfimโbegin๏ฝ># PPFem: An educational finite element code
# Copyright (C) 2015 Matthias Rambausek
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.<๏ฝfimโhole๏ฝ>#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import ppfem.user_elements
import ppfem.user_equations
import ppfem.quadrature
from ppfem.user_elements import *
from ppfem.user_equations import *
from ppfem.quadrature import *
from ppfem.mesh.mesh import Mesh
from ppfem.geometry import Point, Vertex, Line, Face, Cell, Mapping
from ppfem.fem.assembler import DefaultSystemAssembler
from ppfem.fem.form import Functional, LinearForm, BilinearForm, FormCollection
from ppfem.fem.function import FEFunction, FunctionEvaluator
from ppfem.fem.function_space import FunctionSpace
from ppfem.fem.partial_differential_equation import PDE
__all__ = ["Mesh", "Point", "Line", "Vertex", "Face", "Cell", "Mapping", "FunctionSpace", "Functional",
"LinearForm", "BilinearForm", "FormCollection", "DefaultSystemAssembler", "FEFunction", "FunctionEvaluator",
"PDE"]
__all__ += ppfem.user_elements.__all__ + ppfem.quadrature.__all__ + ppfem.user_equations.__all__<๏ฝfimโend๏ฝ> | |
<|file_name|>upload.py<|end_file_name|><๏ฝfimโbegin๏ฝ>#!/usr/bin/env python3
from argparse import ArgumentParser
from os import environ
from sys import argv
from requests import put
def read_file(file_path): # pragma: no cover
with open(file_path, 'rb') as f:
return f.read()
def upload(file_path, repository, repository_path, url, username, password):
url = f'{url}/repository/{repository}/{repository_path}'
data = read_file(file_path)
headers = {'Content-Type': 'application/octet-stream'}
response = put(url, data=data, headers=headers, auth=(username, password))
if response.status_code != 201:
raise OSError(f'{response.status_code}, {response.content}')
return response.status_code
def parse_args(args):
parser = ArgumentParser(description='Get assets')
parser.add_argument('file_path', help='File to upload, e.g. ./myartifact-1.0.0.jar')<๏ฝfimโhole๏ฝ> parser.add_argument('repository_path',
help='Path within Nexus3 repository, e.g com/myorg/myartifact/1.0.0/myartifact-1.0.0.jar')
parser.add_argument('-l', '--url', default=environ.get('NEXUS3_REST_URL', None),
help='Nexus3 url, e.g. http://nexus_host:8080')
parser.add_argument('-u', '--username', default=environ.get('NEXUS3_USERNAME', None), help='Nexus3 username')
parser.add_argument('-p', '--password', default=environ.get('NEXUS3_PASSWORD', None), help='Nexus3 password')
return parser.parse_args(args)
def main(file_path, repository, repository_path, url, username, password):
print(upload(file_path, repository, repository_path, url, username, password))
if __name__ == '__main__': # pragma: no cover
main(**parse_args(argv[1:]).__dict__)<๏ฝfimโend๏ฝ> | parser.add_argument('repository', help='Nexus3 repository, e.g. maven-releases') |
<|file_name|>stylesheets.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A collection of invalidations due to changes in which stylesheets affect a
//! document.
#![deny(unsafe_code)]
use Atom;
use LocalName as SelectorLocalName;
use dom::{TElement, TNode};
use fnv::FnvHashSet;
use invalidation::element::restyle_hints::{RESTYLE_SELF, RestyleHint};
use media_queries::Device;
use selector_parser::SelectorImpl;
use selectors::attr::CaseSensitivity;
use selectors::parser::{Component, LocalName, Selector};
use shared_lock::SharedRwLockReadGuard;
use stylesheets::{CssRule, StylesheetInDocument};<๏ฝfimโhole๏ฝ>/// need to be restyled. Whether it represents a whole subtree or just a single
/// element is determined by whether the invalidation is stored in the
/// StylesheetInvalidationSet's invalid_scopes or invalid_elements table.
#[derive(Debug, Eq, Hash, PartialEq)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
enum Invalidation {
/// An element with a given id.
ID(Atom),
/// An element with a given class name.
Class(Atom),
/// An element with a given local name.
LocalName { name: SelectorLocalName, lower_name: SelectorLocalName },
}
impl Invalidation {
fn is_id(&self) -> bool {
matches!(*self, Invalidation::ID(..))
}
fn is_id_or_class(&self) -> bool {
matches!(*self, Invalidation::ID(..) | Invalidation::Class(..))
}
fn matches<E>(&self, element: E) -> bool
where E: TElement,
{
match *self {
Invalidation::Class(ref class) => {
// FIXME This should look at the quirks mode of the document to
// determine case sensitivity.
element.has_class(class, CaseSensitivity::CaseSensitive)
}
Invalidation::ID(ref id) => {
match element.get_id() {
// FIXME This should look at the quirks mode of the document
// to determine case sensitivity.
Some(element_id) => element_id == *id,
None => false,
}
}
Invalidation::LocalName { ref name, ref lower_name } => {
// This could look at the quirks mode of the document, instead
// of testing against both names, but it's probably not worth
// it.
let local_name = element.get_local_name();
*local_name == **name || *local_name == **lower_name
}
}
}
}
/// A set of invalidations due to stylesheet additions.
///
/// TODO(emilio): We might be able to do the same analysis for media query
/// changes too (or even selector changes?).
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
pub struct StylesheetInvalidationSet {
/// The subtrees we know we have to restyle so far.
invalid_scopes: FnvHashSet<Invalidation>,
/// The elements we know we have to restyle so far.
invalid_elements: FnvHashSet<Invalidation>,
/// Whether the whole document should be restyled.
fully_invalid: bool,
}
impl StylesheetInvalidationSet {
/// Create an empty `StylesheetInvalidationSet`.
pub fn new() -> Self {
Self {
invalid_scopes: FnvHashSet::default(),
invalid_elements: FnvHashSet::default(),
fully_invalid: false,
}
}
/// Mark the DOM tree styles' as fully invalid.
pub fn invalidate_fully(&mut self) {
debug!("StylesheetInvalidationSet::invalidate_fully");
self.invalid_scopes.clear();
self.invalid_elements.clear();
self.fully_invalid = true;
}
/// Analyze the given stylesheet, and collect invalidations from their
/// rules, in order to avoid doing a full restyle when we style the document
/// next time.
pub fn collect_invalidations_for<S>(
&mut self,
device: &Device,
stylesheet: &S,
guard: &SharedRwLockReadGuard
)
where
S: StylesheetInDocument,
{
debug!("StylesheetInvalidationSet::collect_invalidations_for");
if self.fully_invalid {
debug!(" > Fully invalid already");
return;
}
if !stylesheet.enabled() ||
!stylesheet.is_effective_for_device(device, guard) {
debug!(" > Stylesheet was not effective");
return; // Nothing to do here.
}
for rule in stylesheet.effective_rules(device, guard) {
self.collect_invalidations_for_rule(rule, guard);
if self.fully_invalid {
self.invalid_scopes.clear();
self.invalid_elements.clear();
break;
}
}
debug!(" > resulting subtree invalidations: {:?}", self.invalid_scopes);
debug!(" > resulting self invalidations: {:?}", self.invalid_elements);
debug!(" > fully_invalid: {}", self.fully_invalid);
}
/// Clears the invalidation set, invalidating elements as needed if
/// `document_element` is provided.
///
/// Returns true if any invalidations ocurred.
pub fn flush<E>(&mut self, document_element: Option<E>) -> bool
where E: TElement,
{
let have_invalidations = match document_element {
Some(e) => self.process_invalidations(e),
None => false,
};
self.clear();
have_invalidations
}
/// Clears the invalidation set without processing.
pub fn clear(&mut self) {
self.invalid_scopes.clear();
self.invalid_elements.clear();
self.fully_invalid = false;
}
fn process_invalidations<E>(&self, element: E) -> bool
where E: TElement,
{
{
let mut data = match element.mutate_data() {
Some(data) => data,
None => return false,
};
if self.fully_invalid {
debug!("process_invalidations: fully_invalid({:?})",
element);
data.hint.insert(RestyleHint::restyle_subtree());
return true;
}
}
if self.invalid_scopes.is_empty() && self.invalid_elements.is_empty() {
debug!("process_invalidations: empty invalidation set");
return false;
}
self.process_invalidations_in_subtree(element)
}
/// Process style invalidations in a given subtree. This traverses the
/// subtree looking for elements that match the invalidations in
/// invalid_scopes and invalid_elements.
///
/// Returns whether it invalidated at least one element's style.
#[allow(unsafe_code)]
fn process_invalidations_in_subtree<E>(&self, element: E) -> bool
where E: TElement,
{
let mut data = match element.mutate_data() {
Some(data) => data,
None => return false,
};
if !data.has_styles() {
return false;
}
if data.hint.contains_subtree() {
debug!("process_invalidations_in_subtree: {:?} was already invalid",
element);
return false;
}
for invalidation in &self.invalid_scopes {
if invalidation.matches(element) {
debug!("process_invalidations_in_subtree: {:?} matched subtree {:?}",
element, invalidation);
data.hint.insert(RestyleHint::restyle_subtree());
return true;
}
}
let mut self_invalid = false;
if !data.hint.contains(RESTYLE_SELF) {
for invalidation in &self.invalid_elements {
if invalidation.matches(element) {
debug!("process_invalidations_in_subtree: {:?} matched self {:?}",
element, invalidation);
data.hint.insert(RESTYLE_SELF);
self_invalid = true;
break;
}
}
}
let mut any_children_invalid = false;
for child in element.traversal_children() {
let child = match child.as_element() {
Some(e) => e,
None => continue,
};
any_children_invalid |= self.process_invalidations_in_subtree(child);
}
if any_children_invalid {
debug!("Children of {:?} changed, setting dirty descendants",
element);
unsafe { element.set_dirty_descendants() }
}
return self_invalid || any_children_invalid
}
fn scan_component(
component: &Component<SelectorImpl>,
invalidation: &mut Option<Invalidation>)
{
match *component {
Component::LocalName(LocalName { ref name, ref lower_name }) => {
if invalidation.as_ref().map_or(true, |s| !s.is_id_or_class()) {
*invalidation = Some(Invalidation::LocalName {
name: name.clone(),
lower_name: lower_name.clone(),
});
}
}
Component::Class(ref class) => {
if invalidation.as_ref().map_or(true, |s| !s.is_id()) {
*invalidation = Some(Invalidation::Class(class.clone()));
}
}
Component::ID(ref id) => {
if invalidation.is_none() {
*invalidation = Some(Invalidation::ID(id.clone()));
}
}
_ => {
// Ignore everything else, at least for now.
}
}
}
/// Collect invalidations for a given selector.
///
/// We look at the outermost local name, class, or ID selector to the left
/// of an ancestor combinator, in order to restyle only a given subtree.
///
/// If the selector has no ancestor combinator, then we do the same for
/// the only sequence it has, but record it as an element invalidation
/// instead of a subtree invalidation.
///
/// We prefer IDs to classs, and classes to local names, on the basis
/// that the former should be more specific than the latter. We also
/// prefer to generate subtree invalidations for the outermost part
/// of the selector, to reduce the amount of traversal we need to do
/// when flushing invalidations.
fn collect_invalidations(&mut self, selector: &Selector<SelectorImpl>) {
debug!("StylesheetInvalidationSet::collect_invalidations({:?})", selector);
let mut element_invalidation: Option<Invalidation> = None;
let mut subtree_invalidation: Option<Invalidation> = None;
let mut scan_for_element_invalidation = true;
let mut scan_for_subtree_invalidation = false;
let mut iter = selector.iter();
loop {
for component in &mut iter {
if scan_for_element_invalidation {
Self::scan_component(component, &mut element_invalidation);
} else if scan_for_subtree_invalidation {
Self::scan_component(component, &mut subtree_invalidation);
}
}
match iter.next_sequence() {
None => break,
Some(combinator) => {
scan_for_subtree_invalidation = combinator.is_ancestor();
}
}
scan_for_element_invalidation = false;
}
if let Some(s) = subtree_invalidation {
debug!(" > Found subtree invalidation: {:?}", s);
self.invalid_scopes.insert(s);
} else if let Some(s) = element_invalidation {
debug!(" > Found element invalidation: {:?}", s);
self.invalid_elements.insert(s);
} else {
// The selector was of a form that we can't handle. Any element
// could match it, so let's just bail out.
debug!(" > Can't handle selector, marking fully invalid");
self.fully_invalid = true;
}
}
/// Collects invalidations for a given CSS rule.
fn collect_invalidations_for_rule(
&mut self,
rule: &CssRule,
guard: &SharedRwLockReadGuard)
{
use stylesheets::CssRule::*;
debug!("StylesheetInvalidationSet::collect_invalidations_for_rule");
debug_assert!(!self.fully_invalid, "Not worth to be here!");
match *rule {
Style(ref lock) => {
let style_rule = lock.read_with(guard);
for selector in &style_rule.selectors.0 {
self.collect_invalidations(selector);
if self.fully_invalid {
return;
}
}
}
Document(..) |
Namespace(..) |
Import(..) |
Media(..) |
Supports(..) => {
// Do nothing, relevant nested rules are visited as part of the
// iteration.
}
FontFace(..) |
CounterStyle(..) |
Keyframes(..) |
Page(..) |
Viewport(..) |
FontFeatureValues(..) => {
debug!(" > Found unsupported rule, marking the whole subtree \
invalid.");
// TODO(emilio): Can we do better here?
//
// At least in `@page`, we could check the relevant media, I
// guess.
self.fully_invalid = true;
}
}
}
}<๏ฝfimโend๏ฝ> |
/// A style sheet invalidation represents a kind of element or subtree that may |
<|file_name|>authorise.js<|end_file_name|><๏ฝfimโbegin๏ฝ>/**
* Copyright 2013-present NightWorld.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var error = require('./error'),
runner = require('./runner'),
Client = require('./client');
module.exports = Authorise;
/**
* This is the function order used by the runner
*
* @type {Array}
*/
var fns = [
checkAuthoriseType,
checkScope
];
/**
* Authorise
*
* @param {Object} config Instance of OAuth object
* @param {Object} req
* @param {Object} res
* @param {Object} options
* @param {Function} next
*/
function Authorise (config, req, res, options, next) {
options = options || {};
this.config = config;
this.model = config.model;
this.req = req;
this.res = res;
this.options = options;
runner(fns, this, next);
}
function checkAuthoriseType(done) {
var client = Client.credsFromBasic(this.req) || Client.credsFromBody(this.req);
if (this.options.implicit) {
if (this.req.body.response_type === 'token') {
if (client.clientId) {
this.redirectUri = this.req.body.redirect_uri || this.req.query.redirect_uri;
this.clientId = client.clientId;
this.req.auth_type = 'implicit';
return checkImplicitClient.call(this, done);
}
}
}
if (this.options.client_credentials) {
if (client.clientId && client.clientSecret) {
this.client = client;
this.req.auth_type = 'client_credentials';
return getUserFromClient.call(this, done);
}
}
getBearerToken.call(this, done);
}
function getUserFromClient(done) {
var self = this;
this.model.getClient(this.client.clientId, this.client.clientSecret,
function (err, client) {
if (err) return done(error('server_error', false, err));
if (!client) {
return done(error('invalid_client', 'Client credentials are invalid'));
}
self.model.getUserFromClient(client, function (err, user) {
if (err) return done(error('server_error', false, err));
if (!user) {
return done(error('invalid_grant', 'Client credentials are invalid'));
}
self.req.oauth = { bearerToken: user };
self.req.user = { id: user.id };
done();
});
});
}
function checkImplicitClient (done) {
var self = this;
this.model.getClient(this.clientId, null, function (err, client) {
if (err) return done(error('server_error', false, err));
if (!client) {
return done(error('invalid_client', 'Invalid client credentials'));
} else if (self.redirectUri && Array.isArray(client.redirectUri)) {
if (client.redirectUri.indexOf(self.redirectUri) === -1) {
return done(error('invalid_request', 'redirect_uri does not match'));
}
client.redirectUri = self.redirectUri;
} else if (self.redirectUri && client.redirectUri !== self.redirectUri) {
return done(error('invalid_request', 'redirect_uri does not match'));
}
self.model.getUserFromClient(client, function (err, user) {
if (err) return done(error('server_error', false, err));
if (!user) {
return done(error('invalid_grant', 'Client credentials are invalid'));
}
// The request contains valid params so any errors after this point
// are redirected to the redirect_uri
self.res.redirectUri = client.redirectUri;
self.res.oauthRedirect = true;
self.req.oauth = { bearerToken: user };
self.req.user = { id: user.id };
done();
});
});
}
/**
* Get bearer token
*
* Extract token from request according to RFC6750
*
* @param {Function} done
* @this OAuth
*/
function getBearerToken (done) {<๏ฝfimโhole๏ฝ>
// Check exactly one method was used
var methodsUsed = (headerToken !== undefined) + (getToken !== undefined) +
(postToken !== undefined);
if (methodsUsed > 1) {
return done(error('invalid_request',
'Only one method may be used to authenticate at a time (Auth header, ' +
'GET or POST).'));
} else if (methodsUsed === 0) {
return done(error('invalid_request', 'The access token was not found'));
}
// Header: http://tools.ietf.org/html/rfc6750#section-2.1
if (headerToken) {
var matches = headerToken.match(/Bearer\s(\S+)/);
if (!matches) {
return done(error('invalid_request', 'Malformed auth header'));
}
headerToken = matches[1];
}
// POST: http://tools.ietf.org/html/rfc6750#section-2.2
if (postToken) {
if (this.req.method === 'GET') {
return done(error('invalid_request',
'Method cannot be GET When putting the token in the body.'));
}
if (!this.req.is('application/x-www-form-urlencoded')) {
return done(error('invalid_request', 'When putting the token in the ' +
'body, content type must be application/x-www-form-urlencoded.'));
}
}
this.bearerToken = headerToken || postToken || getToken;
checkToken.call(this, done);
}
/**
* Check token
*
* Check it against model, ensure it's not expired
* @param {Function} done
* @this OAuth
*/
function checkToken (done) {
var self = this;
this.model.getAccessToken(this.bearerToken, function (err, token) {
if (err) return done(error('server_error', false, err));
if (!token) {
return done(error('invalid_token',
'The access token provided is invalid.'));
}
if (token.expires !== null &&
(!token.expires || token.expires < new Date())) {
return done(error('invalid_token',
'The access token provided has expired.'));
}
// Expose params
self.req.oauth = { bearerToken: token };
self.req.user = token.user ? token.user : { id: token.userId };
done();
});
}
/**
* Check scope
*
* @param {Function} done
* @this OAuth
*/
function checkScope (done) {
if (!this.model.authoriseScope) return done();
this.model.authoriseScope(this.req.oauth.bearerToken, this.options.scope,
function (err, invalid) {
if (err) return done(error('server_error', false, err));
if (invalid) return done(error('invalid_scope', invalid));
done();
});
}<๏ฝfimโend๏ฝ> | var headerToken = this.req.get('Authorization'),
getToken = this.req.query.access_token,
postToken = this.req.body ? this.req.body.access_token : undefined; |
<|file_name|>FinalizePhase.java<|end_file_name|><๏ฝfimโbegin๏ฝ>package org.jboss.windup.config.phase;
import org.jboss.windup.config.AbstractRuleProvider;
import org.ocpsoft.rewrite.config.Rule;
/**
* Previous: {@link PostReportRenderingPhase}<br/>
* Next: {@link PostFinalizePhase}<๏ฝfimโhole๏ฝ> * may have been opened during {@link Rule}s from earlier {@link AbstractRuleProvider}s.
* </p>
*
* @author <a href="mailto:[email protected]">Jesse Sightler</a>
*
*/
public class FinalizePhase extends RulePhase
{
public FinalizePhase()
{
super(FinalizePhase.class);
}
@Override
public Class<? extends RulePhase> getExecuteAfter()
{
return PostReportRenderingPhase.class;
}
@Override
public Class<? extends RulePhase> getExecuteBefore()
{
return null;
}
}<๏ฝfimโend๏ฝ> | *
* <p>
* This occurs at the end of execution. {@link Rule}s in this phase are responsible for any cleanup of resources that |
<|file_name|>ConfusionMatrixViewer.java<|end_file_name|><๏ฝfimโbegin๏ฝ>/**
* Copyright (C) 2001-2019 by RapidMiner and the contributors
*
* Complete list of developers available at our web site:
*
* http://rapidminer.com
*
* This program is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either version 3
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this program.
* If not, see http://www.gnu.org/licenses/.
*/
package com.rapidminer.gui.viewer;
import java.awt.BorderLayout;
import java.awt.CardLayout;
import java.awt.Component;
import java.awt.FlowLayout;
import java.awt.Font;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.BorderFactory;
import javax.swing.ButtonGroup;
import javax.swing.JComponent;
import javax.swing.JPanel;
import javax.swing.JRadioButton;
import javax.swing.JScrollPane;
import javax.swing.JTextPane;
import com.rapidminer.datatable.SimpleDataTable;
import com.rapidminer.datatable.SimpleDataTableRow;
import com.rapidminer.gui.actions.export.PrintableComponent;
import com.rapidminer.gui.look.Colors;
import com.rapidminer.gui.plotter.PlotterConfigurationModel;
import com.rapidminer.gui.tools.ExtendedJScrollPane;
import com.rapidminer.report.Tableable;
import com.rapidminer.tools.I18N;
/**
* This viewer class can be used to display performance criteria based on a multi class confusion
* matrix. The viewer consists of two parts, first a part containing the general performance info
* string and second a table with the complete confusion matrix.
*
* @author Ingo Mierswa
*/
public class ConfusionMatrixViewer extends JPanel implements Tableable, PrintableComponent {
private static final long serialVersionUID = 3448880915145528006L;
private ConfusionMatrixViewerTable table;
private JComponent plotter;
private String performanceName;
public ConfusionMatrixViewer(String performanceName, String performanceString, String[] classNames, double[][] counter) {
this.performanceName = performanceName;
setLayout(new BorderLayout());
final JPanel mainPanel = new JPanel();
mainPanel.setOpaque(true);
mainPanel.setBackground(Colors.WHITE);
final CardLayout cardLayout = new CardLayout();
mainPanel.setLayout(cardLayout);
add(mainPanel, BorderLayout.CENTER);
// *** table panel ***
JPanel tablePanel = new JPanel(new BorderLayout());
tablePanel.setOpaque(true);
tablePanel.setBackground(Colors.WHITE);
tablePanel.setBorder(BorderFactory.createEmptyBorder(5, 10, 5, 10));
// info string
JPanel infoPanel = new JPanel(new FlowLayout(FlowLayout.LEFT));
infoPanel.setOpaque(true);
infoPanel.setBackground(Colors.WHITE);
JTextPane infoText = new JTextPane();
infoText.setEditable(false);
infoText.setBackground(infoPanel.getBackground());
infoText.setFont(infoText.getFont().deriveFont(Font.BOLD));
infoText.setText(performanceString);
infoPanel.add(infoText);
tablePanel.add(infoPanel, BorderLayout.NORTH);
// table
table = new ConfusionMatrixViewerTable(classNames, counter);
table.setBorder(BorderFactory.createLineBorder(Colors.TABLE_CELL_BORDER));
JScrollPane scrollPane = new ExtendedJScrollPane(table);
scrollPane.setBorder(null);
scrollPane.setBackground(Colors.WHITE);
scrollPane.getViewport().setBackground(Colors.WHITE);
tablePanel.add(scrollPane, BorderLayout.CENTER);
table.setTableHeader(null);
// *** plot panel ***
SimpleDataTable dataTable = new SimpleDataTable("Confusion Matrix", new String[] { "True Class", "Predicted Class",
"Confusion Matrix (x: true class, y: pred. class, z: counters)" });
for (int row = 0; row < classNames.length; row++) {
for (int column = 0; column < classNames.length; column++) {
dataTable.add(new SimpleDataTableRow(new double[] { row, column, counter[row][column] }));
}
}
PlotterConfigurationModel settings = new PlotterConfigurationModel(PlotterConfigurationModel.STICK_CHART_3D,
dataTable);
settings.setAxis(0, 0);
settings.setAxis(1, 1);
settings.enablePlotColumn(2);
mainPanel.add(tablePanel, "table");
plotter = settings.getPlotter().getPlotter();
mainPanel.add(plotter, "plot");
// toggle radio button for views
final JRadioButton metaDataButton = new JRadioButton("Table View", true);
metaDataButton.setToolTipText("Changes to a table showing the confusion matrix.");
metaDataButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
if (metaDataButton.isSelected()) {
cardLayout.show(mainPanel, "table");
}
}
});
final JRadioButton plotButton = new JRadioButton("Plot View", false);
plotButton.setToolTipText("Changes to a plot view of the confusion matrix.");
plotButton.addActionListener(new ActionListener() {
<๏ฝfimโhole๏ฝ> }
}
});
ButtonGroup group = new ButtonGroup();
group.add(metaDataButton);
group.add(plotButton);
JPanel togglePanel = new JPanel(new FlowLayout(FlowLayout.LEFT));
togglePanel.setOpaque(true);
togglePanel.setBackground(Colors.WHITE);
togglePanel.setBorder(BorderFactory.createEmptyBorder(0, 0, 20, 0));
togglePanel.add(metaDataButton);
togglePanel.add(plotButton);
add(togglePanel, BorderLayout.NORTH);
}
@Override
public void prepareReporting() {
table.prepareReporting();
}
@Override
public void finishReporting() {
table.finishReporting();
}
@Override
public boolean isFirstLineHeader() {
return true;
}
@Override
public boolean isFirstColumnHeader() {
return true;
}
@Override
public String getColumnName(int columnIndex) {
return table.getColumnName(columnIndex);
}
@Override
public String getCell(int row, int column) {
return table.getCell(row, column);
}
@Override
public int getColumnNumber() {
return table.getColumnNumber();
}
@Override
public int getRowNumber() {
return table.getRowNumber();
}
@Override
public Component getExportComponent() {
return plotter;
}
@Override
public String getExportName() {
return I18N.getGUIMessage("gui.cards.result_view.confusion_matrix.title");
}
@Override
public String getIdentifier() {
return performanceName;
}
@Override
public String getExportIconName() {
return I18N.getGUIMessage("gui.cards.result_view.confusion_matrix.icon");
}
}<๏ฝfimโend๏ฝ> | @Override
public void actionPerformed(ActionEvent e) {
if (plotButton.isSelected()) {
cardLayout.show(mainPanel, "plot");
|
<|file_name|>buttons.py<|end_file_name|><๏ฝfimโbegin๏ฝ>import logging
from datetime import datetime
from .actions import actions
from .system_menu.system_menu import system_menu
from .library.buttons import library_buttons
from .book.buttons import book_buttons
from .go_to_page.buttons import go_to_page_buttons
from .bookmarks.buttons import bookmarks_buttons
from .language.buttons import language_buttons
log = logging.getLogger(__name__)
bindings = {
'library': library_buttons,
'book': book_buttons,
'go_to_page': go_to_page_buttons,
'bookmarks_menu': bookmarks_buttons,
'language': language_buttons,
'help_menu': {
'single': {
'L': actions.close_menu(),
'>': actions.next_page(),
'<': actions.previous_page(),
'R': actions.toggle_help_menu(),
},
'long': {
'L': actions.close_menu(),
'>': actions.next_page(),
'<': actions.previous_page(),
'R': actions.toggle_help_menu(),
'X': actions.reset_display('start'),
},
},
'system_menu': {
'single': {
'R': actions.toggle_help_menu(),
'>': actions.next_page(),
'<': actions.previous_page(),
'L': actions.close_menu(),
},
'long': {
'R': actions.toggle_help_menu(),
'>': actions.next_page(),
'<': actions.previous_page(),
'L': actions.close_menu(),
'X': actions.reset_display('start'),
},
}
}
sys_menu = system_menu()
for i, item in enumerate(sys_menu):
action = sys_menu[item]
bindings['system_menu']['single'][str(i + 2)] = action<๏ฝfimโhole๏ฝ>
async def dispatch_button(key, press_type, state, dispatch):
if state['help_menu']['visible']:
location = 'help_menu'
else:
location = state['location']
try:
action = bindings[location][press_type][key]
except KeyError:
log.debug('no binding for key {}, {} press'.format(key, press_type))
else:
await dispatch(action)
prev_buttons = {}
long_buttons = {}
async def check(driver, state, dispatch):
# this is a hack for now until we change the protocol, we read the buttons
# twice so we don't miss the release of short presses
for _ in range(2):
buttons = driver.get_buttons()
for key in buttons:
up_or_down = buttons[key]
if up_or_down == 'down':
prev_buttons[key] = datetime.now()
elif up_or_down == 'up':
if key in long_buttons:
del long_buttons[key]
del prev_buttons[key]
else:
if key in prev_buttons:
del prev_buttons[key]
await dispatch_button(key, 'single', state, dispatch)
for key in prev_buttons:
diff = (datetime.now() - prev_buttons[key]).total_seconds()
if diff > 0.5:
prev_buttons[key] = datetime.now()
long_buttons[key] = True
await dispatch_button(key, 'long', state, dispatch)<๏ฝfimโend๏ฝ> | |
<|file_name|>env.ts<|end_file_name|><๏ฝfimโbegin๏ฝ>import * as moment from 'moment';<๏ฝfimโhole๏ฝ>
export const dateFormat = 'DD/MM/YYYY';
export const datePickerFormat = 'dd/mm/yy';
export const minDate = moment('1900-01-01').toDate();<๏ฝfimโend๏ฝ> | |
<|file_name|>test_unit.py<|end_file_name|><๏ฝfimโbegin๏ฝ># This doesn't work- not updated with eventmaster.py updates
# TODO: Fix This :)
# Import Libraries
import eventmaster
import time
import random
import sys
import unittest
import sys
class InputsTestCase(unittest.TestCase):
def setUp(self):
self.s3 = E2S3.E2S3Switcher()
self.s3.set_verbose(0)
self.s3.set_CommsXML_IP("127.0.0.1")
self.s3.set_CommsXML_Port(9876)
if not self.s3.connect(): return -1
while self.s3.is_ready() != 1: time.sleep(1)
def test_set_valid_name_on_invalid_input(self):
test_str = "PYTEST-{0!s}".format(random.randint(1,10))
self.assertRaises(ValueError, lambda: self.s3.get_input(99).set_Name(test_str))
def test_set_valid_name_on_valid_input(self):
test_str = "PYTEST-{0!s}".format(random.randint(1,10))
while(self.s3.has_been_processed(self.s3.get_input(0).set_Name(test_str))==0): time.sleep(1)
time.sleep(1)
self.assertEqual(test_str, self.s3.get_input(0).get_Name())
def test_set_invalid_name_on_valid_input(self):<๏ฝfimโhole๏ฝ>
print unittest.main()
sys.exit()<๏ฝfimโend๏ฝ> | MyObject = type('MyObject', (object,), {})
self.assertEqual(self.s3.get_input(0).set_Name(MyObject), None) |
<|file_name|>test_obs_block.py<|end_file_name|><๏ฝfimโbegin๏ฝ>import datetime
from ecl.util.util import BoolVector
from ecl.util.test import TestAreaContext
from tests import ResTest
from res.enkf import ObsBlock
class ObsBlockTest(ResTest):
def test_create(self):
block = ObsBlock("OBS" , 1000)
self.assertTrue( isinstance( block , ObsBlock ))
self.assertEqual( 1000 , block.totalSize())
self.assertEqual( 0 , block.activeSize())
def test_access(self):
obs_size = 10
block = ObsBlock("OBS" , obs_size)
with self.assertRaises(IndexError):
block[100] = (1,1)
with self.assertRaises(IndexError):
block[-100] = (1,1)
with self.assertRaises(TypeError):
block[4] = 10
with self.assertRaises(TypeError):
block[4] = (1,1,9)
#------
<๏ฝfimโhole๏ฝ> with self.assertRaises(IndexError):
v = block[100]
with self.assertRaises(IndexError):
v = block[-100]
block[0] = (10,1)
v = block[0]
self.assertEqual( v , (10,1))
self.assertEqual( 1 , block.activeSize())
block[-1] = (17,19)
self.assertEqual( block[-1], (17,19))<๏ฝfimโend๏ฝ> | |
<|file_name|>checkReport.sub.js<|end_file_name|><๏ฝfimโbegin๏ฝ>(function () {
// Get values from the substitution engine.
// We can't just pull these from the document context
// because this script is intended to be transcluded into
// another document, and we want the GET values used to request it,
// not the values for the including document
// XXX these are unencoded, so there's an unavoidable
// injection vulnerability in constructing this file...
// need to upgrade the template engine.
var reportField = "{{GET[reportField]}}";
var reportValue = "{{GET[reportValue]}}";
var reportExists = "{{GET[reportExists]}}";
var noCookies = "{{GET[noCookies]}}";
var reportCookieName = "{{GET[reportCookieName]}}"
var testName = "{{GET[testName]}}"
var cookiePresent = "{{GET[cookiePresent]}}"
var reportCount = "{{GET[reportCount]}}"
var location = window.location;
if (reportCookieName == "") {
// fallback on test file name if cookie name not specified
reportCookieName = location.pathname.split('/')[location.pathname.split('/').length - 1].split('.')[0];
}
var reportID = "{{GET[reportID]}}";
if (reportID == "") {
var cookies = document.cookie.split(';');
for (var i = 0; i < cookies.length; i++) {
var cookieName = cookies[i].split('=')[0].trim();
var cookieValue = cookies[i].split('=')[1].trim();
if (cookieName == reportCookieName) {
reportID = cookieValue;
var cookieToDelete = cookieName + "=; expires=Thu, 01 Jan 1970 00:00:00 GMT; path=" + document.location.pathname.substring(0, document.location.pathname.lastIndexOf('/') + 1);
document.cookie = cookieToDelete;
break;
}
}
}
// There is no real way to test (in this particular layer) that a CSP report
// has *not* been sent, at least not without some major reworks and
// involvement from all the platform participants. So the current "solution"
// is to wait for some reasonable amount of time and if no report has been
// received to conclude that no report has been generated. These timeouts must
// not exceed the test timeouts set by vendors otherwise the test would fail.
var timeout = document.querySelector("meta[name=timeout][content=long]") ? 20 : 3;
var reportLocation = location.protocol + "//" + location.host + "/content-security-policy/support/report.py?op=retrieve_report&timeout=" + timeout + "&reportID=" + reportID;
if (testName == "") testName = "Violation report status OK.";
var reportTest = async_test(testName);
function assert_field_value(field, value, field_name) {
assert_true(field.indexOf(value.split(" ")[0]) != -1,
field_name + " value of \"" + field + "\" did not match " +
value.split(" ")[0] + ".");
}
reportTest.step(function () {
var report = new XMLHttpRequest();
report.onload = reportTest.step_func(function () {
var data = JSON.parse(report.responseText);
if (data.error) {
assert_equals("false", reportExists, data.error);
} else {
if(reportExists != "" && reportExists == "false" && data["csp-report"]) {
assert_unreached("CSP report sent, but not expecting one: " + JSON.stringify(data["csp-report"]));
}
// Firefox expands 'self' or origins in a policy to the actual origin value
// so "www.example.com" becomes "http://www.example.com:80".
// Accomodate this by just testing that the correct directive name
// is reported, not the details...
if(data["csp-report"] != undefined && data["csp-report"][reportField] != undefined) {
assert_field_value(data["csp-report"][reportField], reportValue, reportField);
} else if (data[0] != undefined && data[0]["body"] != undefined && data[0]["body"][reportField] != undefined) {
assert_field_value(data[0]["body"][reportField], reportValue, reportField);
} else {
assert_equals("", reportField, "Expected report field could not be found in report");
}
}
reportTest.done();
});
report.open("GET", reportLocation, true);
report.send();
});
if (noCookies || cookiePresent) {
var cookieTest = async_test("Test report cookies.");
var cookieReport = new XMLHttpRequest();
cookieReport.onload = cookieTest.step_func(function () {
var data = JSON.parse(cookieReport.responseText);
if (noCookies) {
assert_equals(data.reportCookies, "None", "Report should not contain any cookies");
}
if (cookiePresent) {
assert_true(data.reportCookies.hasOwnProperty(cookiePresent), "Report should contain cookie: " + cookiePresent);
}
cookieTest.done();
});
var cReportLocation = location.protocol + "//" + location.host + "/content-security-policy/support/report.py?op=retrieve_cookies&timeout=" + timeout + "&reportID=" + reportID;
cookieReport.open("GET", cReportLocation, true);
cookieReport.send();
}
<๏ฝfimโhole๏ฝ> reportCountReport.onload = reportCountTest.step_func(function () {
var data = JSON.parse(reportCountReport.responseText);
assert_equals(data.report_count, reportCount, "Report count was not what was expected.");
reportCountTest.done();
});
var cReportLocation = location.protocol + "//" + location.host + "/content-security-policy/support/report.py?op=retrieve_count&timeout=" + timeout + "&reportID=" + reportID;
reportCountReport.open("GET", cReportLocation, true);
reportCountReport.send();
}
})();<๏ฝfimโend๏ฝ> | if (reportCount != "") {
var reportCountTest = async_test("Test number of sent reports.");
var reportCountReport = new XMLHttpRequest(); |
<|file_name|>FileStorage.java<|end_file_name|><๏ฝfimโbegin๏ฝ>package io.github.notsyncing.cowherd.files;
import io.github.notsyncing.cowherd.Cowherd;
import io.github.notsyncing.cowherd.commons.CowherdConfiguration;
import io.github.notsyncing.cowherd.commons.RouteType;
import io.github.notsyncing.cowherd.models.ActionMethodInfo;
import io.github.notsyncing.cowherd.models.RouteInfo;
import io.github.notsyncing.cowherd.models.UploadFileInfo;
import io.github.notsyncing.cowherd.routing.RouteManager;
import io.github.notsyncing.cowherd.server.CowherdLogger;
import io.github.notsyncing.cowherd.utils.StringUtils;
import io.vertx.core.Vertx;
import io.vertx.core.file.FileSystem;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDate;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Predicate;
/**
* ๆไปถๅญๅจๅฏน่ฑก
* ็จไบๆนไพฟๅ็ฑปๅญๅจๅ็ฑปๆไปถ
*/
public class FileStorage
{
private Map<Enum, Path> storagePaths = new ConcurrentHashMap<>();
private FileSystem fs;
private CowherdLogger log = CowherdLogger.getInstance(this);
public FileStorage(Vertx vertx)
{
init(vertx);
}
public FileStorage() throws IllegalAccessException, InvocationTargetException, InstantiationException {
this(Cowherd.dependencyInjector.getComponent(Vertx.class));
}
protected void init(Vertx vertx) {
try {
fs = vertx.fileSystem();
} catch (Exception e) {
log.e("Failed to create file storage", e);
}
}
/**
* ๆณจๅไธไธชๆไปถๅญๅจ็ฎๅฝ
* @param tag ๆ ่ฏ่ฏฅๅญๅจ็ฑปๅ็ๆไธพ
* @param path ่ฆๆณจๅ็็ฎๅฝ
* @throws IOException
*/
public void registerStoragePath(Enum tag, String path) throws IOException
{
registerStoragePath(tag, Paths.get(path));
}
/**
* ๆณจๅไธไธชๆไปถๅญๅจ็ฎๅฝ
* @param tag ๆ ่ฏ่ฏฅๅญๅจ็ฑปๅ็ๆไธพ
* @param path ่ฆๆณจๅ็็ฎๅฝ
* @throws IOException
*/
public void registerStoragePath(Enum tag, Path path) throws IOException
{
if (storagePaths.containsKey(tag)) {
log.w("Tag " + tag + " already registered to path " + storagePaths.get(tag) +
", will be overwritten to " + path);
}
storagePaths.put(tag, path);
if (!Files.exists(path)) {
Path p = Files.createDirectories(path);
log.i("Created storage path " + p + " for tag " + tag);
} else {
log.i("Registered storage path " + path + " to tag " + tag);
}
}
/**
* ่ทๅๅญๅจ็ฑปๅซๆ ่ฏๆๅฏนๅบ็ๅญๆพ็ฎๅฝ
* @param tag ๅญๅจ็ฑปๅซๆ ่ฏๆไธพ
* @return ่ฏฅ็ฑปๅซๆ ่ฏๆๅฏนๅบ็ๅญๆพ็ฎๅฝ
*/
public Path getStoragePath(Enum tag)
{
return storagePaths.get(tag);
}
/**
* ๅผๆญฅๅฐๆไปถๅญๆพ่ณๆๅฎ็ๅญๅจ็ฑปๅซไธญ
* @param file ่ฆๅญๆพ็ๆไปถ
* @param tag ๅญๅจ็ฑปๅซๆ ่ฏๆไธพ
* @param newFileName ๆฐๆไปถๅ๏ผ่ฅไธบ null๏ผๅๆๅๆไปถๅๅญๅจ
* @param noRemoveOld ไธบ true ๅไธๅ ้คๆบๆไปถ
* @return ๆ็คบๅญๆพๆฏๅฆๅฎๆ็ CompletableFuture ๅฏน่ฑก๏ผๅนถๅ
ๅซๆไปถ็ธๅฏนไบ่ฏฅๅ็ฑปๅญๅจ็ฎๅฝ็็ธๅฏน่ทฏๅพ
*/
public CompletableFuture<Path> storeFile(Path file, Enum tag, String newFileName, boolean noRemoveOld) {
CompletableFuture<Path> f = new CompletableFuture<>();
String fileName = newFileName == null ? file.getFileName().toString() : newFileName;
Path store = storagePaths.get(tag);
Path to;
if (store == null) {
f.completeExceptionally(new Exception("Storage tag " + tag + " not registered!"));
return f;
}
if (CowherdConfiguration.isStoreFilesByDate()) {
LocalDate date = LocalDate.now();
to = store.resolve(String.valueOf(date.getYear())).resolve(String.valueOf(date.getMonthValue()))
.resolve(String.valueOf(date.getDayOfMonth()));
try {
Files.createDirectories(to);
} catch (Exception e) {
f.completeExceptionally(e);
return f;
}
<๏ฝfimโhole๏ฝ> }
final Path finalTo = to;
fs.copy(file.toString(), to.toString(), r -> {
if (r.succeeded()) {
if (noRemoveOld) {
f.complete(store.relativize(finalTo));
} else {
fs.delete(file.toString(), r2 -> {
if (r2.succeeded()) {
f.complete(finalTo);
} else {
f.completeExceptionally(r2.cause());
}
});
}
} else {
f.completeExceptionally(r.cause());
}
});
return f;
}
/**
* ๅผๆญฅๅฐๆไปถๅญๆพ่ณๆๅฎ็ๅญๅจ็ฑปๅซไธญ
* @param file ่ฆๅญๆพ็ๆไปถ
* @param tag ๅญๅจ็ฑปๅซๆ ่ฏๆไธพ
* @param newFileName ๆฐๆไปถๅ๏ผ่ฅไธบ null๏ผๅๆๅๆไปถๅๅญๅจ
* @param noRemoveOld ไธบ true ๅไธๅ ้คๆบๆไปถ
* @return ๆ็คบๅญๆพๆฏๅฆๅฎๆ็ CompletableFuture ๅฏน่ฑก๏ผๅนถๅ
ๅซๆไปถ็ธๅฏนไบ่ฏฅๅ็ฑปๅญๅจ็ฎๅฝ็็ธๅฏน่ทฏๅพ
*/
public CompletableFuture<Path> storeFile(File file, Enum tag, String newFileName, boolean noRemoveOld)
{
return storeFile(file.toPath(), tag, newFileName, noRemoveOld);
}
/**
* ๅผๆญฅๅฐๆไปถๅญๆพ่ณๆๅฎ็ๅญๅจ็ฑปๅซไธญ
* @param file ่ฆๅญๆพ็ๆไปถ
* @param tag ๅญๅจ็ฑปๅซๆ ่ฏๆไธพ
* @param newFileName ๆฐๆไปถๅ๏ผ่ฅไธบ null๏ผๅๆๅๆไปถๅๅญๅจ
* @param noRemoveOld ไธบ true ๅไธๅ ้คๆบๆไปถ
* @return ๆ็คบๅญๆพๆฏๅฆๅฎๆ็ CompletableFuture ๅฏน่ฑก๏ผๅนถๅ
ๅซๆไปถ็ธๅฏนไบ่ฏฅๅ็ฑปๅญๅจ็ฎๅฝ็็ธๅฏน่ทฏๅพ
*/
public CompletableFuture<Path> storeFile(String file, Enum tag, String newFileName, boolean noRemoveOld)
{
return storeFile(Paths.get(file), tag, newFileName, noRemoveOld);
}
/**
* ๅผๆญฅๅฐไธไผ ็ๆไปถๅญๆพ่ณๆๅฎ็ๅญๅจ็ฑปๅซไธญ
* @param file ่ฆๅญๆพ็ไธไผ ๆไปถไฟกๆฏๅฏน่ฑก
* @param tag ๅญๅจ็ฑปๅซๆ ่ฏๆไธพ
* @param newFileName ๆฐๆไปถๅ๏ผ่ฅไธบ null๏ผๅๆๅๆไปถๅๅญๅจ
* @param noRemoveOld ไธบ true ๅไธๅ ้คๆบๆไปถ
* @return ๆ็คบๅญๆพๆฏๅฆๅฎๆ็ CompletableFuture ๅฏน่ฑก๏ผๅนถๅ
ๅซๆไปถ็ธๅฏนไบ่ฏฅๅ็ฑปๅญๅจ็ฎๅฝ็็ธๅฏน่ทฏๅพ
*/
public CompletableFuture<Path> storeFile(UploadFileInfo file, Enum tag, String newFileName, boolean noRemoveOld)
{
if (file == null) {
return CompletableFuture.completedFuture(null);
}
return storeFile(file.getFile(), tag, newFileName, noRemoveOld);
}
/**
* ๅผๆญฅๅฐไธไผ ็ๆไปถๆๆบๆไปถๅๅญๆพ่ณๆๅฎ็ๅญๅจ็ฑปๅซไธญ๏ผๅนถๅ ้คๆบๆไปถ
* @param file ่ฆๅญๆพ็ไธไผ ๆไปถไฟกๆฏๅฏน่ฑก
* @param tag ๅญๅจ็ฑปๅซๆ ่ฏๆไธพ
* @return ๆ็คบๅญๆพๆฏๅฆๅฎๆ็ CompletableFuture ๅฏน่ฑก๏ผๅนถๅ
ๅซๆไปถ็ธๅฏนไบ่ฏฅๅ็ฑปๅญๅจ็ฎๅฝ็็ธๅฏน่ทฏๅพ
*/
public CompletableFuture<Path> storeFile(UploadFileInfo file, Enum tag)
{
if (file == null) {
return CompletableFuture.completedFuture(null);
}
if ((StringUtils.isEmpty(file.getFilename())) && ((file.getFile() == null) || (file.getFile().length() <= 0))) {
return CompletableFuture.completedFuture(null);
}
return storeFile(file.getFile(), tag, file.getFilename(), false);
}
/**
* ๅผๆญฅๅฐไธไผ ็ๆไปถไปฅ้ๆบๆไปถๅ๏ผไฟๆๆฉๅฑๅ๏ผๅญๆพ่ณๆๅฎ็ๅญๅจ็ฑปๅซไธญ๏ผๅนถๅ ้คๆบๆไปถ
* @param file ่ฆๅญๆพ็ไธไผ ๆไปถไฟกๆฏๅฏน่ฑก
* @param tag ๅญๅจ็ฑปๅซๆ ่ฏๆไธพ
* @return ๆ็คบๅญๆพๆฏๅฆๅฎๆ็ CompletableFuture ๅฏน่ฑก๏ผๅนถๅ
ๅซๆไปถ็ธๅฏนไบ่ฏฅๅ็ฑปๅญๅจ็ฎๅฝ็็ธๅฏน่ทฏๅพ
*/
public CompletableFuture<Path> storeFileWithRandomName(UploadFileInfo file, Enum tag)
{
if (file == null) {
return CompletableFuture.completedFuture(null);
}
if ((StringUtils.isEmpty(file.getFilename())) && ((file.getFile() == null) || (file.getFile().length() <= 0))) {
return CompletableFuture.completedFuture(null);
}
String fn = file.getFilename();
int e = fn.lastIndexOf('.');
String ext = e > 0 ? fn.substring(e) : "";
String filename = UUID.randomUUID().toString() + ext;
return storeFile(file.getFile(), tag, filename, false);
}
/**
* ่ทๅๆไปถๅจๆไธๅญๅจ็ฑปๅซไธญ็ๅฎๆด่ทฏๅพ
* @param tag ๅญๅจ็ฑปๅซๆ ่ฏๆไธพ
* @param file ่ฆ่ทๅๅฎๆด่ทฏๅพ็ๆไปถ
* @return ่ฏฅๆไปถ็ๅฎๆด่ทฏๅพ
*/
public Path resolveFile(Enum tag, Path file)
{
return storagePaths.get(tag).resolve(file);
}
/**
* ่ทๅๆไปถไธญๆไธๅญๅจ็ฑปๅซไธญ็็ธๅฏน่ทฏๅพ
* @param tag ๅญๅจ็ฑปๅซๆ ่ฏๆไธพ
* @param file ่ฆ่ทๅ็ธๅฏน่ทฏๅพ็ๆไปถ
* @return ่ฏฅๆไปถ็็ธๅฏน่ทฏๅพ
*/
public Path relativize(Enum tag, Path file)
{
return getStoragePath(tag).relativize(file);
}
private void addServerRoute(RouteInfo route)
{
Method m;
try {
m = CowherdFileStorageService.class.getMethod("getFile", Enum.class, String.class);
} catch (NoSuchMethodException e) {
log.e("No action for file storage!", e);
return;
}
RouteManager.addRoute(route, new ActionMethodInfo(m));
}
/**
* ๆณจๅไธๆก็ดๆฅ่ฎฟ้ฎๆๅฎๆไปถๅญๅจ็่ทฏ็ฑ
* @param tag ๅญๅจ็ฑปๅซๆ ่ฏๆไธพ
* @param routeRegex ่ทฏ็ฑ่งๅ๏ผๅฟ
้กปๅ
ๅซไธไธชๅไธบ path ็ๅฝๅๅน้
็ป๏ผ็จไบๅน้
่ฆ่ฎฟ้ฎ็ๆไปถ็็ธๅฏน่ทฏๅพ
*/
public void registerServerRoute(Enum tag, String routeRegex)
{
RouteInfo info = new RouteInfo();
info.setPath(routeRegex);
info.setType(RouteType.Http);
info.setOtherParameters(new Object[] { tag });
addServerRoute(info);
}
public void registerServerSimpleRoute(Enum tag, String route)
{
RouteInfo info = new RouteInfo();
info.setPath(route);
info.setType(RouteType.Http);
info.setOtherParameters(new Object[] { tag });
info.setFastRoute(true);
addServerRoute(info);
}
public void removeStoragePathIf(Predicate<Enum> predicate) {
storagePaths.entrySet().removeIf(e -> predicate.test(e.getKey()));
}
}<๏ฝfimโend๏ฝ> | to = to.resolve(fileName);
} else {
to = store.resolve(fileName); |
<|file_name|>borrowck-assign-to-andmut-in-aliasable-loc.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that assignments to an `&mut` pointer which is found in a
// borrowed (but otherwise non-aliasable) location is illegal.
struct S<'self> {
pointer: &'self mut int
}
fn a(s: &S) {
*s.pointer += 1; //~ ERROR cannot assign
}
fn b(s: &mut S) {
*s.pointer += 1;
}<๏ฝfimโhole๏ฝ> *s.pointer += 1; //~ ERROR cannot assign
}
fn main() {}<๏ฝfimโend๏ฝ> |
fn c(s: & &mut S) { |
<|file_name|>api_backend.go<|end_file_name|><๏ฝfimโbegin๏ฝ>// Copyright 2016 The go-trustmachine Authors
// This file is part of the go-trustmachine library.
//
// The go-trustmachine library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.<๏ฝfimโhole๏ฝ>// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-trustmachine library. If not, see <http://www.gnu.org/licenses/>.
package les
import (
"context"
"math/big"
"github.com/trust-tech/go-trustmachine/accounts"
"github.com/trust-tech/go-trustmachine/common"
"github.com/trust-tech/go-trustmachine/common/math"
"github.com/trust-tech/go-trustmachine/core"
"github.com/trust-tech/go-trustmachine/core/state"
"github.com/trust-tech/go-trustmachine/core/types"
"github.com/trust-tech/go-trustmachine/core/vm"
"github.com/trust-tech/go-trustmachine/entrust/downloader"
"github.com/trust-tech/go-trustmachine/entrust/gasprice"
"github.com/trust-tech/go-trustmachine/entrustdb"
"github.com/trust-tech/go-trustmachine/event"
"github.com/trust-tech/go-trustmachine/light"
"github.com/trust-tech/go-trustmachine/params"
"github.com/trust-tech/go-trustmachine/rpc"
)
type LesApiBackend struct {
entrust *LightTrustmachine
gpo *gasprice.Oracle
}
func (b *LesApiBackend) ChainConfig() *params.ChainConfig {
return b.entrust.chainConfig
}
func (b *LesApiBackend) CurrentBlock() *types.Block {
return types.NewBlockWithHeader(b.entrust.BlockChain().CurrentHeader())
}
func (b *LesApiBackend) SetHead(number uint64) {
b.entrust.protocolManager.downloader.Cancel()
b.entrust.blockchain.SetHead(number)
}
func (b *LesApiBackend) HeaderByNumber(ctx context.Context, blockNr rpc.BlockNumber) (*types.Header, error) {
if blockNr == rpc.LatestBlockNumber || blockNr == rpc.PendingBlockNumber {
return b.entrust.blockchain.CurrentHeader(), nil
}
return b.entrust.blockchain.GetHeaderByNumberOdr(ctx, uint64(blockNr))
}
func (b *LesApiBackend) BlockByNumber(ctx context.Context, blockNr rpc.BlockNumber) (*types.Block, error) {
header, err := b.HeaderByNumber(ctx, blockNr)
if header == nil || err != nil {
return nil, err
}
return b.GetBlock(ctx, header.Hash())
}
func (b *LesApiBackend) StateAndHeaderByNumber(ctx context.Context, blockNr rpc.BlockNumber) (*state.StateDB, *types.Header, error) {
header, err := b.HeaderByNumber(ctx, blockNr)
if header == nil || err != nil {
return nil, nil, err
}
return light.NewState(ctx, header, b.entrust.odr), header, nil
}
func (b *LesApiBackend) GetBlock(ctx context.Context, blockHash common.Hash) (*types.Block, error) {
return b.entrust.blockchain.GetBlockByHash(ctx, blockHash)
}
func (b *LesApiBackend) GetReceipts(ctx context.Context, blockHash common.Hash) (types.Receipts, error) {
return light.GetBlockReceipts(ctx, b.entrust.odr, blockHash, core.GetBlockNumber(b.entrust.chainDb, blockHash))
}
func (b *LesApiBackend) GetTd(blockHash common.Hash) *big.Int {
return b.entrust.blockchain.GetTdByHash(blockHash)
}
func (b *LesApiBackend) GetEVM(ctx context.Context, msg core.Message, state *state.StateDB, header *types.Header, vmCfg vm.Config) (*vm.EVM, func() error, error) {
state.SetBalance(msg.From(), math.MaxBig256)
context := core.NewEVMContext(msg, header, b.entrust.blockchain, nil)
return vm.NewEVM(context, state, b.entrust.chainConfig, vmCfg), state.Error, nil
}
func (b *LesApiBackend) SendTx(ctx context.Context, signedTx *types.Transaction) error {
return b.entrust.txPool.Add(ctx, signedTx)
}
func (b *LesApiBackend) RemoveTx(txHash common.Hash) {
b.entrust.txPool.RemoveTx(txHash)
}
func (b *LesApiBackend) GetPoolTransactions() (types.Transactions, error) {
return b.entrust.txPool.GetTransactions()
}
func (b *LesApiBackend) GetPoolTransaction(txHash common.Hash) *types.Transaction {
return b.entrust.txPool.GetTransaction(txHash)
}
func (b *LesApiBackend) GetPoolNonce(ctx context.Context, addr common.Address) (uint64, error) {
return b.entrust.txPool.GetNonce(ctx, addr)
}
func (b *LesApiBackend) Stats() (pending int, queued int) {
return b.entrust.txPool.Stats(), 0
}
func (b *LesApiBackend) TxPoolContent() (map[common.Address]types.Transactions, map[common.Address]types.Transactions) {
return b.entrust.txPool.Content()
}
func (b *LesApiBackend) Downloader() *downloader.Downloader {
return b.entrust.Downloader()
}
func (b *LesApiBackend) ProtocolVersion() int {
return b.entrust.LesVersion() + 10000
}
func (b *LesApiBackend) SuggestPrice(ctx context.Context) (*big.Int, error) {
return b.gpo.SuggestPrice(ctx)
}
func (b *LesApiBackend) ChainDb() entrustdb.Database {
return b.entrust.chainDb
}
func (b *LesApiBackend) EventMux() *event.TypeMux {
return b.entrust.eventMux
}
func (b *LesApiBackend) AccountManager() *accounts.Manager {
return b.entrust.accountManager
}<๏ฝfimโend๏ฝ> | //
// The go-trustmachine library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
<|file_name|>router.go<|end_file_name|><๏ฝfimโbegin๏ฝ>package routers
import (
"github.com/astaxie/beego"
"github.com/royburns/goTestLinkReport/apis"
"github.com/royburns/goTestLinkReport/controllers"
)
func init() {
// main
beego.Router("/", &controllers.IndexController{})
beego.Router("/plan", &controllers.PlanController{})
// beego.Router("/report", &controllers.ReportController{})
beego.Router("/getlastexecution", &controllers.GetLastExecutionController{})
beego.Router("/getsprintexecution", &controllers.GetSprintExecutionController{})
beego.Router("/getreleasereport", &controllers.ReleaseController{}, "get:GetReleaseReport")
beego.Router("/api/getreleasereport", &apis.ApiController{}, "get:GetReleaseReport")
beego.Router("/getreleaseoverview", &controllers.ReleaseController{}, "get:GetReleaseOverview")
beego.Router("/api/getreleaseoverview", &apis.ApiController{}, "get:GetReleaseOverview")
// Statistics
// beego.Router("/statistics", &controllers.StatisticsController{})
beego.Router("/statistics/sprint", &controllers.StatisticsController{})
beego.Router("/api/statistics/sprint", &controllers.StatisticsController{}, "get:Sprint")
// Test
beego.Router("/test", &controllers.TestController{})
// About
beego.Router("/about", &controllers.AboutController{})
// cmd
// beego.Router("/cmd", &controllers.CmdController{})
// api
beego.Router("/api/getplan", &apis.ApiController{}, "get:GetPlan")
beego.Router("/api/getlastexecution", &apis.ApiController{}, "get:GetLastExecution")
beego.Router("/api/getsprintexecution", &apis.ApiController{}, "get:GetSprintExecution")
beego.Router("/api/runcmd", &apis.ApiController{}, "get:RunCmd")
// static file
beego.SetStaticPath("/data", "data")<๏ฝfimโhole๏ฝ><๏ฝfimโend๏ฝ> | } |
<|file_name|>render_context.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>use std::collections::{HashMap};
use std::path::{Path};
use std::sync::{Arc};
use crossbeam::sync::{MsQueue};
use glium::backend::{Facade};
use debug::{gnomon, indicator};
use inverse_kinematics::{Chain};
use model::{Model};
use unlit_model::{UnlitModel};
use render::render_frame::{RenderFrame};
pub const DEPTH_DIMENSION: u32 = 2048;
#[derive(Eq, PartialEq, Hash, Copy, Clone)]
pub enum ModelId {
Player,
Scene,
IKModel, // TODO: we are going to need more of these / a dynamic way to generate ids and load at a later time
Tree,
// DEBUG
Gnomon,
Indicator,
}
pub struct RenderContext {
pub q: Arc<MsQueue<RenderFrame>>, // TODO: make private and provide minimal decent api
window_size: (u32, u32), // TODO: maybe this should be a per RenderFrame parameter
pub models: HashMap<ModelId, Arc<Model>>,
// DEBUG
pub unlit_models: HashMap<ModelId, Arc<UnlitModel>>,
}
impl RenderContext {
pub fn new<F: Facade>(facade: &F, q: Arc<MsQueue<RenderFrame>>, window_size: (u32, u32), ik_chains: &[Chain]) -> RenderContext {
let model_map = load_initial_models(facade, ik_chains);
// DEBUG
let mut unlit_models = HashMap::new();
unlit_models.insert(ModelId::Gnomon, Arc::new(gnomon::model(facade)));
unlit_models.insert(ModelId::Indicator, Arc::new(indicator::model(facade)));
<๏ฝfimโhole๏ฝ> window_size: window_size,
models: model_map,
// DEBUG
unlit_models: unlit_models,
}
}
pub fn aspect_ratio(&self) -> f32 {
(self.window_size.0 as f32) / (self.window_size.1 as f32)
}
}
// TODO: don't pass in chains but make something like IntoModel
//
fn load_initial_models<F: Facade>(facade: &F, ik_chains: &[Chain]) -> HashMap<ModelId, Arc<Model>> {
let mut map = HashMap::new();
const MODEL_PATH_STRINGS: [(ModelId, &'static str); 3] = [
(ModelId::Player, "./data/player.obj"),
(ModelId::Scene, "./data/level.obj"),
(ModelId::Tree, "./data/tree.obj")
];
for &(model_id, path) in &MODEL_PATH_STRINGS {
let model = Arc::new(Model::new(facade, &Path::new(path)));
map.insert(model_id, model);
}
for chain in ik_chains {
map.insert(ModelId::IKModel, Arc::new(chain.model(facade)));
}
map
}
unsafe impl Send for RenderContext {}
unsafe impl Sync for RenderContext {}<๏ฝfimโend๏ฝ> | RenderContext {
q: q, |
<|file_name|>hdrs.py<|end_file_name|><๏ฝfimโbegin๏ฝ>"""HTTP Headers constants."""
from .multidict import upstr
METH_ANY = upstr('*')
METH_CONNECT = upstr('CONNECT')
METH_HEAD = upstr('HEAD')
METH_GET = upstr('GET')
METH_DELETE = upstr('DELETE')
METH_OPTIONS = upstr('OPTIONS')
METH_PATCH = upstr('PATCH')
METH_POST = upstr('POST')
METH_PUT = upstr('PUT')
METH_TRACE = upstr('TRACE')
ACCEPT = upstr('ACCEPT')
ACCEPT_CHARSET = upstr('ACCEPT-CHARSET')
ACCEPT_ENCODING = upstr('ACCEPT-ENCODING')
ACCEPT_LANGUAGE = upstr('ACCEPT-LANGUAGE')
ACCEPT_RANGES = upstr('ACCEPT-RANGES')
ACCESS_CONTROL_MAX_AGE = upstr('ACCESS-CONTROL-MAX-AGE')
ACCESS_CONTROL_ALLOW_CREDENTIALS = upstr('ACCESS-CONTROL-ALLOW-CREDENTIALS')
ACCESS_CONTROL_ALLOW_HEADERS = upstr('ACCESS-CONTROL-ALLOW-HEADERS')
ACCESS_CONTROL_ALLOW_METHODS = upstr('ACCESS-CONTROL-ALLOW-METHODS')
ACCESS_CONTROL_ALLOW_ORIGIN = upstr('ACCESS-CONTROL-ALLOW-ORIGIN')
ACCESS_CONTROL_EXPOSE_HEADERS = upstr('ACCESS-CONTROL-EXPOSE-HEADERS')
ACCESS_CONTROL_REQUEST_HEADERS = upstr('ACCESS-CONTROL-REQUEST-HEADERS')
ACCESS_CONTROL_REQUEST_METHOD = upstr('ACCESS-CONTROL-REQUEST-METHOD')
AGE = upstr('AGE')
ALLOW = upstr('ALLOW')
AUTHORIZATION = upstr('AUTHORIZATION')
CACHE_CONTROL = upstr('CACHE-CONTROL')
CONNECTION = upstr('CONNECTION')
CONTENT_DISPOSITION = upstr('CONTENT-DISPOSITION')
CONTENT_ENCODING = upstr('CONTENT-ENCODING')
CONTENT_LANGUAGE = upstr('CONTENT-LANGUAGE')
CONTENT_LENGTH = upstr('CONTENT-LENGTH')
CONTENT_LOCATION = upstr('CONTENT-LOCATION')
CONTENT_MD5 = upstr('CONTENT-MD5')<๏ฝfimโhole๏ฝ>CONTENT_TRANSFER_ENCODING = upstr('CONTENT-TRANSFER-ENCODING')
CONTENT_TYPE = upstr('CONTENT-TYPE')
COOKIE = upstr('COOKIE')
DATE = upstr('DATE')
DESTINATION = upstr('DESTINATION')
DIGEST = upstr('DIGEST')
ETAG = upstr('ETAG')
EXPECT = upstr('EXPECT')
EXPIRES = upstr('EXPIRES')
FROM = upstr('FROM')
HOST = upstr('HOST')
IF_MATCH = upstr('IF-MATCH')
IF_MODIFIED_SINCE = upstr('IF-MODIFIED-SINCE')
IF_NONE_MATCH = upstr('IF-NONE-MATCH')
IF_RANGE = upstr('IF-RANGE')
IF_UNMODIFIED_SINCE = upstr('IF-UNMODIFIED-SINCE')
KEEP_ALIVE = upstr('KEEP-ALIVE')
LAST_EVENT_ID = upstr('LAST-EVENT-ID')
LAST_MODIFIED = upstr('LAST-MODIFIED')
LINK = upstr('LINK')
LOCATION = upstr('LOCATION')
MAX_FORWARDS = upstr('MAX-FORWARDS')
ORIGIN = upstr('ORIGIN')
PRAGMA = upstr('PRAGMA')
PROXY_AUTHENTICATE = upstr('PROXY_AUTHENTICATE')
PROXY_AUTHORIZATION = upstr('PROXY-AUTHORIZATION')
RANGE = upstr('RANGE')
REFERER = upstr('REFERER')
RETRY_AFTER = upstr('RETRY-AFTER')
SEC_WEBSOCKET_ACCEPT = upstr('SEC-WEBSOCKET-ACCEPT')
SEC_WEBSOCKET_VERSION = upstr('SEC-WEBSOCKET-VERSION')
SEC_WEBSOCKET_PROTOCOL = upstr('SEC-WEBSOCKET-PROTOCOL')
SEC_WEBSOCKET_KEY = upstr('SEC-WEBSOCKET-KEY')
SEC_WEBSOCKET_KEY1 = upstr('SEC-WEBSOCKET-KEY1')
SERVER = upstr('SERVER')
SET_COOKIE = upstr('SET-COOKIE')
TE = upstr('TE')
TRAILER = upstr('TRAILER')
TRANSFER_ENCODING = upstr('TRANSFER-ENCODING')
UPGRADE = upstr('UPGRADE')
WEBSOCKET = upstr('WEBSOCKET')
URI = upstr('URI')
USER_AGENT = upstr('USER-AGENT')
VARY = upstr('VARY')
VIA = upstr('VIA')
WANT_DIGEST = upstr('WANT-DIGEST')
WARNING = upstr('WARNING')
WWW_AUTHENTICATE = upstr('WWW-AUTHENTICATE')<๏ฝfimโend๏ฝ> | CONTENT_RANGE = upstr('CONTENT-RANGE') |
<|file_name|>4.py<|end_file_name|><๏ฝfimโbegin๏ฝ>#Python 2.7<๏ฝfimโhole๏ฝ><๏ฝfimโend๏ฝ> |
candidates = set([str(a * b) for a in range(100, 1000) for b in range(100, 1000)])
candidates = filter(lambda x: x == x[::-1], candidates)
print max([int(x) for x in candidates]) |
<|file_name|>app.py<|end_file_name|><๏ฝfimโbegin๏ฝ><๏ฝfimโhole๏ฝ>
app = Flask(__name__)
app.config.from_object("configs.appconfig.DevelopmentConfig")<๏ฝfimโend๏ฝ> | from flask import Flask |
<|file_name|>LmsApiUserService.java<|end_file_name|><๏ฝfimโbegin๏ฝ>package com.softech.ls360.lms.api.proxy.service;
import com.softech.vu360.lms.webservice.message.lmsapi.serviceoperations.user.AddUserResponse;
import com.softech.vu360.lms.webservice.message.lmsapi.serviceoperations.user.UpdateUserResponse;
import com.softech.vu360.lms.webservice.message.lmsapi.types.user.UpdateableUser;
import com.softech.vu360.lms.webservice.message.lmsapi.types.user.User;<๏ฝfimโhole๏ฝ> AddUserResponse createUser(User user, Long customerId, String customerCode, String apiKey) throws Exception;
UpdateUserResponse updateUser(UpdateableUser updateableUser, Long customerId, String customerCode, String apiKey) throws Exception;
}<๏ฝfimโend๏ฝ> |
public interface LmsApiUserService {
|
<|file_name|>delayed_gcode.py<|end_file_name|><๏ฝfimโbegin๏ฝ># A simple timer for executing gcode templates
#
# Copyright (C) 2019 Eric Callahan <[email protected]>
#
# This file may be distributed under the terms of the GNU GPLv3 license.
import logging
class DelayedGcode:
def __init__(self, config):
self.printer = config.get_printer()
self.reactor = self.printer.get_reactor()
self.name = config.get_name().split()[1]
self.gcode = self.printer.lookup_object('gcode')
gcode_macro = self.printer.load_object(config, 'gcode_macro')
self.timer_gcode = gcode_macro.load_template(config, 'gcode')
self.duration = config.getfloat('initial_duration', 0., minval=0.)
self.timer_handler = None
self.inside_timer = self.repeat = False
self.printer.register_event_handler("klippy:ready", self._handle_ready)
self.gcode.register_mux_command(
"UPDATE_DELAYED_GCODE", "ID", self.name,
self.cmd_UPDATE_DELAYED_GCODE,
desc=self.cmd_UPDATE_DELAYED_GCODE_help)
def _handle_ready(self):
waketime = self.reactor.NEVER
if self.duration:
waketime = self.reactor.monotonic() + self.duration
self.timer_handler = self.reactor.register_timer(
self._gcode_timer_event, waketime)
def _gcode_timer_event(self, eventtime):
self.inside_timer = True
try:
self.gcode.run_script(self.timer_gcode.render())
except Exception:
logging.exception("Script running error")
nextwake = self.reactor.NEVER
if self.repeat:
nextwake = eventtime + self.duration
self.inside_timer = self.repeat = False
return nextwake
cmd_UPDATE_DELAYED_GCODE_help = "Update the duration of a delayed_gcode"
def cmd_UPDATE_DELAYED_GCODE(self, gcmd):
self.duration = gcmd.get_float('DURATION', minval=0.)<๏ฝfimโhole๏ฝ> if self.inside_timer:
self.repeat = (self.duration != 0.)
else:
waketime = self.reactor.NEVER
if self.duration:
waketime = self.reactor.monotonic() + self.duration
self.reactor.update_timer(self.timer_handler, waketime)
def load_config_prefix(config):
return DelayedGcode(config)<๏ฝfimโend๏ฝ> | |
<|file_name|>saveFile.js<|end_file_name|><๏ฝfimโbegin๏ฝ>Meteor.methods({
uploadFile: function (file) {
console.log('saving '+file.name+' on server');
//appends to current file by same name..
file.save('/tmp/uploads', {});<๏ฝfimโhole๏ฝ><๏ฝfimโend๏ฝ> | }
}); |
<|file_name|>services.js<|end_file_name|><๏ฝfimโbegin๏ฝ>var sbModule = angular.module('sbServices', ['ngResource']);
sbModule.factory('App', function($resource) {
return $resource('/api/v1/app/:name', { q: '' }, {
get: { method: 'GET' }, //isArray: false },<๏ฝfimโhole๏ฝ> query: { method: 'GET'} //, params: { q: '' }//, isArray: false }
});
});<๏ฝfimโend๏ฝ> | |
<|file_name|>filters.py<|end_file_name|><๏ฝfimโbegin๏ฝ>#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)<๏ฝfimโhole๏ฝ># http://opensource.org/licenses/MIT
#
from django.conf import settings
from django.forms import SelectMultiple
import django_filters
from pdc.apps.common.filters import MultiValueFilter, NullableCharFilter
from . import models
class RPMFilter(django_filters.FilterSet):
name = MultiValueFilter()
version = MultiValueFilter()
epoch = MultiValueFilter()
release = MultiValueFilter()
arch = MultiValueFilter()
srpm_name = MultiValueFilter()
srpm_nevra = NullableCharFilter()
filename = MultiValueFilter()
compose = MultiValueFilter(name='composerpm__variant_arch__variant__compose__compose_id',
distinct=True)
linked_release = MultiValueFilter(name='linked_releases__release_id', distinct=True)
class Meta:
model = models.RPM
fields = ('name', 'version', 'epoch', 'release', 'arch', 'srpm_name',
'srpm_nevra', 'compose', 'filename', 'linked_release')
class ImageFilter(django_filters.FilterSet):
file_name = MultiValueFilter()
image_format = MultiValueFilter(name='image_format__name')
image_type = MultiValueFilter(name='image_type__name')
disc_number = MultiValueFilter()
disc_count = MultiValueFilter()
arch = MultiValueFilter()
mtime = MultiValueFilter()
size = MultiValueFilter()
implant_md5 = MultiValueFilter()
volume_id = MultiValueFilter()
md5 = MultiValueFilter()
sha1 = MultiValueFilter()
sha256 = MultiValueFilter()
compose = MultiValueFilter(name='composeimage__variant_arch__variant__compose__compose_id',
distinct=True)
class Meta:
model = models.Image
fields = ('file_name', 'image_format', 'image_type', 'disc_number',
'disc_count', 'arch', 'mtime', 'size', 'bootable',
'implant_md5', 'volume_id', 'md5', 'sha1', 'sha256')
class BuildImageFilter(django_filters.FilterSet):
if settings.WITH_BINDINGS:
component_name = django_filters.MethodFilter(action='filter_by_component_name',
widget=SelectMultiple)
else:
component_name = MultiValueFilter(name='rpms__srpm_name', distinct=True)
rpm_version = MultiValueFilter(name='rpms__version', distinct=True)
rpm_release = MultiValueFilter(name='rpms__release', distinct=True)
image_id = MultiValueFilter()
image_format = MultiValueFilter(name='image_format__name')
md5 = MultiValueFilter()
archive_build_nvr = MultiValueFilter(name='archives__build_nvr', distinct=True)
archive_name = MultiValueFilter(name='archives__name', distinct=True)
archive_size = MultiValueFilter(name='archives__size', distinct=True)
archive_md5 = MultiValueFilter(name='archives__md5', distinct=True)
release_id = MultiValueFilter(name='releases__release_id', distinct=True)
def filter_by_component_name(self, queryset, value):
from pdc.apps.bindings import models as binding_models
srpm_names = binding_models.ReleaseComponentSRPMNameMapping.objects.filter(
release_component__name__in=value).distinct().values_list('srpm_name')
if value:
if srpm_names:
return queryset.filter(rpms__srpm_name__in=srpm_names).distinct()
else:
return queryset.filter(rpms__srpm_name__in=value).distinct()
else:
return queryset
class Meta:
model = models.BuildImage
fields = ('component_name', 'rpm_version', 'rpm_release', 'image_id', 'image_format', 'md5',
'archive_build_nvr', 'archive_name', 'archive_size', 'archive_md5', 'release_id')<๏ฝfimโend๏ฝ> | |
<|file_name|>mastodon.js<|end_file_name|><๏ฝfimโbegin๏ฝ>import React from 'react';
import { Provider } from 'react-redux';
import PropTypes from 'prop-types';
import configureStore from '../store/configureStore';
import { BrowserRouter, Route } from 'react-router-dom';
import { ScrollContext } from 'react-router-scroll-4';
import UI from '../features/ui';
import { fetchCustomEmojis } from '../actions/custom_emojis';
import { hydrateStore } from '../actions/store';
import { connectUserStream } from '../actions/streaming';
import { IntlProvider, addLocaleData } from 'react-intl';
import { getLocale } from '../locales';
import initialState from '../initial_state';
import ErrorBoundary from '../components/error_boundary';
const { localeData, messages } = getLocale();<๏ฝfimโhole๏ฝ>addLocaleData(localeData);
export const store = configureStore();
const hydrateAction = hydrateStore(initialState);
store.dispatch(hydrateAction);
store.dispatch(fetchCustomEmojis());
const createIdentityContext = state => ({
signedIn: !!state.meta.me,
accountId: state.meta.me,
accessToken: state.meta.access_token,
});
export default class Mastodon extends React.PureComponent {
static propTypes = {
locale: PropTypes.string.isRequired,
};
static childContextTypes = {
identity: PropTypes.shape({
signedIn: PropTypes.bool.isRequired,
accountId: PropTypes.string,
accessToken: PropTypes.string,
}).isRequired,
};
identity = createIdentityContext(initialState);
getChildContext() {
return {
identity: this.identity,
};
}
componentDidMount() {
if (this.identity.signedIn) {
this.disconnect = store.dispatch(connectUserStream());
}
}
componentWillUnmount () {
if (this.disconnect) {
this.disconnect();
this.disconnect = null;
}
}
shouldUpdateScroll (prevRouterProps, { location }) {
return !(location.state?.mastodonModalKey && location.state?.mastodonModalKey !== prevRouterProps?.location?.state?.mastodonModalKey);
}
render () {
const { locale } = this.props;
return (
<IntlProvider locale={locale} messages={messages}>
<Provider store={store}>
<ErrorBoundary>
<BrowserRouter basename='/web'>
<ScrollContext shouldUpdateScroll={this.shouldUpdateScroll}>
<Route path='/' component={UI} />
</ScrollContext>
</BrowserRouter>
</ErrorBoundary>
</Provider>
</IntlProvider>
);
}
}<๏ฝfimโend๏ฝ> | |
<|file_name|>paint_context.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Painting of display lists using Moz2D/Azure.
use app_units::Au;
use azure::azure::AzIntSize;
use azure::azure_hl::{AntialiasMode, Color, ColorPattern, CompositionOp};
use azure::azure_hl::{CapStyle, JoinStyle};
use azure::azure_hl::{DrawOptions, DrawSurfaceOptions, DrawTarget, ExtendMode, FilterType};
use azure::azure_hl::{Filter, FilterNode, GaussianBlurInput, GradientStop, LinearGradientPattern};
use azure::azure_hl::{GaussianBlurAttribute, StrokeOptions, SurfaceFormat};
use azure::azure_hl::{Path, PathBuilder, Pattern, PatternRef, SurfacePattern};
use azure::scaled_font::ScaledFont;
use azure::{AzDrawTargetFillGlyphs, struct__AzGlyphBuffer, struct__AzPoint};
use azure::{AzFloat, struct__AzDrawOptions, struct__AzGlyph};
use display_list::TextOrientation::{SidewaysLeft, SidewaysRight, Upright};
use display_list::{BLUR_INFLATION_FACTOR, BorderRadii, BoxShadowClipMode, ClippingRegion};
use display_list::{TextDisplayItem, WebRenderImageInfo};
use euclid::matrix2d::Matrix2D;
use euclid::point::Point2D;
use euclid::rect::{Rect, TypedRect};
use euclid::scale_factor::ScaleFactor;
use euclid::side_offsets::SideOffsets2D;
use euclid::size::Size2D;
use filters;
use font_context::FontContext;
use gfx_traits::{color, LayerKind};
use net_traits::image::base::PixelFormat;
use range::Range;
use std::default::Default;
use std::{f32, mem, ptr};
use style::computed_values::{border_style, filter, image_rendering, mix_blend_mode};
use style_traits::PagePx;
use text::TextRun;
use text::glyph::ByteIndex;
use util::geometry::{self, max_rect, ScreenPx};
use util::opts;
pub struct PaintContext<'a> {
pub draw_target: DrawTarget,
pub font_context: &'a mut Box<FontContext>,
/// The rectangle that this context encompasses in page coordinates.
pub page_rect: TypedRect<f32, PagePx>,
/// The rectangle that this context encompasses in screen coordinates (pixels).
pub screen_rect: TypedRect<usize, ScreenPx>,
/// The clipping rect for the stacking context as a whole.
pub clip_rect: Option<Rect<Au>>,
/// The current transient clipping region, if any. A "transient clipping region" is the
/// clipping region used by the last display item. We cache the last value so that we avoid
/// pushing and popping clipping regions unnecessarily.
pub transient_clip: Option<ClippingRegion>,
/// A temporary hack to disable clipping optimizations on 3d layers.
pub layer_kind: LayerKind,
/// The current subpixel offset, used to make pixel snapping aware of accumulated subpixels
/// from the StackingContext.
/// TODO: Eventually this should be added to all points handled by the PaintContext.
pub subpixel_offset: Point2D<Au>,
}
#[derive(Copy, Clone)]
enum Direction {
Top,
Left,
Right,
Bottom
}
#[derive(Copy, Clone)]
enum BorderCorner {
TopLeft,
TopRight,
BottomRight,
BottomLeft,
}
#[derive(Copy, Clone)]
enum DashSize {
DottedBorder = 1,
DashedBorder = 3
}
#[derive(Copy, Clone, Debug)]
struct Ellipse {
origin: Point2D<f32>,
width: f32,
height: f32,
}
/// When `Line::new` creates a new `Line` it ensures `start.x <= end.x` for that line.
#[derive(Copy, Clone, Debug)]
struct Line {
start: Point2D<f32>,
end: Point2D<f32>,
}
impl Line {
/// Guarantees that `start.x <= end.x` for the returned `Line`.
fn new(start: Point2D<f32>, end: Point2D<f32>) -> Line {
let line = if start.x <= end.x {
Line { start: start, end: end }
} else {
Line { start: end, end: start }
};
debug_assert!(line.length_squared() > f32::EPSILON);
line
}
fn length_squared(&self) -> f32 {
let width = (self.end.x - self.start.x).abs();
let height = (self.end.y - self.start.y).abs();
width * width + height * height
}
}
struct CornerOrigin {
top_left: Point2D<f32>,
top_right: Point2D<f32>,
bottom_right: Point2D<f32>,
bottom_left: Point2D<f32>,
}
impl<'a> PaintContext<'a> {
pub fn to_nearest_azure_rect(&self, rect: &Rect<Au>) -> Rect<AzFloat> {
rect.translate(&self.subpixel_offset).to_nearest_azure_rect(self.screen_pixels_per_px())
}
pub fn screen_pixels_per_px(&self) -> ScaleFactor<f32, PagePx, ScreenPx> {
ScaleFactor::new(self.screen_rect.as_f32().size.width / self.page_rect.size.width)
}
pub fn draw_target(&self) -> &DrawTarget {
&self.draw_target
}
pub fn draw_solid_color(&self, bounds: &Rect<Au>, color: Color) {
self.draw_target.make_current();
self.draw_target.fill_rect(&self.to_nearest_azure_rect(&bounds),
PatternRef::Color(&ColorPattern::new(color)),
None);
}
pub fn draw_border(&self,
bounds: &Rect<Au>,
border: &SideOffsets2D<Au>,
radius: &BorderRadii<Au>,
color: &SideOffsets2D<Color>,
style: &SideOffsets2D<border_style::T>) {
let scale = self.screen_pixels_per_px();
let border = border.to_float_pixels(scale);
let radius = radius.to_radii_pixels(scale);
self.draw_border_segment(Direction::Top, bounds, &border, &radius, color, style);
self.draw_border_segment(Direction::Right, bounds, &border, &radius, color, style);
self.draw_border_segment(Direction::Bottom, bounds, &border, &radius, color, style);
self.draw_border_segment(Direction::Left, bounds, &border, &radius, color, style);
}
pub fn draw_line(&self, bounds: &Rect<Au>, color: Color, style: border_style::T) {
self.draw_target.make_current();
self.draw_line_segment(bounds, &Default::default(), color, style);
}
pub fn draw_push_clip(&self, bounds: &Rect<Au>) {
let rect = self.to_nearest_azure_rect(bounds);
let path_builder = self.draw_target.create_path_builder();
let left_top = Point2D::new(rect.origin.x, rect.origin.y);
let right_top = Point2D::new(rect.origin.x + rect.size.width, rect.origin.y);
let left_bottom = Point2D::new(rect.origin.x, rect.origin.y + rect.size.height);
let right_bottom = Point2D::new(rect.origin.x + rect.size.width,
rect.origin.y + rect.size.height);
path_builder.move_to(left_top);
path_builder.line_to(right_top);
path_builder.line_to(right_bottom);
path_builder.line_to(left_bottom);
let path = path_builder.finish();
self.draw_target.push_clip(&path);
}
pub fn draw_pop_clip(&self) {
self.draw_target.pop_clip();
}
pub fn draw_image(&self,
bounds: &Rect<Au>,
stretch_size: &Size2D<Au>,
image_info: &WebRenderImageInfo,
image_data: &[u8],
image_rendering: image_rendering::T) {
let size = Size2D::new(image_info.width as i32, image_info.height as i32);
let (pixel_width, source_format) = match image_info.format {
PixelFormat::RGBA8 => (4, SurfaceFormat::B8G8R8A8),
PixelFormat::K8 => (1, SurfaceFormat::A8),
PixelFormat::RGB8 => panic!("RGB8 color type not supported"),
PixelFormat::KA8 => panic!("KA8 color type not supported"),
};
let stride = image_info.width * pixel_width;
let scale = self.screen_pixels_per_px();
self.draw_target.make_current();
let draw_target_ref = &self.draw_target;
let azure_surface = match draw_target_ref.create_source_surface_from_data(image_data,
size,
stride as i32,
source_format) {
Some(azure_surface) => azure_surface,
None => return,
};
let source_rect = Rect::new(Point2D::new(0.0, 0.0),
Size2D::new(image_info.width as AzFloat,
image_info.height as AzFloat));
let dest_rect = self.to_nearest_azure_rect(bounds);
// TODO(pcwalton): According to CSS-IMAGES-3 ยง 5.3, nearest-neighbor interpolation is a
// conforming implementation of `crisp-edges`, but it is not the best we could do.
// Something like Scale2x would be ideal.
let draw_surface_filter = match image_rendering {
image_rendering::T::Auto => Filter::Linear,
image_rendering::T::CrispEdges | image_rendering::T::Pixelated => Filter::Point,
};
let draw_surface_options = DrawSurfaceOptions::new(draw_surface_filter, true);
let draw_options = DrawOptions::new(1.0, CompositionOp::Over, AntialiasMode::None);
// Fast path: No need to create a pattern.
if bounds.size == *stretch_size {
draw_target_ref.draw_surface(azure_surface,
dest_rect,
source_rect,
draw_surface_options,
draw_options);
return
}
// Slightly slower path: No need to stretch.
//
// Annoyingly, surface patterns in Azure/Skia are relative to the top left of the *canvas*,
// not the rectangle we're drawing to. So we need to translate it explicitly.
let matrix = Matrix2D::identity().translate(dest_rect.origin.x, dest_rect.origin.y);
let stretch_size = stretch_size.to_nearest_azure_size(scale);
if source_rect.size == stretch_size {
let pattern = SurfacePattern::new(azure_surface.azure_source_surface,
true,
true,
&matrix);
draw_target_ref.fill_rect(&dest_rect,
PatternRef::Surface(&pattern),
Some(&draw_options));
return
}
// Slow path: Both stretch and a pattern are needed.
let draw_surface_options = DrawSurfaceOptions::new(draw_surface_filter, true);
let draw_options = DrawOptions::new(1.0, CompositionOp::Over, AntialiasMode::None);
let temporary_draw_target =
self.draw_target.create_similar_draw_target(&stretch_size.to_azure_int_size(),
self.draw_target.get_format());
let temporary_dest_rect = Rect::new(Point2D::new(0.0, 0.0), stretch_size);
temporary_draw_target.draw_surface(azure_surface,
temporary_dest_rect,
source_rect,
draw_surface_options,
draw_options);
let temporary_surface = temporary_draw_target.snapshot();
let pattern = SurfacePattern::new(temporary_surface.azure_source_surface,
true,
true,
&matrix);
draw_target_ref.fill_rect(&dest_rect, PatternRef::Surface(&pattern), None);
}
pub fn clear(&self) {
let pattern = ColorPattern::new(color::transparent());
let page_rect = self.page_rect.to_untyped();
let screen_rect = self.screen_rect.to_untyped();
let rect = Rect::new(Point2D::new(page_rect.origin.x as AzFloat,
page_rect.origin.y as AzFloat),
Size2D::new(screen_rect.size.width as AzFloat,
screen_rect.size.height as AzFloat));
let mut draw_options = DrawOptions::new(1.0, CompositionOp::Over, AntialiasMode::None);
draw_options.set_composition_op(CompositionOp::Source);
self.draw_target.make_current();
self.draw_target.fill_rect(&rect, PatternRef::Color(&pattern), Some(&draw_options));
}
fn draw_border_segment(&self,
direction: Direction,
bounds: &Rect<Au>,
border: &SideOffsets2D<f32>,
radius: &BorderRadii<AzFloat>,
color: &SideOffsets2D<Color>,
style: &SideOffsets2D<border_style::T>) {
let (style_select, color_select) = match direction {
Direction::Top => (style.top, color.top),
Direction::Left => (style.left, color.left),
Direction::Right => (style.right, color.right),
Direction::Bottom => (style.bottom, color.bottom)
};
match style_select {
border_style::T::none | border_style::T::hidden => {}
border_style::T::dotted => {
// FIXME(sammykim): This doesn't work well with dash_pattern and cap_style.
self.draw_dashed_border_segment(direction,
bounds,
border,
radius,
color_select,
DashSize::DottedBorder);
}
border_style::T::dashed => {
self.draw_dashed_border_segment(direction,
bounds,
border,
radius,
color_select,
DashSize::DashedBorder);
}
border_style::T::solid => {
self.draw_solid_border_segment(direction, bounds, border, radius, color_select);
}
border_style::T::double => {
self.draw_double_border_segment(direction, bounds, border, radius, color_select);
}
border_style::T::groove | border_style::T::ridge => {
self.draw_groove_ridge_border_segment(direction,
bounds,
border,
radius,
color_select,
style_select);
}
border_style::T::inset | border_style::T::outset => {
self.draw_inset_outset_border_segment(direction,
bounds,
border,
radius,
color_select,
style_select);
}
}
}
fn draw_line_segment(&self,
bounds: &Rect<Au>,
radius: &BorderRadii<AzFloat>,
color: Color,
style: border_style::T) {
let scale = self.screen_pixels_per_px();
let border = SideOffsets2D::new_all_same(bounds.size.width).to_float_pixels(scale);
match style {
border_style::T::none | border_style::T::hidden => {}
border_style::T::dotted => {
self.draw_dashed_border_segment(Direction::Right,
bounds,
&border,
radius,
color,
DashSize::DottedBorder);
}
border_style::T::dashed => {
self.draw_dashed_border_segment(Direction::Right,
bounds,
&border,
radius,
color,
DashSize::DashedBorder);
}
border_style::T::solid => {
self.draw_solid_border_segment(Direction::Right, bounds, &border, radius, color)
}
border_style::T::double => {
self.draw_double_border_segment(Direction::Right, bounds, &border, radius, color)
}
border_style::T::groove | border_style::T::ridge => {
self.draw_groove_ridge_border_segment(Direction::Right,
bounds,
&border,
radius,
color,
style);
}
border_style::T::inset | border_style::T::outset => {
self.draw_inset_outset_border_segment(Direction::Right,
bounds,
&border,
radius,
color,
style);
}
}
}
fn draw_border_path(&self,
bounds: &Rect<f32>,
direction: Direction,
border: &SideOffsets2D<f32>,
radii: &BorderRadii<AzFloat>,
color: Color) {
let mut path_builder = self.draw_target.create_path_builder();
self.create_border_path_segment(&mut path_builder,
bounds,
direction,
border,
radii,
BorderPathDrawingMode::EntireBorder);
let draw_options = DrawOptions::new(1.0, CompositionOp::Over, AntialiasMode::None);
self.draw_target.fill(&path_builder.finish(),
Pattern::Color(ColorPattern::new(color)).to_pattern_ref(),
&draw_options);
}
fn push_rounded_rect_clip(&self, bounds: &Rect<f32>, radii: &BorderRadii<AzFloat>) {
let mut path_builder = self.draw_target.create_path_builder();
self.create_rounded_rect_path(&mut path_builder, bounds, radii);
self.draw_target.push_clip(&path_builder.finish());
}
fn solve_quadratic(a: f32, b: f32, c: f32) -> (Option<f32>, Option<f32>) {
let discriminant = b * b - 4. * a * c;
if discriminant < 0. {
return (None, None);
}
let x1 = (-b + discriminant.sqrt())/(2. * a);
let x2 = (-b - discriminant.sqrt())/(2. * a);
if discriminant == 0. {
return (Some(x1), None);
}
(Some(x1), Some(x2))
}
fn intersect_ellipse_line(mut e: Ellipse, mut line: Line) -> (Option<Point2D<f32>>,
Option<Point2D<f32>>) {
let mut rotated_axes = false;
fn rotate_axes(point: Point2D<f32>, clockwise: bool) -> Point2D<f32> {
if clockwise {
// rotate clockwise by 90 degrees
Point2D::new(point.y, -point.x)
} else {
// rotate counter clockwise by 90 degrees
Point2D::new(-point.y, point.x)
}
}
// if line height is greater than its width then rotate the axes by 90 degrees,
// i.e. (x, y) -> (y, -x).
if (line.end.x - line.start.x).abs() < (line.end.y - line.start.y).abs() {
rotated_axes = true;
line = Line::new(rotate_axes(line.start, true), rotate_axes(line.end, true));
e = Ellipse { origin: rotate_axes(e.origin, true),
width: e.height, height: e.width };
}
debug_assert!(line.end.x - line.start.x > f32::EPSILON,
"Error line segment end.x ({}) <= start.x ({})!", line.end.x, line.start.x);
// shift the origin to center of the ellipse.
line = Line::new(line.start - e.origin, line.end - e.origin);
let a = (line.end.y - line.start.y)/(line.end.x - line.start.x);
let b = line.start.y - (a * line.start.x);
// given the equation of a line,
// y = a * x + b,
// and the equation of an ellipse,
// x^2/w^2 + y^2/h^2 = 1,
// substitute y = a * x + b, giving
// x^2/w^2 + (a^2x^2 + 2abx + b^2)/h^2 = 1
// then simplify to
// (h^2 + w^2a^2)x^2 + 2abw^2x + (b^2w^2 - w^2h^2) = 0
// finally solve for w using the quadratic equation.
let w = e.width;
let h = e.height;
let quad_a = h * h + w * w * a * a;
let quad_b = 2. * a * b * w * w;
let quad_c = b * b * w * w - w * w * h * h;
let intersections = PaintContext::solve_quadratic(quad_a, quad_b, quad_c);
match intersections {
(Some(x0), Some(x1)) => {
let mut p0 = Point2D::new(x0, a * x0 + b) + e.origin;
let mut p1 = Point2D::new(x1, a * x1 + b) + e.origin;
if x0 > x1 {
mem::swap(&mut p0, &mut p1);
}
if rotated_axes {
p0 = rotate_axes(p0, false);
p1 = rotate_axes(p1, false);
}
(Some(p0), Some(p1))
},
(Some(x0), None) | (None, Some(x0)) => {
let mut p = Point2D::new(x0, a * x0 + b) + e.origin;
if rotated_axes {
p = rotate_axes(p, false);
}
(Some(p), None)
},
(None, None) => (None, None),
}
}
// Given an ellipse and line segment, the line segment may intersect the
// ellipse at 0, 1, or 2 points. We compute those intersection points.
// For each intersection point the angle of the point on the ellipse relative to
// the top|bottom of the ellipse is computed.
// Examples:
// - intersection at ellipse.center + (0, ellipse.height), the angle is 0 rad.
// - intersection at ellipse.center + (0, -ellipse.height), the angle is 0 rad.
// - intersection at ellipse.center + (+-ellipse.width, 0), the angle is pi/2.
fn ellipse_line_intersection_angles(e: Ellipse, l: Line)
-> (Option<(Point2D<f32>, f32)>, Option<(Point2D<f32>, f32)>) {
fn point_angle(e: Ellipse, intersect_point: Point2D<f32>) -> f32 {
((intersect_point.y - e.origin.y).abs() / e.height).asin()
}
let intersection = PaintContext::intersect_ellipse_line(e, l);
match intersection {
(Some(p0), Some(p1)) => (Some((p0, point_angle(e, p0))), Some((p1, point_angle(e, p1)))),
(Some(p0), None) => (Some((p0, point_angle(e, p0))), None),
(None, Some(p1)) => (None, Some((p1, point_angle(e, p1)))),
(None, None) => (None, None),
}
}
fn ellipse_rightmost_intersection(e: Ellipse, l: Line) -> Option<f32> {
match PaintContext::ellipse_line_intersection_angles(e, l) {
(Some((p0, angle0)), Some((p1, _))) if p0.x > p1.x => Some(angle0),
(_, Some((_, angle1))) => Some(angle1),
(Some((_, angle0)), None) => Some(angle0),
(None, None) => None,
}
}
fn ellipse_leftmost_intersection(e: Ellipse, l: Line) -> Option<f32> {
match PaintContext::ellipse_line_intersection_angles(e, l) {
(Some((p0, angle0)), Some((p1, _))) if p0.x < p1.x => Some(angle0),
(_, Some((_, angle1))) => Some(angle1),
(Some((_, angle0)), None) => Some(angle0),
(None, None) => None,
}
}
fn is_zero_radius(radius: &Size2D<AzFloat>) -> bool {
radius.width <= 0. || radius.height <= 0.
}
// The following comment is wonderful, and stolen from
// gecko:gfx/thebes/gfxContext.cpp:RoundedRectangle for reference.
// ---------------------------------------------------------------
//
// For CW drawing, this looks like:
//
// ...******0** 1 C
// ****
// *** 2
// **
// *
// *
// 3
// *
// *
//
// Where 0, 1, 2, 3 are the control points of the Bezier curve for
// the corner, and C is the actual corner point.
//
// At the start of the loop, the current point is assumed to be
// the point adjacent to the top left corner on the top
// horizontal. Note that corner indices start at the top left and
// continue clockwise, whereas in our loop i = 0 refers to the top
// right corner.
//
// When going CCW, the control points are swapped, and the first
// corner that's drawn is the top left (along with the top segment).
//
// There is considerable latitude in how one chooses the four
// control points for a Bezier curve approximation to an ellipse.
// For the overall path to be continuous and show no corner at the
// endpoints of the arc, points 0 and 3 must be at the ends of the
// straight segments of the rectangle; points 0, 1, and C must be
// collinear; and points 3, 2, and C must also be collinear. This
// leaves only two free parameters: the ratio of the line segments
// 01 and 0C, and the ratio of the line segments 32 and 3C. See
// the following papers for extensive discussion of how to choose
// these ratios:
//
// Dokken, Tor, et al. "Good approximation of circles by
// curvature-continuous Bezier curves." Computer-Aided
// Geometric Design 7(1990) 33--41.
// Goldapp, Michael. "Approximation of circular arcs by cubic
// polynomials." Computer-Aided Geometric Design 8(1991) 227--238.
// Maisonobe, Luc. "Drawing an elliptical arc using polylines,
// quadratic, or cubic Bezier curves."
// http://www.spaceroots.org/documents/ellipse/elliptical-arc.pdf
//
// We follow the approach in section 2 of Goldapp (least-error,
// Hermite-type approximation) and make both ratios equal to
//
// 2 2 + n - sqrt(2n + 28)
// alpha = - * ---------------------
// 3 n - 4
//
// where n = 3( cbrt(sqrt(2)+1) - cbrt(sqrt(2)-1) ).
//
// This is the result of Goldapp's equation (10b) when the angle
// swept out by the arc is pi/2, and the parameter "a-bar" is the
// expression given immediately below equation (21).
//
// Using this value, the maximum radial error for a circle, as a
// fraction of the radius, is on the order of 0.2 x 10^-3.
// Neither Dokken nor Goldapp discusses error for a general
// ellipse; Maisonobe does, but his choice of control points
// follows different constraints, and Goldapp's expression for
// 'alpha' gives much smaller radial error, even for very flat
// ellipses, than Maisonobe's equivalent.
//
// For the various corners and for each axis, the sign of this
// constant changes, or it might be 0 -- it's multiplied by the
// appropriate multiplier from the list before using.
// ---------------------------------------------------------------
//
// Code adapted from gecko:gfx/2d/PathHelpers.h:EllipseToBezier
fn ellipse_to_bezier(path_builder: &mut PathBuilder,
origin: Point2D<AzFloat>,
radius: Size2D<AzFloat>,
start_angle: f32,
end_angle: f32) {
if PaintContext::is_zero_radius(&radius) {
return;
}
// Calculate kappa constant for partial curve. The sign of angle in the
// tangent will actually ensure this is negative for a counter clockwise
// sweep, so changing signs later isn't needed.
let kappa_factor: f32 = (4.0f32 / 3.0f32) * ((end_angle - start_angle) / 4.).tan();
let kappa_x: f32 = kappa_factor * radius.width;
let kappa_y: f32 = kappa_factor * radius.height;
// We guarantee here the current point is the start point of the next
// curve segment.
let start_point = Point2D::new(origin.x + start_angle.cos() * radius.width,
origin.y + start_angle.sin() * radius.height);
path_builder.line_to(start_point);
let end_point = Point2D::new(origin.x + end_angle.cos() * radius.width,
origin.y + end_angle.sin() * radius.height);
let tangent_start = Point2D::new(-start_angle.sin(), start_angle.cos());
let cp1 = Point2D::new(start_point.x + tangent_start.x * kappa_x,
start_point.y + tangent_start.y * kappa_y);
let rev_tangent_end = Point2D::new(end_angle.sin(), -end_angle.cos());
let cp2 = Point2D::new(end_point.x + rev_tangent_end.x * kappa_x,
end_point.y + rev_tangent_end.y * kappa_y);
path_builder.bezier_curve_to(&cp1, &cp2, &end_point);
}
#[allow(non_snake_case)]
fn inner_border_bounds(bounds: &Rect<f32>, border: &SideOffsets2D<f32>) -> Rect<f32> {
// T = top, B = bottom, L = left, R = right
let inner_TL = bounds.origin + Point2D::new(border.left, border.top);
let inner_BR = bounds.bottom_right() + Point2D::new(-border.right, -border.bottom);
Rect::new(inner_TL, Size2D::new(inner_BR.x - inner_TL.x, inner_BR.y - inner_TL.y))
}
#[allow(non_snake_case)]
fn corner_bounds(bounds: &Rect<f32>,
border: &SideOffsets2D<f32>,
radii: &BorderRadii<AzFloat>) -> (CornerOrigin, SideOffsets2D<Size2D<f32>>) {
fn distance_to_elbow(radius: &Size2D<AzFloat>,
corner_width: f32,
corner_height: f32) -> Size2D<f32> {
if corner_width >= radius.width || corner_height >= radius.height {
Size2D::zero()
} else {
Size2D::new(radius.width - corner_width, radius.height - corner_height)
}
}
// T = top, B = bottom, L = left, R = right
let origin_TL = bounds.origin + Point2D::new(radii.top_left.width, radii.top_left.height);
let origin_TR = bounds.top_right() + Point2D::new(-radii.top_right.width,
radii.top_right.height);
let origin_BR = bounds.bottom_right() + Point2D::new(-radii.bottom_right.width,
-radii.bottom_right.height);
let origin_BL = bounds.bottom_left() + Point2D::new(radii.bottom_left.width,
-radii.bottom_left.height);
let elbow_TL = distance_to_elbow(&radii.top_left, border.left, border.top);
let elbow_TR = distance_to_elbow(&radii.top_right, border.right, border.top);
let elbow_BR = distance_to_elbow(&radii.bottom_right, border.right, border.bottom);
let elbow_BL = distance_to_elbow(&radii.bottom_left, border.left, border.bottom);
(CornerOrigin { top_left: origin_TL,
top_right: origin_TR,
bottom_right: origin_BR,
bottom_left: origin_BL },
SideOffsets2D::new(elbow_TL, elbow_TR, elbow_BR, elbow_BL))
}
/// `origin` is the origin point when drawing the corner e.g. it's the circle center
/// when drawing radial borders.
///
/// `corner` indicates which corner to draw e.g. top left or top right etc.
///
/// `radius` is the border-radius width and height. If `radius.width == radius.height` then
/// an arc from a circle is drawn instead of an arc from an ellipse.
///
/// `inner_border` & `outer_border` are the inner and outer points on the border corner
/// respectively. ASCII diagram:
/// ---------------* =====> ("*" is the `outer_border` point)
/// |
/// |
/// |
/// --------* ============> ("*" is the `inner_border` point)
/// | |
/// | |
///
///
/// `dist_elbow` is the distance from `origin` to the inner part of the border corner.
/// `clockwise` indicates direction to draw the border curve.
#[allow(non_snake_case)]
fn draw_corner(path_builder: &mut PathBuilder,
corner: BorderCorner,
origin: &Point2D<AzFloat>,
radius: &Size2D<AzFloat>,
inner_border: &Point2D<AzFloat>,
outer_border: &Point2D<AzFloat>,
dist_elbow: &Size2D<AzFloat>,
clockwise: bool) {
let rad_R: AzFloat = 0.;
let rad_BR = rad_R + f32::consts::FRAC_PI_4;
let rad_B = rad_BR + f32::consts::FRAC_PI_4;
let rad_BL = rad_B + f32::consts::FRAC_PI_4;
let rad_L = rad_BL + f32::consts::FRAC_PI_4;
let rad_TL = rad_L + f32::consts::FRAC_PI_4;
let rad_T = rad_TL + f32::consts::FRAC_PI_4;
// Returns true if the angular size for this border corner
// is PI/4.
fn simple_border_corner(border_corner_radius: &Size2D<f32>,
border1_width: f32,
border2_width: f32) -> bool {
(border_corner_radius.width - border_corner_radius.height).abs() <= f32::EPSILON &&
(border1_width - border2_width).abs() <= f32::EPSILON
}
if PaintContext::is_zero_radius(radius) {
return;
}
let ellipse = Ellipse { origin: *origin, width: radius.width, height: radius.height };
let simple_border = simple_border_corner(&radius,
(outer_border.x - inner_border.x).abs(),
(outer_border.y - inner_border.y).abs());
let corner_angle = if simple_border {
f32::consts::FRAC_PI_4
} else {
let corner_line = Line::new(*inner_border, *outer_border);
match corner {
BorderCorner::TopLeft | BorderCorner::BottomLeft =>
PaintContext::ellipse_leftmost_intersection(ellipse, corner_line).unwrap(),
BorderCorner::TopRight | BorderCorner::BottomRight =>
PaintContext::ellipse_rightmost_intersection(ellipse, corner_line).unwrap(),
}
};
let (start_angle, end_angle) = match corner {
// TR corner - top border & right border
BorderCorner::TopRight =>
if clockwise { (-rad_B, rad_R - corner_angle) } else { (rad_R - corner_angle, rad_R) },
// BR corner - right border & bottom border
BorderCorner::BottomRight =>
if clockwise { (rad_R, rad_R + corner_angle) } else { (rad_R + corner_angle, rad_B) },
// TL corner - left border & top border
BorderCorner::TopLeft =>
if clockwise { (rad_L, rad_L + corner_angle) } else { (rad_L + corner_angle, rad_T) },
// BL corner - bottom border & left border
BorderCorner::BottomLeft =>
if clockwise { (rad_B, rad_L - corner_angle) } else { (rad_L - corner_angle, rad_L) },
};
if clockwise {
PaintContext::ellipse_to_bezier(path_builder, *origin, *radius, start_angle, end_angle);
PaintContext::ellipse_to_bezier(path_builder, *origin, *dist_elbow, end_angle, start_angle);
} else {
PaintContext::ellipse_to_bezier(path_builder, *origin, *dist_elbow, end_angle, start_angle);
PaintContext::ellipse_to_bezier(path_builder, *origin, *radius, start_angle, end_angle);
}
}
#[allow(non_snake_case)]
fn create_border_path_segment(&self,
path_builder: &mut PathBuilder,
bounds: &Rect<f32>,
direction: Direction,
border: &SideOffsets2D<f32>,
radii: &BorderRadii<AzFloat>,
mode: BorderPathDrawingMode) {
// T = top, B = bottom, L = left, R = right
let inner = PaintContext::inner_border_bounds(bounds, &border);
let (box_TL, inner_TL,
box_TR, inner_TR,
box_BR, inner_BR,
box_BL, inner_BL) = (bounds.origin, inner.origin,
bounds.top_right(), inner.top_right(),
bounds.bottom_right(), inner.bottom_right(),
bounds.bottom_left(), inner.bottom_left());
fn dx(x: AzFloat) -> Point2D<AzFloat> {
Point2D::new(x, 0.)
}
fn dy(y: AzFloat) -> Point2D<AzFloat> {
Point2D::new(0., y)
}
fn dx_if(cond: bool, dx: AzFloat) -> Point2D<AzFloat> {
Point2D::new(if cond { dx } else { 0. }, 0.)
}
fn dy_if(cond: bool, dy: AzFloat) -> Point2D<AzFloat> {
Point2D::new(0., if cond { dy } else { 0. })
}
let (corner_origin, elbow) =
PaintContext::corner_bounds(bounds, border, radii);
let (elbow_TL, elbow_TR, elbow_BR, elbow_BL) =
(elbow.top, elbow.right, elbow.bottom, elbow.left);
match direction {
Direction::Top => {
let edge_TL = box_TL + dx(radii.top_left.width.max(border.left));
let edge_TR = box_TR + dx(-radii.top_right.width.max(border.right));
let edge_BR = box_TR + dx(-border.right - elbow_TR.width) + dy(border.top);
let edge_BL = box_TL + dx(border.left + elbow_TL.width) + dy(border.top);
let corner_TL = edge_TL + dx_if(PaintContext::is_zero_radius(&radii.top_left),
-border.left);
let corner_TR = edge_TR + dx_if(PaintContext::is_zero_radius(&radii.top_right),
border.right);
match mode {
BorderPathDrawingMode::EntireBorder => {
path_builder.move_to(corner_TL);
path_builder.line_to(corner_TR);
}
BorderPathDrawingMode::CornersOnly => path_builder.move_to(corner_TR),
}
PaintContext::draw_corner(path_builder,
BorderCorner::TopRight,
&corner_origin.top_right,
&radii.top_right,
&inner_TR,
&box_TR,
&elbow_TR,
true);
match mode {
BorderPathDrawingMode::EntireBorder => {
path_builder.line_to(edge_BR);
path_builder.line_to(edge_BL);
}
BorderPathDrawingMode::CornersOnly => path_builder.move_to(edge_BL),
}
PaintContext::draw_corner(path_builder,
BorderCorner::TopLeft,
&corner_origin.top_left,
&radii.top_left,
&inner_TL,
&box_TL,
&elbow_TL,
false);
}
Direction::Left => {
let edge_TL = box_TL + dy(radii.top_left.height.max(border.top));
let edge_BL = box_BL + dy(-radii.bottom_left.height.max(border.bottom));
let edge_TR = box_TL + dx(border.left) + dy(border.top + elbow_TL.height);
let edge_BR = box_BL + dx(border.left) + dy(-border.bottom -
elbow_BL.height);
let corner_TL = edge_TL + dy_if(PaintContext::is_zero_radius(&radii.top_left),
-border.top);
let corner_BL = edge_BL + dy_if(PaintContext::is_zero_radius(&radii.bottom_left),
border.bottom);
match mode {
BorderPathDrawingMode::EntireBorder => {
path_builder.move_to(corner_BL);
path_builder.line_to(corner_TL);
}
BorderPathDrawingMode::CornersOnly => path_builder.move_to(corner_TL),
}
PaintContext::draw_corner(path_builder,
BorderCorner::TopLeft,
&corner_origin.top_left,
&radii.top_left,
&inner_TL,
&box_TL,
&elbow_TL,
true);
match mode {
BorderPathDrawingMode::EntireBorder => {
path_builder.line_to(edge_TR);
path_builder.line_to(edge_BR);
}
BorderPathDrawingMode::CornersOnly => path_builder.move_to(edge_BR),
}
PaintContext::draw_corner(path_builder,
BorderCorner::BottomLeft,
&corner_origin.bottom_left,
&radii.bottom_left,
&inner_BL,
&box_BL,
&elbow_BL,
false);
}
Direction::Right => {
let edge_TR = box_TR + dy(radii.top_right.height.max(border.top));
let edge_BR = box_BR + dy(-radii.bottom_right.height.max(border.bottom));
let edge_TL = box_TR + dx(-border.right) + dy(border.top + elbow_TR.height);
let edge_BL = box_BR + dx(-border.right) + dy(-border.bottom -
elbow_BR.height);
let corner_TR = edge_TR + dy_if(PaintContext::is_zero_radius(&radii.top_right),
-border.top);
let corner_BR = edge_BR + dy_if(PaintContext::is_zero_radius(&radii.bottom_right),
border.bottom);
match mode {
BorderPathDrawingMode::EntireBorder => {
path_builder.move_to(edge_BL);
path_builder.line_to(edge_TL);
}
BorderPathDrawingMode::CornersOnly => path_builder.move_to(edge_TL),
}
PaintContext::draw_corner(path_builder,
BorderCorner::TopRight,
&corner_origin.top_right,
&radii.top_right,
&inner_TR,
&box_TR,
&elbow_TR,
false);
match mode {
BorderPathDrawingMode::EntireBorder => {
path_builder.line_to(corner_TR);
path_builder.line_to(corner_BR);
}
BorderPathDrawingMode::CornersOnly => path_builder.move_to(corner_BR),
}
PaintContext::draw_corner(path_builder,
BorderCorner::BottomRight,
&corner_origin.bottom_right,
&radii.bottom_right,
&inner_BR,
&box_BR,
&elbow_BR,
true);
}
Direction::Bottom => {
let edge_BL = box_BL + dx(radii.bottom_left.width.max(border.left));
let edge_BR = box_BR + dx(-radii.bottom_right.width.max(border.right));
let edge_TL = box_BL + dy(-border.bottom) + dx(border.left +
elbow_BL.width);
let edge_TR = box_BR + dy(-border.bottom) + dx(-border.right -
elbow_BR.width);
let corner_BR = edge_BR + dx_if(PaintContext::is_zero_radius(&radii.bottom_right),
border.right);
let corner_BL = edge_BL + dx_if(PaintContext::is_zero_radius(&radii.bottom_left),
-border.left);
match mode {
BorderPathDrawingMode::EntireBorder => {
path_builder.move_to(edge_TL);
path_builder.line_to(edge_TR);
}
BorderPathDrawingMode::CornersOnly => path_builder.move_to(edge_TR),
}
PaintContext::draw_corner(path_builder,
BorderCorner::BottomRight,
&corner_origin.bottom_right,
&radii.bottom_right,
&inner_BR,
&box_BR,
&elbow_BR,
false);
match mode {
BorderPathDrawingMode::EntireBorder => {
path_builder.line_to(corner_BR);
path_builder.line_to(corner_BL);
}
BorderPathDrawingMode::CornersOnly => path_builder.move_to(corner_BL),
}
PaintContext::draw_corner(path_builder,
BorderCorner::BottomLeft,
&corner_origin.bottom_left,
&radii.bottom_left,
&inner_BL,
&box_BL,
&elbow_BL,
true);
}
}
}
/// Creates a path representing the given rounded rectangle.
///
/// TODO(pcwalton): Should we unify with the code above? It doesn't seem immediately obvious
/// how to do that (especially without regressing performance) unless we have some way to
/// efficiently intersect or union paths, since different border styles/colors can force us to
/// slice through the rounded corners. My first attempt to unify with the above code resulted
/// in making a mess of it, and the simplicity of this code path is appealing, so it may not
/// be worth itโฆ In any case, revisit this decision when we support elliptical radii.
#[allow(non_snake_case)]
fn create_rounded_rect_path(&self,
path_builder: &mut PathBuilder,
bounds: &Rect<f32>,
radii: &BorderRadii<AzFloat>) {
// +----------+
// / 1 2 \
// + 8 3 +
// | |
// + 7 4 +
// \ 6 5 /
// +----------+
let border = SideOffsets2D::new(radii.top_left.height.max(radii.top_right.height),
radii.bottom_right.width.max(radii.top_right.width),
radii.bottom_right.height.max(radii.bottom_left.height),
radii.top_left.height.max(radii.bottom_left.height));
// T = top, B = bottom, L = left, R = right
let inner = PaintContext::inner_border_bounds(bounds, &border);
let (outer_TL, inner_TL,
outer_TR, inner_TR,
outer_BR, inner_BR,
outer_BL, inner_BL) = (bounds.origin, inner.origin,
bounds.top_right(), inner.top_right(),
bounds.bottom_right(), inner.bottom_right(),
bounds.bottom_left(), inner.bottom_left());
let (corner_origin, _) =
PaintContext::corner_bounds(bounds, &border, radii);
let (origin_TL, origin_TR, origin_BR, origin_BL) = (corner_origin.top_left,
corner_origin.top_right,
corner_origin.bottom_right,
corner_origin.bottom_left);
let zero_elbow = Size2D::new(0., 0.);
path_builder.move_to(Point2D::new(origin_TL.x - radii.top_left.width, origin_TL.y));
path_builder.move_to(Point2D::new(bounds.origin.x + radii.top_left.width, bounds.origin.y)); // 1
path_builder.line_to(Point2D::new(bounds.max_x() - radii.top_right.width, bounds.origin.y)); // 2
PaintContext::draw_corner(path_builder, // 3
BorderCorner::TopRight,
&origin_TR,
&radii.top_right,
&inner_TR,
&outer_TR,
&zero_elbow,
true);
PaintContext::draw_corner(path_builder, // 3
BorderCorner::TopRight,
&origin_TR,
&radii.top_right,
&inner_TR,
&outer_TR,
&zero_elbow,
false);
path_builder.line_to(Point2D::new(bounds.max_x(), bounds.max_y() - radii.bottom_right.width)); // 4
PaintContext::draw_corner(path_builder, // 5
BorderCorner::BottomRight,
&origin_BR,
&radii.bottom_right,
&inner_BR,
&outer_BR,
&zero_elbow,
true);
PaintContext::draw_corner(path_builder, // 5
BorderCorner::BottomRight,
&origin_BR,
&radii.bottom_right,
&inner_BR,
&outer_BR,
&zero_elbow,
false);
path_builder.line_to(Point2D::new(bounds.origin.x + radii.bottom_left.width,
bounds.max_y())); // 6
PaintContext::draw_corner(path_builder, // 7
BorderCorner::BottomLeft,
&origin_BL,
&radii.bottom_left,
&inner_BL,
&outer_BL,
&zero_elbow,
true);
PaintContext::draw_corner(path_builder, // 7
BorderCorner::BottomLeft,
&origin_BL,
&radii.bottom_left,
&inner_BL,
&outer_BL,
&zero_elbow,
false);
path_builder.line_to(Point2D::new(bounds.origin.x,
bounds.origin.y + radii.top_left.height)); // 8
PaintContext::draw_corner(path_builder, // 9
BorderCorner::TopLeft,
&origin_TL,
&radii.top_left,
&inner_TL,
&outer_TL,
&zero_elbow,
true);
PaintContext::draw_corner(path_builder, // 9
BorderCorner::TopLeft,
&origin_TL,
&radii.top_left,
&inner_TL,
&outer_TL,
&zero_elbow,
false);
}
fn draw_dashed_border_segment(&self,
direction: Direction,
bounds: &Rect<Au>,
border: &SideOffsets2D<f32>,
radius: &BorderRadii<AzFloat>,
color: Color,
dash_size: DashSize) {
let rect = self.to_nearest_azure_rect(bounds);
let draw_opts = DrawOptions::new(1.0, CompositionOp::Over, AntialiasMode::None);
let border_width = match direction {
Direction::Top => border.top,
Direction::Left => border.left,
Direction::Right => border.right,
Direction::Bottom => border.bottom
};
let dash_pattern = [border_width * (dash_size as i32) as AzFloat,
border_width * (dash_size as i32) as AzFloat];
let stroke_opts = StrokeOptions::new(border_width as AzFloat,
JoinStyle::MiterOrBevel,
CapStyle::Butt,
10 as AzFloat,
&dash_pattern);
let (start, end) = match direction {
Direction::Top => {
let y = rect.origin.y + border.top * 0.5;
let start = Point2D::new(rect.origin.x + radius.top_left.width, y);
let end = Point2D::new(rect.origin.x + rect.size.width - radius.top_right.width, y);
(start, end)
}
Direction::Left => {
let x = rect.origin.x + border.left * 0.5;
let start = Point2D::new(x, rect.origin.y + rect.size.height - radius.bottom_left.height);
let end = Point2D::new(x, rect.origin.y + border.top.max(radius.top_left.height));
(start, end)
}
Direction::Right => {
let x = rect.origin.x + rect.size.width - border.right * 0.5;
let start = Point2D::new(x, rect.origin.y + radius.top_right.height);
let end = Point2D::new(x, rect.origin.y + rect.size.height - radius.bottom_right.height);
(start, end)
}
Direction::Bottom => {
let y = rect.origin.y + rect.size.height - border.bottom * 0.5;
let start = Point2D::new(rect.origin.x + rect.size.width - radius.bottom_right.width, y);
let end = Point2D::new(rect.origin.x + border.left.max(radius.bottom_left.width), y);
(start, end)
}
};
self.draw_target.stroke_line(start,
end,
PatternRef::Color(&ColorPattern::new(color)),
&stroke_opts,
&draw_opts);
if radii_apply_to_border_direction(direction, radius) {
let mut path_builder = self.draw_target.create_path_builder();
self.create_border_path_segment(&mut path_builder,
&rect,
direction,
border,
radius,
BorderPathDrawingMode::CornersOnly);
self.draw_target.fill(&path_builder.finish(),
Pattern::Color(ColorPattern::new(color)).to_pattern_ref(),
&draw_opts);
}
}
fn draw_solid_border_segment(&self,
direction: Direction,
bounds: &Rect<Au>,
border: &SideOffsets2D<f32>,
radius: &BorderRadii<AzFloat>,
color: Color) {
let rect = self.to_nearest_azure_rect(bounds);
self.draw_border_path(&rect, direction, border, radius, color);
}
fn compute_scaled_bounds(&self,
bounds: &Rect<Au>,
border: &SideOffsets2D<f32>,
shrink_factor: f32) -> Rect<f32> {
let rect = self.to_nearest_azure_rect(bounds);
let scaled_border = SideOffsets2D::new(shrink_factor * border.top,
shrink_factor * border.right,
shrink_factor * border.bottom,
shrink_factor * border.left);
let left_top = Point2D::new(rect.origin.x, rect.origin.y);
let scaled_left_top = left_top + Point2D::new(scaled_border.left,
scaled_border.top);
Rect::new(scaled_left_top,
Size2D::new(rect.size.width - 2.0 * scaled_border.right,
rect.size.height - 2.0 * scaled_border.bottom))
}
fn scale_color(&self, color: Color, scale_factor: f32) -> Color {
color::new(color.r * scale_factor,
color.g * scale_factor,
color.b * scale_factor,
color.a)
}
fn draw_double_border_segment(&self,
direction: Direction,
bounds: &Rect<Au>,
border: &SideOffsets2D<f32>,
radius: &BorderRadii<AzFloat>,
color: Color) {
let scaled_border = SideOffsets2D::new((1.0 / 3.0) * border.top,
(1.0 / 3.0) * border.right,
(1.0 / 3.0) * border.bottom,
(1.0 / 3.0) * border.left);
let inner_scaled_bounds = self.compute_scaled_bounds(bounds, border, 2.0 / 3.0);
// draw the outer portion of the double border.
self.draw_solid_border_segment(direction, bounds, &scaled_border, radius, color);
// draw the inner portion of the double border.
self.draw_border_path(&inner_scaled_bounds, direction, &scaled_border, radius, color);
}
fn draw_groove_ridge_border_segment(&self,
direction: Direction,
bounds: &Rect<Au>,
border: &SideOffsets2D<f32>,
radius: &BorderRadii<AzFloat>,
color: Color,
style: border_style::T) {
// original bounds as a Rect<f32>, with no scaling.
let original_bounds = self.compute_scaled_bounds(bounds, border, 0.0);
// shrink the bounds by 1/2 of the border, leaving the innermost 1/2 of the border
let inner_scaled_bounds = self.compute_scaled_bounds(bounds, border, 0.5);
let scaled_border = SideOffsets2D::new(0.5 * border.top,
0.5 * border.right,
0.5 * border.bottom,
0.5 * border.left);
let is_groove = match style {
border_style::T::groove => true,
border_style::T::ridge => false,
_ => panic!("invalid border style")
};
let lighter_color;
let mut darker_color = color::black();
if color != darker_color {
darker_color = self.scale_color(color, if is_groove { 1.0 / 3.0 } else { 2.0 / 3.0 });
lighter_color = color;
} else {
// You can't scale black color (i.e. 'scaled = 0 * scale', equals black).
darker_color = color::new(0.3, 0.3, 0.3, color.a);
lighter_color = color::new(0.7, 0.7, 0.7, color.a);
}
let (outer_color, inner_color) = match (direction, is_groove) {
(Direction::Top, true) | (Direction::Left, true) |
(Direction::Right, false) | (Direction::Bottom, false) => {
(darker_color, lighter_color)
}
(Direction::Top, false) | (Direction::Left, false) |
(Direction::Right, true) | (Direction::Bottom, true) => (lighter_color, darker_color),
};
// outer portion of the border
self.draw_border_path(&original_bounds, direction, &scaled_border, radius, outer_color);
// inner portion of the border
self.draw_border_path(&inner_scaled_bounds,
direction,
&scaled_border,
radius,
inner_color);
}
fn draw_inset_outset_border_segment(&self,
direction: Direction,
bounds: &Rect<Au>,
border: &SideOffsets2D<f32>,
radius: &BorderRadii<AzFloat>,
color: Color,
style: border_style::T) {
let is_inset = match style {
border_style::T::inset => true,
border_style::T::outset => false,
_ => panic!("invalid border style")
};
// original bounds as a Rect<f32>
let original_bounds = self.compute_scaled_bounds(bounds, border, 0.0);
// You can't scale black color (i.e. 'scaled = 0 * scale', equals black).
let mut scaled_color = color::black();
if color != scaled_color {
scaled_color = match direction {
Direction::Top | Direction::Left => {
self.scale_color(color, if is_inset { 2.0 / 3.0 } else { 1.0 })
}
Direction::Right | Direction::Bottom => {
self.scale_color(color, if is_inset { 1.0 } else { 2.0 / 3.0 })
}
};
} else {
scaled_color = match direction {
Direction::Top | Direction::Left => {
if is_inset {
color::new(0.3, 0.3, 0.3, color.a)
} else {
color::new(0.7, 0.7, 0.7, color.a)
}
}
Direction::Right | Direction::Bottom => {
if is_inset {
color::new(0.7, 0.7, 0.7, color.a)
} else {
color::new(0.3, 0.3, 0.3, color.a)
}
}
};
}
self.draw_border_path(&original_bounds, direction, border, radius, scaled_color);
}
/// Draws the given text display item into the current context.
pub fn draw_text(&mut self, text: &TextDisplayItem) {
let draw_target_transform = self.draw_target.get_transform();
let origin = text.baseline_origin + self.subpixel_offset;
// Optimization: Donโt set a transform matrix for upright text, and pass a start point to
// `draw_text_into_context`.
//
// For sideways text, itโs easier to do the rotation such that its center (the baselineโs
// start point) is at (0, 0) coordinates.
let baseline_origin = match text.orientation {
Upright => origin,
SidewaysLeft => {
let x = origin.x.to_f32_px();
let y = origin.y.to_f32_px();
self.draw_target.set_transform(&draw_target_transform.mul(&Matrix2D::new(0., -1.,
1., 0.,
x, y)));
Point2D::zero()
}
SidewaysRight => {
let x = origin.x.to_f32_px();
let y = origin.y.to_f32_px();
self.draw_target.set_transform(&draw_target_transform.mul(&Matrix2D::new(0., 1.,
-1., 0.,
x, y)));
Point2D::zero()
}
};
// Draw the text.
let temporary_draw_target =
self.create_draw_target_for_blur_if_necessary(&text.base.bounds, text.blur_radius);
{
// FIXME(https://github.com/rust-lang/rust/issues/23338)
let font = self.font_context.paint_font_from_template(
&text.text_run.font_template, text.text_run.actual_pt_size);
font.borrow()
.draw_text(&temporary_draw_target.draw_target,
&*text.text_run,
&text.range,
baseline_origin,
text.text_color,
opts::get().enable_text_antialiasing);
}
// Blur, if necessary.
self.blur_if_necessary(temporary_draw_target, text.blur_radius);
self.draw_target.set_transform(&draw_target_transform)
}
/// Draws a linear gradient in the given boundaries from the given start point to the given end
/// point with the given stops.
pub fn draw_linear_gradient(&self,
bounds: &Rect<Au>,
start_point: &Point2D<Au>,
end_point: &Point2D<Au>,
stops: &[GradientStop]) {
self.draw_target.make_current();
let stops = self.draw_target.create_gradient_stops(stops, ExtendMode::Clamp);
let scale = self.screen_pixels_per_px();
let pattern = LinearGradientPattern::new(&start_point.to_nearest_azure_point(scale),
&end_point.to_nearest_azure_point(scale),
stops,
&Matrix2D::identity());
self.draw_target.fill_rect(&self.to_nearest_azure_rect(&bounds),
PatternRef::LinearGradient(&pattern),
None);
}
pub fn get_or_create_temporary_draw_target(&mut self,
filters: &filter::T,
blend_mode: mix_blend_mode::T)
-> DrawTarget {
// Determine if we need a temporary draw target.
if !filters::temporary_draw_target_needed_for_style_filters(filters) &&
blend_mode == mix_blend_mode::T::normal {
// Reuse the draw target, but remove the transient clip. If we don't do the latter,
// we'll be in a state whereby the paint subcontext thinks it has no transient clip
// (see `StackingContext::optimize_and_draw_into_context`) but it actually does,
// resulting in a situation whereby display items are seemingly randomly clipped out.
self.remove_transient_clip_if_applicable();
return self.draw_target.clone()
}
// FIXME(pcwalton): This surface might be bigger than necessary and waste memory.
let size: AzIntSize = self.draw_target.get_size();
let mut size = Size2D::new(size.width, size.height);
// Pre-calculate if there is a blur expansion need.
let accum_blur = filters::calculate_accumulated_blur(filters);
let mut matrix = self.draw_target.get_transform();
if accum_blur > Au(0) {
// Set the correct size.
let side_inflation = accum_blur * BLUR_INFLATION_FACTOR;
size = Size2D::new(size.width + (side_inflation.to_nearest_px() * 2) as i32,
size.height + (side_inflation.to_nearest_px() * 2) as i32);
// Calculate the transform matrix.
let old_transform = self.draw_target.get_transform();
let inflated_size = Rect::new(Point2D::new(0.0, 0.0),
Size2D::new(size.width as AzFloat,
size.height as AzFloat));
let temporary_draw_target_bounds = old_transform.transform_rect(&inflated_size);
matrix = Matrix2D::identity().translate(
-temporary_draw_target_bounds.origin.x as AzFloat,
-temporary_draw_target_bounds.origin.y as AzFloat).mul(&old_transform);
}
let temporary_draw_target =
self.draw_target.create_similar_draw_target(&size, self.draw_target.get_format());
temporary_draw_target.set_transform(&matrix);
temporary_draw_target
}
/// If we created a temporary draw target, then draw it to the main draw target. This is called
/// after doing all the painting, and the temporary draw target must not be used afterward.
pub fn draw_temporary_draw_target_if_necessary(&mut self,
temporary_draw_target: &DrawTarget,
filters: &filter::T,
blend_mode: mix_blend_mode::T) {
if (*temporary_draw_target) == self.draw_target {
// We're directly painting to the surface; nothing to do.
return
}
// Set up transforms.
let old_transform = self.draw_target.get_transform();
self.draw_target.set_transform(&Matrix2D::identity());
let rect = Rect::new(Point2D::new(0.0, 0.0), self.draw_target.get_size().to_azure_size());
let rect_temporary = Rect::new(Point2D::new(0.0, 0.0),
temporary_draw_target.get_size().to_azure_size());
// Create the Azure filter pipeline.
let mut accum_blur = Au(0);
let (filter_node, opacity) = filters::create_filters(&self.draw_target,
temporary_draw_target,
filters,
&mut accum_blur);
// Perform the blit operation.
let mut draw_options = DrawOptions::new(opacity, CompositionOp::Over, AntialiasMode::None);
draw_options.set_composition_op(blend_mode.to_azure_composition_op());
// If there is a blur expansion, shift the transform and update the size.
if accum_blur > Au(0) {
// Remove both the transient clip and the stacking context clip, because we may need to
// draw outside the stacking context's clip.
self.remove_transient_clip_if_applicable();
self.pop_clip_if_applicable();
debug!("######### use expanded Rect.");
self.draw_target.draw_filter(&filter_node,
&rect_temporary,
&rect_temporary.origin,
draw_options);
self.push_clip_if_applicable();
} else {
debug!("######### use regular Rect.");
self.draw_target.draw_filter(&filter_node, &rect, &rect.origin, draw_options);
}
self.draw_target.set_transform(&old_transform);
}
/// Draws a box shadow with the given boundaries, color, offset, blur radius, and spread
/// radius. `box_bounds` represents the boundaries of the box.
pub fn draw_box_shadow(&mut self,
box_bounds: &Rect<Au>,
offset: &Point2D<Au>,
color: Color,
blur_radius: Au,
spread_radius: Au,
clip_mode: BoxShadowClipMode) {
// Remove both the transient clip and the stacking context clip, because we may need to
// draw outside the stacking context's clip.
self.remove_transient_clip_if_applicable();
self.pop_clip_if_applicable();
// If we have blur, create a new draw target.
let pixels_per_px = self.screen_pixels_per_px();
let shadow_bounds = box_bounds.translate(offset).inflate(spread_radius, spread_radius);
let side_inflation = blur_radius * BLUR_INFLATION_FACTOR;
let inflated_shadow_bounds = shadow_bounds.inflate(side_inflation, side_inflation);
let temporary_draw_target =
self.create_draw_target_for_blur_if_necessary(&inflated_shadow_bounds, blur_radius);
let path;
match clip_mode {
BoxShadowClipMode::Inset => {
path = temporary_draw_target.draw_target
.create_rectangular_border_path(&max_rect(),
&shadow_bounds,
pixels_per_px);
self.draw_target.push_clip(
&self.draw_target.create_rectangular_path(box_bounds, pixels_per_px))
}
BoxShadowClipMode::Outset => {
path = temporary_draw_target.draw_target.create_rectangular_path(&shadow_bounds,
pixels_per_px);
self.draw_target.push_clip(
&self.draw_target.create_rectangular_border_path(&max_rect(), box_bounds,
pixels_per_px))
}
BoxShadowClipMode::None => {
path = temporary_draw_target.draw_target.create_rectangular_path(&shadow_bounds,
pixels_per_px)
}
}
// Draw the shadow, and blur if we need to.
temporary_draw_target.draw_target.fill(
&path,
Pattern::Color(ColorPattern::new(color)).to_pattern_ref(),
&DrawOptions::new(1.0, CompositionOp::Over, AntialiasMode::None));
self.blur_if_necessary(temporary_draw_target, blur_radius);
// Undo the draw target's clip if we need to, and push back the stacking context clip.
if clip_mode != BoxShadowClipMode::None {
self.draw_target.pop_clip()
}
self.push_clip_if_applicable();
}
/// If we have blur, create a new draw target that's the same size as this tile, but with
/// enough space around the edges to hold the entire blur. (If we don't do the latter, then
/// there will be seams between tiles.)
fn create_draw_target_for_blur_if_necessary(&self, box_bounds: &Rect<Au>, blur_radius: Au)
-> TemporaryDrawTarget {
if blur_radius == Au(0) {
return TemporaryDrawTarget::from_main_draw_target(&self.draw_target)
}
// Intersect display item bounds with the tile bounds inflated by blur radius to get the
// smallest possible rectangle that encompasses all the paint.
let side_inflation = blur_radius * BLUR_INFLATION_FACTOR;
let tile_box_bounds =
geometry::f32_rect_to_au_rect(self.page_rect.to_untyped()).intersection(box_bounds)
.unwrap_or(Rect::zero())
.inflate(side_inflation, side_inflation);
TemporaryDrawTarget::from_bounds(&self.draw_target, &tile_box_bounds)
}
/// Performs a blur using the draw target created in
/// `create_draw_target_for_blur_if_necessary`.
fn blur_if_necessary(&self, temporary_draw_target: TemporaryDrawTarget, blur_radius: Au) {
if blur_radius == Au(0) {
return
}
let blur_filter = self.draw_target.create_filter(FilterType::GaussianBlur);
blur_filter.set_attribute(GaussianBlurAttribute::StdDeviation(blur_radius.to_f64_px() as
AzFloat));
blur_filter.set_input(GaussianBlurInput, &temporary_draw_target.draw_target.snapshot());
temporary_draw_target.draw_filter(&self.draw_target, blur_filter);
}
pub fn push_clip_if_applicable(&self) {
if let Some(ref clip_rect) = self.clip_rect {
self.draw_push_clip(clip_rect)
}
}
pub fn pop_clip_if_applicable(&self) {
if self.clip_rect.is_some() {
self.draw_pop_clip()
}
}
pub fn remove_transient_clip_if_applicable(&mut self) {
if let Some(old_transient_clip) = mem::replace(&mut self.transient_clip, None) {
for _ in &old_transient_clip.complex {
self.draw_pop_clip()
}
self.draw_pop_clip()
}
}
/// Sets a new transient clipping region. Automatically calls
/// `remove_transient_clip_if_applicable()` first.
pub fn push_transient_clip(&mut self, clip_region: ClippingRegion) {
let scale = self.screen_pixels_per_px();
self.remove_transient_clip_if_applicable();
self.draw_push_clip(&clip_region.main);
for complex_region in &clip_region.complex {
// FIXME(pcwalton): Actually draw a rounded rect.
self.push_rounded_rect_clip(&self.to_nearest_azure_rect(&complex_region.rect),
&complex_region.radii.to_radii_pixels(scale))
}
self.transient_clip = Some(clip_region)
}
}
pub trait ToAzurePoint {
fn to_nearest_azure_point(&self, pixels_per_px: ScaleFactor<f32, PagePx, ScreenPx>) -> Point2D<AzFloat>;
fn to_azure_point(&self) -> Point2D<AzFloat>;
}
impl ToAzurePoint for Point2D<Au> {
fn to_nearest_azure_point(&self, pixels_per_px: ScaleFactor<f32, PagePx, ScreenPx>) -> Point2D<AzFloat> {
Point2D::new(self.x.to_nearest_pixel(pixels_per_px.get()) as AzFloat,
self.y.to_nearest_pixel(pixels_per_px.get()) as AzFloat)
}
fn to_azure_point(&self) -> Point2D<AzFloat> {
Point2D::new(self.x.to_f32_px(), self.y.to_f32_px())
}
}
pub trait ToAzureRect {
fn to_nearest_azure_rect(&self, pixels_per_px: ScaleFactor<f32, PagePx, ScreenPx>) -> Rect<AzFloat>;
fn to_nearest_non_empty_azure_rect(&self, pixels_per_px: ScaleFactor<f32, PagePx, ScreenPx>) -> Rect<AzFloat>;
fn to_azure_rect(&self) -> Rect<AzFloat>;
}
impl ToAzureRect for Rect<Au> {
/// Round rects to pixel coordinates, maintaining the invariant of non-overlap,
/// assuming that before rounding rects don't overlap.
fn to_nearest_azure_rect(&self, pixels_per_px: ScaleFactor<f32, PagePx, ScreenPx>) -> Rect<AzFloat> {
// Rounding the top left corner to the nearest pixel with the size rounded
// to the nearest pixel multiple would violate the non-overlap condition,
// e.g.
// 10pxร9.60px at (0px,6.6px) & 10pxร9.60px at (0px,16.2px)
// would round to
// 10pxร10.0px at (0px,7.0px) & 10pxร10.0px at (0px,16.0px), which overlap.
//
// Instead round each corner to the nearest pixel.
let top_left = self.origin.to_nearest_azure_point(pixels_per_px);
let bottom_right = self.bottom_right().to_nearest_azure_point(pixels_per_px);
Rect::new(top_left, Size2D::new((bottom_right.x - top_left.x) as AzFloat,
(bottom_right.y - top_left.y) as AzFloat))
}
/// For rects of width or height between 0.5px and 1px, rounding each rect corner to the
/// nearest pixel can yield an empty rect e.g.
/// 10pxร0.6px at 0px,28.56px -> 10pxร0px at 0px,29px
/// Instead round the top left to the nearest pixel and the size to the nearest pixel
/// multiple. It's possible for non-overlapping rects after this rounding to overlap.
fn to_nearest_non_empty_azure_rect(&self, pixels_per_px: ScaleFactor<f32, PagePx, ScreenPx>) -> Rect<AzFloat> {
Rect::new(self.origin.to_nearest_azure_point(pixels_per_px),
self.size.to_nearest_azure_size(pixels_per_px))
}
fn to_azure_rect(&self) -> Rect<AzFloat> {
Rect::new(self.origin.to_azure_point(), self.size.to_azure_size())
}
}
pub trait ToNearestAzureSize {
fn to_nearest_azure_size(&self, pixels_per_px: ScaleFactor<f32, PagePx, ScreenPx>) -> Size2D<AzFloat>;
}
impl ToNearestAzureSize for Size2D<Au> {
fn to_nearest_azure_size(&self, pixels_per_px: ScaleFactor<f32, PagePx, ScreenPx>) -> Size2D<AzFloat> {
Size2D::new(self.width.to_nearest_pixel(pixels_per_px.get()) as AzFloat,
self.height.to_nearest_pixel(pixels_per_px.get()) as AzFloat)
}
}
pub trait ToAzureSize {
fn to_azure_size(&self) -> Size2D<AzFloat>;
}
impl ToAzureSize for Size2D<Au> {
fn to_azure_size(&self) -> Size2D<AzFloat> {
Size2D::new(self.width.to_f32_px(), self.height.to_f32_px())
}
}
impl ToAzureSize for AzIntSize {
fn to_azure_size(&self) -> Size2D<AzFloat> {
Size2D::new(self.width as AzFloat, self.height as AzFloat)
}
}
trait ToAzureIntSize {
fn to_azure_int_size(&self) -> Size2D<i32>;
}
impl ToAzureIntSize for Size2D<AzFloat> {
fn to_azure_int_size(&self) -> Size2D<i32> {
Size2D::new(self.width as i32, self.height as i32)
}
}
trait ToSideOffsetsPixels {
fn to_float_pixels(&self, pixels_per_px: ScaleFactor<f32, PagePx, ScreenPx>) -> SideOffsets2D<AzFloat>;
}
impl ToSideOffsetsPixels for SideOffsets2D<Au> {
fn to_float_pixels(&self, pixels_per_px: ScaleFactor<f32, PagePx, ScreenPx>) -> SideOffsets2D<AzFloat> {
SideOffsets2D::new(self.top.to_nearest_pixel(pixels_per_px.get()) as AzFloat,
self.right.to_nearest_pixel(pixels_per_px.get()) as AzFloat,
self.bottom.to_nearest_pixel(pixels_per_px.get()) as AzFloat,
self.left.to_nearest_pixel(pixels_per_px.get()) as AzFloat)
}
}
trait ToRadiiPixels {
fn to_radii_pixels(&self, pixels_per_px: ScaleFactor<f32, PagePx, ScreenPx>) -> BorderRadii<AzFloat>;
}
impl ToRadiiPixels for BorderRadii<Au> {
fn to_radii_pixels(&self, pixels_per_px: ScaleFactor<f32, PagePx, ScreenPx>) -> BorderRadii<AzFloat> {
let to_nearest_px = |x: Au| -> AzFloat {
x.to_nearest_pixel(pixels_per_px.get()) as AzFloat
};
BorderRadii {
top_left: Size2D::new(to_nearest_px(self.top_left.width),
to_nearest_px(self.top_left.height)),
top_right: Size2D::new(to_nearest_px(self.top_right.width),
to_nearest_px(self.top_right.height)),
bottom_left: Size2D::new(to_nearest_px(self.bottom_left.width),
to_nearest_px(self.bottom_left.height)),
bottom_right: Size2D::new(to_nearest_px(self.bottom_right.width),
to_nearest_px(self.bottom_right.height)),
}
}
}
trait ScaledFontExtensionMethods {
fn draw_text(&self,
draw_target: &DrawTarget,
run: &TextRun,
range: &Range<ByteIndex>,
baseline_origin: Point2D<Au>,
color: Color,
antialias: bool);
}
impl ScaledFontExtensionMethods for ScaledFont {
#[allow(unsafe_code)]
fn draw_text(&self,
draw_target: &DrawTarget,
run: &TextRun,
range: &Range<ByteIndex>,
baseline_origin: Point2D<Au>,
color: Color,
antialias: bool) {
let pattern = ColorPattern::new(color);
let azure_pattern = pattern.azure_color_pattern;
assert!(!azure_pattern.is_null());
let mut options = struct__AzDrawOptions {
mAlpha: 1f64 as AzFloat,
mCompositionOp: CompositionOp::Over as u8,
mAntialiasMode: if antialias {
AntialiasMode::Subpixel as u8
} else {
AntialiasMode::None as u8
}
};
let mut origin = baseline_origin.clone();
let mut azglyphs = Vec::with_capacity(range.length().to_usize());
for slice in run.natural_word_slices_in_visual_order(range) {
for glyph in slice.glyphs.iter_glyphs_for_byte_range(&slice.range) {
let glyph_advance = if glyph.char_is_space() {
glyph.advance() + run.extra_word_spacing
} else {
glyph.advance()
};
if !slice.glyphs.is_whitespace() {
let glyph_offset = glyph.offset().unwrap_or(Point2D::zero());
let azglyph = struct__AzGlyph {
mIndex: glyph.id() as u32,
mPosition: struct__AzPoint {
x: (origin.x + glyph_offset.x).to_f32_px(),
y: (origin.y + glyph_offset.y).to_f32_px(),
}
};
azglyphs.push(azglyph)
}<๏ฝfimโhole๏ฝ>
let azglyph_buf_len = azglyphs.len();
if azglyph_buf_len == 0 { return; } // Otherwise the Quartz backend will assert.
let mut glyphbuf = struct__AzGlyphBuffer {
mGlyphs: azglyphs.as_mut_ptr(),
mNumGlyphs: azglyph_buf_len as u32
};
unsafe {
// TODO(Issue #64): this call needs to move into azure_hl.rs
AzDrawTargetFillGlyphs(draw_target.azure_draw_target,
self.get_ref(),
&mut glyphbuf,
azure_pattern,
&mut options,
ptr::null_mut());
}
}
}
trait DrawTargetExtensions {
/// Creates and returns a path that represents a rectangular border. Like this:
///
/// ```text
/// +--------------------------------+
/// |################################|
/// |#######+---------------------+##|
/// |#######| |##|
/// |#######+---------------------+##|
/// |################################|
/// +--------------------------------+
/// ```
fn create_rectangular_border_path(&self,
outer_rect: &Rect<Au>,
inner_rect: &Rect<Au>,
pixels_per_px: ScaleFactor<f32, PagePx, ScreenPx>) -> Path;
/// Creates and returns a path that represents a rectangle.
fn create_rectangular_path(&self, rect: &Rect<Au>, pixels_per_px: ScaleFactor<f32, PagePx, ScreenPx>) -> Path;
}
impl DrawTargetExtensions for DrawTarget {
fn create_rectangular_border_path(&self,
outer_rect: &Rect<Au>,
inner_rect: &Rect<Au>,
pixels_per_px: ScaleFactor<f32, PagePx, ScreenPx>) -> Path {
// +-----------+
// |2 |1
// | |
// | +---+---+
// | |9 |6 |5, 10
// | | | |
// | +---+ |
// | 8 7 |
// | |
// +-----------+
// 3 4
let outer_rect = outer_rect.to_nearest_azure_rect(pixels_per_px);
let inner_rect = inner_rect.to_nearest_azure_rect(pixels_per_px);
let path_builder = self.create_path_builder();
path_builder.move_to(Point2D::new(outer_rect.max_x(), outer_rect.origin.y)); // 1
path_builder.line_to(Point2D::new(outer_rect.origin.x, outer_rect.origin.y)); // 2
path_builder.line_to(Point2D::new(outer_rect.origin.x, outer_rect.max_y())); // 3
path_builder.line_to(Point2D::new(outer_rect.max_x(), outer_rect.max_y())); // 4
path_builder.line_to(Point2D::new(outer_rect.max_x(), inner_rect.origin.y)); // 5
path_builder.line_to(Point2D::new(inner_rect.max_x(), inner_rect.origin.y)); // 6
path_builder.line_to(Point2D::new(inner_rect.max_x(), inner_rect.max_y())); // 7
path_builder.line_to(Point2D::new(inner_rect.origin.x, inner_rect.max_y())); // 8
path_builder.line_to(inner_rect.origin); // 9
path_builder.line_to(Point2D::new(outer_rect.max_x(), inner_rect.origin.y)); // 10
path_builder.finish()
}
fn create_rectangular_path(&self, rect: &Rect<Au>, pixels_per_px: ScaleFactor<f32, PagePx, ScreenPx>) -> Path {
// Explicitly round to the nearest non-empty rect because when drawing
// box-shadow the rect height can be between 0.5px & 1px and could
// otherwise round to an empty rect.
let rect = rect.to_nearest_non_empty_azure_rect(pixels_per_px);
let path_builder = self.create_path_builder();
path_builder.move_to(rect.origin);
path_builder.line_to(Point2D::new(rect.max_x(), rect.origin.y));
path_builder.line_to(Point2D::new(rect.max_x(), rect.max_y()));
path_builder.line_to(Point2D::new(rect.origin.x, rect.max_y()));
path_builder.finish()
}
}
/// Converts a CSS blend mode (per CSS-COMPOSITING) to an Azure `CompositionOp`.
trait ToAzureCompositionOp {
/// Converts a CSS blend mode (per CSS-COMPOSITING) to an Azure `CompositionOp`.
fn to_azure_composition_op(&self) -> CompositionOp;
}
impl ToAzureCompositionOp for mix_blend_mode::T {
fn to_azure_composition_op(&self) -> CompositionOp {
match *self {
mix_blend_mode::T::normal => CompositionOp::Over,
mix_blend_mode::T::multiply => CompositionOp::Multiply,
mix_blend_mode::T::screen => CompositionOp::Screen,
mix_blend_mode::T::overlay => CompositionOp::Overlay,
mix_blend_mode::T::darken => CompositionOp::Darken,
mix_blend_mode::T::lighten => CompositionOp::Lighten,
mix_blend_mode::T::color_dodge => CompositionOp::ColorDodge,
mix_blend_mode::T::color_burn => CompositionOp::ColorBurn,
mix_blend_mode::T::hard_light => CompositionOp::HardLight,
mix_blend_mode::T::soft_light => CompositionOp::SoftLight,
mix_blend_mode::T::difference => CompositionOp::Difference,
mix_blend_mode::T::exclusion => CompositionOp::Exclusion,
mix_blend_mode::T::hue => CompositionOp::Hue,
mix_blend_mode::T::saturation => CompositionOp::Saturation,
mix_blend_mode::T::color => CompositionOp::Color,
mix_blend_mode::T::luminosity => CompositionOp::Luminosity,
}
}
}
/// Represents a temporary drawing surface. Some operations that perform complex compositing
/// operations need this.
struct TemporaryDrawTarget {
/// The draw target.
draw_target: DrawTarget,
/// The distance from the top left of the main draw target to the top left of this temporary
/// draw target.
offset: Point2D<AzFloat>,
}
impl TemporaryDrawTarget {
/// Creates a temporary draw target that simply draws to the main draw target.
fn from_main_draw_target(main_draw_target: &DrawTarget) -> TemporaryDrawTarget {
TemporaryDrawTarget {
draw_target: main_draw_target.clone(),
offset: Point2D::new(0.0, 0.0),
}
}
/// Creates a temporary draw target large enough to encompass the given bounding rect in page
/// coordinates. The temporary draw target will have the same transform as the tile we're
/// drawing to.
fn from_bounds(main_draw_target: &DrawTarget, bounds: &Rect<Au>) -> TemporaryDrawTarget {
let draw_target_transform = main_draw_target.get_transform();
let temporary_draw_target_bounds =
draw_target_transform.transform_rect(&bounds.to_azure_rect());
let temporary_draw_target_size =
Size2D::new(temporary_draw_target_bounds.size.width.ceil() as i32,
temporary_draw_target_bounds.size.height.ceil() as i32);
let temporary_draw_target =
main_draw_target.create_similar_draw_target(&temporary_draw_target_size,
main_draw_target.get_format());
let matrix =
Matrix2D::identity().translate(-temporary_draw_target_bounds.origin.x as AzFloat,
-temporary_draw_target_bounds.origin.y as AzFloat)
.mul(&draw_target_transform);
temporary_draw_target.set_transform(&matrix);
TemporaryDrawTarget {
draw_target: temporary_draw_target,
offset: temporary_draw_target_bounds.origin,
}
}
/// Composites this temporary draw target onto the main surface, with the given Azure filter.
fn draw_filter(self, main_draw_target: &DrawTarget, filter: FilterNode) {
let main_draw_target_transform = main_draw_target.get_transform();
let temporary_draw_target_size = self.draw_target.get_size();
let temporary_draw_target_size = Size2D::new(temporary_draw_target_size.width as AzFloat,
temporary_draw_target_size.height as AzFloat);
// Blit the blur onto the tile. We undo the transforms here because we want to directly
// stack the temporary draw target onto the tile.
main_draw_target.set_transform(&Matrix2D::identity());
main_draw_target.draw_filter(&filter,
&Rect::new(Point2D::new(0.0, 0.0), temporary_draw_target_size),
&self.offset,
DrawOptions::new(1.0, CompositionOp::Over, AntialiasMode::None));
main_draw_target.set_transform(&main_draw_target_transform);
}
}
#[derive(Copy, Clone, PartialEq)]
enum BorderPathDrawingMode {
EntireBorder,
CornersOnly,
}
fn radii_apply_to_border_direction(direction: Direction, radius: &BorderRadii<AzFloat>) -> bool {
match (direction,
radius.top_left.width,
radius.top_right.width,
radius.bottom_left.width,
radius.bottom_right.width) {
(Direction::Top, a, b, _, _) |
(Direction::Right, _, a, _, b) |
(Direction::Bottom, _, _, a, b) |
(Direction::Left, a, _, b, _) => a != 0.0 || b != 0.0,
}
}<๏ฝfimโend๏ฝ> | origin.x = origin.x + glyph_advance;
};
} |
<|file_name|>cuki.js<|end_file_name|><๏ฝfimโbegin๏ฝ><๏ฝfimโhole๏ฝ> result = 0,
i = 0;
while (true) {
if (i === 0) {
current = numbers[i];
if (numbers.length === 1) {
result = current;
break;
}
i++;
}
if (numbers[i] % 2 === 0 || numbers[i] === 0) {
current += numbers[i];
current = current % module;
i += 2;
if (i > numbers.length - 1) {
result = current;
break;
}
} else if (numbers[i] % 2 !== 0 || numbers[i] === 1) {
current *= numbers[i];
current = current % module;
i++;
if (i > numbers.length - 1) {
result = current;
break;
}
}
}
console.log(result);
}
// tests
solve([
'10',
'1',
'2',
'3',
'4',
'5',
'6',
'7',
'8',
'9',
'0'
]);
console.log('------------');
solve([
'9',
'9',
'9',
'9',
'9',
'9',
'9',
'9',
'9',
'9'
]);
solve(['2', '2', '2', '2', '2', '2', '2', '2', '2']);<๏ฝfimโend๏ฝ> | function solve(args) {
const module = 1024;
let numbers = args.slice(1).map(Number),
current, |
<|file_name|>urls.py<|end_file_name|><๏ฝfimโbegin๏ฝ>"""template_III URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:<๏ฝfimโhole๏ฝ>Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from app import views as app_views # new
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$', app_views.home, name='home'), # new
url(r'^add/(\d+)/(\d+)/$', app_views.add, name='add'), # new
# if method is replaced with method sum below, the app can still works fine
# url(r'^sum/(\d+)/(\d+)/$', app_views.add, name='add'), # new
]<๏ฝfimโend๏ฝ> | ERROR: type should be large_string, got " https://docs.djangoproject.com/en/1.10/topics/http/urls/" |
<|file_name|>LibraryRuntimeClasspathScope.java<|end_file_name|><๏ฝfimโbegin๏ฝ>/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.module.impl.scopes;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.util.containers.ContainerUtil;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import java.util.*;
/**
* @author max
*/
public class LibraryRuntimeClasspathScope extends GlobalSearchScope {
private final ProjectFileIndex myIndex;
private final LinkedHashSet<VirtualFile> myEntries = new LinkedHashSet<VirtualFile>();
private int myCachedHashCode = 0;
public LibraryRuntimeClasspathScope(final Project project, final List<Module> modules) {
super(project);
myIndex = ProjectRootManager.getInstance(project).getFileIndex();
final Set<Sdk> processedSdk = new THashSet<Sdk>();
final Set<Library> processedLibraries = new THashSet<Library>();<๏ฝfimโhole๏ฝ> final Condition<OrderEntry> condition = new Condition<OrderEntry>() {
@Override
public boolean value(OrderEntry orderEntry) {
if (orderEntry instanceof ModuleOrderEntry) {
final Module module = ((ModuleOrderEntry)orderEntry).getModule();
return module != null && !processedModules.contains(module);
}
return true;
}
};
for (Module module : modules) {
buildEntries(module, processedModules, processedLibraries, processedSdk, condition);
}
}
public LibraryRuntimeClasspathScope(Project project, LibraryOrderEntry entry) {
super(project);
myIndex = ProjectRootManager.getInstance(project).getFileIndex();
Collections.addAll(myEntries, entry.getRootFiles(OrderRootType.CLASSES));
}
public int hashCode() {
if (myCachedHashCode == 0) {
myCachedHashCode = myEntries.hashCode();
}
return myCachedHashCode;
}
public boolean equals(Object object) {
if (object == this) return true;
if (object == null || object.getClass() != LibraryRuntimeClasspathScope.class) return false;
final LibraryRuntimeClasspathScope that = (LibraryRuntimeClasspathScope)object;
return that.myEntries.equals(myEntries);
}
private void buildEntries(@NotNull final Module module,
@NotNull final Set<Module> processedModules,
@NotNull final Set<Library> processedLibraries,
@NotNull final Set<Sdk> processedSdk,
Condition<OrderEntry> condition) {
if (!processedModules.add(module)) return;
ModuleRootManager.getInstance(module).orderEntries().recursively().satisfying(condition).process(new RootPolicy<LinkedHashSet<VirtualFile>>() {
public LinkedHashSet<VirtualFile> visitLibraryOrderEntry(final LibraryOrderEntry libraryOrderEntry,
final LinkedHashSet<VirtualFile> value) {
final Library library = libraryOrderEntry.getLibrary();
if (library != null && processedLibraries.add(library)) {
ContainerUtil.addAll(value, libraryOrderEntry.getRootFiles(OrderRootType.CLASSES));
}
return value;
}
public LinkedHashSet<VirtualFile> visitModuleSourceOrderEntry(final ModuleSourceOrderEntry moduleSourceOrderEntry,
final LinkedHashSet<VirtualFile> value) {
processedModules.add(moduleSourceOrderEntry.getOwnerModule());
ContainerUtil.addAll(value, moduleSourceOrderEntry.getRootModel().getSourceRoots());
return value;
}
@Override
public LinkedHashSet<VirtualFile> visitModuleOrderEntry(ModuleOrderEntry moduleOrderEntry, LinkedHashSet<VirtualFile> value) {
final Module depModule = moduleOrderEntry.getModule();
if (depModule != null) {
ContainerUtil.addAll(value, ModuleRootManager.getInstance(depModule).getSourceRoots());
}
return value;
}
public LinkedHashSet<VirtualFile> visitJdkOrderEntry(final JdkOrderEntry jdkOrderEntry, final LinkedHashSet<VirtualFile> value) {
final Sdk jdk = jdkOrderEntry.getJdk();
if (jdk != null && processedSdk.add(jdk)) {
ContainerUtil.addAll(value, jdkOrderEntry.getRootFiles(OrderRootType.CLASSES));
}
return value;
}
}, myEntries);
}
public boolean contains(VirtualFile file) {
return myEntries.contains(getFileRoot(file));
}
@Nullable
private VirtualFile getFileRoot(VirtualFile file) {
if (myIndex.isLibraryClassFile(file)) {
return myIndex.getClassRootForFile(file);
}
if (myIndex.isInContent(file)) {
return myIndex.getSourceRootForFile(file);
}
if (myIndex.isInLibraryClasses(file)) {
return myIndex.getClassRootForFile(file);
}
return null;
}
public int compare(VirtualFile file1, VirtualFile file2) {
final VirtualFile r1 = getFileRoot(file1);
final VirtualFile r2 = getFileRoot(file2);
for (VirtualFile root : myEntries) {
if (Comparing.equal(r1, root)) return 1;
if (Comparing.equal(r2, root)) return -1;
}
return 0;
}
@TestOnly
public List<VirtualFile> getRoots() {
return new ArrayList<VirtualFile>(myEntries);
}
public boolean isSearchInModuleContent(@NotNull Module aModule) {
return false;
}
public boolean isSearchInLibraries() {
return true;
}
}<๏ฝfimโend๏ฝ> | final Set<Module> processedModules = new THashSet<Module>(); |
<|file_name|>TestEnumStates.java<|end_file_name|><๏ฝfimโbegin๏ฝ>package Init.Enum;
import Generics.Generator;
import java.util.EnumMap;
import java.util.Iterator;
import static Init.Enum.Input.*;
import static Init.Print.print;
/**
* Created by Defias on 2020/07.<๏ฝfimโhole๏ฝ> * Description: ไฝฟ็จenum็็ถๆๆบ
*
*/
public class TestEnumStates {
public static void main(String[] args) {
Generator<Input> gen = new RandomInputGenerator();
VendingMachine.run(gen);
}
}
class VendingMachine {
private static State state = State.RESTING;
private static int amount = 0;
private static Input selection = null;
enum StateDuration { TRANSIENT } // Tagging enum
enum State {
RESTING {
void next(Input input) {
switch(Category.categorize(input)) {
case MONEY:
amount += input.amount();
state = ADDING_MONEY;
break;
case SHUT_DOWN:
state = TERMINAL;
default:
}
}
},
ADDING_MONEY {
void next(Input input) {
switch(Category.categorize(input)) {
case MONEY:
amount += input.amount();
break;
case ITEM_SELECTION:
selection = input;
if(amount < selection.amount())
print("Insufficient money for " + selection);
else state = DISPENSING;
break;
case QUIT_TRANSACTION:
state = GIVING_CHANGE;
break;
case SHUT_DOWN:
state = TERMINAL;
default:
}
}
},
DISPENSING(StateDuration.TRANSIENT) {
void next() {
print("here is your " + selection);
amount -= selection.amount();
state = GIVING_CHANGE;
}
},
GIVING_CHANGE(StateDuration.TRANSIENT) {
void next() {
if(amount > 0) {
print("Your change: " + amount);
amount = 0;
}
state = RESTING;
}
},
TERMINAL { void output() { print("Halted"); }};
private boolean isTransient = false;
State() {}
State(StateDuration trans) {
isTransient = true;
}
void next(Input input) {
throw new RuntimeException("Only call " +
"next(Input input) for non-transient states");
}
void next() {
throw new RuntimeException("Only call next() for " +
"StateDuration.TRANSIENT states");
}
void output() { print(amount); }
}
static void run(Generator<Input> gen) {
while(state != State.TERMINAL) {
state.next(gen.next());
while(state.isTransient)
state.next();
state.output();
}
}
}
// For a basic sanity check:
class RandomInputGenerator implements Generator<Input> {
public Input next() {
return Input.randomSelection();
}
}
enum Category {
MONEY(NICKEL, DIME, QUARTER, DOLLAR),
ITEM_SELECTION(TOOTHPASTE, CHIPS, SODA, SOAP),
QUIT_TRANSACTION(ABORT_TRANSACTION),
SHUT_DOWN(STOP);
private Input[] values;
Category(Input... types) {
values = types;
}
private static EnumMap<Input,Category> categories =
new EnumMap<Input,Category>(Input.class);
static {
for(Category c : Category.class.getEnumConstants())
for(Input type : c.values)
categories.put(type, c);
}
public static Category categorize(Input input) {
return categories.get(input);
}
}<๏ฝfimโend๏ฝ> | |
<|file_name|>lib.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>pub fn bottle_or_bottles(n: i32) -> &'static str {
match n {
1 => "bottle",
_ => "bottles",
}
}
pub fn sing(n: i32) {
for i in (1..n + 1).rev() {
println!(
"{0} {1} of beer on the wall, {0} {1} of beer.",
i,
bottle_or_bottles(i)
);
println!(
"Take one down and pass it around, {0} {1} of beer on the wall!",<๏ฝfimโhole๏ฝ> println!();
}
println!("No more bottles of beer on the wall, no more bottles of beer.");
println!(
"Go to the store and buy some more, {0} bottles of beer on the wall.",
n
);
}<๏ฝfimโend๏ฝ> | i - 1,
bottle_or_bottles(i - 1)
); |
<|file_name|>metadata.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use self::RecursiveTypeDescription::*;
use self::MemberOffset::*;
use self::MemberDescriptionFactory::*;
use self::EnumDiscriminantInfo::*;
use super::utils::{debug_context, DIB, span_start, bytes_to_bits, size_and_align_of,
get_namespace_and_span_for_item, create_DIArray,
fn_should_be_ignored, is_node_local_to_unit};
use super::namespace::namespace_for_item;
use super::type_names::{compute_debuginfo_type_name, push_debuginfo_type_name};
use super::{declare_local, VariableKind, VariableAccess};
use llvm::{self, ValueRef};
use llvm::debuginfo::{DIType, DIFile, DIScope, DIDescriptor, DICompositeType};
use metadata::csearch;
use middle::pat_util;
use middle::subst::{self, Substs};
use rustc::ast_map;
use trans::{type_of, adt, machine, monomorphize};
use trans::common::{self, CrateContext, FunctionContext, NormalizingClosureTyper, Block};
use trans::_match::{BindingInfo, TrByCopy, TrByMove, TrByRef};
use trans::type_::Type;
use middle::ty::{self, Ty, ClosureTyper};
use session::config::{self, FullDebugInfo};
use util::nodemap::FnvHashMap;
use util::common::path2cstr;
use libc::{c_uint, c_longlong};
use std::ffi::CString;
use std::path::Path;
use std::ptr;
use std::rc::Rc;
use syntax::util::interner::Interner;
use syntax::codemap::Span;
use syntax::{ast, codemap, ast_util};
use syntax::parse::token::{self, special_idents};
const DW_LANG_RUST: c_uint = 0x9000;
#[allow(non_upper_case_globals)]
const DW_ATE_boolean: c_uint = 0x02;
#[allow(non_upper_case_globals)]
const DW_ATE_float: c_uint = 0x04;
#[allow(non_upper_case_globals)]
const DW_ATE_signed: c_uint = 0x05;
#[allow(non_upper_case_globals)]
const DW_ATE_unsigned: c_uint = 0x07;
#[allow(non_upper_case_globals)]
const DW_ATE_unsigned_char: c_uint = 0x08;
pub const UNKNOWN_LINE_NUMBER: c_uint = 0;
pub const UNKNOWN_COLUMN_NUMBER: c_uint = 0;
// ptr::null() doesn't work :(
const UNKNOWN_FILE_METADATA: DIFile = (0 as DIFile);
const UNKNOWN_SCOPE_METADATA: DIScope = (0 as DIScope);
const FLAGS_NONE: c_uint = 0;
#[derive(Copy, Debug, Hash, Eq, PartialEq, Clone)]
pub struct UniqueTypeId(ast::Name);
// The TypeMap is where the CrateDebugContext holds the type metadata nodes
// created so far. The metadata nodes are indexed by UniqueTypeId, and, for
// faster lookup, also by Ty. The TypeMap is responsible for creating
// UniqueTypeIds.
pub struct TypeMap<'tcx> {
// The UniqueTypeIds created so far
unique_id_interner: Interner<Rc<String>>,
// A map from UniqueTypeId to debuginfo metadata for that type. This is a 1:1 mapping.
unique_id_to_metadata: FnvHashMap<UniqueTypeId, DIType>,
// A map from types to debuginfo metadata. This is a N:1 mapping.
type_to_metadata: FnvHashMap<Ty<'tcx>, DIType>,
// A map from types to UniqueTypeId. This is a N:1 mapping.
type_to_unique_id: FnvHashMap<Ty<'tcx>, UniqueTypeId>
}
impl<'tcx> TypeMap<'tcx> {
pub fn new() -> TypeMap<'tcx> {
TypeMap {
unique_id_interner: Interner::new(),
type_to_metadata: FnvHashMap(),
unique_id_to_metadata: FnvHashMap(),
type_to_unique_id: FnvHashMap(),
}
}
// Adds a Ty to metadata mapping to the TypeMap. The method will fail if
// the mapping already exists.
fn register_type_with_metadata<'a>(&mut self,
cx: &CrateContext<'a, 'tcx>,
type_: Ty<'tcx>,
metadata: DIType) {
if self.type_to_metadata.insert(type_, metadata).is_some() {
cx.sess().bug(&format!("Type metadata for Ty '{}' is already in the TypeMap!",
type_));
}
}
// Adds a UniqueTypeId to metadata mapping to the TypeMap. The method will
// fail if the mapping already exists.
fn register_unique_id_with_metadata(&mut self,
cx: &CrateContext,
unique_type_id: UniqueTypeId,
metadata: DIType) {
if self.unique_id_to_metadata.insert(unique_type_id, metadata).is_some() {
let unique_type_id_str = self.get_unique_type_id_as_string(unique_type_id);
cx.sess().bug(&format!("Type metadata for unique id '{}' is already in the TypeMap!",
&unique_type_id_str[..]));
}
}
fn find_metadata_for_type(&self, type_: Ty<'tcx>) -> Option<DIType> {
self.type_to_metadata.get(&type_).cloned()
}
fn find_metadata_for_unique_id(&self, unique_type_id: UniqueTypeId) -> Option<DIType> {
self.unique_id_to_metadata.get(&unique_type_id).cloned()
}
// Get the string representation of a UniqueTypeId. This method will fail if
// the id is unknown.
fn get_unique_type_id_as_string(&self, unique_type_id: UniqueTypeId) -> Rc<String> {
let UniqueTypeId(interner_key) = unique_type_id;
self.unique_id_interner.get(interner_key)
}
// Get the UniqueTypeId for the given type. If the UniqueTypeId for the given
// type has been requested before, this is just a table lookup. Otherwise an
// ID will be generated and stored for later lookup.
fn get_unique_type_id_of_type<'a>(&mut self, cx: &CrateContext<'a, 'tcx>,
type_: Ty<'tcx>) -> UniqueTypeId {
// basic type -> {:name of the type:}
// tuple -> {tuple_(:param-uid:)*}
// struct -> {struct_:svh: / :node-id:_<(:param-uid:),*> }
// enum -> {enum_:svh: / :node-id:_<(:param-uid:),*> }
// enum variant -> {variant_:variant-name:_:enum-uid:}
// reference (&) -> {& :pointee-uid:}
// mut reference (&mut) -> {&mut :pointee-uid:}
// ptr (*) -> {* :pointee-uid:}
// mut ptr (*mut) -> {*mut :pointee-uid:}
// unique ptr (box) -> {box :pointee-uid:}
// @-ptr (@) -> {@ :pointee-uid:}
// sized vec ([T; x]) -> {[:size:] :element-uid:}
// unsized vec ([T]) -> {[] :element-uid:}
// trait (T) -> {trait_:svh: / :node-id:_<(:param-uid:),*> }
// closure -> {<unsafe_> <once_> :store-sigil: |(:param-uid:),* <,_...>| -> \
// :return-type-uid: : (:bounds:)*}
// function -> {<unsafe_> <abi_> fn( (:param-uid:)* <,_...> ) -> \
// :return-type-uid:}
match self.type_to_unique_id.get(&type_).cloned() {
Some(unique_type_id) => return unique_type_id,
None => { /* generate one */}
};
let mut unique_type_id = String::with_capacity(256);
unique_type_id.push('{');
match type_.sty {
ty::TyBool |
ty::TyChar |
ty::TyStr |
ty::TyInt(_) |
ty::TyUint(_) |
ty::TyFloat(_) => {
push_debuginfo_type_name(cx, type_, false, &mut unique_type_id);
},
ty::TyEnum(def_id, substs) => {
unique_type_id.push_str("enum ");
from_def_id_and_substs(self, cx, def_id, substs, &mut unique_type_id);
},
ty::TyStruct(def_id, substs) => {
unique_type_id.push_str("struct ");
from_def_id_and_substs(self, cx, def_id, substs, &mut unique_type_id);
},
ty::TyTuple(ref component_types) if component_types.is_empty() => {
push_debuginfo_type_name(cx, type_, false, &mut unique_type_id);
},
ty::TyTuple(ref component_types) => {
unique_type_id.push_str("tuple ");
for &component_type in component_types {
let component_type_id =
self.get_unique_type_id_of_type(cx, component_type);
let component_type_id =
self.get_unique_type_id_as_string(component_type_id);
unique_type_id.push_str(&component_type_id[..]);
}
},
ty::TyBox(inner_type) => {
unique_type_id.push_str("box ");
let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
unique_type_id.push_str(&inner_type_id[..]);
},
ty::TyRawPtr(ty::mt { ty: inner_type, mutbl } ) => {
unique_type_id.push('*');
if mutbl == ast::MutMutable {
unique_type_id.push_str("mut");
}
let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
unique_type_id.push_str(&inner_type_id[..]);
},
ty::TyRef(_, ty::mt { ty: inner_type, mutbl }) => {
unique_type_id.push('&');
if mutbl == ast::MutMutable {
unique_type_id.push_str("mut");
}
let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
unique_type_id.push_str(&inner_type_id[..]);
},
ty::TyArray(inner_type, len) => {
unique_type_id.push_str(&format!("[{}]", len));
let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
unique_type_id.push_str(&inner_type_id[..]);
},
ty::TySlice(inner_type) => {
unique_type_id.push_str("[]");
let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
unique_type_id.push_str(&inner_type_id[..]);
},
ty::TyTrait(ref trait_data) => {
unique_type_id.push_str("trait ");
let principal = cx.tcx().erase_late_bound_regions(&trait_data.principal);
from_def_id_and_substs(self,
cx,
principal.def_id,
principal.substs,
&mut unique_type_id);
},
ty::TyBareFn(_, &ty::BareFnTy{ unsafety, abi, ref sig } ) => {
if unsafety == ast::Unsafety::Unsafe {
unique_type_id.push_str("unsafe ");
}
unique_type_id.push_str(abi.name());
unique_type_id.push_str(" fn(");
let sig = cx.tcx().erase_late_bound_regions(sig);
for ¶meter_type in &sig.inputs {
let parameter_type_id =
self.get_unique_type_id_of_type(cx, parameter_type);
let parameter_type_id =
self.get_unique_type_id_as_string(parameter_type_id);
unique_type_id.push_str(¶meter_type_id[..]);
unique_type_id.push(',');
}
if sig.variadic {
unique_type_id.push_str("...");
}
unique_type_id.push_str(")->");
match sig.output {
ty::FnConverging(ret_ty) => {
let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty);
let return_type_id = self.get_unique_type_id_as_string(return_type_id);
unique_type_id.push_str(&return_type_id[..]);
}
ty::FnDiverging => {
unique_type_id.push_str("!");
}
}
},
ty::TyClosure(def_id, substs) => {
let typer = NormalizingClosureTyper::new(cx.tcx());
let closure_ty = typer.closure_type(def_id, substs);
self.get_unique_type_id_of_closure_type(cx,
closure_ty,
&mut unique_type_id);
},
_ => {
cx.sess().bug(&format!("get_unique_type_id_of_type() - unexpected type: {:?}",
type_))
}
};
unique_type_id.push('}');
// Trim to size before storing permanently
unique_type_id.shrink_to_fit();
let key = self.unique_id_interner.intern(Rc::new(unique_type_id));
self.type_to_unique_id.insert(type_, UniqueTypeId(key));
return UniqueTypeId(key);
fn from_def_id_and_substs<'a, 'tcx>(type_map: &mut TypeMap<'tcx>,
cx: &CrateContext<'a, 'tcx>,
def_id: ast::DefId,
substs: &subst::Substs<'tcx>,
output: &mut String) {
// First, find out the 'real' def_id of the type. Items inlined from
// other crates have to be mapped back to their source.
let source_def_id = if def_id.krate == ast::LOCAL_CRATE {
match cx.external_srcs().borrow().get(&def_id.node).cloned() {
Some(source_def_id) => {
// The given def_id identifies the inlined copy of a
// type definition, let's take the source of the copy.
source_def_id
}
None => def_id
}
} else {
def_id
};
// Get the crate hash as first part of the identifier.
let crate_hash = if source_def_id.krate == ast::LOCAL_CRATE {
cx.link_meta().crate_hash.clone()
} else {
cx.sess().cstore.get_crate_hash(source_def_id.krate)
};
output.push_str(crate_hash.as_str());
output.push_str("/");
output.push_str(&format!("{:x}", def_id.node));
// Maybe check that there is no self type here.
let tps = substs.types.get_slice(subst::TypeSpace);
if !tps.is_empty() {
output.push('<');
for &type_parameter in tps {
let param_type_id =
type_map.get_unique_type_id_of_type(cx, type_parameter);
let param_type_id =
type_map.get_unique_type_id_as_string(param_type_id);
output.push_str(¶m_type_id[..]);
output.push(',');
}
output.push('>');
}
}
}
fn get_unique_type_id_of_closure_type<'a>(&mut self,
cx: &CrateContext<'a, 'tcx>,
closure_ty: ty::ClosureTy<'tcx>,
unique_type_id: &mut String) {
let ty::ClosureTy { unsafety,
ref sig,
abi: _ } = closure_ty;
if unsafety == ast::Unsafety::Unsafe {
unique_type_id.push_str("unsafe ");
}
unique_type_id.push_str("|");
let sig = cx.tcx().erase_late_bound_regions(sig);
for ¶meter_type in &sig.inputs {
let parameter_type_id =
self.get_unique_type_id_of_type(cx, parameter_type);
let parameter_type_id =
self.get_unique_type_id_as_string(parameter_type_id);
unique_type_id.push_str(¶meter_type_id[..]);
unique_type_id.push(',');
}
if sig.variadic {
unique_type_id.push_str("...");
}
unique_type_id.push_str("|->");
match sig.output {
ty::FnConverging(ret_ty) => {
let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty);
let return_type_id = self.get_unique_type_id_as_string(return_type_id);
unique_type_id.push_str(&return_type_id[..]);
}
ty::FnDiverging => {
unique_type_id.push_str("!");
}
}
}
// Get the UniqueTypeId for an enum variant. Enum variants are not really
// types of their own, so they need special handling. We still need a
// UniqueTypeId for them, since to debuginfo they *are* real types.
fn get_unique_type_id_of_enum_variant<'a>(&mut self,
cx: &CrateContext<'a, 'tcx>,
enum_type: Ty<'tcx>,
variant_name: &str)
-> UniqueTypeId {
let enum_type_id = self.get_unique_type_id_of_type(cx, enum_type);
let enum_variant_type_id = format!("{}::{}",
&self.get_unique_type_id_as_string(enum_type_id),
variant_name);
let interner_key = self.unique_id_interner.intern(Rc::new(enum_variant_type_id));
UniqueTypeId(interner_key)
}
}
// A description of some recursive type. It can either be already finished (as
// with FinalMetadata) or it is not yet finished, but contains all information
// needed to generate the missing parts of the description. See the
// documentation section on Recursive Types at the top of this file for more
// information.
enum RecursiveTypeDescription<'tcx> {
UnfinishedMetadata {
unfinished_type: Ty<'tcx>,
unique_type_id: UniqueTypeId,
metadata_stub: DICompositeType,
llvm_type: Type,
member_description_factory: MemberDescriptionFactory<'tcx>,
},
FinalMetadata(DICompositeType)
}
fn create_and_register_recursive_type_forward_declaration<'a, 'tcx>(
cx: &CrateContext<'a, 'tcx>,
unfinished_type: Ty<'tcx>,
unique_type_id: UniqueTypeId,
metadata_stub: DICompositeType,
llvm_type: Type,
member_description_factory: MemberDescriptionFactory<'tcx>)
-> RecursiveTypeDescription<'tcx> {
// Insert the stub into the TypeMap in order to allow for recursive references
let mut type_map = debug_context(cx).type_map.borrow_mut();
type_map.register_unique_id_with_metadata(cx, unique_type_id, metadata_stub);
type_map.register_type_with_metadata(cx, unfinished_type, metadata_stub);
UnfinishedMetadata {
unfinished_type: unfinished_type,
unique_type_id: unique_type_id,
metadata_stub: metadata_stub,
llvm_type: llvm_type,
member_description_factory: member_description_factory,
}
}
impl<'tcx> RecursiveTypeDescription<'tcx> {
// Finishes up the description of the type in question (mostly by providing
// descriptions of the fields of the given type) and returns the final type
// metadata.
fn finalize<'a>(&self, cx: &CrateContext<'a, 'tcx>) -> MetadataCreationResult {
match *self {
FinalMetadata(metadata) => MetadataCreationResult::new(metadata, false),
UnfinishedMetadata {
unfinished_type,
unique_type_id,
metadata_stub,
llvm_type,
ref member_description_factory,
..
} => {
// Make sure that we have a forward declaration of the type in
// the TypeMap so that recursive references are possible. This
// will always be the case if the RecursiveTypeDescription has
// been properly created through the
// create_and_register_recursive_type_forward_declaration()
// function.
{
let type_map = debug_context(cx).type_map.borrow();
if type_map.find_metadata_for_unique_id(unique_type_id).is_none() ||
type_map.find_metadata_for_type(unfinished_type).is_none() {
cx.sess().bug(&format!("Forward declaration of potentially recursive type \
'{:?}' was not found in TypeMap!",
unfinished_type)
);
}
}
// ... then create the member descriptions ...
let member_descriptions =
member_description_factory.create_member_descriptions(cx);
// ... and attach them to the stub to complete it.
set_members_of_composite_type(cx,
metadata_stub,
llvm_type,
&member_descriptions[..]);
return MetadataCreationResult::new(metadata_stub, true);
}
}
}
}
// Returns from the enclosing function if the type metadata with the given
// unique id can be found in the type map
macro_rules! return_if_metadata_created_in_meantime {
($cx: expr, $unique_type_id: expr) => (
match debug_context($cx).type_map
.borrow()
.find_metadata_for_unique_id($unique_type_id) {
Some(metadata) => return MetadataCreationResult::new(metadata, true),
None => { /* proceed normally */ }
};
)
}
fn fixed_vec_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
unique_type_id: UniqueTypeId,
element_type: Ty<'tcx>,
len: Option<u64>,
span: Span)
-> MetadataCreationResult {
let element_type_metadata = type_metadata(cx, element_type, span);
return_if_metadata_created_in_meantime!(cx, unique_type_id);
let element_llvm_type = type_of::type_of(cx, element_type);
let (element_type_size, element_type_align) = size_and_align_of(cx, element_llvm_type);
let (array_size_in_bytes, upper_bound) = match len {
Some(len) => (element_type_size * len, len as c_longlong),
None => (0, -1)
};
let subrange = unsafe {
llvm::LLVMDIBuilderGetOrCreateSubrange(DIB(cx), 0, upper_bound)
};
let subscripts = create_DIArray(DIB(cx), &[subrange]);
let metadata = unsafe {
llvm::LLVMDIBuilderCreateArrayType(
DIB(cx),
bytes_to_bits(array_size_in_bytes),
bytes_to_bits(element_type_align),
element_type_metadata,
subscripts)
};
return MetadataCreationResult::new(metadata, false);
}
fn vec_slice_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
vec_type: Ty<'tcx>,
element_type: Ty<'tcx>,
unique_type_id: UniqueTypeId,
span: Span)
-> MetadataCreationResult {
let data_ptr_type = cx.tcx().mk_ptr(ty::mt {
ty: element_type,
mutbl: ast::MutImmutable
});
let element_type_metadata = type_metadata(cx, data_ptr_type, span);
return_if_metadata_created_in_meantime!(cx, unique_type_id);
let slice_llvm_type = type_of::type_of(cx, vec_type);
let slice_type_name = compute_debuginfo_type_name(cx, vec_type, true);
let member_llvm_types = slice_llvm_type.field_types();
assert!(slice_layout_is_correct(cx,
&member_llvm_types[..],
element_type));
let member_descriptions = [
MemberDescription {
name: "data_ptr".to_string(),
llvm_type: member_llvm_types[0],
type_metadata: element_type_metadata,
offset: ComputedMemberOffset,
flags: FLAGS_NONE
},
MemberDescription {
name: "length".to_string(),
llvm_type: member_llvm_types[1],
type_metadata: type_metadata(cx, cx.tcx().types.usize, span),
offset: ComputedMemberOffset,
flags: FLAGS_NONE
},
];
assert!(member_descriptions.len() == member_llvm_types.len());
let loc = span_start(cx, span);
let file_metadata = file_metadata(cx, &loc.file.name);
let metadata = composite_type_metadata(cx,
slice_llvm_type,
&slice_type_name[..],
unique_type_id,
&member_descriptions,
UNKNOWN_SCOPE_METADATA,
file_metadata,
span);
return MetadataCreationResult::new(metadata, false);
fn slice_layout_is_correct<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
member_llvm_types: &[Type],
element_type: Ty<'tcx>)
-> bool {
member_llvm_types.len() == 2 &&
member_llvm_types[0] == type_of::type_of(cx, element_type).ptr_to() &&
member_llvm_types[1] == cx.int_type()
}
}
fn subroutine_type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
unique_type_id: UniqueTypeId,
signature: &ty::PolyFnSig<'tcx>,
span: Span)
-> MetadataCreationResult
{
let signature = cx.tcx().erase_late_bound_regions(signature);
let mut signature_metadata: Vec<DIType> = Vec::with_capacity(signature.inputs.len() + 1);
// return type
signature_metadata.push(match signature.output {
ty::FnConverging(ret_ty) => match ret_ty.sty {
ty::TyTuple(ref tys) if tys.is_empty() => ptr::null_mut(),
_ => type_metadata(cx, ret_ty, span)
},
ty::FnDiverging => diverging_type_metadata(cx)
});
// regular arguments
for &argument_type in &signature.inputs {
signature_metadata.push(type_metadata(cx, argument_type, span));
}
return_if_metadata_created_in_meantime!(cx, unique_type_id);
return MetadataCreationResult::new(
unsafe {
llvm::LLVMDIBuilderCreateSubroutineType(
DIB(cx),
UNKNOWN_FILE_METADATA,
create_DIArray(DIB(cx), &signature_metadata[..]))
},
false);
}
// FIXME(1563) This is all a bit of a hack because 'trait pointer' is an ill-
// defined concept. For the case of an actual trait pointer (i.e., Box<Trait>,
// &Trait), trait_object_type should be the whole thing (e.g, Box<Trait>) and
// trait_type should be the actual trait (e.g., Trait). Where the trait is part
// of a DST struct, there is no trait_object_type and the results of this
// function will be a little bit weird.
fn trait_pointer_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
trait_type: Ty<'tcx>,
trait_object_type: Option<Ty<'tcx>>,
unique_type_id: UniqueTypeId)
-> DIType {
// The implementation provided here is a stub. It makes sure that the trait
// type is assigned the correct name, size, namespace, and source location.
// But it does not describe the trait's methods.
let def_id = match trait_type.sty {
ty::TyTrait(ref data) => data.principal_def_id(),
_ => {
cx.sess().bug(&format!("debuginfo: Unexpected trait-object type in \
trait_pointer_metadata(): {:?}",
trait_type));
}
};
let trait_object_type = trait_object_type.unwrap_or(trait_type);
let trait_type_name =
compute_debuginfo_type_name(cx, trait_object_type, false);
let (containing_scope, _) = get_namespace_and_span_for_item(cx, def_id);
let trait_llvm_type = type_of::type_of(cx, trait_object_type);
composite_type_metadata(cx,
trait_llvm_type,
&trait_type_name[..],
unique_type_id,
&[],
containing_scope,
UNKNOWN_FILE_METADATA,
codemap::DUMMY_SP)
}
pub fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
t: Ty<'tcx>,
usage_site_span: Span)
-> DIType {
// Get the unique type id of this type.
let unique_type_id = {
let mut type_map = debug_context(cx).type_map.borrow_mut();
// First, try to find the type in TypeMap. If we have seen it before, we
// can exit early here.
match type_map.find_metadata_for_type(t) {
Some(metadata) => {
return metadata;
},
None => {
// The Ty is not in the TypeMap but maybe we have already seen
// an equivalent type (e.g. only differing in region arguments).
// In order to find out, generate the unique type id and look
// that up.
let unique_type_id = type_map.get_unique_type_id_of_type(cx, t);
match type_map.find_metadata_for_unique_id(unique_type_id) {
Some(metadata) => {
// There is already an equivalent type in the TypeMap.
// Register this Ty as an alias in the cache and
// return the cached metadata.
type_map.register_type_with_metadata(cx, t, metadata);
return metadata;
},
None => {
// There really is no type metadata for this type, so
// proceed by creating it.
unique_type_id
}
}
}
}
};
debug!("type_metadata: {:?}", t);
let sty = &t.sty;
let MetadataCreationResult { metadata, already_stored_in_typemap } = match *sty {
ty::TyBool |
ty::TyChar |
ty::TyInt(_) |
ty::TyUint(_) |
ty::TyFloat(_) => {
MetadataCreationResult::new(basic_type_metadata(cx, t), false)
}
ty::TyTuple(ref elements) if elements.is_empty() => {
MetadataCreationResult::new(basic_type_metadata(cx, t), false)
}
ty::TyEnum(def_id, _) => {
prepare_enum_metadata(cx, t, def_id, unique_type_id, usage_site_span).finalize(cx)
}
ty::TyArray(typ, len) => {
fixed_vec_metadata(cx, unique_type_id, typ, Some(len as u64), usage_site_span)
}
ty::TySlice(typ) => {
fixed_vec_metadata(cx, unique_type_id, typ, None, usage_site_span)
}
ty::TyStr => {
fixed_vec_metadata(cx, unique_type_id, cx.tcx().types.i8, None, usage_site_span)
}
ty::TyTrait(..) => {
MetadataCreationResult::new(
trait_pointer_metadata(cx, t, None, unique_type_id),
false)
}
ty::TyBox(ty) | ty::TyRawPtr(ty::mt{ty, ..}) | ty::TyRef(_, ty::mt{ty, ..}) => {
match ty.sty {
ty::TySlice(typ) => {
vec_slice_metadata(cx, t, typ, unique_type_id, usage_site_span)
}
ty::TyStr => {
vec_slice_metadata(cx, t, cx.tcx().types.u8, unique_type_id, usage_site_span)
}
ty::TyTrait(..) => {
MetadataCreationResult::new(
trait_pointer_metadata(cx, ty, Some(t), unique_type_id),
false)
}
_ => {
let pointee_metadata = type_metadata(cx, ty, usage_site_span);
match debug_context(cx).type_map
.borrow()
.find_metadata_for_unique_id(unique_type_id) {
Some(metadata) => return metadata,
None => { /* proceed normally */ }
};
MetadataCreationResult::new(pointer_type_metadata(cx, t, pointee_metadata),
false)
}
}
}
ty::TyBareFn(_, ref barefnty) => {
subroutine_type_metadata(cx, unique_type_id, &barefnty.sig, usage_site_span)
}
ty::TyClosure(def_id, substs) => {
let typer = NormalizingClosureTyper::new(cx.tcx());
let sig = typer.closure_type(def_id, substs).sig;
subroutine_type_metadata(cx, unique_type_id, &sig, usage_site_span)
}
ty::TyStruct(def_id, substs) => {
prepare_struct_metadata(cx,
t,
def_id,
substs,
unique_type_id,
usage_site_span).finalize(cx)
}
ty::TyTuple(ref elements) => {
prepare_tuple_metadata(cx,
t,
&elements[..],
unique_type_id,
usage_site_span).finalize(cx)
}
_ => {
cx.sess().bug(&format!("debuginfo: unexpected type in type_metadata: {:?}",
sty))
}
};
{
let mut type_map = debug_context(cx).type_map.borrow_mut();
if already_stored_in_typemap {
// Also make sure that we already have a TypeMap entry entry for the unique type id.
let metadata_for_uid = match type_map.find_metadata_for_unique_id(unique_type_id) {
Some(metadata) => metadata,
None => {
let unique_type_id_str =
type_map.get_unique_type_id_as_string(unique_type_id);
let error_message = format!("Expected type metadata for unique \
type id '{}' to already be in \
the debuginfo::TypeMap but it \
was not. (Ty = {})",
&unique_type_id_str[..],
t);
cx.sess().span_bug(usage_site_span, &error_message[..]);
}
};
match type_map.find_metadata_for_type(t) {
Some(metadata) => {
if metadata != metadata_for_uid {
let unique_type_id_str =
type_map.get_unique_type_id_as_string(unique_type_id);<๏ฝfimโhole๏ฝ> UniqueTypeId maps in \
debuginfo::TypeMap. \
UniqueTypeId={}, Ty={}",
&unique_type_id_str[..],
t);
cx.sess().span_bug(usage_site_span, &error_message[..]);
}
}
None => {
type_map.register_type_with_metadata(cx, t, metadata);
}
}
} else {
type_map.register_type_with_metadata(cx, t, metadata);
type_map.register_unique_id_with_metadata(cx, unique_type_id, metadata);
}
}
metadata
}
pub fn file_metadata(cx: &CrateContext, full_path: &str) -> DIFile {
match debug_context(cx).created_files.borrow().get(full_path) {
Some(file_metadata) => return *file_metadata,
None => ()
}
debug!("file_metadata: {}", full_path);
// FIXME (#9639): This needs to handle non-utf8 paths
let work_dir = cx.sess().working_dir.to_str().unwrap();
let file_name =
if full_path.starts_with(work_dir) {
&full_path[work_dir.len() + 1..full_path.len()]
} else {
full_path
};
let file_name = CString::new(file_name).unwrap();
let work_dir = CString::new(work_dir).unwrap();
let file_metadata = unsafe {
llvm::LLVMDIBuilderCreateFile(DIB(cx), file_name.as_ptr(),
work_dir.as_ptr())
};
let mut created_files = debug_context(cx).created_files.borrow_mut();
created_files.insert(full_path.to_string(), file_metadata);
return file_metadata;
}
/// Finds the scope metadata node for the given AST node.
pub fn scope_metadata(fcx: &FunctionContext,
node_id: ast::NodeId,
error_reporting_span: Span)
-> DIScope {
let scope_map = &fcx.debug_context
.get_ref(fcx.ccx, error_reporting_span)
.scope_map;
match scope_map.borrow().get(&node_id).cloned() {
Some(scope_metadata) => scope_metadata,
None => {
let node = fcx.ccx.tcx().map.get(node_id);
fcx.ccx.sess().span_bug(error_reporting_span,
&format!("debuginfo: Could not find scope info for node {:?}",
node));
}
}
}
fn diverging_type_metadata(cx: &CrateContext) -> DIType {
unsafe {
llvm::LLVMDIBuilderCreateBasicType(
DIB(cx),
"!\0".as_ptr() as *const _,
bytes_to_bits(0),
bytes_to_bits(0),
DW_ATE_unsigned)
}
}
fn basic_type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
t: Ty<'tcx>) -> DIType {
debug!("basic_type_metadata: {:?}", t);
let (name, encoding) = match t.sty {
ty::TyTuple(ref elements) if elements.is_empty() =>
("()".to_string(), DW_ATE_unsigned),
ty::TyBool => ("bool".to_string(), DW_ATE_boolean),
ty::TyChar => ("char".to_string(), DW_ATE_unsigned_char),
ty::TyInt(int_ty) => match int_ty {
ast::TyIs => ("isize".to_string(), DW_ATE_signed),
ast::TyI8 => ("i8".to_string(), DW_ATE_signed),
ast::TyI16 => ("i16".to_string(), DW_ATE_signed),
ast::TyI32 => ("i32".to_string(), DW_ATE_signed),
ast::TyI64 => ("i64".to_string(), DW_ATE_signed)
},
ty::TyUint(uint_ty) => match uint_ty {
ast::TyUs => ("usize".to_string(), DW_ATE_unsigned),
ast::TyU8 => ("u8".to_string(), DW_ATE_unsigned),
ast::TyU16 => ("u16".to_string(), DW_ATE_unsigned),
ast::TyU32 => ("u32".to_string(), DW_ATE_unsigned),
ast::TyU64 => ("u64".to_string(), DW_ATE_unsigned)
},
ty::TyFloat(float_ty) => match float_ty {
ast::TyF32 => ("f32".to_string(), DW_ATE_float),
ast::TyF64 => ("f64".to_string(), DW_ATE_float),
},
_ => cx.sess().bug("debuginfo::basic_type_metadata - t is invalid type")
};
let llvm_type = type_of::type_of(cx, t);
let (size, align) = size_and_align_of(cx, llvm_type);
let name = CString::new(name).unwrap();
let ty_metadata = unsafe {
llvm::LLVMDIBuilderCreateBasicType(
DIB(cx),
name.as_ptr(),
bytes_to_bits(size),
bytes_to_bits(align),
encoding)
};
return ty_metadata;
}
fn pointer_type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
pointer_type: Ty<'tcx>,
pointee_type_metadata: DIType)
-> DIType {
let pointer_llvm_type = type_of::type_of(cx, pointer_type);
let (pointer_size, pointer_align) = size_and_align_of(cx, pointer_llvm_type);
let name = compute_debuginfo_type_name(cx, pointer_type, false);
let name = CString::new(name).unwrap();
let ptr_metadata = unsafe {
llvm::LLVMDIBuilderCreatePointerType(
DIB(cx),
pointee_type_metadata,
bytes_to_bits(pointer_size),
bytes_to_bits(pointer_align),
name.as_ptr())
};
return ptr_metadata;
}
pub fn compile_unit_metadata(cx: &CrateContext) -> DIDescriptor {
let work_dir = &cx.sess().working_dir;
let compile_unit_name = match cx.sess().local_crate_source_file {
None => fallback_path(cx),
Some(ref abs_path) => {
if abs_path.is_relative() {
cx.sess().warn("debuginfo: Invalid path to crate's local root source file!");
fallback_path(cx)
} else {
match abs_path.relative_from(work_dir) {
Some(ref p) if p.is_relative() => {
if p.starts_with(Path::new("./")) {
path2cstr(p)
} else {
path2cstr(&Path::new(".").join(p))
}
}
_ => fallback_path(cx)
}
}
}
};
debug!("compile_unit_metadata: {:?}", compile_unit_name);
let producer = format!("rustc version {}",
(option_env!("CFG_VERSION")).expect("CFG_VERSION"));
let compile_unit_name = compile_unit_name.as_ptr();
let work_dir = path2cstr(&work_dir);
let producer = CString::new(producer).unwrap();
let flags = "\0";
let split_name = "\0";
return unsafe {
llvm::LLVMDIBuilderCreateCompileUnit(
debug_context(cx).builder,
DW_LANG_RUST,
compile_unit_name,
work_dir.as_ptr(),
producer.as_ptr(),
cx.sess().opts.optimize != config::No,
flags.as_ptr() as *const _,
0,
split_name.as_ptr() as *const _)
};
fn fallback_path(cx: &CrateContext) -> CString {
CString::new(cx.link_meta().crate_name.clone()).unwrap()
}
}
struct MetadataCreationResult {
metadata: DIType,
already_stored_in_typemap: bool
}
impl MetadataCreationResult {
fn new(metadata: DIType, already_stored_in_typemap: bool) -> MetadataCreationResult {
MetadataCreationResult {
metadata: metadata,
already_stored_in_typemap: already_stored_in_typemap
}
}
}
#[derive(Debug)]
enum MemberOffset {
FixedMemberOffset { bytes: usize },
// For ComputedMemberOffset, the offset is read from the llvm type definition.
ComputedMemberOffset
}
// Description of a type member, which can either be a regular field (as in
// structs or tuples) or an enum variant.
#[derive(Debug)]
struct MemberDescription {
name: String,
llvm_type: Type,
type_metadata: DIType,
offset: MemberOffset,
flags: c_uint
}
// A factory for MemberDescriptions. It produces a list of member descriptions
// for some record-like type. MemberDescriptionFactories are used to defer the
// creation of type member descriptions in order to break cycles arising from
// recursive type definitions.
enum MemberDescriptionFactory<'tcx> {
StructMDF(StructMemberDescriptionFactory<'tcx>),
TupleMDF(TupleMemberDescriptionFactory<'tcx>),
EnumMDF(EnumMemberDescriptionFactory<'tcx>),
VariantMDF(VariantMemberDescriptionFactory<'tcx>)
}
impl<'tcx> MemberDescriptionFactory<'tcx> {
fn create_member_descriptions<'a>(&self, cx: &CrateContext<'a, 'tcx>)
-> Vec<MemberDescription> {
match *self {
StructMDF(ref this) => {
this.create_member_descriptions(cx)
}
TupleMDF(ref this) => {
this.create_member_descriptions(cx)
}
EnumMDF(ref this) => {
this.create_member_descriptions(cx)
}
VariantMDF(ref this) => {
this.create_member_descriptions(cx)
}
}
}
}
//=-----------------------------------------------------------------------------
// Structs
//=-----------------------------------------------------------------------------
// Creates MemberDescriptions for the fields of a struct
struct StructMemberDescriptionFactory<'tcx> {
fields: Vec<ty::field<'tcx>>,
is_simd: bool,
span: Span,
}
impl<'tcx> StructMemberDescriptionFactory<'tcx> {
fn create_member_descriptions<'a>(&self, cx: &CrateContext<'a, 'tcx>)
-> Vec<MemberDescription> {
if self.fields.is_empty() {
return Vec::new();
}
let field_size = if self.is_simd {
machine::llsize_of_alloc(cx, type_of::type_of(cx, self.fields[0].mt.ty)) as usize
} else {
0xdeadbeef
};
self.fields.iter().enumerate().map(|(i, field)| {
let name = if field.name == special_idents::unnamed_field.name {
format!("__{}", i)
} else {
token::get_name(field.name).to_string()
};
let offset = if self.is_simd {
assert!(field_size != 0xdeadbeef);
FixedMemberOffset { bytes: i * field_size }
} else {
ComputedMemberOffset
};
MemberDescription {
name: name,
llvm_type: type_of::type_of(cx, field.mt.ty),
type_metadata: type_metadata(cx, field.mt.ty, self.span),
offset: offset,
flags: FLAGS_NONE,
}
}).collect()
}
}
fn prepare_struct_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
struct_type: Ty<'tcx>,
def_id: ast::DefId,
substs: &subst::Substs<'tcx>,
unique_type_id: UniqueTypeId,
span: Span)
-> RecursiveTypeDescription<'tcx> {
let struct_name = compute_debuginfo_type_name(cx, struct_type, false);
let struct_llvm_type = type_of::in_memory_type_of(cx, struct_type);
let (containing_scope, _) = get_namespace_and_span_for_item(cx, def_id);
let struct_metadata_stub = create_struct_stub(cx,
struct_llvm_type,
&struct_name,
unique_type_id,
containing_scope);
let mut fields = cx.tcx().struct_fields(def_id, substs);
// The `Ty` values returned by `ty::struct_fields` can still contain
// `TyProjection` variants, so normalize those away.
for field in &mut fields {
field.mt.ty = monomorphize::normalize_associated_type(cx.tcx(), &field.mt.ty);
}
create_and_register_recursive_type_forward_declaration(
cx,
struct_type,
unique_type_id,
struct_metadata_stub,
struct_llvm_type,
StructMDF(StructMemberDescriptionFactory {
fields: fields,
is_simd: struct_type.is_simd(cx.tcx()),
span: span,
})
)
}
//=-----------------------------------------------------------------------------
// Tuples
//=-----------------------------------------------------------------------------
// Creates MemberDescriptions for the fields of a tuple
struct TupleMemberDescriptionFactory<'tcx> {
component_types: Vec<Ty<'tcx>>,
span: Span,
}
impl<'tcx> TupleMemberDescriptionFactory<'tcx> {
fn create_member_descriptions<'a>(&self, cx: &CrateContext<'a, 'tcx>)
-> Vec<MemberDescription> {
self.component_types
.iter()
.enumerate()
.map(|(i, &component_type)| {
MemberDescription {
name: format!("__{}", i),
llvm_type: type_of::type_of(cx, component_type),
type_metadata: type_metadata(cx, component_type, self.span),
offset: ComputedMemberOffset,
flags: FLAGS_NONE,
}
}).collect()
}
}
fn prepare_tuple_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
tuple_type: Ty<'tcx>,
component_types: &[Ty<'tcx>],
unique_type_id: UniqueTypeId,
span: Span)
-> RecursiveTypeDescription<'tcx> {
let tuple_name = compute_debuginfo_type_name(cx, tuple_type, false);
let tuple_llvm_type = type_of::type_of(cx, tuple_type);
create_and_register_recursive_type_forward_declaration(
cx,
tuple_type,
unique_type_id,
create_struct_stub(cx,
tuple_llvm_type,
&tuple_name[..],
unique_type_id,
UNKNOWN_SCOPE_METADATA),
tuple_llvm_type,
TupleMDF(TupleMemberDescriptionFactory {
component_types: component_types.to_vec(),
span: span,
})
)
}
//=-----------------------------------------------------------------------------
// Enums
//=-----------------------------------------------------------------------------
// Describes the members of an enum value: An enum is described as a union of
// structs in DWARF. This MemberDescriptionFactory provides the description for
// the members of this union; so for every variant of the given enum, this
// factory will produce one MemberDescription (all with no name and a fixed
// offset of zero bytes).
struct EnumMemberDescriptionFactory<'tcx> {
enum_type: Ty<'tcx>,
type_rep: Rc<adt::Repr<'tcx>>,
variants: Rc<Vec<Rc<ty::VariantInfo<'tcx>>>>,
discriminant_type_metadata: Option<DIType>,
containing_scope: DIScope,
file_metadata: DIFile,
span: Span,
}
impl<'tcx> EnumMemberDescriptionFactory<'tcx> {
fn create_member_descriptions<'a>(&self, cx: &CrateContext<'a, 'tcx>)
-> Vec<MemberDescription> {
match *self.type_rep {
adt::General(_, ref struct_defs, _) => {
let discriminant_info = RegularDiscriminant(self.discriminant_type_metadata
.expect(""));
struct_defs
.iter()
.enumerate()
.map(|(i, struct_def)| {
let (variant_type_metadata,
variant_llvm_type,
member_desc_factory) =
describe_enum_variant(cx,
self.enum_type,
struct_def,
&*(*self.variants)[i],
discriminant_info,
self.containing_scope,
self.span);
let member_descriptions = member_desc_factory
.create_member_descriptions(cx);
set_members_of_composite_type(cx,
variant_type_metadata,
variant_llvm_type,
&member_descriptions);
MemberDescription {
name: "".to_string(),
llvm_type: variant_llvm_type,
type_metadata: variant_type_metadata,
offset: FixedMemberOffset { bytes: 0 },
flags: FLAGS_NONE
}
}).collect()
},
adt::Univariant(ref struct_def, _) => {
assert!(self.variants.len() <= 1);
if self.variants.is_empty() {
vec![]
} else {
let (variant_type_metadata,
variant_llvm_type,
member_description_factory) =
describe_enum_variant(cx,
self.enum_type,
struct_def,
&*(*self.variants)[0],
NoDiscriminant,
self.containing_scope,
self.span);
let member_descriptions =
member_description_factory.create_member_descriptions(cx);
set_members_of_composite_type(cx,
variant_type_metadata,
variant_llvm_type,
&member_descriptions[..]);
vec![
MemberDescription {
name: "".to_string(),
llvm_type: variant_llvm_type,
type_metadata: variant_type_metadata,
offset: FixedMemberOffset { bytes: 0 },
flags: FLAGS_NONE
}
]
}
}
adt::RawNullablePointer { nndiscr: non_null_variant_index, nnty, .. } => {
// As far as debuginfo is concerned, the pointer this enum
// represents is still wrapped in a struct. This is to make the
// DWARF representation of enums uniform.
// First create a description of the artificial wrapper struct:
let non_null_variant = &(*self.variants)[non_null_variant_index as usize];
let non_null_variant_name = token::get_name(non_null_variant.name);
// The llvm type and metadata of the pointer
let non_null_llvm_type = type_of::type_of(cx, nnty);
let non_null_type_metadata = type_metadata(cx, nnty, self.span);
// The type of the artificial struct wrapping the pointer
let artificial_struct_llvm_type = Type::struct_(cx,
&[non_null_llvm_type],
false);
// For the metadata of the wrapper struct, we need to create a
// MemberDescription of the struct's single field.
let sole_struct_member_description = MemberDescription {
name: match non_null_variant.arg_names {
Some(ref names) => token::get_name(names[0]).to_string(),
None => "__0".to_string()
},
llvm_type: non_null_llvm_type,
type_metadata: non_null_type_metadata,
offset: FixedMemberOffset { bytes: 0 },
flags: FLAGS_NONE
};
let unique_type_id = debug_context(cx).type_map
.borrow_mut()
.get_unique_type_id_of_enum_variant(
cx,
self.enum_type,
&non_null_variant_name);
// Now we can create the metadata of the artificial struct
let artificial_struct_metadata =
composite_type_metadata(cx,
artificial_struct_llvm_type,
&non_null_variant_name,
unique_type_id,
&[sole_struct_member_description],
self.containing_scope,
self.file_metadata,
codemap::DUMMY_SP);
// Encode the information about the null variant in the union
// member's name.
let null_variant_index = (1 - non_null_variant_index) as usize;
let null_variant_name = token::get_name((*self.variants)[null_variant_index].name);
let union_member_name = format!("RUST$ENCODED$ENUM${}${}",
0,
null_variant_name);
// Finally create the (singleton) list of descriptions of union
// members.
vec![
MemberDescription {
name: union_member_name,
llvm_type: artificial_struct_llvm_type,
type_metadata: artificial_struct_metadata,
offset: FixedMemberOffset { bytes: 0 },
flags: FLAGS_NONE
}
]
},
adt::StructWrappedNullablePointer { nonnull: ref struct_def,
nndiscr,
ref discrfield, ..} => {
// Create a description of the non-null variant
let (variant_type_metadata, variant_llvm_type, member_description_factory) =
describe_enum_variant(cx,
self.enum_type,
struct_def,
&*(*self.variants)[nndiscr as usize],
OptimizedDiscriminant,
self.containing_scope,
self.span);
let variant_member_descriptions =
member_description_factory.create_member_descriptions(cx);
set_members_of_composite_type(cx,
variant_type_metadata,
variant_llvm_type,
&variant_member_descriptions[..]);
// Encode the information about the null variant in the union
// member's name.
let null_variant_index = (1 - nndiscr) as usize;
let null_variant_name = token::get_name((*self.variants)[null_variant_index].name);
let discrfield = discrfield.iter()
.skip(1)
.map(|x| x.to_string())
.collect::<Vec<_>>().connect("$");
let union_member_name = format!("RUST$ENCODED$ENUM${}${}",
discrfield,
null_variant_name);
// Create the (singleton) list of descriptions of union members.
vec![
MemberDescription {
name: union_member_name,
llvm_type: variant_llvm_type,
type_metadata: variant_type_metadata,
offset: FixedMemberOffset { bytes: 0 },
flags: FLAGS_NONE
}
]
},
adt::CEnum(..) => cx.sess().span_bug(self.span, "This should be unreachable.")
}
}
}
// Creates MemberDescriptions for the fields of a single enum variant.
struct VariantMemberDescriptionFactory<'tcx> {
args: Vec<(String, Ty<'tcx>)>,
discriminant_type_metadata: Option<DIType>,
span: Span,
}
impl<'tcx> VariantMemberDescriptionFactory<'tcx> {
fn create_member_descriptions<'a>(&self, cx: &CrateContext<'a, 'tcx>)
-> Vec<MemberDescription> {
self.args.iter().enumerate().map(|(i, &(ref name, ty))| {
MemberDescription {
name: name.to_string(),
llvm_type: type_of::type_of(cx, ty),
type_metadata: match self.discriminant_type_metadata {
Some(metadata) if i == 0 => metadata,
_ => type_metadata(cx, ty, self.span)
},
offset: ComputedMemberOffset,
flags: FLAGS_NONE
}
}).collect()
}
}
#[derive(Copy, Clone)]
enum EnumDiscriminantInfo {
RegularDiscriminant(DIType),
OptimizedDiscriminant,
NoDiscriminant
}
// Returns a tuple of (1) type_metadata_stub of the variant, (2) the llvm_type
// of the variant, and (3) a MemberDescriptionFactory for producing the
// descriptions of the fields of the variant. This is a rudimentary version of a
// full RecursiveTypeDescription.
fn describe_enum_variant<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
enum_type: Ty<'tcx>,
struct_def: &adt::Struct<'tcx>,
variant_info: &ty::VariantInfo<'tcx>,
discriminant_info: EnumDiscriminantInfo,
containing_scope: DIScope,
span: Span)
-> (DICompositeType, Type, MemberDescriptionFactory<'tcx>) {
let variant_llvm_type =
Type::struct_(cx, &struct_def.fields
.iter()
.map(|&t| type_of::type_of(cx, t))
.collect::<Vec<_>>()
,
struct_def.packed);
// Could do some consistency checks here: size, align, field count, discr type
let variant_name = token::get_name(variant_info.name);
let variant_name = &variant_name;
let unique_type_id = debug_context(cx).type_map
.borrow_mut()
.get_unique_type_id_of_enum_variant(
cx,
enum_type,
variant_name);
let metadata_stub = create_struct_stub(cx,
variant_llvm_type,
variant_name,
unique_type_id,
containing_scope);
// Get the argument names from the enum variant info
let mut arg_names: Vec<_> = match variant_info.arg_names {
Some(ref names) => {
names.iter()
.map(|&name| token::get_name(name).to_string())
.collect()
}
None => {
variant_info.args
.iter()
.enumerate()
.map(|(i, _)| format!("__{}", i))
.collect()
}
};
// If this is not a univariant enum, there is also the discriminant field.
match discriminant_info {
RegularDiscriminant(_) => arg_names.insert(0, "RUST$ENUM$DISR".to_string()),
_ => { /* do nothing */ }
};
// Build an array of (field name, field type) pairs to be captured in the factory closure.
let args: Vec<(String, Ty)> = arg_names.iter()
.zip(&struct_def.fields)
.map(|(s, &t)| (s.to_string(), t))
.collect();
let member_description_factory =
VariantMDF(VariantMemberDescriptionFactory {
args: args,
discriminant_type_metadata: match discriminant_info {
RegularDiscriminant(discriminant_type_metadata) => {
Some(discriminant_type_metadata)
}
_ => None
},
span: span,
});
(metadata_stub, variant_llvm_type, member_description_factory)
}
fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
enum_type: Ty<'tcx>,
enum_def_id: ast::DefId,
unique_type_id: UniqueTypeId,
span: Span)
-> RecursiveTypeDescription<'tcx> {
let enum_name = compute_debuginfo_type_name(cx, enum_type, false);
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, enum_def_id);
let loc = span_start(cx, definition_span);
let file_metadata = file_metadata(cx, &loc.file.name);
let variants = cx.tcx().enum_variants(enum_def_id);
let enumerators_metadata: Vec<DIDescriptor> = variants
.iter()
.map(|v| {
let token = token::get_name(v.name);
let name = CString::new(token.as_bytes()).unwrap();
unsafe {
llvm::LLVMDIBuilderCreateEnumerator(
DIB(cx),
name.as_ptr(),
v.disr_val as u64)
}
})
.collect();
let discriminant_type_metadata = |inttype| {
// We can reuse the type of the discriminant for all monomorphized
// instances of an enum because it doesn't depend on any type
// parameters. The def_id, uniquely identifying the enum's polytype acts
// as key in this cache.
let cached_discriminant_type_metadata = debug_context(cx).created_enum_disr_types
.borrow()
.get(&enum_def_id).cloned();
match cached_discriminant_type_metadata {
Some(discriminant_type_metadata) => discriminant_type_metadata,
None => {
let discriminant_llvm_type = adt::ll_inttype(cx, inttype);
let (discriminant_size, discriminant_align) =
size_and_align_of(cx, discriminant_llvm_type);
let discriminant_base_type_metadata =
type_metadata(cx,
adt::ty_of_inttype(cx.tcx(), inttype),
codemap::DUMMY_SP);
let discriminant_name = get_enum_discriminant_name(cx, enum_def_id);
let name = CString::new(discriminant_name.as_bytes()).unwrap();
let discriminant_type_metadata = unsafe {
llvm::LLVMDIBuilderCreateEnumerationType(
DIB(cx),
containing_scope,
name.as_ptr(),
UNKNOWN_FILE_METADATA,
UNKNOWN_LINE_NUMBER,
bytes_to_bits(discriminant_size),
bytes_to_bits(discriminant_align),
create_DIArray(DIB(cx), &enumerators_metadata),
discriminant_base_type_metadata)
};
debug_context(cx).created_enum_disr_types
.borrow_mut()
.insert(enum_def_id, discriminant_type_metadata);
discriminant_type_metadata
}
}
};
let type_rep = adt::represent_type(cx, enum_type);
let discriminant_type_metadata = match *type_rep {
adt::CEnum(inttype, _, _) => {
return FinalMetadata(discriminant_type_metadata(inttype))
},
adt::RawNullablePointer { .. } |
adt::StructWrappedNullablePointer { .. } |
adt::Univariant(..) => None,
adt::General(inttype, _, _) => Some(discriminant_type_metadata(inttype)),
};
let enum_llvm_type = type_of::type_of(cx, enum_type);
let (enum_type_size, enum_type_align) = size_and_align_of(cx, enum_llvm_type);
let unique_type_id_str = debug_context(cx)
.type_map
.borrow()
.get_unique_type_id_as_string(unique_type_id);
let enum_name = CString::new(enum_name).unwrap();
let unique_type_id_str = CString::new(unique_type_id_str.as_bytes()).unwrap();
let enum_metadata = unsafe {
llvm::LLVMDIBuilderCreateUnionType(
DIB(cx),
containing_scope,
enum_name.as_ptr(),
file_metadata,
UNKNOWN_LINE_NUMBER,
bytes_to_bits(enum_type_size),
bytes_to_bits(enum_type_align),
0, // Flags
ptr::null_mut(),
0, // RuntimeLang
unique_type_id_str.as_ptr())
};
return create_and_register_recursive_type_forward_declaration(
cx,
enum_type,
unique_type_id,
enum_metadata,
enum_llvm_type,
EnumMDF(EnumMemberDescriptionFactory {
enum_type: enum_type,
type_rep: type_rep.clone(),
variants: variants,
discriminant_type_metadata: discriminant_type_metadata,
containing_scope: containing_scope,
file_metadata: file_metadata,
span: span,
}),
);
fn get_enum_discriminant_name(cx: &CrateContext,
def_id: ast::DefId)
-> token::InternedString {
let name = if def_id.krate == ast::LOCAL_CRATE {
cx.tcx().map.get_path_elem(def_id.node).name()
} else {
csearch::get_item_path(cx.tcx(), def_id).last().unwrap().name()
};
token::get_name(name)
}
}
/// Creates debug information for a composite type, that is, anything that
/// results in a LLVM struct.
///
/// Examples of Rust types to use this are: structs, tuples, boxes, vecs, and enums.
fn composite_type_metadata(cx: &CrateContext,
composite_llvm_type: Type,
composite_type_name: &str,
composite_type_unique_id: UniqueTypeId,
member_descriptions: &[MemberDescription],
containing_scope: DIScope,
// Ignore source location information as long as it
// can't be reconstructed for non-local crates.
_file_metadata: DIFile,
_definition_span: Span)
-> DICompositeType {
// Create the (empty) struct metadata node ...
let composite_type_metadata = create_struct_stub(cx,
composite_llvm_type,
composite_type_name,
composite_type_unique_id,
containing_scope);
// ... and immediately create and add the member descriptions.
set_members_of_composite_type(cx,
composite_type_metadata,
composite_llvm_type,
member_descriptions);
return composite_type_metadata;
}
fn set_members_of_composite_type(cx: &CrateContext,
composite_type_metadata: DICompositeType,
composite_llvm_type: Type,
member_descriptions: &[MemberDescription]) {
// In some rare cases LLVM metadata uniquing would lead to an existing type
// description being used instead of a new one created in
// create_struct_stub. This would cause a hard to trace assertion in
// DICompositeType::SetTypeArray(). The following check makes sure that we
// get a better error message if this should happen again due to some
// regression.
{
let mut composite_types_completed =
debug_context(cx).composite_types_completed.borrow_mut();
if composite_types_completed.contains(&composite_type_metadata) {
cx.sess().bug("debuginfo::set_members_of_composite_type() - \
Already completed forward declaration re-encountered.");
} else {
composite_types_completed.insert(composite_type_metadata);
}
}
let member_metadata: Vec<DIDescriptor> = member_descriptions
.iter()
.enumerate()
.map(|(i, member_description)| {
let (member_size, member_align) = size_and_align_of(cx, member_description.llvm_type);
let member_offset = match member_description.offset {
FixedMemberOffset { bytes } => bytes as u64,
ComputedMemberOffset => machine::llelement_offset(cx, composite_llvm_type, i)
};
let member_name = member_description.name.as_bytes();
let member_name = CString::new(member_name).unwrap();
unsafe {
llvm::LLVMDIBuilderCreateMemberType(
DIB(cx),
composite_type_metadata,
member_name.as_ptr(),
UNKNOWN_FILE_METADATA,
UNKNOWN_LINE_NUMBER,
bytes_to_bits(member_size),
bytes_to_bits(member_align),
bytes_to_bits(member_offset),
member_description.flags,
member_description.type_metadata)
}
})
.collect();
unsafe {
let type_array = create_DIArray(DIB(cx), &member_metadata[..]);
llvm::LLVMDICompositeTypeSetTypeArray(DIB(cx), composite_type_metadata, type_array);
}
}
// A convenience wrapper around LLVMDIBuilderCreateStructType(). Does not do any
// caching, does not add any fields to the struct. This can be done later with
// set_members_of_composite_type().
fn create_struct_stub(cx: &CrateContext,
struct_llvm_type: Type,
struct_type_name: &str,
unique_type_id: UniqueTypeId,
containing_scope: DIScope)
-> DICompositeType {
let (struct_size, struct_align) = size_and_align_of(cx, struct_llvm_type);
let unique_type_id_str = debug_context(cx).type_map
.borrow()
.get_unique_type_id_as_string(unique_type_id);
let name = CString::new(struct_type_name).unwrap();
let unique_type_id = CString::new(unique_type_id_str.as_bytes()).unwrap();
let metadata_stub = unsafe {
// LLVMDIBuilderCreateStructType() wants an empty array. A null
// pointer will lead to hard to trace and debug LLVM assertions
// later on in llvm/lib/IR/Value.cpp.
let empty_array = create_DIArray(DIB(cx), &[]);
llvm::LLVMDIBuilderCreateStructType(
DIB(cx),
containing_scope,
name.as_ptr(),
UNKNOWN_FILE_METADATA,
UNKNOWN_LINE_NUMBER,
bytes_to_bits(struct_size),
bytes_to_bits(struct_align),
0,
ptr::null_mut(),
empty_array,
0,
ptr::null_mut(),
unique_type_id.as_ptr())
};
return metadata_stub;
}
/// Creates debug information for the given global variable.
///
/// Adds the created metadata nodes directly to the crate's IR.
pub fn create_global_var_metadata(cx: &CrateContext,
node_id: ast::NodeId,
global: ValueRef) {
if cx.dbg_cx().is_none() {
return;
}
// Don't create debuginfo for globals inlined from other crates. The other
// crate should already contain debuginfo for it. More importantly, the
// global might not even exist in un-inlined form anywhere which would lead
// to a linker errors.
if cx.external_srcs().borrow().contains_key(&node_id) {
return;
}
let var_item = cx.tcx().map.get(node_id);
let (name, span) = match var_item {
ast_map::NodeItem(item) => {
match item.node {
ast::ItemStatic(..) => (item.ident.name, item.span),
ast::ItemConst(..) => (item.ident.name, item.span),
_ => {
cx.sess()
.span_bug(item.span,
&format!("debuginfo::\
create_global_var_metadata() -
Captured var-id refers to \
unexpected ast_item variant: {:?}",
var_item))
}
}
},
_ => cx.sess().bug(&format!("debuginfo::create_global_var_metadata() \
- Captured var-id refers to unexpected \
ast_map variant: {:?}",
var_item))
};
let (file_metadata, line_number) = if span != codemap::DUMMY_SP {
let loc = span_start(cx, span);
(file_metadata(cx, &loc.file.name), loc.line as c_uint)
} else {
(UNKNOWN_FILE_METADATA, UNKNOWN_LINE_NUMBER)
};
let is_local_to_unit = is_node_local_to_unit(cx, node_id);
let variable_type = cx.tcx().node_id_to_type(node_id);
let type_metadata = type_metadata(cx, variable_type, span);
let namespace_node = namespace_for_item(cx, ast_util::local_def(node_id));
let var_name = token::get_name(name).to_string();
let linkage_name =
namespace_node.mangled_name_of_contained_item(&var_name[..]);
let var_scope = namespace_node.scope;
let var_name = CString::new(var_name).unwrap();
let linkage_name = CString::new(linkage_name).unwrap();
unsafe {
llvm::LLVMDIBuilderCreateStaticVariable(DIB(cx),
var_scope,
var_name.as_ptr(),
linkage_name.as_ptr(),
file_metadata,
line_number,
type_metadata,
is_local_to_unit,
global,
ptr::null_mut());
}
}
/// Creates debug information for the given local variable.
///
/// This function assumes that there's a datum for each pattern component of the
/// local in `bcx.fcx.lllocals`.
/// Adds the created metadata nodes directly to the crate's IR.
pub fn create_local_var_metadata(bcx: Block, local: &ast::Local) {
if bcx.unreachable.get() ||
fn_should_be_ignored(bcx.fcx) ||
bcx.sess().opts.debuginfo != FullDebugInfo {
return;
}
let cx = bcx.ccx();
let def_map = &cx.tcx().def_map;
let locals = bcx.fcx.lllocals.borrow();
pat_util::pat_bindings(def_map, &*local.pat, |_, node_id, span, var_ident| {
let datum = match locals.get(&node_id) {
Some(datum) => datum,
None => {
bcx.sess().span_bug(span,
&format!("no entry in lllocals table for {}",
node_id));
}
};
if unsafe { llvm::LLVMIsAAllocaInst(datum.val) } == ptr::null_mut() {
cx.sess().span_bug(span, "debuginfo::create_local_var_metadata() - \
Referenced variable location is not an alloca!");
}
let scope_metadata = scope_metadata(bcx.fcx, node_id, span);
declare_local(bcx,
var_ident.node.name,
datum.ty,
scope_metadata,
VariableAccess::DirectVariable { alloca: datum.val },
VariableKind::LocalVariable,
span);
})
}
/// Creates debug information for a variable captured in a closure.
///
/// Adds the created metadata nodes directly to the crate's IR.
pub fn create_captured_var_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
node_id: ast::NodeId,
env_pointer: ValueRef,
env_index: usize,
captured_by_ref: bool,
span: Span) {
if bcx.unreachable.get() ||
fn_should_be_ignored(bcx.fcx) ||
bcx.sess().opts.debuginfo != FullDebugInfo {
return;
}
let cx = bcx.ccx();
let ast_item = cx.tcx().map.find(node_id);
let variable_name = match ast_item {
None => {
cx.sess().span_bug(span, "debuginfo::create_captured_var_metadata: node not found");
}
Some(ast_map::NodeLocal(pat)) | Some(ast_map::NodeArg(pat)) => {
match pat.node {
ast::PatIdent(_, ref path1, _) => {
path1.node.name
}
_ => {
cx.sess()
.span_bug(span,
&format!(
"debuginfo::create_captured_var_metadata() - \
Captured var-id refers to unexpected \
ast_map variant: {:?}",
ast_item));
}
}
}
_ => {
cx.sess()
.span_bug(span,
&format!("debuginfo::create_captured_var_metadata() - \
Captured var-id refers to unexpected \
ast_map variant: {:?}",
ast_item));
}
};
let variable_type = common::node_id_type(bcx, node_id);
let scope_metadata = bcx.fcx.debug_context.get_ref(cx, span).fn_metadata;
// env_pointer is the alloca containing the pointer to the environment,
// so it's type is **EnvironmentType. In order to find out the type of
// the environment we have to "dereference" two times.
let llvm_env_data_type = common::val_ty(env_pointer).element_type()
.element_type();
let byte_offset_of_var_in_env = machine::llelement_offset(cx,
llvm_env_data_type,
env_index);
let address_operations = unsafe {
[llvm::LLVMDIBuilderCreateOpDeref(),
llvm::LLVMDIBuilderCreateOpPlus(),
byte_offset_of_var_in_env as i64,
llvm::LLVMDIBuilderCreateOpDeref()]
};
let address_op_count = if captured_by_ref {
address_operations.len()
} else {
address_operations.len() - 1
};
let variable_access = VariableAccess::IndirectVariable {
alloca: env_pointer,
address_operations: &address_operations[..address_op_count]
};
declare_local(bcx,
variable_name,
variable_type,
scope_metadata,
variable_access,
VariableKind::CapturedVariable,
span);
}
/// Creates debug information for a local variable introduced in the head of a
/// match-statement arm.
///
/// Adds the created metadata nodes directly to the crate's IR.
pub fn create_match_binding_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
variable_name: ast::Name,
binding: BindingInfo<'tcx>) {
if bcx.unreachable.get() ||
fn_should_be_ignored(bcx.fcx) ||
bcx.sess().opts.debuginfo != FullDebugInfo {
return;
}
let scope_metadata = scope_metadata(bcx.fcx, binding.id, binding.span);
let aops = unsafe {
[llvm::LLVMDIBuilderCreateOpDeref()]
};
// Regardless of the actual type (`T`) we're always passed the stack slot
// (alloca) for the binding. For ByRef bindings that's a `T*` but for ByMove
// bindings we actually have `T**`. So to get the actual variable we need to
// dereference once more. For ByCopy we just use the stack slot we created
// for the binding.
let var_access = match binding.trmode {
TrByCopy(llbinding) => VariableAccess::DirectVariable {
alloca: llbinding
},
TrByMove => VariableAccess::IndirectVariable {
alloca: binding.llmatch,
address_operations: &aops
},
TrByRef => VariableAccess::DirectVariable {
alloca: binding.llmatch
}
};
declare_local(bcx,
variable_name,
binding.ty,
scope_metadata,
var_access,
VariableKind::LocalVariable,
binding.span);
}
/// Creates debug information for the given function argument.
///
/// This function assumes that there's a datum for each pattern component of the
/// argument in `bcx.fcx.lllocals`.
/// Adds the created metadata nodes directly to the crate's IR.
pub fn create_argument_metadata(bcx: Block, arg: &ast::Arg) {
if bcx.unreachable.get() ||
fn_should_be_ignored(bcx.fcx) ||
bcx.sess().opts.debuginfo != FullDebugInfo {
return;
}
let def_map = &bcx.tcx().def_map;
let scope_metadata = bcx
.fcx
.debug_context
.get_ref(bcx.ccx(), arg.pat.span)
.fn_metadata;
let locals = bcx.fcx.lllocals.borrow();
pat_util::pat_bindings(def_map, &*arg.pat, |_, node_id, span, var_ident| {
let datum = match locals.get(&node_id) {
Some(v) => v,
None => {
bcx.sess().span_bug(span,
&format!("no entry in lllocals table for {}",
node_id));
}
};
if unsafe { llvm::LLVMIsAAllocaInst(datum.val) } == ptr::null_mut() {
bcx.sess().span_bug(span, "debuginfo::create_argument_metadata() - \
Referenced variable location is not an alloca!");
}
let argument_index = {
let counter = &bcx
.fcx
.debug_context
.get_ref(bcx.ccx(), span)
.argument_counter;
let argument_index = counter.get();
counter.set(argument_index + 1);
argument_index
};
declare_local(bcx,
var_ident.node.name,
datum.ty,
scope_metadata,
VariableAccess::DirectVariable { alloca: datum.val },
VariableKind::ArgumentVariable(argument_index),
span);
})
}<๏ฝfimโend๏ฝ> | let error_message = format!("Mismatch between Ty and \ |
<|file_name|>test_person_misc_api.py<|end_file_name|><๏ฝfimโbegin๏ฝ>__author__ = 'sweemeng'
from rest_framework import status
from popit.signals.handlers import *
from popit.models import *
from popit.tests.base_testcase import BasePopitAPITestCase
class PersonLinkAPITestCase(BasePopitAPITestCase):
def test_view_person_link_list_unauthorized(self):
response = self.client.get("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_view_person_link_list_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.get("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_view_person_link_details_unauthorized(self):
response = self.client.get("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/a4ffa24a9ef3cbcb8cfaa178c9329367/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_view_person_link_details_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.get("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/a4ffa24a9ef3cbcb8cfaa178c9329367/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_view_person_link_details_not_exist_unauthorized(self):
response = self.client.get("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/not_exist/")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_view_person_link_details_not_exist_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.get("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/not_exist/")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_create_person_links_unauthorized(self):
data = {
"url": "http://twitter.com/sweemeng",
}
response = self.client.post("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/", data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_create_person_links_authorized(self):
data = {
"url": "http://twitter.com/sweemeng",
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/", data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
url = person_.links.language("en").get(url="http://twitter.com/sweemeng")
self.assertEqual(url.url, "http://twitter.com/sweemeng")
def test_update_person_links_unauthorized(self):
data = {
"note": "just a random repo"
}
response = self.client.put(
"/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/a4ffa24a9ef3cbcb8cfaa178c9329367/",
data
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update_person_links_not_exist_unauthorized(self):
data = {
"note": "just a random repo"
}
response = self.client.put(
"/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/not_exist/",
data
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update_person_links_authorized(self):
data = {
"note": "just a random repo"
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put(
"/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/a4ffa24a9ef3cbcb8cfaa178c9329367/",
data
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
person = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
url = person.links.language("en").get(id="a4ffa24a9ef3cbcb8cfaa178c9329367")
self.assertEqual(url.note, "just a random repo")
def test_update_person_links_not_exist_authorized(self):
data = {
"note": "just a random repo"
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put(
"/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/not_exist/",
data
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_delete_person_links_unauthorized(self):
response = self.client.delete("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/a4ffa24a9ef3cbcb8cfaa178c9329367/")
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_delete_person_links_not_exist_unauthorized(self):
response = self.client.delete("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/not_exist/")
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_delete_person_links_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.delete("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/a4ffa24a9ef3cbcb8cfaa178c9329367/")
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_delete_person_links_not_exist_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.delete("/en/persons/ab1a5788e5bae955c048748fa6af0e97/links/not_exist/")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
class PersonOtherNameAPITestCase(BasePopitAPITestCase):
def test_view_person_othername_list_unauthorized(self):
response = self.client.get("/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_view_person_othername_list_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.get("/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_view_person_othername_details_unauthorized(self):
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/"
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_view_person_othername_details_not_exist_unauthorized(self):
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/not_exist/"
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_view_person_othername_details_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/"
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_view_person_othername_details_not_exist_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/not_exist/"
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_create_person_othername_unauthorized(self):
data = {
"name": "jane",
"family_name": "jambul",
"given_name": "test person",
"start_date": "1950-01-01",
"end_date": "2010-01-01",
}
response = self.client.post(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/", data
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_create_person_othername_authorized(self):
data = {
"name": "jane",
"family_name": "jambul",
"given_name": "test person",
"start_date": "1950-01-01",
"end_date": "2010-01-01",
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post(
"/en/persons/ab1a5788e5bae955c048748fa6af0e97/othernames/", data
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
person_ = Person.objects.language('en').get(id='ab1a5788e5bae955c048748fa6af0e97')
other_name = person_.other_names.language('en').get(name="jane")
self.assertEqual(other_name.given_name, "test person")
def test_update_person_othername_unauthorized(self):
data = {
"family_name": "jambul",
}
person = Person.objects.language('en').get(id='8497ba86-7485-42d2-9596-2ab14520f1f4')
other_name = person.other_names.language('en').get(id="cf93e73f-91b6-4fad-bf76-0782c80297a8")
response = self.client.put(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/",
data
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update_person_othername_not_exist_unauthorized(self):
data = {
"family_name": "jambul",
}
response = self.client.put(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/not_exist/",
data
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update_person_othername_authorized(self):
data = {
"family_name": "jambul",
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/",
data
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
person = Person.objects.language('en').get(id='8497ba86-7485-42d2-9596-2ab14520f1f4')
other_name = person.other_names.language('en').get(id="cf93e73f-91b6-4fad-bf76-0782c80297a8")
self.assertEqual(other_name.family_name, "jambul")
def test_update_person_othername_not_exist_authorized(self):
data = {
"family_name": "jambul",
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/not_exist/",
data
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_delete_person_othername_unauthorized(self):
response = self.client.delete(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/"
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_delete_person_othername_not_exist_unauthorized(self):
response = self.client.delete(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/not_exist/"
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_delete_person_othername_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.delete(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/"
)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_delete_person_othername_not_exist_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.delete(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/not_exist/"
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
class PersonIdentifierLinkAPITestCase(BasePopitAPITestCase):
def test_get_person_identifier_link_list_unauthorized(self):
# identifier af7c01b5-1c4f-4c08-9174-3de5ff270bdb
# link 9c9a2093-c3eb-4b51-b869-0d3b4ab281fd
# person 8497ba86-7485-42d2-9596-2ab14520f1f4
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/"
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = response.data["results"][0]
self.assertEqual(data["url"], "http://github.com/sinarproject/")
def test_get_person_identifier_link_list_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/"
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = response.data["results"][0]
self.assertEqual(data["url"], "http://github.com/sinarproject/")
def test_get_person_identifier_link_detail_unauthorized(self):
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/9c9a2093-c3eb-4b51-b869-0d3b4ab281fd/"
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["results"]["url"], "http://github.com/sinarproject/")
def test_get_person_identifier_link_detail_not_exist_unauthorized(self):
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/not_exist/"
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_get_person_identifier_link_detail_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/9c9a2093-c3eb-4b51-b869-0d3b4ab281fd/"
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["results"]["url"], "http://github.com/sinarproject/")
def test_get_person_identifier_link_detail_not_exist_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/not_exist/"
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_create_person_identifier_link_unauthorized(self):
data = {
"url": "http://twitter.com/sinarproject"
}
response = self.client.post(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/",
data
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_create_person_identifier_link_authorized(self):
data = {
"url": "http://twitter.com/sinarproject"
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/",
data
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
person = Person.objects.language("en").get(id="8497ba86-7485-42d2-9596-2ab14520f1f4")
identifier = person.identifiers.language("en").get(id="af7c01b5-1c4f-4c08-9174-3de5ff270bdb")
link = identifier.links.language("en").get(url="http://twitter.com/sinarproject")
self.assertEqual(link.url, "http://twitter.com/sinarproject")
def test_update_person_identifier_link_unauthorized(self):
data = {
"note":"This is a nested link"
}
response = self.client.put(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/9c9a2093-c3eb-4b51-b869-0d3b4ab281fd/",
data
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update_person_identifier_link_not_exist_unauthorized(self):
data = {
"note":"This is a nested link"
}
response = self.client.put(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/not_exist/",
data
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update_person_identifier_link_authorized(self):
data = {
"note":"This is a nested link"
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/9c9a2093-c3eb-4b51-b869-0d3b4ab281fd/",
data
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# 9c9a2093-c3eb-4b51-b869-0d3b4ab281fd
person = Person.objects.language("en").get(id="8497ba86-7485-42d2-9596-2ab14520f1f4")
identifier = person.identifiers.language("en").get(id="af7c01b5-1c4f-4c08-9174-3de5ff270bdb")
link = identifier.links.language("en").get(id="9c9a2093-c3eb-4b51-b869-0d3b4ab281fd")
self.assertEqual(link.note, "This is a nested link")
def test_update_person_identifier_link_not_exist_authorized(self):
data = {
"note":"This is a nested link"
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/not_exist/",
data
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_delete_person_identifier_link_unauthorized(self):
response = self.client.delete(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/9c9a2093-c3eb-4b51-b869-0d3b4ab281fd/"
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_delete_person_identifier_link_not_exist_unauthorized(self):
response = self.client.delete(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/not_exist/"
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_delete_person_identifier_link_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.delete(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/9c9a2093-c3eb-4b51-b869-0d3b4ab281fd/"
)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_delete_person_identifier_link_not_exist_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.delete(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/identifiers/af7c01b5-1c4f-4c08-9174-3de5ff270bdb/links/not_exist/"
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
# We going to use existing serilaizer.
class PersonOtherNameLinkAPITestCase(BasePopitAPITestCase):
def test_list_person_othername_link(self):
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/"
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_show_person_othername_link_detail_not_exist(self):
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/not_exist/"
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_show_person_othername_link_detail(self):
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/4d8d71c4-20ea-4ed1-ae38-4b7d7550cdf6/"
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_create_person_othername_link_unauthorized(self):
data = {<๏ฝfimโhole๏ฝ> }
response = self.client.post(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/",
data
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_create_person_othername_link_authorized(self):
data = {
"url": "http://github.com/sinar"
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/",
data
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_update_person_othername_link_not_exist_unauthorized(self):
data = {
"note": "Just a link"
}
response = self.client.put(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/not_exist/",
data
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update_person_othername_link_not_exist_authorized(self):
data = {
"note": "Just a link"
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/not_exist/",
data
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_update_person_othername_link_unauthorized(self):
data = {
"note": "Just a link"
}
response = self.client.put(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/4d8d71c4-20ea-4ed1-ae38-4b7d7550cdf6/",
data
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update_person_othername_link_authorized(self):
data = {
"note": "Just a link"
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/4d8d71c4-20ea-4ed1-ae38-4b7d7550cdf6/",
data
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_delete_person_othername_link_not_exist_unauthorized(self):
response = self.client.delete(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/not_exist/"
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_delete_person_othername_link_not_exist_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.delete(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/not_exist/"
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_delete_person_othername_link_unauthorized(self):
response = self.client.delete(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/4d8d71c4-20ea-4ed1-ae38-4b7d7550cdf6/"
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_delete_person_othername_link_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.delete(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/othernames/cf93e73f-91b6-4fad-bf76-0782c80297a8/links/4d8d71c4-20ea-4ed1-ae38-4b7d7550cdf6/"
)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
class PersonContactLinkAPITestCase(BasePopitAPITestCase):
def test_list_person_contact_link(self):
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/"
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_show_person_contact_link_not_exist(self):
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/not_exist/"
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_show_person_contact_link(self):
response = self.client.get(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/6d0afb46-67d4-4708-87c4-4d51ce99767e/"
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_create_person_contact_link_unauthorized(self):
data = {
"url": "http://github.com/sinar"
}
response = self.client.post(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/",
data
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_create_person_contact_link_authorized(self):
data = {
"url": "http://github.com/sinar"
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/",
data
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_update_person_contact_link_not_exist_unauthorized(self):
data = {
"note": "Just a link"
}
response = self.client.put(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/not_exist/",
data
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update_person_contact_link_not_exist_authorized(self):
data = {
"note": "Just a link"
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/not_exist/",
data
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_update_person_contact_link_unauthorized(self):
data = {
"note": "Just a link"
}
response = self.client.put(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/6d0afb46-67d4-4708-87c4-4d51ce99767e/",
data
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update_person_contact_link_authorized(self):
data = {
"note": "Just a link"
}
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.put(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/6d0afb46-67d4-4708-87c4-4d51ce99767e/",
data
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_delete_person_contact_link_not_exist_unauthorized(self):
response = self.client.delete(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/not_exist/"
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_delete_person_contact_link_not_exist_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.delete(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/not_exist/"
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_delete_person_contact_link_unauthorized(self):
response = self.client.delete(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/6d0afb46-67d4-4708-87c4-4d51ce99767e/"
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_delete_person_contact_link_authorized(self):
token = Token.objects.get(user__username="admin")
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.delete(
"/en/persons/8497ba86-7485-42d2-9596-2ab14520f1f4/contact_details/2256ec04-2d1d-4994-b1f1-16d3f5245441/links/6d0afb46-67d4-4708-87c4-4d51ce99767e/"
)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)<๏ฝfimโend๏ฝ> | "url": "http://github.com/sinar" |
<|file_name|>pipeline.py<|end_file_name|><๏ฝfimโbegin๏ฝ>from accounts.models import Practice
def create_practice(request, strategy, backend, uid, response={}, details={}, user=None, social=None, *args, **kwargs):
"""
if user has a practice skip else create new practice<๏ฝfimโhole๏ฝ> """
practice, created = Practice.objects.update_or_create(user=user)
return None<๏ฝfimโend๏ฝ> | |
<|file_name|>ease.js<|end_file_name|><๏ฝfimโbegin๏ฝ>import "../arrays/map";
import "../core/array";
import "../core/identity";
import "../math/trigonometry";
var d3_ease_default = function() { return d3_identity; };
var d3_ease = d3.map({
linear: d3_ease_default,
poly: d3_ease_poly,
quad: function() { return d3_ease_quad; },
cubic: function() { return d3_ease_cubic; },
sin: function() { return d3_ease_sin; },
exp: function() { return d3_ease_exp; },
circle: function() { return d3_ease_circle; },
elastic: d3_ease_elastic,
back: d3_ease_back,
bounce: function() { return d3_ease_bounce; }
});
var d3_ease_mode = d3.map({
"in": d3_identity,
"out": d3_ease_reverse,
"in-out": d3_ease_reflect,
"out-in": function(f) { return d3_ease_reflect(d3_ease_reverse(f)); }
});
d3.ease = function(name) {
var i = name.indexOf("-"),
t = i >= 0 ? name.slice(0, i) : name,
m = i >= 0 ? name.slice(i + 1) : "in";
t = d3_ease.get(t) || d3_ease_default;
m = d3_ease_mode.get(m) || d3_identity;
return d3_ease_clamp(m(t.apply(null, d3_arraySlice.call(arguments, 1))));
};
function d3_ease_clamp(f) {
return function(t) {
return t <= 0 ? 0 : t >= 1 ? 1 : f(t);
};
}
function d3_ease_reverse(f) {
return function(t) {
return 1 - f(1 - t);
};
}
function d3_ease_reflect(f) {
return function(t) {
return 0.5 * (t < 0.5 ? f(2 * t) : (2 - f(2 - 2 * t)));
};
}
function d3_ease_quad(t) {
return t * t;
}
function d3_ease_cubic(t) {
return t * t * t;
}
// Optimized clamp(reflect(poly(3))).
function d3_ease_cubicInOut(t) {
if (t <= 0) return 0;
if (t >= 1) return 1;
var t2 = t * t, t3 = t2 * t;
return 4 * (t < 0.5 ? t3 : 3 * (t - t2) + t3 - 0.75);
}
function d3_ease_poly(e) {
return function(t) {
return Math.pow(t, e);
};
}
function d3_ease_sin(t) {
return 1 - Math.cos(t * halfฯ);
}
function d3_ease_exp(t) {
return Math.pow(2, 10 * (t - 1));
}
function d3_ease_circle(t) {
return 1 - Math.sqrt(1 - t * t);
}<๏ฝfimโhole๏ฝ> var s;
if (arguments.length < 2) p = 0.45;
if (arguments.length) s = p / ฯ * Math.asin(1 / a);
else a = 1, s = p / 4;
return function(t) {
return 1 + a * Math.pow(2, -10 * t) * Math.sin((t - s) * ฯ / p);
};
}
function d3_ease_back(s) {
if (!s) s = 1.70158;
return function(t) {
return t * t * ((s + 1) * t - s);
};
}
function d3_ease_bounce(t) {
return t < 1 / 2.75 ? 7.5625 * t * t
: t < 2 / 2.75 ? 7.5625 * (t -= 1.5 / 2.75) * t + 0.75
: t < 2.5 / 2.75 ? 7.5625 * (t -= 2.25 / 2.75) * t + 0.9375
: 7.5625 * (t -= 2.625 / 2.75) * t + 0.984375;
}<๏ฝfimโend๏ฝ> |
function d3_ease_elastic(a, p) { |
<|file_name|>post_remote_api_j_account_fetch_payment_methods_id_responses.go<|end_file_name|><๏ฝfimโbegin๏ฝ>package j_account
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"fmt"
"io"<๏ฝfimโhole๏ฝ>
strfmt "github.com/go-openapi/strfmt"
"koding/remoteapi/models"
)
// PostRemoteAPIJAccountFetchPaymentMethodsIDReader is a Reader for the PostRemoteAPIJAccountFetchPaymentMethodsID structure.
type PostRemoteAPIJAccountFetchPaymentMethodsIDReader struct {
formats strfmt.Registry
}
// ReadResponse reads a server response into the received o.
func (o *PostRemoteAPIJAccountFetchPaymentMethodsIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
switch response.Code() {
case 200:
result := NewPostRemoteAPIJAccountFetchPaymentMethodsIDOK()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return result, nil
default:
return nil, runtime.NewAPIError("unknown error", response, response.Code())
}
}
// NewPostRemoteAPIJAccountFetchPaymentMethodsIDOK creates a PostRemoteAPIJAccountFetchPaymentMethodsIDOK with default headers values
func NewPostRemoteAPIJAccountFetchPaymentMethodsIDOK() *PostRemoteAPIJAccountFetchPaymentMethodsIDOK {
return &PostRemoteAPIJAccountFetchPaymentMethodsIDOK{}
}
/*PostRemoteAPIJAccountFetchPaymentMethodsIDOK handles this case with default header values.
OK
*/
type PostRemoteAPIJAccountFetchPaymentMethodsIDOK struct {
Payload PostRemoteAPIJAccountFetchPaymentMethodsIDOKBody
}
func (o *PostRemoteAPIJAccountFetchPaymentMethodsIDOK) Error() string {
return fmt.Sprintf("[POST /remote.api/JAccount.fetchPaymentMethods/{id}][%d] postRemoteApiJAccountFetchPaymentMethodsIdOK %+v", 200, o.Payload)
}
func (o *PostRemoteAPIJAccountFetchPaymentMethodsIDOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
// response payload
if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
/*PostRemoteAPIJAccountFetchPaymentMethodsIDOKBody post remote API j account fetch payment methods ID o k body
swagger:model PostRemoteAPIJAccountFetchPaymentMethodsIDOKBody
*/
type PostRemoteAPIJAccountFetchPaymentMethodsIDOKBody struct {
models.JAccount
models.DefaultResponse
}
// UnmarshalJSON unmarshals this object from a JSON structure
func (o *PostRemoteAPIJAccountFetchPaymentMethodsIDOKBody) UnmarshalJSON(raw []byte) error {
var postRemoteAPIJAccountFetchPaymentMethodsIDOKBodyAO0 models.JAccount
if err := swag.ReadJSON(raw, &postRemoteAPIJAccountFetchPaymentMethodsIDOKBodyAO0); err != nil {
return err
}
o.JAccount = postRemoteAPIJAccountFetchPaymentMethodsIDOKBodyAO0
var postRemoteAPIJAccountFetchPaymentMethodsIDOKBodyAO1 models.DefaultResponse
if err := swag.ReadJSON(raw, &postRemoteAPIJAccountFetchPaymentMethodsIDOKBodyAO1); err != nil {
return err
}
o.DefaultResponse = postRemoteAPIJAccountFetchPaymentMethodsIDOKBodyAO1
return nil
}
// MarshalJSON marshals this object to a JSON structure
func (o PostRemoteAPIJAccountFetchPaymentMethodsIDOKBody) MarshalJSON() ([]byte, error) {
var _parts [][]byte
postRemoteAPIJAccountFetchPaymentMethodsIDOKBodyAO0, err := swag.WriteJSON(o.JAccount)
if err != nil {
return nil, err
}
_parts = append(_parts, postRemoteAPIJAccountFetchPaymentMethodsIDOKBodyAO0)
postRemoteAPIJAccountFetchPaymentMethodsIDOKBodyAO1, err := swag.WriteJSON(o.DefaultResponse)
if err != nil {
return nil, err
}
_parts = append(_parts, postRemoteAPIJAccountFetchPaymentMethodsIDOKBodyAO1)
return swag.ConcatJSON(_parts...), nil
}
// Validate validates this post remote API j account fetch payment methods ID o k body
func (o *PostRemoteAPIJAccountFetchPaymentMethodsIDOKBody) Validate(formats strfmt.Registry) error {
var res []error
if err := o.JAccount.Validate(formats); err != nil {
res = append(res, err)
}
if err := o.DefaultResponse.Validate(formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}<๏ฝfimโend๏ฝ> |
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
"github.com/go-openapi/swag" |
<|file_name|>SpecialCells.cpp<|end_file_name|><๏ฝfimโbegin๏ฝ>// BEGIN CUT HERE
// END CUT HERE
#include <sstream>
#include <cstdio>
#include <cstdlib>
#include <iostream>
#include <cstring>
#include <algorithm>
#include <cmath>
#include <vector>
#include <map>
#include <string>
#include <set>
#include <algorithm>
using namespace std;
const int V = 64;
const int E = V * V * 2;
const int INF = 1 << 29;
int d[V], how[V], eCapacity[E], eU[E], eV[E], eCost[E];
int eIndex = 0;
void addEdge(int u, int v, int capacity, int cost) {
eU[eIndex] = u, eV[eIndex] = v, eCapacity[eIndex] = capacity, eCost[eIndex++] = cost;<๏ฝfimโhole๏ฝ> int flow = 0, cost = 0;
for (;;) {
for (int i = 0; i < n; i++) {
d[i] = INF;
}
d[s] = 0;
for(;;) {
bool done = true;
for (int e = 0; e < eIndex; e++) {
if (eCapacity[e] > 0) {
int u = eU[e], v = eV[e], cost = eCost[e];
if (d[v] > d[u] + cost) {
d[v] = d[u] + cost;
how[v] = e;
done = false;
}
}
}
if (done) {
break;
}
}
if (d[t] >= INF / 2) {
break;
}
int augment = INF;
for (int v = t; v != s; v = eU[how[v]]) {
augment = min(augment, eCapacity[how[v]]);
}
for (int v = t; v != s; v = eU[how[v]]) {
int e = how[v];
eCapacity[e] -= augment;
eCapacity[e ^ 1] += augment;
}
flow += augment;
cost += d[t] * augment;
}
pair <int, int> ret = make_pair(cost, flow);
return ret;
}
class SpecialCells
{
public:
int guess(vector <int> x, vector <int> y) {
eIndex = 0;
map <int, int> xMap, yMap;
set <pair <int, int> > pairSet;
int n = x.size();
for (int i = 0; i < n; i++) {
xMap[x[i]]++;
yMap[y[i]]++;
pairSet.insert(make_pair(x[i], y[i]));
}
int grpahVertexNumber = xMap.size() + yMap.size() + 2;
int s = grpahVertexNumber - 2, t = grpahVertexNumber - 1, xIndex = 0, yIndex = xMap.size();
for (map <int, int> :: iterator it = xMap.begin(); it != xMap.end(); it++, xIndex++) {
addEdge(s, xIndex, it->second, 0);
}
for (map <int, int> :: iterator it = yMap.begin(); it != yMap.end(); it++, yIndex++) {
addEdge(yIndex, t, it->second, 0);
}
xIndex = 0;
for (map <int, int> :: iterator it = xMap.begin(); it != xMap.end(); it++, xIndex++) {
yIndex = xMap.size();
for (map <int, int> :: iterator jt = yMap.begin(); jt != yMap.end(); jt++, yIndex++) {
int cost = pairSet.find(make_pair(it->first, jt->first)) == pairSet.end() ? 0 : 1;
addEdge(xIndex, yIndex, 1, cost);
}
}
pair <int, int> mcmf = minCostMaxFlow(grpahVertexNumber, s, t);
int ret = mcmf.first;
return ret;
}
// BEGIN CUT HERE
public:
void run_test(int Case) { if ((Case == -1) || (Case == 0)) test_case_0(); if ((Case == -1) || (Case == 1)) test_case_1(); if ((Case == -1) || (Case == 2)) test_case_2(); if ((Case == -1) || (Case == 3)) test_case_3(); if ((Case == -1) || (Case == 4)) test_case_4(); }
private:
template <typename T> string print_array(const vector<T> &V) { ostringstream os; os << "{ "; for (typename vector<T>::const_iterator iter = V.begin(); iter != V.end(); ++iter) os << '\"' << *iter << "\","; os << " }"; return os.str(); }
void verify_case(int Case, const int &Expected, const int &Received) { cerr << "Test Case #" << Case << "..."; if (Expected == Received) cerr << "PASSED" << endl; else { cerr << "FAILED" << endl; cerr << "\tExpected: \"" << Expected << '\"' << endl; cerr << "\tReceived: \"" << Received << '\"' << endl; } }
void test_case_0() { int Arr0[] = {1,2}; vector <int> Arg0(Arr0, Arr0 + (sizeof(Arr0) / sizeof(Arr0[0]))); int Arr1[] = {1,2}; vector <int> Arg1(Arr1, Arr1 + (sizeof(Arr1) / sizeof(Arr1[0]))); int Arg2 = 0; verify_case(0, Arg2, guess(Arg0, Arg1)); }
void test_case_1() { int Arr0[] = {1,1,2}; vector <int> Arg0(Arr0, Arr0 + (sizeof(Arr0) / sizeof(Arr0[0]))); int Arr1[] = {1,2,1}; vector <int> Arg1(Arr1, Arr1 + (sizeof(Arr1) / sizeof(Arr1[0]))); int Arg2 = 3; verify_case(1, Arg2, guess(Arg0, Arg1)); }
void test_case_2() { int Arr0[] = {1,2,1,2,1,2}; vector <int> Arg0(Arr0, Arr0 + (sizeof(Arr0) / sizeof(Arr0[0]))); int Arr1[] = {1,2,3,1,2,3}; vector <int> Arg1(Arr1, Arr1 + (sizeof(Arr1) / sizeof(Arr1[0]))); int Arg2 = 6; verify_case(2, Arg2, guess(Arg0, Arg1)); }
void test_case_3() { int Arr0[] = {1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9}; vector <int> Arg0(Arr0, Arr0 + (sizeof(Arr0) / sizeof(Arr0[0]))); int Arr1[] = {1,2,3,1,2,3,1,2,3,1,2,3,1,2,3,1,2,3}; vector <int> Arg1(Arr1, Arr1 + (sizeof(Arr1) / sizeof(Arr1[0]))); int Arg2 = 9; verify_case(3, Arg2, guess(Arg0, Arg1)); }
void test_case_4() { int Arr0[] = {1,100000}; vector <int> Arg0(Arr0, Arr0 + (sizeof(Arr0) / sizeof(Arr0[0]))); int Arr1[] = {1,100000}; vector <int> Arg1(Arr1, Arr1 + (sizeof(Arr1) / sizeof(Arr1[0]))); int Arg2 = 0; verify_case(4, Arg2, guess(Arg0, Arg1)); }
// END CUT HERE
};
// BEGIN CUT HERE
int main(){
SpecialCells ___test;
___test.run_test(-1);
return 0;
}
// END CUT HERE<๏ฝfimโend๏ฝ> | eU[eIndex] = v, eV[eIndex] = u, eCapacity[eIndex] = 0, eCost[eIndex++] = -cost;
}
pair <int, int> minCostMaxFlow(int n, int s, int t) { |
<|file_name|>config.go<|end_file_name|><๏ฝfimโbegin๏ฝ>package config
import (
log "github.com/Sirupsen/logrus"
_ "github.com/confur-me/confur-api/lib/logrus"
cfg "github.com/olebedev/config"
)
var c *cfg.Config
func init() {<๏ฝfimโhole๏ฝ>func Read(path string) error {
log.Info("Reading configuration from ", path)
var err error
c, err = cfg.ParseYamlFile(path)
if err != nil {
log.Error(err)
}
return err
}
func Config() *cfg.Config {
return c
}<๏ฝfimโend๏ฝ> | c = new(cfg.Config)
}
|
<|file_name|>__init__.py<|end_file_name|><๏ฝfimโbegin๏ฝ>"""Multidict implementation.
HTTP Headers and URL query string require specific data structure:
multidict. It behaves mostly like a dict but it can have
several values for the same key.
"""
import os
__all__ = ('MultiDictProxy', 'CIMultiDictProxy',
'MultiDict', 'CIMultiDict', 'upstr', 'istr')
__version__ = '2.1.5'
if bool(os.environ.get('MULTIDICT_NO_EXTENSIONS')):<๏ฝfimโhole๏ฝ> CIMultiDictProxy,
MultiDict,
CIMultiDict,
upstr, istr)
else:
try:
from ._multidict import (MultiDictProxy,
CIMultiDictProxy,
MultiDict,
CIMultiDict,
upstr, istr)
except ImportError: # pragma: no cover
from ._multidict_py import (MultiDictProxy,
CIMultiDictProxy,
MultiDict,
CIMultiDict,
upstr, istr)<๏ฝfimโend๏ฝ> | from ._multidict_py import (MultiDictProxy, |
<|file_name|>base.py<|end_file_name|><๏ฝfimโbegin๏ฝ>import logging
import uuid
from datetime import timedelta
from typing import List, Optional
import stripe
from django.apps import apps
from django.db import IntegrityError, models, transaction
from django.utils import dateformat, timezone
from django.utils.encoding import smart_str
from stripe.api_resources.abstract.api_resource import APIResource
from stripe.error import InvalidRequestError
from djstripe.utils import get_friendly_currency_amount
from ..fields import JSONField, StripeDateTimeField, StripeForeignKey, StripeIdField
from ..managers import StripeModelManager
from ..settings import djstripe_settings
logger = logging.getLogger(__name__)
class StripeBaseModel(models.Model):
stripe_class: Optional[APIResource] = None
djstripe_created = models.DateTimeField(auto_now_add=True, editable=False)
djstripe_updated = models.DateTimeField(auto_now=True, editable=False)
class Meta:
abstract = True
@classmethod
def api_list(cls, api_key=djstripe_settings.STRIPE_SECRET_KEY, **kwargs):
"""
Call the stripe API's list operation for this model.
:param api_key: The api key to use for this request. \
Defaults to djstripe_settings.STRIPE_SECRET_KEY.
:type api_key: string
See Stripe documentation for accepted kwargs for each object.
:returns: an iterator over all items in the query
"""
return cls.stripe_class.list(api_key=api_key, **kwargs).auto_paging_iter()
class StripeModel(StripeBaseModel):
# This must be defined in descendants of this model/mixin
# e.g. Event, Charge, Customer, etc.
expand_fields: List[str] = []
stripe_dashboard_item_name = ""
objects = models.Manager()
stripe_objects = StripeModelManager()
djstripe_id = models.BigAutoField(
verbose_name="ID", serialize=False, primary_key=True
)
id = StripeIdField(unique=True)
djstripe_owner_account: Optional[StripeForeignKey] = StripeForeignKey(
"djstripe.Account",
on_delete=models.CASCADE,
to_field="id",
null=True,
blank=True,
help_text="The Stripe Account this object belongs to.",
)
livemode = models.BooleanField(
null=True,
default=None,
blank=True,
help_text="Null here indicates that the livemode status is unknown or was "
"previously unrecorded. Otherwise, this field indicates whether this record "
"comes from Stripe test mode or live mode operation.",
)
created = StripeDateTimeField(
null=True,
blank=True,
help_text="The datetime this object was created in stripe.",
)
metadata = JSONField(
null=True,
blank=True,
help_text="A set of key/value pairs that you can attach to an object. "
"It can be useful for storing additional information about an object in "
"a structured format.",
)
description = models.TextField(
null=True, blank=True, help_text="A description of this object."
)
class Meta:
abstract = True
get_latest_by = "created"
def _get_base_stripe_dashboard_url(self):
owner_path_prefix = (
(self.djstripe_owner_account.id + "/")
if self.djstripe_owner_account
else ""
)
return "https://dashboard.stripe.com/{}{}".format(
owner_path_prefix, "test/" if not self.livemode else ""
)
def get_stripe_dashboard_url(self) -> str:
"""Get the stripe dashboard url for this object."""
if not self.stripe_dashboard_item_name or not self.id:
return ""
else:
return "{base_url}{item}/{id}".format(
base_url=self._get_base_stripe_dashboard_url(),
item=self.stripe_dashboard_item_name,
id=self.id,
)
@property
def human_readable_amount(self) -> str:
return get_friendly_currency_amount(self.amount, self.currency)
@property
def default_api_key(self) -> str:
# If the class is abstract (StripeModel), fall back to default key.
if not self._meta.abstract:
if self.djstripe_owner_account:
return self.djstripe_owner_account.get_default_api_key()
return djstripe_settings.get_default_api_key(self.livemode)
def _get_stripe_account_id(self, api_key=None) -> Optional[str]:
"""
Call the stripe API's retrieve operation for this model.
:param api_key: The api key to use for this request. \
Defaults to djstripe_settings.STRIPE_SECRET_KEY.
:type api_key: string
:param stripe_account: The optional connected account \
for which this request is being made.
:type stripe_account: string
"""
api_key = api_key or self.default_api_key
try:
djstripe_owner_account = self.djstripe_owner_account
if djstripe_owner_account is not None:
return djstripe_owner_account.id
except (AttributeError, KeyError, ValueError):
pass
# Get reverse foreign key relations to Account in case we need to
# retrieve ourselves using that Account ID.
reverse_account_relations = (
field
for field in self._meta.get_fields(include_parents=True)
if field.is_relation and field.one_to_many
# Avoid circular import problems by using the app registry to
# get the model class rather than a direct import.
and field.related_model
is apps.get_model(app_label="djstripe", model_name="account")
)
# Handle case where we have a reverse relation to Account and should pass
# that account ID to the retrieve call.
for field in reverse_account_relations:
# Grab the related object, using the first one we find.
reverse_lookup_attr = field.get_accessor_name()
account = getattr(self, reverse_lookup_attr).first()
if account is not None:
return account.id
return None
def api_retrieve(self, api_key=None, stripe_account=None):
"""
Call the stripe API's retrieve operation for this model.
:param api_key: The api key to use for this request. \
Defaults to djstripe_settings.STRIPE_SECRET_KEY.
:type api_key: string
:param stripe_account: The optional connected account \
for which this request is being made.
:type stripe_account: string
"""
# Prefer passed in stripe_account if set.
if not stripe_account:
stripe_account = self._get_stripe_account_id(api_key)
return self.stripe_class.retrieve(
id=self.id,
api_key=api_key or self.default_api_key,
expand=self.expand_fields,
stripe_account=stripe_account,
)
@classmethod
def _api_create(cls, api_key=djstripe_settings.STRIPE_SECRET_KEY, **kwargs):
"""
Call the stripe API's create operation for this model.
:param api_key: The api key to use for this request. \
Defaults to djstripe_settings.STRIPE_SECRET_KEY.
:type api_key: string
"""
return cls.stripe_class.create(api_key=api_key, **kwargs)
def _api_delete(self, api_key=None, stripe_account=None, **kwargs):
"""
Call the stripe API's delete operation for this model
:param api_key: The api key to use for this request. \
Defaults to djstripe_settings.STRIPE_SECRET_KEY.
:type api_key: string
:param stripe_account: The optional connected account \
for which this request is being made.
:type stripe_account: string
"""
api_key = api_key or self.default_api_key
# Prefer passed in stripe_account if set.
if not stripe_account:
stripe_account = self._get_stripe_account_id(api_key)
return self.api_retrieve(api_key=api_key, stripe_account=stripe_account).delete(
**kwargs
)
def _api_update(self, api_key=None, stripe_account=None, **kwargs):
"""
Call the stripe API's modify operation for this model
:param api_key: The api key to use for this request.
Defaults to djstripe_settings.STRIPE_SECRET_KEY.
:type api_key: string
:param stripe_account: The optional connected account \
for which this request is being made.
:type stripe_account: string
"""
api_key = api_key or self.default_api_key
# Prefer passed in stripe_account if set.
if not stripe_account:
stripe_account = self._get_stripe_account_id(api_key)
instance = self.api_retrieve(api_key=api_key, stripe_account=stripe_account)
return instance.request("post", instance.instance_url(), params=kwargs)
def str_parts(self) -> List[str]:
"""
Extend this to add information to the string representation of the object
"""
return ["id={id}".format(id=self.id)]
@classmethod
def _manipulate_stripe_object_hook(cls, data):
"""
Gets called by this object's stripe object conversion method just before
conversion.
Use this to populate custom fields in a StripeModel from stripe data.
"""
return data
@classmethod
def _find_owner_account(cls, data):
"""
Fetches the Stripe Account (djstripe_owner_account model field)
linked to the class, cls.
Tries to retreive using the Stripe_account if given.
Otherwise uses the api_key.
"""
from .account import Account
stripe_account = cls._id_from_data(data.get("account"))
if stripe_account:
return Account._get_or_retrieve(id=stripe_account)
api_key = data.get("api_key", "")
if api_key:
return Account.get_or_retrieve_for_api_key(api_key)
@classmethod
def _stripe_object_to_record(
cls,
data: dict,
current_ids=None,
pending_relations: list = None,
stripe_account: str = None,
) -> dict:
"""
This takes an object, as it is formatted in Stripe's current API for our object
type. In return, it provides a dict. The dict can be used to create a record or
to update a record
This function takes care of mapping from one field name to another, converting
from cents to dollars, converting timestamps, and eliminating unused fields
(so that an objects.create() call would not fail).
:param data: the object, as sent by Stripe. Parsed from JSON, into a dict
:param current_ids: stripe ids of objects that are currently being processed
:type current_ids: set
:param pending_relations: list of tuples of relations to be attached post-save
:param stripe_account: The optional connected account \
for which this request is being made.
:return: All the members from the input, translated, mutated, etc
"""
manipulated_data = cls._manipulate_stripe_object_hook(data)
if not cls.is_valid_object(data):
raise ValueError(
"Trying to fit a %r into %r. Aborting."
% (data.get("object", ""), cls.__name__)
)
result = {}
if current_ids is None:
current_ids = set()
# Iterate over all the fields that we know are related to Stripe,
# let each field work its own magic
ignore_fields = ["date_purged", "subscriber"] # XXX: Customer hack
for field in cls._meta.fields:
if field.name.startswith("djstripe_") or field.name in ignore_fields:
continue
if isinstance(field, models.ForeignKey):
field_data, skip = cls._stripe_object_field_to_foreign_key(
field=field,
manipulated_data=manipulated_data,
current_ids=current_ids,
pending_relations=pending_relations,
stripe_account=stripe_account,
)
if skip:
continue
else:
if hasattr(field, "stripe_to_db"):
field_data = field.stripe_to_db(manipulated_data)
else:
field_data = manipulated_data.get(field.name)
if (
isinstance(field, (models.CharField, models.TextField))
and field_data is None
):
# TODO - this applies to StripeEnumField as well, since it
# sub-classes CharField, is that intentional?
field_data = ""
result[field.name] = field_data
# For all objects other than the account object itself, get the API key
# attached to the request, and get the matching Account for that key.
owner_account = cls._find_owner_account(data)
if owner_account:
result["djstripe_owner_account"] = owner_account
return result
@classmethod
def _id_from_data(cls, data):
"""
Extract stripe id from stripe field data
:param data:
:return:
"""
if isinstance(data, str):
# data like "sub_6lsC8pt7IcFpjA"
id_ = data
elif data:
# data like {"id": sub_6lsC8pt7IcFpjA", ...}
id_ = data.get("id")
else:
id_ = None
return id_
@classmethod
def _stripe_object_field_to_foreign_key(
cls,
field,
manipulated_data,
current_ids=None,
pending_relations=None,
stripe_account=None,
):
"""
This converts a stripe API field to the dj stripe object it references,
so that foreign keys can be connected up automatically.
:param field:
:type field: models.ForeignKey
:param manipulated_data:
:type manipulated_data: dict
:param current_ids: stripe ids of objects that are currently being processed
:type current_ids: set
:param pending_relations: list of tuples of relations to be attached post-save
:type pending_relations: list
:param stripe_account: The optional connected account \
for which this request is being made.
:type stripe_account: string
:return:
"""
field_data = None
field_name = field.name
raw_field_data = manipulated_data.get(field_name)
refetch = False
skip = False
if issubclass(field.related_model, StripeModel):
id_ = cls._id_from_data(raw_field_data)
if not raw_field_data:
skip = True
elif id_ == raw_field_data:
# A field like {"subscription": "sub_6lsC8pt7IcFpjA", ...}
refetch = True
else:
# A field like {"subscription": {"id": sub_6lsC8pt7IcFpjA", ...}}
pass
if id_ in current_ids:
# this object is currently being fetched, don't try to fetch again,
# to avoid recursion instead, record the relation that should be
# created once "object_id" object exists
if pending_relations is not None:
object_id = manipulated_data["id"]
pending_relations.append((object_id, field, id_))
skip = True
if not skip:
# add the id of the current object to the list
# of ids being processed.
# This will avoid infinite recursive syncs in case a relatedmodel
# requests the same object
current_ids.add(id_)
field_data, _ = field.related_model._get_or_create_from_stripe_object(
manipulated_data,
field_name,
refetch=refetch,
current_ids=current_ids,
pending_relations=pending_relations,
stripe_account=stripe_account,
)
# Remove the id of the current object from the list
# after it has been created or retrieved
current_ids.remove(id_)
else:
# eg PaymentMethod, handled in hooks
skip = True
return field_data, skip
@classmethod
def is_valid_object(cls, data):
"""
Returns whether the data is a valid object for the class
"""
return "object" in data and data["object"] == cls.stripe_class.OBJECT_NAME
def _attach_objects_hook(self, cls, data, current_ids=None):
"""
Gets called by this object's create and sync methods just before save.
Use this to populate fields before the model is saved.
:param cls: The target class for the instantiated object.
:param data: The data dictionary received from the Stripe API.
:type data: dict
:param current_ids: stripe ids of objects that are currently being processed
:type current_ids: set
"""
pass
def _attach_objects_post_save_hook(self, cls, data, pending_relations=None):
"""
Gets called by this object's create and sync methods just after save.
Use this to populate fields after the model is saved.
:param cls: The target class for the instantiated object.
:param data: The data dictionary received from the Stripe API.
:type data: dict
"""
unprocessed_pending_relations = []
if pending_relations is not None:
for post_save_relation in pending_relations:
object_id, field, id_ = post_save_relation
if self.id == id_:
# the target instance now exists
target = field.model.objects.get(id=object_id)
setattr(target, field.name, self)
target.save()
# reload so that indirect relations back to this object
# eg self.charge.invoice = self are set
# TODO - reverse the field reference here to avoid hitting the DB?
self.refresh_from_db()
else:
unprocessed_pending_relations.append(post_save_relation)
if len(pending_relations) != len(unprocessed_pending_relations):
# replace in place so passed in list is updated in calling method
pending_relations[:] = unprocessed_pending_relations
@classmethod
def _create_from_stripe_object(
cls,
data,
current_ids=None,
pending_relations=None,
save=True,
stripe_account=None,
):
"""
Instantiates a model instance using the provided data object received
from Stripe, and saves it to the database if specified.
:param data: The data dictionary received from the Stripe API.
:type data: dict<๏ฝfimโhole๏ฝ> :type current_ids: set
:param pending_relations: list of tuples of relations to be attached post-save
:type pending_relations: list
:param save: If True, the object is saved after instantiation.
:type save: bool
:param stripe_account: The optional connected account \
for which this request is being made.
:type stripe_account: string
:returns: The instantiated object.
"""
instance = cls(
**cls._stripe_object_to_record(
data,
current_ids=current_ids,
pending_relations=pending_relations,
stripe_account=stripe_account,
)
)
instance._attach_objects_hook(cls, data, current_ids=current_ids)
if save:
instance.save(force_insert=True)
instance._attach_objects_post_save_hook(
cls, data, pending_relations=pending_relations
)
return instance
# flake8: noqa (C901)
@classmethod
def _get_or_create_from_stripe_object(
cls,
data,
field_name="id",
refetch=True,
current_ids=None,
pending_relations=None,
save=True,
stripe_account=None,
):
"""
:param data:
:param field_name:
:param refetch:
:param current_ids: stripe ids of objects that are currently being processed
:type current_ids: set
:param pending_relations: list of tuples of relations to be attached post-save
:type pending_relations: list
:param save:
:param stripe_account: The optional connected account \
for which this request is being made.
:type stripe_account: string
:return:
:rtype: cls, bool
"""
field = data.get(field_name)
is_nested_data = field_name != "id"
should_expand = False
if pending_relations is None:
pending_relations = []
id_ = cls._id_from_data(field)
if not field:
# An empty field - We need to return nothing here because there is
# no way of knowing what needs to be fetched!
logger.warning(
"empty field %s.%s = %r - this is a bug, "
"please report it to dj-stripe!",
cls.__name__,
field_name,
field,
)
return None, False
elif id_ == field:
# A field like {"subscription": "sub_6lsC8pt7IcFpjA", ...}
# We'll have to expand if the field is not "id" (= is nested)
should_expand = is_nested_data
else:
# A field like {"subscription": {"id": sub_6lsC8pt7IcFpjA", ...}}
data = field
try:
return cls.stripe_objects.get(id=id_), False
except cls.DoesNotExist:
if is_nested_data and refetch:
# This is what `data` usually looks like:
# {"id": "cus_XXXX", "default_source": "card_XXXX"}
# Leaving the default field_name ("id") will get_or_create the customer.
# If field_name="default_source", we get_or_create the card instead.
cls_instance = cls(id=id_)
try:
data = cls_instance.api_retrieve(stripe_account=stripe_account)
except InvalidRequestError as e:
if "a similar object exists in" in str(e):
# HACK around a Stripe bug.
# When a File is retrieved from the Account object,
# a mismatch between live and test mode is possible depending
# on whether the file (usually the logo) was uploaded in live
# or test. Reported to Stripe in August 2020.
# Context: https://github.com/dj-stripe/dj-stripe/issues/830
pass
elif "No such PaymentMethod:" in str(e):
# payment methods (card_โฆ etc) can be irretrievably deleted,
# but still present during sync. For example, if a refund is
# issued on a charge whose payment method has been deleted.
return None, False
else:
raise
should_expand = False
# The next thing to happen will be the "create from stripe object" call.
# At this point, if we don't have data to start with (field is a str),
# *and* we didn't refetch by id, then `should_expand` is True and we
# don't have the data to actually create the object.
# If this happens when syncing Stripe data, it's a djstripe bug. Report it!
assert not should_expand, "No data to create {} from {}".format(
cls.__name__, field_name
)
try:
# We wrap the `_create_from_stripe_object` in a transaction to
# avoid TransactionManagementError on subsequent queries in case
# of the IntegrityError catch below. See PR #903
with transaction.atomic():
return (
cls._create_from_stripe_object(
data,
current_ids=current_ids,
pending_relations=pending_relations,
save=save,
stripe_account=stripe_account,
),
True,
)
except IntegrityError:
# Handle the race condition that something else created the object
# after the `get` and before `_create_from_stripe_object`.
# This is common during webhook handling, since Stripe sends
# multiple webhook events simultaneously,
# each of which will cause recursive syncs. See issue #429
return cls.stripe_objects.get(id=id_), False
@classmethod
def _stripe_object_to_customer(cls, target_cls, data, current_ids=None):
"""
Search the given manager for the Customer matching this object's
``customer`` field.
:param target_cls: The target class
:type target_cls: Customer
:param data: stripe object
:type data: dict
:param current_ids: stripe ids of objects that are currently being processed
:type current_ids: set
"""
if "customer" in data and data["customer"]:
return target_cls._get_or_create_from_stripe_object(
data, "customer", current_ids=current_ids
)[0]
@classmethod
def _stripe_object_to_default_tax_rates(cls, target_cls, data):
"""
Retrieves TaxRates for a Subscription or Invoice
:param target_cls:
:param data:
:param instance:
:type instance: Union[djstripe.models.Invoice, djstripe.models.Subscription]
:return:
"""
tax_rates = []
for tax_rate_data in data.get("default_tax_rates", []):
tax_rate, _ = target_cls._get_or_create_from_stripe_object(
tax_rate_data, refetch=False
)
tax_rates.append(tax_rate)
return tax_rates
@classmethod
def _stripe_object_to_tax_rates(cls, target_cls, data):
"""
Retrieves TaxRates for a SubscriptionItem or InvoiceItem
:param target_cls:
:param data:
:return:
"""
tax_rates = []
for tax_rate_data in data.get("tax_rates", []):
tax_rate, _ = target_cls._get_or_create_from_stripe_object(
tax_rate_data, refetch=False
)
tax_rates.append(tax_rate)
return tax_rates
@classmethod
def _stripe_object_set_total_tax_amounts(cls, target_cls, data, instance):
"""
Set total tax amounts on Invoice instance
:param target_cls:
:param data:
:param instance:
:type instance: djstripe.models.Invoice
:return:
"""
from .billing import TaxRate
pks = []
for tax_amount_data in data.get("total_tax_amounts", []):
tax_rate_data = tax_amount_data["tax_rate"]
if isinstance(tax_rate_data, str):
tax_rate_data = {"tax_rate": tax_rate_data}
tax_rate, _ = TaxRate._get_or_create_from_stripe_object(
tax_rate_data, field_name="tax_rate", refetch=True
)
tax_amount, _ = target_cls.objects.update_or_create(
invoice=instance,
tax_rate=tax_rate,
defaults={
"amount": tax_amount_data["amount"],
"inclusive": tax_amount_data["inclusive"],
},
)
pks.append(tax_amount.pk)
instance.total_tax_amounts.exclude(pk__in=pks).delete()
@classmethod
def _stripe_object_to_invoice_items(cls, target_cls, data, invoice):
"""
Retrieves InvoiceItems for an invoice.
If the invoice item doesn't exist already then it is created.
If the invoice is an upcoming invoice that doesn't persist to the
database (i.e. ephemeral) then the invoice items are also not saved.
:param target_cls: The target class to instantiate per invoice item.
:type target_cls: Type[djstripe.models.InvoiceItem]
:param data: The data dictionary received from the Stripe API.
:type data: dict
:param invoice: The invoice object that should hold the invoice items.
:type invoice: ``djstripe.models.Invoice``
"""
lines = data.get("lines")
if not lines:
return []
invoiceitems = []
for line in lines.auto_paging_iter():
if invoice.id:
save = True
line.setdefault("invoice", invoice.id)
if line.get("type") == "subscription":
# Lines for subscriptions need to be keyed based on invoice and
# subscription, because their id is *just* the subscription
# when received from Stripe. This means that future updates to
# a subscription will change previously saved invoices - Doing
# the composite key avoids this.
if not line["id"].startswith(invoice.id):
line["id"] = "{invoice_id}-{subscription_id}".format(
invoice_id=invoice.id, subscription_id=line["id"]
)
else:
# Don't save invoice items for ephemeral invoices
save = False
line.setdefault("customer", invoice.customer.id)
line.setdefault("date", int(dateformat.format(invoice.created, "U")))
item, _ = target_cls._get_or_create_from_stripe_object(
line, refetch=False, save=save
)
invoiceitems.append(item)
return invoiceitems
@classmethod
def _stripe_object_to_subscription_items(cls, target_cls, data, subscription):
"""
Retrieves SubscriptionItems for a subscription.
If the subscription item doesn't exist already then it is created.
:param target_cls: The target class to instantiate per invoice item.
:type target_cls: Type[djstripe.models.SubscriptionItem]
:param data: The data dictionary received from the Stripe API.
:type data: dict
:param subscription: The subscription object that should hold the items.
:type subscription: djstripe.models.Subscription
"""
items = data.get("items")
if not items:
subscription.items.delete()
return []
pks = []
subscriptionitems = []
for item_data in items.auto_paging_iter():
item, _ = target_cls._get_or_create_from_stripe_object(
item_data, refetch=False
)
# sync the SubscriptionItem
target_cls.sync_from_stripe_data(item_data)
pks.append(item.pk)
subscriptionitems.append(item)
subscription.items.exclude(pk__in=pks).delete()
return subscriptionitems
@classmethod
def _stripe_object_to_refunds(cls, target_cls, data, charge):
"""
Retrieves Refunds for a charge
:param target_cls: The target class to instantiate per refund
:type target_cls: Type[djstripe.models.Refund]
:param data: The data dictionary received from the Stripe API.
:type data: dict
:param charge: The charge object that refunds are for.
:type charge: djstripe.models.Refund
:return:
"""
refunds = data.get("refunds")
if not refunds:
return []
refund_objs = []
for refund_data in refunds.auto_paging_iter():
item, _ = target_cls._get_or_create_from_stripe_object(
refund_data, refetch=False
)
refund_objs.append(item)
return refund_objs
@classmethod
def sync_from_stripe_data(cls, data):
"""
Syncs this object from the stripe data provided.
Foreign keys will also be retrieved and synced recursively.
:param data: stripe object
:type data: dict
:rtype: cls
"""
current_ids = set()
data_id = data.get("id")
stripe_account = getattr(data, "stripe_account", None)
if data_id:
# stop nested objects from trying to retrieve this object before
# initial sync is complete
current_ids.add(data_id)
instance, created = cls._get_or_create_from_stripe_object(
data,
current_ids=current_ids,
stripe_account=stripe_account,
)
if not created:
record_data = cls._stripe_object_to_record(data)
for attr, value in record_data.items():
setattr(instance, attr, value)
instance._attach_objects_hook(cls, data, current_ids=current_ids)
instance.save()
instance._attach_objects_post_save_hook(cls, data)
return instance
@classmethod
def _get_or_retrieve(cls, id, stripe_account=None, **kwargs):
"""
Retrieve object from the db, if it exists. If it doesn't, query Stripe to fetch
the object and sync with the db.
"""
try:
return cls.objects.get(id=id)
except cls.DoesNotExist:
pass
if stripe_account:
kwargs["stripe_account"] = str(stripe_account)
# If no API key is specified, use the default one for the specified livemode
# (or if no livemode is specified, the default one altogether)
kwargs.setdefault(
"api_key",
djstripe_settings.get_default_api_key(livemode=kwargs.get("livemode")),
)
data = cls.stripe_class.retrieve(id=id, **kwargs)
instance = cls.sync_from_stripe_data(data)
return instance
def __str__(self):
return smart_str("<{list}>".format(list=", ".join(self.str_parts())))
class IdempotencyKey(models.Model):
uuid = models.UUIDField(
max_length=36, primary_key=True, editable=False, default=uuid.uuid4
)
action = models.CharField(max_length=100)
livemode = models.BooleanField(
help_text="Whether the key was used in live or test mode."
)
created = models.DateTimeField(auto_now_add=True)
class Meta:
unique_together = ("action", "livemode")
def __str__(self):
return str(self.uuid)
@property
def is_expired(self) -> bool:
return timezone.now() > self.created + timedelta(hours=24)<๏ฝfimโend๏ฝ> | :param current_ids: stripe ids of objects that are currently being processed |
<|file_name|>ml_input_utils.py<|end_file_name|><๏ฝfimโbegin๏ฝ>#########################################################################################################
# ml_input_utils.py
# One of the Python modules written as part of the genericQSARpyUtils project (see below).
#
# ################################################
# #ml_input_utils.py: Key documentation :Contents#
# ################################################
# #1. Overview of this project.
# #2. IMPORTANT LEGAL ISSUES
# #<N.B.: Check this section ("IMPORTANT LEGAL ISSUES") to see whether - and how - you ARE ALLOWED TO use this code!>
# #<N.B.: Includes contact details.>
# ##############################
# #1. Overview of this project.#
# ##############################
# #Project name: genericQSARpyUtils
# #Purpose of this project: To provide a set of Python functions
# #(or classes with associated methods) that can be used to perform a variety of tasks
# #which are relevant to generating input files, from cheminformatics datasets, which can be used to build and
# #validate QSAR models (generated using Machine Learning methods implemented in other software packages)
# #on such datasets.
# #To this end, two Python modules are currently provided.
# #(1) ml_input_utils.py
# #Defines the following class:
# #descriptorsFilesProcessor: This contains methods which can be used to prepare datasets in either CSV or svmlight format, including converting between these formats, based upon previously calculated fingerprints (expressed as a set of tab separated text strings for each instance) or numeric descriptors.
# #(2) ml_functions.py
# #Defines a set of functions which can be used to carry out univariate feature selection,cross-validation etc. for Machine Learning model input files in svmlight format.
# ###########################
# #2. IMPORTANT LEGAL ISSUES#
# ###########################
# Copyright Syngenta Limited 2013
#Copyright (c) 2013-2015 Liverpool John Moores University
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
# THIS PROGRAM IS MADE AVAILABLE FOR DISTRIBUTION WITHOUT ANY FORM OF WARRANTY TO THE
# EXTENT PERMITTED BY APPLICABLE LAW. THE COPYRIGHT HOLDER PROVIDES THE PROGRAM \"AS IS\"
# WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM LIES
# WITH THE USER. SHOULD THE PROGRAM PROVE DEFECTIVE IN ANY WAY, THE USER ASSUMES THE
# COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. THE COPYRIGHT HOLDER IS NOT
# RESPONSIBLE FOR ANY AMENDMENT, MODIFICATION OR OTHER ENHANCEMENT MADE TO THE PROGRAM
# BY ANY USER WHO REDISTRIBUTES THE PROGRAM SO AMENDED, MODIFIED OR ENHANCED.
# IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL THE
# COPYRIGHT HOLDER BE LIABLE TO ANY USER FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL,
# INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
# PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE
# OR LOSSES SUSTAINED BY THE USER OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO
# OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER HAS BEEN ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGES.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
# ####################
# See also: http://www.gnu.org/licenses/ (last accessed 14/01/2013)
# Contact:
# 1. [email protected]
# or if this fails
# 2. [email protected]
# #####################
#########################################################################################################
#<N.B.: All file name manipulation supposes this code is running under Windows!>
import re,os,itertools,sys,csv
from collections import defaultdict #Assumption: Python version >= 2.5
import functools
import pybel
class descriptorsFilesProcessor():
def __init__(self):
pass
def match_ids_to_string_fp_features(self,string_fp_file,jCompoundMapperStringFeatures=False):
id2string_fp_features = {} #N.B.: For now, we will only compute binary descriptors based upon feature occurence => only the set of unique features per compound is required!
f_in = open(string_fp_file)
try:
lines = [LINE.replace('\n','') for LINE in f_in.readlines()]
assert not 0 == len(lines), " Fingerprints file is empty???"
del LINE
finally:
f_in.close()
del f_in
for LINE in lines:
if jCompoundMapperStringFeatures:
ID = re.sub('(_INDEX=[0-9]+)','',LINE.split('\t')[0])
features = list(set([re.sub('(\:1$)','',raw_feat) for raw_feat in LINE.split('\t')[1:]]))
else:
ID = LINE.split('\t')[0]
features = list(set([raw_feat for raw_feat in LINE.split('\t')[1:]]))
features.sort() #15/01/13:new line inserted
id2string_fp_features[ID] = features
del LINE
#assert len(id2string_fp_features) == len(lines), " Duplicate IDs???" #-Better handled within script body - can call utility function to identify which IDs are duplicated!
return id2string_fp_features
def match_all_unique_features_to_indices(self,id2features,feat2IndexFileName='feat2Index.csv'):
feat2Exists = defaultdict(bool) #is this a faster way to get all unique features than simply building up a list and then applying list(set(built_up_list))?
for id in id2features:
for FEATURE in id2features[id]:
feat2Exists[FEATURE] = True
del id
del FEATURE
feat2Index = defaultdict(int) #values should default to zero - a pre-requisite for this function and convert_id2features_to_svm_light_format_descriptors_file(...)!
#for FEATURE in feat2Exists.keys(): ###15/01/13: commented out
features = feat2Exists.keys() #15/01/13:new line inserted
features.sort() #15/01/13:new line inserted
feat_count = 0 #15/01/13:new line inserted
for FEATURE in features: #15/01/13:new line inserted
#feat2Index[FEATURE] += range(1,len(feat2Exists.keys())+1)[feat2Exists.keys().index(FEATURE)] ###15/01/13: commented out
feat_count += 1 #15/01/13:new line inserted
feat2Index[FEATURE] = feat_count #15/01/13:new line inserted
del FEATURE
del feat_count #15/01/13:new line inserted
#############################################################################################
#Record the correspondence between features and indices for subsequent model intepretation###
#############################################################################################
f_out = open(feat2IndexFileName,'w')
try:
f_out.write('Feature(Quoted),Index\n') #Quoting should make it possible to inspect this file in Excel...
for FEATURE in feat2Index:
f_out.write('"%s",%d\n' % (FEATURE,feat2Index[FEATURE]))
finally:
f_out.close()
del f_out
#############################################################################################
return feat2Index
def generate_molId2DescId2DescValue_from_raw_fp_file(self,raw_fp_file,iSjCompoundMapperStringFeatures=False,unique_features_file=None):
'''
generate_molId2DescId2DescValue_from_raw_fp_file(raw_fp_file,iSjCompoundMapperStringFeatures=False,unique_features_file=None)
(1) raw_fp_file :
Must have the following structure to each line:
molId\tFeatureB\tFeatureC\tFeatureA\tFeatureX....
Must - for now! - have a .txt extension!
(2) unique_features_file :
Must have the same format as feat2IndexFileName (see contents of self.match_all_unique_features_to_indices(...).
'''
id2string_fp_features = self.match_ids_to_string_fp_features(raw_fp_file,iSjCompoundMapperStringFeatures)
if unique_features_file is None:
feat2IndexFileName = re.sub('(\.txt$)','_fpFeat2InitialIndex.csv',raw_fp_file)#17/03/13: actually, it is useful to write this to the same directory as the fingerprints file! => Hopefully any associated errors can be dealt with!#.split("\\")[-1]) #16/01/2013, 15:25: this line was put back in - since unittests failed when it was replaced with the following line!
#feat2IndexFileName = re.sub('(\.txt$)','_fpFeat2InitialIndex.csv',raw_fp_file)
feat2Index = self.match_all_unique_features_to_indices(id2string_fp_features,feat2IndexFileName)
else:
feat2IndexFileName = unique_features_file
feat2Index = {}
f_in = open(unique_features_file)
try:
data = csv.DictReader(f_in)
for LINE in data:
feat2Index[re.sub('("$|^")','',LINE['Feature(Quoted)'])] = int(LINE['Index'])
del LINE
del data
finally:
f_in.close()
del f_in
molId2DescId2DescValue = defaultdict(functools.partial(defaultdict,int))
for molId in id2string_fp_features:
# ########################
# ########Initialise######
# ########################
# for feat in feat2Index:
# molId2DescId2DescValue[molId][feat2Index[feat]] = 0
# del feat
# ########################
for feat in id2string_fp_features[molId]:
molId2DescId2DescValue[molId][feat2Index[feat]] = 1
return molId2DescId2DescValue, feat2IndexFileName #5/01/13: I think the problem (TypeError) arose because this must have been updated to not just return molId2DescId2DescValue, but forgot to update generate_molId2DescId2DescValue_from_multiple_descriptors_files(...) - see below.
def generate_molId2DescId2DescValue_from_CSV(self,raw_descriptors_csv):
'''
generate_molId2DescId2DescValue_from_CSV(raw_descriptors_csv)
raw_descriptors_csv - must have the following structure:
First line = Header => "molID,<Descriptor1:Name>,<Descriptor2:Name>,..."
Subsequent lines:
molId,<Descriptor1:Value>,<Descriptor2:Value>,....
'''
molId2DescId2DescValue = defaultdict(functools.partial(defaultdict,int))
f_in = open(raw_descriptors_csv)
try:
data = [LINE for LINE in csv.DictReader(f_in)]
del LINE
descriptor_names = [KEY_NAME for KEY_NAME in data[0].keys() if not 'molID'==KEY_NAME]
descName2descId = dict(zip(descriptor_names,range(1,len(descriptor_names)+1)))
del descriptor_names
############################################################
#First record the (current) descriptor name: descId pairing#
############################################################
f_out = open(re.sub('(\.csv$)','_descName2InitialDescId.csv',raw_descriptors_csv.split("\\")[-1]),'w')
try:
f_out.write('DescriptorName,InitId\n')
for descName in descName2descId:
f_out.write('"%s",%s\n' % (descName,descName2descId[descName]))
del descName
finally:
f_out.close()
del f_out
############################################################
for mol_line in data:
for descName in descName2descId:
molId2DescId2DescValue[mol_line['molID']][descName2descId[descName]] = float(mol_line[descName])
del descName
del mol_line
finally:
f_in.close()
del f_in
del data
return molId2DescId2DescValue
def write_svmlight_format_modellingFile_from_multiple_descriptors_files(self,list_of_descriptors_files,corresponding_list_of_whether_descriptors_file_is_actually_a_raw_fp_file,corresponding_list_of_whether_descriptors_file_is_actually_a_jCompoundMapperStringFeatures_file,descriptors_file_name,id2responseVariable=defaultdict(int),corresponding_list_of_unique_features_files=[None]):
#p.t.r.d.i.:DONE
#####################################################################################################
#<N.B.: 09/10/12: Adapted from write_csv_format_modellingFile_from_multiple_descriptors_files(...).>
#<10/10/12: But, unlike the OLD version of write_csv_format_modellingFile_from_multiple_descriptors_files(...), the possibility of defining the descriptors for fingerprint features files ('fp files') based upon an externally specified set of unique features has been introduced via the new argument: corresponding_list_of_unique_features_files!>
#####################################################################################################
assert len(list_of_descriptors_files) == len(set(list_of_descriptors_files)), " %s ???" % list_of_descriptors_files
assert len(list_of_descriptors_files) == len(corresponding_list_of_whether_descriptors_file_is_actually_a_raw_fp_file) , " %d vs. %d ???" % (len(list_of_descriptors_files),len(corresponding_list_of_whether_descriptors_file_is_actually_a_raw_fp_file))
assert len(list_of_descriptors_files) == len(corresponding_list_of_whether_descriptors_file_is_actually_a_jCompoundMapperStringFeatures_file), " %d vs. %d ???" % (len(list_of_descriptors_files),len(corresponding_list_of_whether_descriptors_file_is_actually_a_jCompoundMapperStringFeatures_file))
if [None] == corresponding_list_of_unique_features_files:
corresponding_list_of_unique_features_files = corresponding_list_of_unique_features_files*len(list_of_descriptors_files)
record_of_all_feat2IndexFiles = []
else:
record_of_all_feat2IndexFiles = [None]*len(list_of_descriptors_files)
#Clearly, all descriptors/raw fp files parsed MUST correspond to the same molecule IDs!
combined_molId2DescId2DescValue = defaultdict(functools.partial(defaultdict,int))
current_initial_descriptor_id = 1
for raw_descriptors_file in list_of_descriptors_files:
if corresponding_list_of_whether_descriptors_file_is_actually_a_raw_fp_file[list_of_descriptors_files.index(raw_descriptors_file)]:
iSjCompoundMapperStringFeatures = corresponding_list_of_whether_descriptors_file_is_actually_a_jCompoundMapperStringFeatures_file[list_of_descriptors_files.index(raw_descriptors_file)]
unique_features_file = corresponding_list_of_unique_features_files[list_of_descriptors_files.index(raw_descriptors_file)]
current_molId2DescId2DescValue, feat2IndexFile = self.generate_molId2DescId2DescValue_from_raw_fp_file(raw_descriptors_file,iSjCompoundMapperStringFeatures,unique_features_file)
if unique_features_file is None:
record_of_all_feat2IndexFiles.append(feat2IndexFile)
else:
assert feat2IndexFile == unique_features_file
else:
current_molId2DescId2DescValue = self.generate_molId2DescId2DescValue_from_CSV(raw_descriptors_file)
all_current_original_desc_ids = []
for molId in current_molId2DescId2DescValue:
for descId in current_molId2DescId2DescValue[molId]:
all_current_original_desc_ids.append(descId)
combined_molId2DescId2DescValue[molId][(current_initial_descriptor_id-1)+descId] = float(current_molId2DescId2DescValue[molId][descId])
del descId
del molId
all_current_original_desc_ids = list(set(all_current_original_desc_ids))
current_initial_descriptor_id += len(all_current_original_desc_ids)
del all_current_original_desc_ids
del current_initial_descriptor_id
#########################
all_desc_ids = list(set(list(itertools.chain(*[combined_molId2DescId2DescValue[mol_ID].keys() for mol_ID in combined_molId2DescId2DescValue.keys()])))) ####No keys assigned for zero valued FP descriptors!
del mol_ID
all_desc_ids.sort()
f_out = open(descriptors_file_name,'w')
try:
all_mol_ids = combined_molId2DescId2DescValue.keys()
#####################################################################################################
#N.B.: Should ensure (i.e. to make sure selection of the same rows, e.g. for a train/test partition or when doing bootstrapping) that substances are written to the model input file in the same order irrespective of the descriptors set used for modelling!
#This will be taken care of by sorting the IDs prior to writing the corresponding entries to the output file.
#####################################################################################################
all_mol_ids.sort()
for molID in all_mol_ids:
current_line_list = ['%s' % id2responseVariable[molID]]
################################################################
#Hopefully this will avoid a MemoryError exception!#############
################################################################
current_DescId2DescValue = combined_molId2DescId2DescValue[molID]
del combined_molId2DescId2DescValue[molID]
current_line_list += ['%d:%f' % (descId,current_DescId2DescValue[descId]) for descId in all_desc_ids if not 0.0 == current_DescId2DescValue[descId]]
del descId
del current_DescId2DescValue
#################################################################
f_out.write(' '.join(current_line_list)+'#%s' % molID+'\n') #svmlight format: anything following # should not be read into memory by correct parsers of this format!
del molID
del current_line_list
finally:
f_out.close()
del f_out
#######################
return record_of_all_feat2IndexFiles
def convert_svmlight_to_csv(self,svmlight_file,csv_file=r''):
#d.i.p.t.r.:<DONE>
molID2descID2Value = defaultdict(functools.partial(defaultdict,int))
molID2responseValue = {}
f_in = open(svmlight_file)
try:
all_data_lines = [LINE.replace('\n','') for LINE in f_in.readlines()]
del LINE
finally:
f_in.close()
del f_in
for LINE in all_data_lines:
response_value_THEN_feature_ID_Value_Pairs, molID = LINE.split('#')
response_value = float(response_value_THEN_feature_ID_Value_Pairs.split()[0])
molID2responseValue[molID] = response_value
del response_value
for feature_ID_Value_PAIR in response_value_THEN_feature_ID_Value_Pairs.split()[1:]:
molID2descID2Value[molID][int(feature_ID_Value_PAIR.split(':')[0])] = float(feature_ID_Value_PAIR.split(':')[1])
del response_value_THEN_feature_ID_Value_Pairs
#del feature_ID_Value_PAIR ##Would fail if the current line corresponded to a molecule with no non-zero valued descriptors!
del molID
del LINE
all_desc_ids = list(set(list(itertools.chain(*[molID2descID2Value[molID].keys() for molID in molID2descID2Value]))))
all_desc_ids.sort()
del molID
if '' == csv_file:
csv_file = re.sub('(\.%s$)' % svmlight_file.split('.')[-1] , '.csv',svmlight_file)
f_out = open(csv_file,'w')
try:
#Copied (below) from above:
header = ','.join(['molID','yValue']+['d%d' % descID for descID in all_desc_ids])
del descID
f_out.write(header+'\n')
del header
###########################
all_mol_ids = molID2responseValue.keys() ####<***N.B.: If we select molecule IDs from molID2descID2Value.keys(), we would miss molecules with no non-zero valued descriptors!***><***TO DO: Fix this *possible* problem when generating initial svmlight/csv model input files in the methods of the current class presented above!****>
#Copied (below) from above:
#####################################################################################################
#N.B.: Should ensure (i.e. to make sure selection of the same rows, e.g. for a train/test partition or when doing bootstrapping) that substances are written to the model input file in the same order irrespective of the descriptors set used for modelling!
#This will be taken care of by sorting the IDs prior to writing the corresponding entries to the output file.
#####################################################################################################
all_mol_ids.sort()
############################
for molID in all_mol_ids:
current_descID2Value = molID2descID2Value[molID]
del molID2descID2Value[molID]
for descID in all_desc_ids:
current_descID2Value[descID] += 0.0
del descID
f_out.write(','.join([str(molID),str(molID2responseValue[molID])]+['%f' % current_descID2Value[descID] for descID in all_desc_ids])+'\n')
del current_descID2Value
finally:
f_out.close()
del f_out
return csv_file
def remove_response_values_column(self,ID_responseValue_descriptors_File,ID_descriptors_File='',responseValueColumnPosition=1,columnDelimiter=','):
#d.i.p.t.r.:<DONE>
f_in = open(ID_responseValue_descriptors_File)
try:
input_lines = [LINE.replace('\n','') for LINE in f_in.readlines()]
del LINE
finally:
f_in.close()
del f_in
###
if '' == ID_descriptors_File:
ID_descriptors_File = re.sub('(\.%s$)' % ID_responseValue_descriptors_File.split('.')[-1], '_noY.%s' % ID_responseValue_descriptors_File.split('.')[-1],ID_responseValue_descriptors_File)
###<๏ฝfimโhole๏ฝ> NEW_LINE = columnDelimiter.join([LINE.split(columnDelimiter)[col_pos] for col_pos in range(0,len(LINE.split(columnDelimiter))) if not col_pos == responseValueColumnPosition])
f_out.write(NEW_LINE+'\n')
finally:
f_out.close()
del f_out
return ID_descriptors_File<๏ฝfimโend๏ฝ> |
f_out = open(ID_descriptors_File,'w')
try:
for LINE in input_lines: |
<|file_name|>reset-password.component.ts<|end_file_name|><๏ฝfimโbegin๏ฝ>import { Component } from "@angular/core";
import { emailPattern } from "../../core/utils/utils";
import { FormHandlerService } from "../../services/form-handler.service";
@Component({
selector: "bsc-reset-password",
styleUrls: [ "reset-password.component.scss" ],
templateUrl: "reset-password.component.html"
})
export class ResetPasswordComponent {
public email = "";
public emailSent = false;
public emailSending = false;
public emailPattern = emailPattern;
public isEmpty(data: string): boolean {
return FormHandlerService.isEmpty(data);
}
public sendRecoveryEmail(email: string): void {
console.error("IMPLEMENT RECOVERY EMAIL LOGIC HERE");
this.emailSending = true;
setTimeout(() => {
this.emailSending = false;
this.emailSent = true;
}, 2000);
}<๏ฝfimโhole๏ฝ><๏ฝfimโend๏ฝ> | } |
<|file_name|>bundles.config.js<|end_file_name|><๏ฝfimโbegin๏ฝ>/**
* Copyright 2018 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const argv = require('minimist')(process.argv.slice(2));
const colors = require('ansi-colors');
const log = require('fancy-log');
const wrappers = require('./compile-wrappers');
const {VERSION: internalRuntimeVersion} = require('./internal-version');
/**
* @enum {string}
*/
const TYPES = (exports.TYPES = {
AD: '_base_ad',
MEDIA: '_base_media',
MISC: '_base_misc',
});
/**
* Used to generate top-level JS build targets
*/
exports.jsBundles = {
'polyfills.js': {
srcDir: './src/',
srcFilename: 'polyfills.js',
destDir: './build/',
minifiedDestDir: './build/',
},
'alp.max.js': {
srcDir: './ads/alp/',
srcFilename: 'install-alp.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
toName: 'alp.max.js',
includePolyfills: true,
minifiedName: 'alp.js',
},
},
'examiner.max.js': {
srcDir: './src/examiner/',
srcFilename: 'examiner.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
toName: 'examiner.max.js',
includePolyfills: true,
minifiedName: 'examiner.js',
},
},
'ww.max.js': {
srcDir: './src/web-worker/',
srcFilename: 'web-worker.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
toName: 'ww.max.js',
minifiedName: 'ww.js',
includePolyfills: true,
},
},
'integration.js': {
srcDir: './3p/',
srcFilename: 'integration.js',
destDir: './dist.3p/current',
minifiedDestDir: './dist.3p/' + internalRuntimeVersion,
options: {
minifiedName: 'f.js',
externs: ['./ads/ads.extern.js'],
include3pDirectories: true,
includePolyfills: true,
},
},
'ampcontext-lib.js': {
srcDir: './3p/',
srcFilename: 'ampcontext-lib.js',
destDir: './dist.3p/current',
minifiedDestDir: './dist.3p/' + internalRuntimeVersion,
options: {
minifiedName: 'ampcontext-v0.js',
externs: ['./ads/ads.extern.js'],
include3pDirectories: true,
includePolyfills: false,
},
},
'iframe-transport-client-lib.js': {
srcDir: './3p/',
srcFilename: 'iframe-transport-client-lib.js',
destDir: './dist.3p/current',
minifiedDestDir: './dist.3p/' + internalRuntimeVersion,
options: {
minifiedName: 'iframe-transport-client-v0.js',
externs: ['./ads/ads.extern.js'],
include3pDirectories: true,
includePolyfills: false,
},
},
'recaptcha.js': {
srcDir: './3p/',
srcFilename: 'recaptcha.js',
destDir: './dist.3p/current',
minifiedDestDir: './dist.3p/' + internalRuntimeVersion,
options: {
minifiedName: 'recaptcha.js',
externs: [],
include3pDirectories: true,
includePolyfills: true,
},
},
'amp-viewer-host.max.js': {
srcDir: './extensions/amp-viewer-integration/0.1/examples/',
srcFilename: 'amp-viewer-host.js',
destDir: './dist/v0/examples',
minifiedDestDir: './dist/v0/examples',
options: {
toName: 'amp-viewer-host.max.js',
minifiedName: 'amp-viewer-host.js',
incudePolyfills: true,
extraGlobs: ['extensions/amp-viewer-integration/**/*.js'],
skipUnknownDepsCheck: true,
},
},
'video-iframe-integration.js': {
srcDir: './src/',
srcFilename: 'video-iframe-integration.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
minifiedName: 'video-iframe-integration-v0.js',
includePolyfills: false,
},
},
'amp-story-entry-point.js': {
srcDir: './src/amp-story-player/amp-story-entry-point/',
srcFilename: 'amp-story-entry-point.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
minifiedName: 'amp-story-entry-point-v0.js',
includePolyfills: false,
},
},
'amp-story-player.js': {
srcDir: './src/amp-story-player/',
srcFilename: 'amp-story-player.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
minifiedName: 'amp-story-player-v0.js',
includePolyfills: false,
},
},
'amp-inabox-host.js': {
srcDir: './ads/inabox/',
srcFilename: 'inabox-host.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
toName: 'amp-inabox-host.js',
minifiedName: 'amp4ads-host-v0.js',
includePolyfills: false,
},
},
'amp.js': {
srcDir: './src/',
srcFilename: 'amp.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
minifiedName: 'v0.js',
includePolyfills: true,
wrapper: wrappers.mainBinary,
esmPassCompilation: argv.esm,
includeOnlyESMLevelPolyfills: argv.esm,
},
},
'amp-shadow.js': {
srcDir: './src/',
srcFilename: 'amp-shadow.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
minifiedName: 'shadow-v0.js',
includePolyfills: true,
},
},
'amp-inabox.js': {
srcDir: './src/inabox/',
srcFilename: 'amp-inabox.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
toName: 'amp-inabox.js',
minifiedName: 'amp4ads-v0.js',
includePolyfills: true,
extraGlobs: ['src/inabox/*.js', '3p/iframe-messaging-client.js'],
},
},
};
/**
* Used to generate extension build targets
*/
exports.extensionBundles = [
{
name: 'amp-3d-gltf',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-3q-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-access',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-access-laterpay',
version: ['0.1', '0.2'],
latestVersion: '0.2',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-access-scroll',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-access-poool',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-accordion',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-action-macro',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-ad',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.AD,
},
{
name: 'amp-ad-custom',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-ad-network-adsense-impl',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-ad-network-adzerk-impl',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-ad-network-doubleclick-impl',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-ad-network-fake-impl',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-ad-exit',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-addthis',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-analytics',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-anim',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-animation',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-apester-media',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MEDIA,
},
{
name: 'amp-app-banner',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-audio',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-auto-ads',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-autocomplete',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-auto-lightbox',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-base-carousel',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-beopinion',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-bind',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-bodymovin-animation',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-brid-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-delight-player',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MEDIA,
},
{
name: 'amp-brightcove',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-byside-content',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-kaltura-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-call-tracking',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-carousel',
version: ['0.1', '0.2'],
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-consent',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-connatix-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-crypto-polyfill',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-dailymotion',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-date-countdown',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-date-display',
version: ['0.1', '1.0'],
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-google-document-embed',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-dynamic-css-classes',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-embedly-card',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-experiment',
version: ['0.1', '1.0'],
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-facebook',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-facebook-comments',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-facebook-like',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-facebook-page',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-fit-text',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-fit-text',
version: '1.0',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-font',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-form',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-fx-collection',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-fx-flying-carpet',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-geo',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-gfycat',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-gist',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-gwd-animation',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-hulu',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-iframe',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-ima-video',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-image-lightbox',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-image-slider',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-imgur',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-inline-gallery',
version: '0.1',
latestVersion: '0.1',
options: {
hasCss: true,
cssBinaries: [
'amp-inline-gallery',
'amp-inline-gallery-pagination',
'amp-inline-gallery-thumbnails',
],
},
type: TYPES.MISC,
},
{
name: 'amp-inputmask',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
postPrepend: ['third_party/inputmask/bundle.js'],
},
{
name: 'amp-instagram',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-install-serviceworker',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-intersection-observer-polyfill',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-izlesene',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-jwplayer',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-lightbox',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-lightbox-gallery',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-list',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-live-list',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-loader',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-mathml',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-mega-menu',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-megaphone',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-mustache',
version: ['0.1', '0.2'],
latestVersion: '0.2',
type: TYPES.MISC,
},
{
name: 'amp-nested-menu',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-next-page',
version: ['0.1', '1.0'],
latestVersion: '1.0',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-nexxtv-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-o2-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-ooyala-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-pinterest',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-playbuzz',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MEDIA,
},
{
name: 'amp-reach-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-redbull-player',
version: '0.1',<๏ฝfimโhole๏ฝ> },
{
name: 'amp-reddit',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-riddle-quiz',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-script',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-sidebar',
version: ['0.1', '0.2'],
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-skimlinks',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-smartlinks',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-soundcloud',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-springboard-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-standalone',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-sticky-ad',
version: '1.0',
latestVersion: '1.0',
options: {hasCss: true},
type: TYPES.AD,
},
{
name: 'amp-story',
version: '1.0',
latestVersion: '1.0',
options: {
hasCss: true,
cssBinaries: [
'amp-story-bookend',
'amp-story-consent',
'amp-story-draggable-drawer-header',
'amp-story-hint',
'amp-story-info-dialog',
'amp-story-interactive',
'amp-story-interactive-binary-poll',
'amp-story-interactive-quiz',
'amp-story-share',
'amp-story-share-menu',
'amp-story-system-layer',
'amp-story-tooltip',
'amp-story-unsupported-browser-layer',
'amp-story-viewport-warning-layer',
],
},
type: TYPES.MISC,
},
{
name: 'amp-story-auto-ads',
version: '0.1',
latestVersion: '0.1',
options: {
hasCss: true,
cssBinaries: [
'amp-story-auto-ads-ad-badge',
'amp-story-auto-ads-attribution',
],
},
type: TYPES.MISC,
},
{
name: 'amp-story-education',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-stream-gallery',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-selector',
version: ['0.1', '1.0'],
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-web-push',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-wistia-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-position-observer',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-orientation-observer',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-date-picker',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
postPrepend: ['third_party/react-dates/bundle.js'],
},
{
name: 'amp-image-viewer',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-subscriptions',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-subscriptions-google',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-pan-zoom',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-recaptcha-input',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
/**
* @deprecated `amp-slides` is deprecated and will be deleted before 1.0.
* Please see {@link AmpCarousel} with `type=slides` attribute instead.
*/
{
name: 'amp-slides',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-social-share',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-social-share',
version: '1.0',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-timeago',
version: ['0.1', '1.0'],
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-truncate-text',
version: '0.1',
latestVersion: '0.1',
options: {
hasCss: true,
cssBinaries: ['amp-truncate-text', 'amp-truncate-text-shadow'],
},
type: TYPES.MISC,
},
{
name: 'amp-twitter',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-user-notification',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-vimeo',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-vine',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-viz-vega',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
postPrepend: [
'third_party/d3/d3.js',
'third_party/d3-geo-projection/d3-geo-projection.js',
'third_party/vega/vega.js',
],
},
{
name: 'amp-google-vrview-image',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-viewer-assistance',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-viewer-integration',
version: '0.1',
latestVersion: '0.1',
options: {
// The viewer integration code needs to run asap, so that viewers
// can influence document state asap. Otherwise the document may take
// a long time to learn that it should start process other extensions
// faster.
loadPriority: 'high',
},
type: TYPES.MISC,
},
{
name: 'amp-video',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-video-docking',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MEDIA,
},
{
name: 'amp-video-iframe',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-viqeo-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-vk',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-yotpo',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-youtube',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-mowplayer',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-powr-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-mraid',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-link-rewriter',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-minute-media-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
].sort((a, b) => a.name.localeCompare(b.name));
/**
* Used to alias a version of an extension to an older deprecated version.
*/
exports.extensionAliasBundles = {
'amp-sticky-ad': {
version: '1.0',
aliasedVersion: '0.1',
},
'amp-story': {
version: '1.0',
aliasedVersion: '0.1',
},
};
/**
* @param {boolean} condition
* @param {string} field
* @param {string} message
* @param {string} name
* @param {string} found
*/
function verifyBundle_(condition, field, message, name, found) {
if (!condition) {
log(
colors.red('ERROR:'),
colors.cyan(field),
message,
colors.cyan(name),
'\n' + found
);
process.exit(1);
}
}
exports.verifyExtensionBundles = function () {
exports.extensionBundles.forEach((bundle) => {
const bundleString = JSON.stringify(bundle, null, 2);
verifyBundle_(
'name' in bundle,
'name',
'is missing from',
'',
bundleString
);
verifyBundle_(
'version' in bundle,
'version',
'is missing from',
bundle.name,
bundleString
);
verifyBundle_(
'latestVersion' in bundle,
'latestVersion',
'is missing from',
bundle.name,
bundleString
);
const duplicates = exports.extensionBundles.filter(
(duplicate) => duplicate.name === bundle.name
);
verifyBundle_(
duplicates.every(
(duplicate) => duplicate.latestVersion === bundle.latestVersion
),
'latestVersion',
'is not the same for all versions of',
bundle.name,
JSON.stringify(duplicates, null, 2)
);
verifyBundle_(
'type' in bundle,
'type',
'is missing from',
bundle.name,
bundleString
);
const validTypes = Object.keys(TYPES).map((x) => TYPES[x]);
verifyBundle_(
validTypes.some((validType) => validType === bundle.type),
'type',
`is not one of ${validTypes.join(',')} in`,
bundle.name,
bundleString
);
});
};<๏ฝfimโend๏ฝ> | latestVersion: '0.1',
type: TYPES.MEDIA, |
<|file_name|>SensorDataManager.java<|end_file_name|><๏ฝfimโbegin๏ฝ>/*****************************************************************
SPINE - Signal Processing In-Node Environment is a framework that
allows dynamic configuration of feature extraction capabilities
of WSN nodes via an OtA protocol
Copyright (C) 2007 Telecom Italia S.p.A.
ย
GNU Lesser General Public License
ย
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation,
version 2.1 of the License.
ย
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.ย See the GNU
Lesser General Public License for more details.
ย
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the
Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MAย 02111-1307, USA.
*****************************************************************/
package logic;
import java.util.Arrays;
import java.util.Vector;
import spine.SPINEFunctionConstants;
/**
* SensorDataManager: calculate feature on sensor data.
*
* @author Alessia Salmeri : [email protected]
* @author Raffaele Gravina
*
* @version 1.0
*/
public class SensorDataManager {
int sensorCodeKey;
byte featureCode;
short windowSize;
short shiftSize;
Vector[] sensorRawValue;
Vector ch1RawValue;
Vector ch2RawValue;
Vector ch3RawValue;
Vector ch4RawValue;
/**
* Constructor of an SensorDataManager.
*
* @param sensorCodeKey is a sensor code.
* @param sensorRawValue is the set of sensor raw data.
* @param windowSize is the windows feature setup info.
* @param shiftSize is the shift feature setup info.
*
*/
public SensorDataManager(int sensorCodeKey, Vector[] sensorRawValue, short windowSize, short shiftSize) {
this.sensorCodeKey = sensorCodeKey;
this.sensorRawValue = sensorRawValue;
this.ch1RawValue = sensorRawValue[0];
this.ch2RawValue = sensorRawValue[1];
this.ch3RawValue = sensorRawValue[2];
this.ch4RawValue = sensorRawValue[3];
this.windowSize = windowSize;
this.shiftSize = shiftSize;
};
/**
* Calculate feature (RAW_DATA, MAX, MIN, RANGE, MEAN, AMPLITUDE, RMS,
* ST_DEV, TOTAL_ENERGY, VARIANCE, MODE, MEDIAN).
*
* @param featureCode is a feature code (SPINEFunctionConstants).
*
*/
public Vector[] calculateFeature(byte featureCode) {
this.featureCode = featureCode;
Vector[] sensorFeatureValue = new Vector[4];
Vector ch1FeatureValue = new Vector();
Vector ch2FeatureValue = new Vector();
Vector ch3FeatureValue = new Vector();
Vector ch4FeatureValue = new Vector();
int[] rawData;
if (windowSize <= 0 || shiftSize <= 0 || shiftSize > windowSize) {
System.out.println("WINDOW and/or SHIFT INVALID.");
}
// ch1FeatureValue
rawData = new int[ch1RawValue.size()];
for (int i = 0; i < ch1RawValue.size(); i++) {
rawData[i] = (Integer) ch1RawValue.get(i);
}
if (rawData.length < windowSize) {
System.out.println("WINDOW > rawData.lenght");
} else {
ch1FeatureValue = calculate(rawData, featureCode);
}
// ch2FeatureValue
rawData = new int[ch2RawValue.size()];
for (int i = 0; i < ch2RawValue.size(); i++) {
rawData[i] = (Integer) ch2RawValue.get(i);
}
// ch2FeatureValue = calculate(rawData, featureCode);
if (rawData.length < windowSize) {
System.out.println("WINDOW > rawData.lenght");
} else {
ch2FeatureValue = calculate(rawData, featureCode);
}
// ch3FeatureValue
rawData = new int[ch3RawValue.size()];
for (int i = 0; i < ch3RawValue.size(); i++) {
rawData[i] = (Integer) ch3RawValue.get(i);
}
// ch3FeatureValue = calculate(rawData, featureCode);
if (rawData.length < windowSize) {
System.out.println("WINDOW > rawData.lenght");
} else {
ch3FeatureValue = calculate(rawData, featureCode);
}
// ch4FeatureValue
rawData = new int[ch4RawValue.size()];
for (int i = 0; i < ch4RawValue.size(); i++) {
rawData[i] = (Integer) ch4RawValue.get(i);
}
// ch4FeatureValue = calculate(rawData, featureCode);
if (rawData.length < windowSize) {
System.out.println("WINDOW > rawData.lenght");
} else {
ch4FeatureValue = calculate(rawData, featureCode);
}
sensorFeatureValue[0] = ch1FeatureValue;
sensorFeatureValue[1] = ch2FeatureValue;
sensorFeatureValue[2] = ch3FeatureValue;
sensorFeatureValue[3] = ch4FeatureValue;
return sensorFeatureValue;
}
private Vector calculate(int[] rawData, byte featureCode) {
int[] dataWindow = new int[windowSize];
Vector currInstance = new Vector();
int startIndex = 0;
int j = 0;
try {
while (true) {
System.arraycopy(rawData, startIndex, dataWindow, 0, windowSize);
currInstance.add((int) calculate(featureCode, dataWindow));
startIndex = shiftSize * ++j;
}
} catch (Exception e) {
System.err.println("No more data from rawData to dataWindow");
}
return currInstance;
}
private static int calculate(byte featurecode, int[] data) {
switch (featurecode) {
case SPINEFunctionConstants.RAW_DATA:
return raw(data);
case SPINEFunctionConstants.MAX:
return max(data);
case SPINEFunctionConstants.MIN:
return min(data);
case SPINEFunctionConstants.RANGE:
return range(data);
case SPINEFunctionConstants.MEAN:
return mean(data);
case SPINEFunctionConstants.AMPLITUDE:
return amplitude(data);
case SPINEFunctionConstants.RMS:
return rms(data);
case SPINEFunctionConstants.ST_DEV:
return stDev(data);
case SPINEFunctionConstants.TOTAL_ENERGY:
return totEnergy(data);
case SPINEFunctionConstants.VARIANCE:
return variance(data);
case SPINEFunctionConstants.MODE:
return mode(data);
case SPINEFunctionConstants.MEDIAN:
return median(data);
default:
return 0;
}
}
// RAW_DATA calculate: the last raw_data in a window
private static int raw(int[] data) {
int indexLastValue = data.length - 1;
int raw = data[indexLastValue];
return raw;
}
private static int max(int[] data) {
int max = data[0];
for (int i = 1; i < data.length; i++)
if (data[i] > max)
max = data[i];
return max;
}
private static int min(int[] data) {
int min = data[0];
for (int i = 1; i < data.length; i++)
if (data[i] < min)
min = data[i];
return min;
}
private static int range(int[] data) {
int min = data[0];
int max = min;
// we don't use the methods 'max' and 'min';
// instead, to boost the alg, we can compute both using one single for
// loop ( O(n) vs O(2n) )
for (int i = 1; i < data.length; i++) {
if (data[i] < min)
min = data[i];
if (data[i] > max)
max = data[i];
}
return (max - min);
}
private static int mean(int[] data) {
double mean = 0;
for (int i = 0; i < data.length; i++)
mean += data[i];
return (int) (Math.round(mean / data.length));
}
private static int amplitude(int[] data) {
return (max(data) - mean(data));
}
private static int rms(int[] data) {
double rms = 0;
for (int i = 0; i < data.length; i++)
rms += (data[i] * data[i]);
rms /= data.length;
return (int) Math.round(Math.sqrt(rms));
}
private static int variance(int[] data) {
double var = 0, mu = 0;
int val = 0;
for (int i = 0; i < data.length; i++) {
val = data[i];
mu += val;
var += (val * val);
}
mu /= data.length;
var /= data.length;
var -= (mu * mu);
return (int) Math.round(var);
}
private static int stDev(int[] data) {
return (int) (Math.round(Math.sqrt(variance(data))));
}
private static int mode(int[] data) {
int iMax = 0;
int[] orderedData = new int[data.length];
System.arraycopy(data, 0, orderedData, 0, data.length);
int[] tmp = new int[data.length];
// to boost the algorithm, we first sort the array (mergeSort takes
// O(nlogn))
Arrays.sort(orderedData);
int i = 0;
// now we look for the max number of occurences per each value
while (i < data.length - 1) {
for (int j = i + 1; j < data.length; j++)
if (orderedData[i] == orderedData[j]) {
tmp[i] = j - i + 1;
if (j == (data.length - 1))
i = data.length - 1; // exit condition
} else {
i = j;
break;
}
}
// we choose the overall max
for (i = 1; i < data.length; i++)
if (tmp[i] > tmp[iMax])
iMax = i;
return orderedData[iMax];
}
private static int median(int[] data) {
int[] sortedData = new int[data.length];
System.arraycopy(data, 0, sortedData, 0, data.length);
Arrays.sort(sortedData);
return (data.length % 2 == 0) ? (sortedData[data.length / 2] + sortedData[(data.length / 2) - 1]) / 2 : sortedData[(data.length - 1) / 2];
}
<๏ฝfimโhole๏ฝ> totEn += (data[i] * data[i]);
return (int) (totEn / data.length);
}
}<๏ฝfimโend๏ฝ> | private static int totEnergy(int[] data) {
double totEn = 0;
for (int i = 0; i < data.length; i++)
|
<|file_name|>index.js<|end_file_name|><๏ฝfimโbegin๏ฝ>const HttpStatus = require('http-status-codes');
const build = status => {
return (ctx, message) => {
ctx.status = status
ctx.body = message || {message: HttpStatus.getStatusText(status)}
return ctx
}
}
module.exports = {
accepted: build(HttpStatus.ACCEPTED), // 202
badGateway: build(HttpStatus.BAD_GATEWAY), // 502
badRequest: build(HttpStatus.BAD_REQUEST), // 400
conflict: build(HttpStatus.CONFLICT), // 409
continue: build(HttpStatus.CONTINUE), // 100
created: build(HttpStatus.CREATED), // 201
expectationFailed: build(HttpStatus.EXPECTATION_FAILED), // 417
failedDependency: build(HttpStatus.FAILED_DEPENDENCY), // 424
forbidden: build(HttpStatus.FORBIDDEN), // 403
gatewayTimeout: build(HttpStatus.GATEWAY_TIMEOUT), // 504
gone: build(HttpStatus.GONE), // 410
httpVersionNotSupported: build(HttpStatus.HTTP_VERSION_NOT_SUPPORTED), // 505
imATeapot: build(HttpStatus.IM_A_TEAPOT), // 418
insufficientSpaceOnResource: build(HttpStatus.INSUFFICIENT_SPACE_ON_RESOURCE), // 419
insufficientStorage: build(HttpStatus.INSUFFICIENT_STORAGE), // 507
internalServerError: build(HttpStatus.INTERNAL_SERVER_ERROR), // 500
lengthRequired: build(HttpStatus.LENGTH_REQUIRED), // 411
locked: build(HttpStatus.LOCKED), // 423
methodFailure: build(HttpStatus.METHOD_FAILURE), // 420
methodNotAllowed: build(HttpStatus.METHOD_NOT_ALLOWED), // 405
movedPermanently: build(HttpStatus.MOVED_PERMANENTLY), // 301
movedTemporarily: build(HttpStatus.MOVED_TEMPORARILY), // 302
multiStatus: build(HttpStatus.MULTI_STATUS), // 207
multipleChoices: build(HttpStatus.MULTIPLE_CHOICES), // 300
networkAuthenticationRequired: build(HttpStatus.NETWORK_AUTHENTICATION_REQUIRED), // 511
noContent: build(HttpStatus.NO_CONTENT), // 204
nonAuthoritativeInformation: build(HttpStatus.NON_AUTHORITATIVE_INFORMATION), // 203
notAcceptable: build(HttpStatus.NOT_ACCEPTABLE), // 406
notFound: build(HttpStatus.NOT_FOUND), // 404
notImplemented: build(HttpStatus.NOT_IMPLEMENTED), // 501
notModified: build(HttpStatus.NOT_MODIFIED), // 304
ok: build(HttpStatus.OK), // 200
partialContent: build(HttpStatus.PARTIAL_CONTENT), // 206
paymentRequired: build(HttpStatus.PAYMENT_REQUIRED), // 402
permanentRedirect: build(HttpStatus.PERMANENT_REDIRECT), // 308
preconditionFailed: build(HttpStatus.PRECONDITION_FAILED), // 412
preconditionRequired: build(HttpStatus.PRECONDITION_REQUIRED), // 428
processing: build(HttpStatus.PROCESSING), // 102
proxyAuthenticationRequired: build(HttpStatus.PROXY_AUTHENTICATION_REQUIRED), // 407
requestHeaderFieldsTooLarge: build(HttpStatus.REQUEST_HEADER_FIELDS_TOO_LARGE), // 431
requestTimeout: build(HttpStatus.REQUEST_TIMEOUT), // 408
requestTooLong: build(HttpStatus.REQUEST_TOO_LONG), // 413
requestUriTooLong: build(HttpStatus.REQUEST_URI_TOO_LONG), // 414
requestedRangeNotSatisfiable: build(HttpStatus.REQUESTED_RANGE_NOT_SATISFIABLE), // 416
resetContent: build(HttpStatus.RESET_CONTENT), // 205
seeOther: build(HttpStatus.SEE_OTHER), // 303
serviceUnavailable: build(HttpStatus.SERVICE_UNAVAILABLE), // 503
switchingProtocols: build(HttpStatus.SWITCHING_PROTOCOLS), // 101
temporaryRedirect: build(HttpStatus.TEMPORARY_REDIRECT), // 307
tooManyRequests: build(HttpStatus.TOO_MANY_REQUESTS), // 429
unauthorized: build(HttpStatus.UNAUTHORIZED), // 401
unprocessableEntity: build(HttpStatus.UNPROCESSABLE_ENTITY), // 422
unsupportedMediaType: build(HttpStatus.UNSUPPORTED_MEDIA_TYPE), // 415
useProxy: build(HttpStatus.USE_PROXY) // 305<๏ฝfimโhole๏ฝ><๏ฝfimโend๏ฝ> | } |
<|file_name|>utils.js<|end_file_name|><๏ฝfimโbegin๏ฝ>"use strict";
const removeDiacritics = require('diacritics').remove;
const request = require('request');
//const pSegCases = require('../test/promiseSwitchCase.js');
var utils = {
/**
* Resolve all promises in Object via for ... in loop
* @param {object} obj - The object containing function properties => Switch cases that resolve
*/
switchCasePromiseResolver: function switchCasePromiseResolver (obj, event) {
//Promise Resolver For...In Loop - returns out to Var as Array
let i = -1;
var promisesArr = [];
//Loop through the segmented Switch Cases (test is an obj with each Switch Case as a property)
for (var ligneFn in obj) {
//console.log(ligneFn);
i++;
//resolve each switch case with the event.message.text and return resolve
promisesArr[i] = Promise.resolve( obj[ligneFn](event) );
}
/**
* Returns newly filled in Arr from loop
* @return {array} - returns array with promise status (resolve, false) in array
*/
return promisesArr;
},
//////////////////
// Text Cleaners
//////////////////
cleanseText: function cleanseText (text) {
return removeDiacritics(
text.toLowerCase()
.replace(/\s\s+|[.-]/g, function (match) { return (match === "-" || " " ? " " : "") }
).trim())
//([\uD800-\uDBFF][\uDC00-\uDFFF]) to remove emojis
},
//////////////////////////////////////////
//Format Time before SearchStop Function
/////////////////////////////////////////
timeFormatter (time) {
var timeArr = time.split('T');
var finalTime = timeArr[1].slice(0,2) + ':' + timeArr[1].slice(2,4);
return finalTime;
},
//Random Number between 2 values
randNum (min, max) {
min = Math.ceil(min);
max = Math.floor(max);
return Math.floor(Math.random() * (max - min)) + min;
},
//Whitelist them domains bruh
setWhiteList(domains) {
if (!Array.isArray(domains)) {
throw "Error ... domains param MUST be an array. You passed in: " + typeof domains;
} else {
request(
{
method: 'POST',
uri: 'https://graph.facebook.com/v2.6/me/messenger_profile?access_token=' + process.env.PAGE_ACCESS_TOKEN,
headers: {
'content-type': 'application/json'
},
body: {
whitelisted_domains: domains
},
json: true
}, function (error, response, body) {
if (!error) {
request(
{
method: 'GET',
uri: 'https://graph.facebook.com/v2.6/me/messenger_profile?fields=whitelisted_domains&access_token=' + process.env.PAGE_ACCESS_TOKEN
}, function (error, response, body) {
if (!error) {
console.log('Displaying whitelisted sites:');
console.log(body);
} else if (error) {
console.error (error);
}
})
} else if (error) {
console.error(error);
}
}<๏ฝfimโhole๏ฝ> };
}
}
module.exports = utils;<๏ฝfimโend๏ฝ> | ); |
<|file_name|>helper_test.go<|end_file_name|><๏ฝfimโbegin๏ฝ>package elasticsearch_test
import (
"fmt"
"path/filepath"
"reflect"
"runtime"
"testing"
)
//https://github.com/benbjohnson/testing
// Test represents a set of helper functions to test
type Test struct{}
// Assert fails the test if the condition is false.
func (t *Test) Assert(tb testing.TB, condition bool, msg string, v ...interface{}) {
if !condition {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d: "+msg+"\033[39m\n\n", append([]interface{}{filepath.Base(file), line}, v...)...)
tb.FailNow()
}
}<๏ฝfimโhole๏ฝ>func (t *Test) OK(tb testing.TB, err error) {
if err != nil {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d: unexpected error: %s\033[39m\n\n", filepath.Base(file), line, err.Error())
tb.FailNow()
}
}
// Equals fails the test if exp is not equal to act.
func (t *Test) Equals(tb testing.TB, exp, act interface{}) {
if !reflect.DeepEqual(exp, act) {
_, file, line, _ := runtime.Caller(1)
fmt.Printf("\033[31m%s:%d:\n\n\texp: %#v\n\n\tgot: %#v\033[39m\n\n", filepath.Base(file), line, exp, act)
tb.FailNow()
}
}<๏ฝfimโend๏ฝ> |
// OK fails the test if an err is not nil. |
<|file_name|>cursor.py<|end_file_name|><๏ฝfimโbegin๏ฝ>from .util.deb import deb
from .util.nrange import nrange
from .cell import Cell
#F,e,Cursor
from .grid import spoint
CURSOR_POS=None
def gcp(): #get cursor position
global CURSOR_POS
deb('gcp',CURSOR_POS)
return CURSOR_POS
def scp(x,y):
deb('scp',gcp(),x,y)
cxc=0 #todo, normalize in cursor...
global CURSOR_POS
CURSOR_POS=(x,y)
assert (x,y)==gcp()
#todo cpget and cpset
cpget=gcp
cpset=scp
def cursor(HG,x,y,f,X,Y):
deb('make an a cursor in the empty space around point in cell x,y',x,y)
#x,y=x-1,y-1
assert len(f)==4
#HG=_clearcursor(HG)
i=x
j=y
scp(i,j)
cxl=Cell(f[0],0,0)
cyu=Cell(f[1],0,0)
cxr=Cell(f[2],0,0)
cyd=Cell(f[3],0,0,)
HG=spoint(i-1,j,HG,cxl)
HG=spoint(i,j-1,HG,cyu)<๏ฝfimโhole๏ฝ> HG=spoint(i,j+1,HG,cyd)
return HG
def grid_cursor(HG,x,y,f,X,Y):
return cursor(HG,x,y,f,X,Y)
def _clearcursor(HG):
cp=gcp()
r1=r2=r3=r4=Cell('.',0,0)
deb('clear a cursor in the empty space around point in cell x,y',cp)
if not cp:return HG
i,j=cp
HG=spoint(i-1,j,HG,r1)
HG=spoint(i,j-1,HG,r2)
HG=spoint(i+1,j,HG,r3)
HG=spoint(i,j+1,HG,r4)
return HG<๏ฝfimโend๏ฝ> | HG=spoint(i+1,j,HG,cxr) |
<|file_name|>CLIProcessor.hpp<|end_file_name|><๏ฝfimโbegin๏ฝ>/*
* Stellarium
* Copyright (C) 2009 Fabien Chereau
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Suite 500, Boston, MA 02110-1335, USA.
*/
#ifndef CLIPROCESSOR_HPP
#define CLIPROCESSOR_HPP
#include <QStringList>
class QSettings;
class CLIProcessor
{
public:
//! Check if a QStringList has a CLI-style option in it (before the first --).
//! @param argList a list of strings, think argv
//! @param shortOpt a short-form option string, e.g, "-h"
//! @param longOpt a long-form option string, e.g. "--help"
//! @return true if the option exists in args before any element which is "--"
static bool argsGetOption(const QStringList& argList, QString shortOpt, QString longOpt);
//! Retrieve the argument to an option from a QStringList.
//! Given a list of strings, this function will extract the argument of
//! type T to an option, where the option in an element which matches
//! either the short or long forms, and the argument to that option<๏ฝfimโhole๏ฝ> //! is the following element in the list, e.g. ("--option", "arg").
//! It is also possible to extract argument to options which are
//! part of the option element, separated by the "=" character, e.g.
//! ( "--option=arg" ).
//! Type conversion is done using the QTextStream class, and as such
//! possible types which this template function may use are restricted
//! to those where there is a value operator<<() defined in the
//! QTextStream class for that type.
//! The argument list is only processed as far as the first value "--".
//! If an argument "--" is to be retrieved, it must be apecified using
//! the "--option=--" form.
//! @param argList a list of strings, think argv.
//! @param shortOpt the short form of the option, e.g. "-n".
//! @param longOpt the long form of the option, e.g. "--number".
//! @param defaultValue the default value to return if the option was
//! not found in args.
//! @exception runtime_error("no_optarg") the expected argument to the
//! option was not found.
//! @exception runtime_error("optarg_type") the expected argument to
//! the option could not be converted.
//! @return The value of the argument to the specified option which
//! occurs before the first element with the value "--". If the option
//! is not found, defaultValue is returned.
static QVariant argsGetOptionWithArg(const QStringList& argList, QString shortOpt, QString longOpt, QVariant defaultValue);
//! Check if a QStringList has a yes/no CLI-style option in it, and
//! find out the argument to that parameter.
//! e.g. option --use-foo can have parameter "yes" or "no"
//! It is also possible for the argument to take values, "1", "0";
//! "true", "false";
//! @param argList a list of strings, think argv
//! @param shortOpt a short-form option string, e.g, "-h"
//! @param longOpt a long-form option string, e.g. "--help"
//! @param defaultValue the default value to return if the option was
//! not found in args.
//! @exception runtime_error("no_optarg") the expected argument to the
//! option was not found. The longOpt value is appended in parenthesis.
//! @exception runtime_error("optarg_type") the expected argument to
//! the option could not be converted. The longOpt value is appended
//! in parenthesis.
//! @return 1 if the argument to the specified opion is "yes", "y",
//! "true", "on" or 1; 0 if the argument to the specified opion is "no",
//! "n", "false", "off" or 0; the value of the defaultValue parameter if
//! the option was not found in the argument list before an element which
//! has the value "--".
static int argsGetYesNoOption(const QStringList& argList, QString shortOpt, QString longOpt, int defaultValue);
//! Processing of command line options which is to be done before config file is read.
//! This includes the chance to set the configuration file name. It is to be done
//! in the sub-class of the StelApp, as the sub-class may want to manage the
//! argument list, as is the case with the StelMainWindow version.
static void parseCLIArgsPreConfig(const QStringList& argList);
//! Processing of command line options which is to be done after the config file is
//! read. This gives us the chance to over-ride settings which are in the configuration
//! file.
static void parseCLIArgsPostConfig(const QStringList& argList, QSettings* conf);
};
#endif // CLIPROCESSOR_HPP<๏ฝfimโend๏ฝ> | |
<|file_name|>flat.py<|end_file_name|><๏ฝfimโbegin๏ฝ>#!/usr/bin/env python
import copy
from cStringIO import StringIO<๏ฝfimโhole๏ฝ>import gzip
import hashlib
import mimetypes
import os
import boto
from boto.s3.key import Key
from boto.s3.connection import OrdinaryCallingFormat
import app_config
GZIP_FILE_TYPES = ['.html', '.js', '.json', '.css', '.xml']
class FakeTime:
def time(self):
return 1261130520.0
# Hack to override gzip's time implementation
# See: http://stackoverflow.com/questions/264224/setting-the-gzip-timestamp-from-python
gzip.time = FakeTime()
def deploy_file(connection, src, dst, headers={}):
"""
Deploy a single file to S3, if the local version is different.
"""
bucket = connection.get_bucket(app_config.S3_BUCKET['bucket_name'])
k = bucket.get_key(dst)
s3_md5 = None
if k:
s3_md5 = k.etag.strip('"')
else:
k = Key(bucket)
k.key = dst
file_headers = copy.copy(headers)
if 'Content-Type' not in headers:
file_headers['Content-Type'] = mimetypes.guess_type(src)[0]
# Gzip file
if os.path.splitext(src)[1].lower() in GZIP_FILE_TYPES:
file_headers['Content-Encoding'] = 'gzip'
with open(src, 'rb') as f_in:
contents = f_in.read()
output = StringIO()
f_out = gzip.GzipFile(filename=dst, mode='wb', fileobj=output)
f_out.write(contents)
f_out.close()
local_md5 = hashlib.md5()
local_md5.update(output.getvalue())
local_md5 = local_md5.hexdigest()
if local_md5 == s3_md5:
print 'Skipping %s (has not changed)' % src
else:
print 'Uploading %s --> %s (gzipped)' % (src, dst)
k.set_contents_from_string(output.getvalue(), file_headers, policy='public-read')
# Non-gzip file
else:
with open(src, 'rb') as f:
local_md5 = hashlib.md5()
local_md5.update(f.read())
local_md5 = local_md5.hexdigest()
if local_md5 == s3_md5:
print 'Skipping %s (has not changed)' % src
else:
print 'Uploading %s --> %s' % (src, dst)
k.set_contents_from_filename(src, file_headers, policy='public-read')
def deploy_folder(src, dst, headers={}, ignore=[]):
"""
Deploy a folder to S3, checking each file to see if it has changed.
"""
to_deploy = []
for local_path, subdirs, filenames in os.walk(src, topdown=True):
rel_path = os.path.relpath(local_path, src)
for name in filenames:
if name.startswith('.'):
continue
src_path = os.path.join(local_path, name)
skip = False
for pattern in ignore:
if fnmatch(src_path, pattern):
skip = True
break
if skip:
continue
if rel_path == '.':
dst_path = os.path.join(dst, name)
else:
dst_path = os.path.join(dst, rel_path, name)
to_deploy.append((src_path, dst_path))
s3 = boto.connect_s3(calling_format=OrdinaryCallingFormat())
for src, dst in to_deploy:
deploy_file(s3, src, dst, headers)
def delete_folder(dst):
"""
Delete a folder from S3.
"""
s3 = boto.connect_s3(calling_format=OrdinaryCallingFormat())
bucket = s3.get_bucket(app_config.S3_BUCKET['bucket_name'])
for key in bucket.list(prefix='%s/' % dst):
print 'Deleting %s' % (key.key)
key.delete()<๏ฝfimโend๏ฝ> | from fnmatch import fnmatch |
<|file_name|>test_service_project_link.py<|end_file_name|><๏ฝfimโbegin๏ฝ>from rest_framework import test, status
from waldur_core.structure.models import CustomerRole, ProjectRole
from waldur_core.structure.tests import factories as structure_factories
from . import factories
class ServiceProjectLinkPermissionTest(test.APITransactionTestCase):
def setUp(self):
self.users = {
'owner': structure_factories.UserFactory(),
'admin': structure_factories.UserFactory(),
'manager': structure_factories.UserFactory(),
'no_role': structure_factories.UserFactory(),
'not_connected': structure_factories.UserFactory(),
}
# a single customer
self.customer = structure_factories.CustomerFactory()
self.customer.add_user(self.users['owner'], CustomerRole.OWNER)
# that has 3 users connected: admin, manager
self.connected_project = structure_factories.ProjectFactory(customer=self.customer)
self.connected_project.add_user(self.users['admin'], ProjectRole.ADMINISTRATOR)
self.connected_project.add_user(self.users['manager'], ProjectRole.MANAGER)
# has defined a service and connected service to a project
self.service = factories.OpenStackServiceFactory(customer=self.customer)
self.service_project_link = factories.OpenStackServiceProjectLinkFactory(
project=self.connected_project,
service=self.service)
# the customer also has another project with users but without a permission link
self.not_connected_project = structure_factories.ProjectFactory(customer=self.customer)
self.not_connected_project.add_user(self.users['not_connected'], ProjectRole.ADMINISTRATOR)
self.not_connected_project.save()
self.url = factories.OpenStackServiceProjectLinkFactory.get_list_url()
def test_anonymous_user_cannot_grant_service_to_project(self):
response = self.client.post(self.url, self._get_valid_payload())
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_user_can_connect_service_and_project_he_owns(self):
user = self.users['owner']
self.client.force_authenticate(user=user)
service = factories.OpenStackServiceFactory(customer=self.customer)
project = structure_factories.ProjectFactory(customer=self.customer)
payload = self._get_valid_payload(service, project)
response = self.client.post(self.url, payload)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_admin_cannot_connect_new_service_and_project_if_he_is_project_admin(self):
user = self.users['admin']
self.client.force_authenticate(user=user)
service = factories.OpenStackServiceFactory(customer=self.customer)
project = self.connected_project
payload = self._get_valid_payload(service, project)
response = self.client.post(self.url, payload)
# the new service should not be visible to the user
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertDictContainsSubset(
{'service': ['Invalid hyperlink - Object does not exist.']}, response.data)
<๏ฝfimโhole๏ฝ> self.client.force_authenticate(user=user)
url = factories.OpenStackServiceProjectLinkFactory.get_url(self.service_project_link)
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def _get_valid_payload(self, service=None, project=None):
return {
'service': factories.OpenStackServiceFactory.get_url(service),
'project': structure_factories.ProjectFactory.get_url(project)
}<๏ฝfimโend๏ฝ> | def test_user_cannot_revoke_service_and_project_permission_if_he_is_project_manager(self):
user = self.users['manager'] |
<|file_name|>webdriver.py<|end_file_name|><๏ฝfimโbegin๏ฝ># Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from selenium.common.exceptions import WebDriverException
try:
import http.client as http_client
except ImportError:
import httplib as http_client
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.remote.webdriver import WebDriver as RemoteWebDriver
from .service import Service
from .remote_connection import SafariRemoteConnection
class WebDriver(RemoteWebDriver):
"""
Controls the SafariDriver and allows you to drive the browser.
"""
def __init__(self, port=0, executable_path="/usr/bin/safaridriver", reuse_service=False,
desired_capabilities=DesiredCapabilities.SAFARI, quiet=False,
keep_alive=True):
"""
Creates a new Safari driver instance and launches or finds a running safaridriver service.
:Args:
- port - The port on which the safaridriver service should listen for new connections. If zero, a free port will be found.
- executable_path - Path to a custom safaridriver executable to be used. If absent, /usr/bin/safaridriver is used.
- reuse_service - If True, do not spawn a safaridriver instance; instead, connect to an already-running service that was launched externally.
- desired_capabilities: Dictionary object with desired capabilities (Can be used to provide various Safari switches).
- quiet - If True, the driver's stdout and stderr is suppressed.
- keep_alive - Whether to configure SafariRemoteConnection to use
HTTP keep-alive. Defaults to False.
"""
self._reuse_service = reuse_service
self.service = Service(executable_path, port=port, quiet=quiet)
if not reuse_service:
self.service.start()
executor = SafariRemoteConnection(remote_server_addr=self.service.service_url,
keep_alive=keep_alive)
RemoteWebDriver.__init__(
self,
command_executor=executor,
desired_capabilities=desired_capabilities)
self._is_remote = False
def quit(self):
"""
Closes the browser and shuts down the SafariDriver executable
that is started when starting the SafariDriver
"""<๏ฝfimโhole๏ฝ> finally:
if not self._reuse_service:
self.service.stop()
# safaridriver extension commands. The canonical command support matrix is here:
# https://developer.apple.com/library/content/documentation/NetworkingInternetWeb/Conceptual/WebDriverEndpointDoc/Commands/Commands.html
# First available in Safari 11.1 and Safari Technology Preview 41.
def set_permission(self, permission, value):
if not isinstance(value, bool):
raise WebDriverException("Value of a session permission must be set to True or False.")
payload = {}
payload[permission] = value
self.execute("SET_PERMISSIONS", {"permissions": payload})
# First available in Safari 11.1 and Safari Technology Preview 41.
def get_permission(self, permission):
payload = self.execute("GET_PERMISSIONS")["value"]
permissions = payload["permissions"]
if not permissions:
return None
if permission not in permissions:
return None
value = permissions[permission]
if not isinstance(value, bool):
return None
return value
# First available in Safari 11.1 and Safari Technology Preview 42.
def debug(self):
self.execute("ATTACH_DEBUGGER")
self.execute_script("debugger;")<๏ฝfimโend๏ฝ> | try:
RemoteWebDriver.quit(self)
except http_client.BadStatusLine:
pass |
<|file_name|>0004_auto_20170307_0605.py<|end_file_name|><๏ฝfimโbegin๏ฝ># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-07 06:05<๏ฝfimโhole๏ฝ>
class Migration(migrations.Migration):
dependencies = [
('news', '0003_auto_20170228_2249'),
]
operations = [
migrations.CreateModel(
name='Location',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('city', models.CharField(default='Testville', max_length=200)),
('state', models.CharField(default='Montigania', max_length=200)),
],
),
migrations.AddField(
model_name='newspaper',
name='next_paper',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='news.Newspaper'),
),
migrations.AddField(
model_name='newspaper',
name='prev_paper',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='news.Newspaper'),
),
migrations.AlterField(
model_name='newspaper',
name='date_ended',
field=models.DateField(blank=True, null=True, verbose_name='date ended'),
),
migrations.AlterUniqueTogether(
name='location',
unique_together=set([('city', 'state')]),
),
migrations.AddField(
model_name='newspaper',
name='location',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='news.Location'),
),
]<๏ฝfimโend๏ฝ> | from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion |
<|file_name|>backwards_compatibility.py<|end_file_name|><๏ฝfimโbegin๏ฝ># Copyright 2016 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from types import ModuleType
from distutils.version import StrictVersion
from neutron.plugins.ml2.drivers import type_tunnel
from neutron import version
# Some constants and verifier functions have been deprecated but are still
# used by earlier releases of neutron. In order to maintain
# backwards-compatibility with stable/mitaka this will act as a translator
# that passes constants and functions according to version number.
NEUTRON_VERSION = StrictVersion(str(version.version_info))
NEUTRON_NEWTON_VERSION = StrictVersion('9.0.0')
NEUTRON_OCATA_VERSION = StrictVersion('10.0.0')
NEUTRON_PIKE_VERSION = StrictVersion('11.0.0')
n_c = __import__('neutron.common.constants', fromlist=['common.constants'])
constants = __import__('neutron_lib.constants', fromlist=['constants'])
if NEUTRON_VERSION >= NEUTRON_NEWTON_VERSION:
from neutron.conf import common as base_config
from neutron_lib.api import validators
is_attr_set = validators.is_attr_set
validators = validators.validators
n_c_attr_names = getattr(n_c, "_mg__my_globals", None)
else:
from neutron.api.v2 import attributes
from neutron.common import config as base_config
n_c_attr_names = n_c.my_globals
is_attr_set = attributes.is_attr_set
validators = attributes.validators
setattr(constants, 'ATTR_NOT_SPECIFIED', getattr(attributes,
'ATTR_NOT_SPECIFIED'))
if NEUTRON_VERSION >= NEUTRON_OCATA_VERSION:
from neutron.db.models import agent as agent_model
from neutron.db.models import l3 as l3_models
from neutron_lib.api.definitions import portbindings
from neutron_lib.api.definitions import provider_net as providernet
from neutron_lib.api import extensions
from neutron_lib.db import model_base
from neutron_lib.plugins import directory
from neutron_lib.services import base as service_base
from neutron_lib.utils import helpers as common_utils
try:
from neutron import context
except ImportError:<๏ฝfimโhole๏ฝ> from neutron_lib import context
get_plugin = directory.get_plugin
n_c_attr_names = dir(n_c)
HasProject = model_base.HasProject
VXLAN_TUNNEL_TYPE = type_tunnel.ML2TunnelTypeDriver
Agent = agent_model.Agent
RouterPort = l3_models.RouterPort
Router = l3_models.Router
def get_context():
return context.Context()
def get_db_ref(context):
return context
def get_tunnel_session(context):
return context.session
def get_novaclient_images(nclient):
return nclient.glance
else:
from neutron.api import extensions # noqa
from neutron.common import utils as common_utils # noqa
from neutron import context
from neutron.db import agents_db
from neutron.db import api as db_api
from neutron.db import l3_db
from neutron.db import model_base # noqa
from neutron.db import models_v2
from neutron.extensions import portbindings # noqa
from neutron.extensions import providernet # noqa
from neutron import manager
from neutron.plugins.common import constants as svc_constants
from neutron.services import service_base # noqa
def get_plugin(service=None):
if service is None:
return manager.NeutronManager.get_plugin()
else:
return manager.NeutronManager.get_service_plugins().get(service)
HasProject = models_v2.HasTenant
setattr(constants, 'L3', getattr(svc_constants, 'L3_ROUTER_NAT'))
VXLAN_TUNNEL_TYPE = type_tunnel.TunnelTypeDriver
Agent = agents_db.Agent
RouterPort = l3_db.RouterPort
Router = l3_db.Router
def get_context():
return None
def get_db_ref(context):
return db_api.get_session()
def get_tunnel_session(context):
return context
def get_novaclient_images(nclient):
return nclient.images
if NEUTRON_VERSION >= NEUTRON_PIKE_VERSION:
from neutron.conf.agent import common as config
else:
from neutron.agent.common import config # noqa
core_opts = base_config.core_opts
# Bring in the union of all constants in neutron.common.constants
# and neutron_lib.constants. Handle any duplicates by using the
# values in neutron_lib.
#
# In the plugin code, replace the following imports:
# from neutron.common import constants
# from neutron_lib import constants
# with (something like this):
# from networking_cisco import backward_compatibility as bc
# Then constants are referenced as shown in this example:
# port['devide_owner'] = bc.constants.DEVICE_OWNER_ROUTER_INTF
ignore = frozenset(['__builtins__', '__doc__', '__file__', '__name__',
'__package__', '__path__', '__version__'])
for attr_name in n_c_attr_names:
attr = getattr(n_c, attr_name)
if attr_name in ignore or isinstance(attr, ModuleType):
continue
else:
setattr(constants, attr_name, attr)
del n_c, ignore, attr_name, attr<๏ฝfimโend๏ฝ> | |
<|file_name|>ext.js<|end_file_name|><๏ฝfimโbegin๏ฝ>function onLoaded() {
var csInterface = new CSInterface();
var appName = csInterface.hostEnvironment.appName;
if(appName != "FLPR"){
loadJSX();
}
var appNames = ["PHXS"];
for (var i = 0; i < appNames.length; i++) {
var name = appNames[i];
if (appName.indexOf(name) >= 0) {
var btn = document.getElementById("btn_" + name);
if (btn)
btn.disabled = false;
}
}
updateThemeWithAppSkinInfo(csInterface.hostEnvironment.appSkinInfo);
// Update the color of the panel when the theme color of the product changed.
csInterface.addEventListener(CSInterface.THEME_COLOR_CHANGED_EVENT, onAppThemeColorChanged);
}
/**
* Update the theme with the AppSkinInfo retrieved from the host product.
*/
function updateThemeWithAppSkinInfo(appSkinInfo) {
//Update the background color of the panel
var panelBackgroundColor = appSkinInfo.panelBackgroundColor.color;
document.body.bgColor = toHex(panelBackgroundColor);
var styleId = "ppstyle";
var csInterface = new CSInterface();
var appName = csInterface.hostEnvironment.appName;
if(appName == "PHXS"){
addRule(styleId, "button, select, input[type=button], input[type=submit]", "border-radius:3px;");
}
if(appName == "PHXS" || appName == "PPRO" || appName == "PRLD") {
////////////////////////////////////////////////////////////////////////////////////////////////
// NOTE: Below theme related code are only suitable for Photoshop. //
// If you want to achieve same effect on other products please make your own changes here. //
////////////////////////////////////////////////////////////////////////////////////////////////
var gradientBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, 40) + " , " + toHex(panelBackgroundColor, 10) + ");";
var gradientDisabledBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, 15) + " , " + toHex(panelBackgroundColor, 5) + ");";
var boxShadow = "-webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.4), 0 1px 1px rgba(0, 0, 0, 0.2);";
var boxActiveShadow = "-webkit-box-shadow: inset 0 1px 4px rgba(0, 0, 0, 0.6);";
var isPanelThemeLight = panelBackgroundColor.red > 127;
var fontColor, disabledFontColor;
var borderColor;
var inputBackgroundColor;
var gradientHighlightBg;
if(isPanelThemeLight) {
fontColor = "#000000;";
disabledFontColor = "color:" + toHex(panelBackgroundColor, -70) + ";";
borderColor = "border-color: " + toHex(panelBackgroundColor, -90) + ";";
inputBackgroundColor = toHex(panelBackgroundColor, 54) + ";";
gradientHighlightBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, -40) + " , " + toHex(panelBackgroundColor,-50) + ");";
} else {
fontColor = "#ffffff;";
disabledFontColor = "color:" + toHex(panelBackgroundColor, 100) + ";";
borderColor = "border-color: " + toHex(panelBackgroundColor, -45) + ";";
inputBackgroundColor = toHex(panelBackgroundColor, -20) + ";";
gradientHighlightBg = "background-image: -webkit-linear-gradient(top, " + toHex(panelBackgroundColor, -20) + " , " + toHex(panelBackgroundColor, -30) + ");";
}
//Update the default text style with pp values
addRule(styleId, ".default", "font-size:" + appSkinInfo.baseFontSize + "px" + "; color:" + fontColor + "; background-color:" + toHex(panelBackgroundColor) + ";");
addRule(styleId, "button, select, input[type=text], input[type=button], input[type=submit]", borderColor);
addRule(styleId, "button, select, input[type=button], input[type=submit]", gradientBg);
addRule(styleId, "button, select, input[type=button], input[type=submit]", boxShadow);
addRule(styleId, "button:enabled:active, input[type=button]:enabled:active, input[type=submit]:enabled:active", gradientHighlightBg);
addRule(styleId, "button:enabled:active, input[type=button]:enabled:active, input[type=submit]:enabled:active", boxActiveShadow);
addRule(styleId, "[disabled]", gradientDisabledBg);
addRule(styleId, "[disabled]", disabledFontColor);
addRule(styleId, "input[type=text]", "padding:1px 3px;");
addRule(styleId, "input[type=text]", "background-color: " + inputBackgroundColor) + ";";
addRule(styleId, "input[type=text]:focus", "background-color: #ffffff;");
addRule(styleId, "input[type=text]:focus", "color: #000000;");
} else {
// For AI, ID and FL use old implementation
addRule(styleId, ".default", "font-size:" + appSkinInfo.baseFontSize + "px" + "; color:" + reverseColor(panelBackgroundColor) + "; background-color:" + toHex(panelBackgroundColor, 20));
addRule(styleId, "button", "border-color: " + toHex(panelBgColor, -50));
}
}
function addRule(stylesheetId, selector, rule) {
var stylesheet = document.getElementById(stylesheetId);
if (stylesheet) {
stylesheet = stylesheet.sheet;
if( stylesheet.addRule ){
stylesheet.addRule(selector, rule);
} else if( stylesheet.insertRule ){
stylesheet.insertRule(selector + ' { ' + rule + ' }', stylesheet.cssRules.length);
}
}
}
function reverseColor(color, delta) {
return toHex({red:Math.abs(255-color.red), green:Math.abs(255-color.green), blue:Math.abs(255-color.blue)}, delta);
}
/**
* Convert the Color object to string in hexadecimal format;
*/
function toHex(color, delta) {
function computeValue(value, delta) {
var computedValue = !isNaN(delta) ? value + delta : value;
if (computedValue < 0) {
computedValue = 0;
} else if (computedValue > 255) {
computedValue = 255;
}
computedValue = computedValue.toString(16);
return computedValue.length == 1 ? "0" + computedValue : computedValue;
}
var hex = "";
if (color) {
with (color) {
hex = computeValue(red, delta) + computeValue(green, delta) + computeValue(blue, delta);
};
}
return "#" + hex;
}
function onAppThemeColorChanged(event) {
// Should get a latest HostEnvironment object from application.
var skinInfo = JSON.parse(window.__adobe_cep__.getHostEnvironment()).appSkinInfo;
// Gets the style information such as color info from the skinInfo,
// and redraw all UI controls of your extension according to the style info.
updateThemeWithAppSkinInfo(skinInfo);
}
<๏ฝfimโhole๏ฝ>/**
* Load JSX file into the scripting context of the product. All the jsx files in
* folder [ExtensionRoot]/jsx will be loaded.
*/
function loadJSX() {
var csInterface = new CSInterface();
var extensionRoot = csInterface.getSystemPath(SystemPath.EXTENSION) + "/jsx/";
csInterface.evalScript('$._ext.evalFiles("' + extensionRoot + '")');
}
function evalScript(script, callback) {
new CSInterface().evalScript(script, callback);
}
function onClickButton(ppid) {
if(ppid == "FLPR"){
var jsfl = 'fl.createDocument(); fl.getDocumentDOM().addNewText({left:100, top:100, right:300, bottom:300} , "Hello Flash!" ); ';
evalScript(jsfl);
} else {
var extScript = "$._ext_" + ppid + ".run()";
evalScript(extScript);
}
}<๏ฝfimโend๏ฝ> | |
<|file_name|>MoverInterpolateComponent.cpp<|end_file_name|><๏ฝfimโbegin๏ฝ>/*
* i6engine
* Copyright (2016) Daniel Bonrath, Michael Baer, All rights reserved.
*
* This file is part of i6engine; i6engine is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "i6engine/api/components/MoverInterpolateComponent.h"
#include "i6engine/utils/Exceptions.h"
#include "i6engine/math/i6eMath.h"
#include "i6engine/core/configs/SubsystemConfig.h"
#include "i6engine/configs/NetworkChannels.h"
#include "i6engine/api/EngineController.h"
#include "i6engine/api/FrontendMessageTypes.h"
#include "i6engine/api/components/PhysicalStateComponent.h"
#include "i6engine/api/configs/ComponentConfig.h"
#include "i6engine/api/facades/NetworkFacade.h"
#include "i6engine/api/facades/ObjectFacade.h"
#include "i6engine/api/objects/GameObject.h"
namespace i6e {
namespace api {
MoverInterpolateComponent::MoverInterpolateComponent(const int64_t id, const attributeMap & params) : MoverComponent(id, params), _keyFrames(), _mode(), _openTime(2), _way(), _totalDistance(0), _currentDist(0), _currentFrame(0), _direction(true), _lock() {
_objComponentID = components::MoverInterpolateComponent;
loadParams(params);
}
MoverInterpolateComponent::~MoverInterpolateComponent() {
}
void MoverInterpolateComponent::addKeyFrame(const Vec3 & position, const Quaternion & rotation) {
boost::mutex::scoped_lock l(_lock);
_keyFrames.push_back(keyFrame(position, rotation));
}
void MoverInterpolateComponent::removeKeyFrame(const uint32_t id) {
boost::mutex::scoped_lock l(_lock);
_keyFrames.erase(_keyFrames.begin() + int(id));
}
void MoverInterpolateComponent::start(Vec3 & startPos) {
boost::mutex::scoped_lock l(_lock);
_started = true;
_moving = true;
_currentFrame = 0;
_totalDistance = 0;<๏ฝfimโhole๏ฝ>
_startTime = EngineController::GetSingleton().getCurrentTime();
if (_keyFrames.size() <= 1) {
ISIXE_THROW_FAILURE("MoverComponent", "You need at least two keyFrames.");
return;
}
if (_keyFrames.size() > 0) {
if (_positioning == Positioning::POSITIONING_ABSOLUTE) {
// for absolute, startPos ist first key frame
_realStartPos = _keyFrames[0].first;
}
if (_direction) {
for (size_t i = 0; i < _keyFrames.size() - 1; ++i) {
_totalDistance += (_keyFrames[i + 1].first - _keyFrames[i].first).length();
}
} else {
for (size_t i = _keyFrames.size(); i > 0; --i) {
_totalDistance += (_keyFrames[i].first - _keyFrames[i - 1].first).length();
}
}
}
auto psc = _psc.get();
if (_way == Way::LINEAR) {
if (_positioning == Positioning::POSITIONING_ABSOLUTE) {
psc->setPosition(_realStartPos, 2);
}
if (_direction) {
_lastPos = _keyFrames[0].first;
} else {
_lastPos = _keyFrames.back().first;
}
} else if (_way == Way::BEZIER) {
if (_positioning == Positioning::POSITIONING_ABSOLUTE) {
psc->setPosition(_realStartPos, 2);
}
if (_direction) {
_lastPos = _keyFrames[0].first;
} else {
_lastPos = _keyFrames.back().first;
}
} else {
ISIXE_THROW_FAILURE("MoverComponent", "Unknown way.");
}
// resync
GOPtr go = getOwnerGO();
if (go != nullptr && go->getGOC(components::NetworkSenderComponent) != nullptr) {
attributeMap am = synchronize();
GameMessage::Ptr msg = boost::make_shared<GameMessage>(messages::ComponentMessageType, components::ComMoverResync, core::Method::Update, new components::Component_MoverResync_Update(go->getID(), _id, am), core::Subsystem::Object);
EngineController::GetSingletonPtr()->getNetworkFacade()->publish(OBJECT_CHANNEL, msg);
}
}
void MoverInterpolateComponent::getNewPosition(const uint64_t t, Vec3 & newPos, Quaternion & newRot) {
uint64_t timeElapsed = t;
boost::mutex::scoped_lock l(_lock);
double tt = 0;
if (_mode == Mode::NSTATE_LOOP) {
timeElapsed %= _duration;
tt = double(timeElapsed) / _duration;
} else if (_mode == Mode::TWOSTATE_TOGGLE) {
timeElapsed %= (2 * _duration);
tt = 1 - double(timeElapsed - _duration) / _duration;
} else if (_mode == Mode::TWOSTATE_OPENTIME) {
timeElapsed %= (2 * _duration + _openTime);
if (timeElapsed < _duration) {
tt = double(timeElapsed) / _duration;
} else if (timeElapsed < _duration + _openTime) {
tt = 1;
} else {
tt = 1 - double(timeElapsed - _duration - _openTime) / _duration;
}
} else if (_mode == Mode::ONCE) {
timeElapsed %= _duration;
tt = double(timeElapsed) / _duration;
if (t > _duration) {
stop();
return;
}
}
switch (_way) {
case Way::LINEAR: {
// TODO (Michael): optimierung, dass nicht immer soviel gerechnet werden muss. vllt etwas precalc? oder distanzvektoren zusaetlzlich speichern?
// gesamtstrecke nach dieser Zeit
double traveled = _totalDistance * tt;
// zwischen diesen 2 Frames ist normalerweise der Punkt
keyFrame last;
keyFrame next;
if (_direction) {
last = _keyFrames[_currentFrame];
next = _keyFrames[(_currentFrame + 1) % _keyFrames.size()];
} else {
last = _keyFrames[_currentFrame];
if (_currentFrame == 0) {
next = _keyFrames.back();
} else {
next = _keyFrames[(_currentFrame - 1)];
}
}
// this part of the distance between these two frames should be traveled
double part = (traveled - _currentDist) / (next.first - last.first).length();
if (_currentDist > traveled) {
_currentFrame = 0;
_currentDist = 0;
part = (traveled - _currentDist) / (next.first - last.first).length();
if (_direction) {
last = _keyFrames[0];
next = _keyFrames[1];
} else {
last = _keyFrames.back();
next = _keyFrames[_keyFrames.size() - 2];
}
}
// point is further than next frame
while (part >= 1) {
// go to next frames
if (_direction) {
_currentFrame++;
_currentFrame %= _keyFrames.size();
_currentDist += (next.first - last.first).length();
last = next;
next = _keyFrames[(_currentFrame + 1) % _keyFrames.size()];
} else {
_currentFrame--;
if (_currentFrame == UINT32_MAX) {
_currentFrame = _keyFrames.size() - 1;
}
_currentDist += (next.first - last.first).length();
last = next;
if (_currentFrame == 0) {
next = _keyFrames.back();
} else {
next = _keyFrames[(_currentFrame - 1)];
}
}
part = (traveled - _currentDist) / (next.first - last.first).length();
}
// calc actual Position
newPos = last.first + (next.first - last.first) * part;
// calc actual rotation
double omega = acos(dotProduct(last.second, next.second));
newRot = (last.second * sin((1 - part) * omega) + next.second * sin(part * omega)) / sin(omega);
break;
}
case Way::BEZIER: {
double fak1 = 1, fak2 = 1;
uint32_t n = uint32_t(_keyFrames.size());
for (uint32_t i = 0; i < n; i++) {
fak1 *= (1 - tt);
}
for (uint32_t i = 0; i < n + 1; ++i) {
newPos += _keyFrames[i % n].first * fak1 * fak2 * double(math::binom(n, i));
fak1 /= (1 - tt);
fak2 *= tt;
}
break;
}
default: {
ISIXE_THROW_FAILURE("MoverLinearComponent", "Invalid way.");
return;
}
} /* switch _way */
}
void MoverInterpolateComponent::News(const GameMessage::Ptr & msg) {
uint16_t type = msg->getSubtype();
if (type == api::components::ComMoverResync) {
attributeMap am = static_cast<components::Component_MoverResync_Update *>(msg->getContent())->attMap;
stop();
{
boost::mutex::scoped_lock l(_lock);
_keyFrames.clear();
}
loadParams(am);
Vec3 x(am, "realCenterPos");
start(x);
} else {
Component::News(msg);
}
}
void MoverInterpolateComponent::loadParams(const attributeMap & params) {
MoverComponent::loadParams(params);
parseAttribute<true>(params, "mode", _mode);
parseAttribute<true>(params, "way", _way);
parseAttribute<true>(params, "direction", _direction);
if (_mode == Mode::TWOSTATE_OPENTIME) {
parseAttribute<true>(params, "openTime", _openTime);
}
uint32_t frames;
parseAttribute<true>(params, "keyframes", frames);
for (uint32_t i = 0; i < frames; ++i) {
Vec3 pos;
Quaternion rot;
parseAttribute<true>(params, std::string("keyframe_") + std::to_string(i) + "_pos", pos);
parseAttribute<true>(params, std::string("keyframe_") + std::to_string(i) + "_rot", rot);
addKeyFrame(pos, rot);
}
if (_direction) {
_lastPos = _keyFrames[0].first;
} else {
_lastPos = _keyFrames.back().first;
}
}
attributeMap MoverInterpolateComponent::synchronize() const {
attributeMap params = MoverComponent::synchronize();
// general attributes
writeAttribute(params, "mode", _mode);
writeAttribute(params, "way", _way);
writeAttribute(params, "direction", _direction);
writeAttribute(params, "keyframes", _keyFrames.size());
for (size_t i = 0; i < _keyFrames.size(); ++i) {
writeAttribute(params, std::string("keyframe_") + std::to_string(i) + "_pos", _keyFrames[i].first);
writeAttribute(params, std::string("keyframe_") + std::to_string(i) + "_rot", _keyFrames[i].second);
}
return params;
}
void MoverInterpolateComponent::reset() {
boost::mutex::scoped_lock l(_lock);
_currentFrame = 0;
_totalDistance = 0;
if (_keyFrames.size() <= 1) {
ISIXE_THROW_FAILURE("MoverComponent", "You need at least two keyFrames.");
return;
}
auto psc = _psc.get();
if (_way == Way::LINEAR) {
psc->setPosition(_realStartPos, 1);
_lastPos = _keyFrames[0].first;
} else if (_way == Way::BEZIER) {
psc->setPosition(_realStartPos, 1);
_lastPos = _keyFrames[0].first;
} else {
ISIXE_THROW_FAILURE("MoverComponent", "Unknown way.");
}
}
std::vector<componentOptions> MoverInterpolateComponent::getComponentOptions() {
std::vector<componentOptions> result = MoverComponent::getComponentOptions();
result.push_back(std::make_tuple(AccessState::READWRITE, "Mode", [this]() {
return boost::lexical_cast<std::string>(uint16_t(_mode));
}, [this](std::string s) {
_mode = Mode(boost::lexical_cast<uint16_t>(s));
return true;
}, "MoverInterpolateMode"));
result.push_back(std::make_tuple(AccessState::READWRITE, "Way", [this]() {
return boost::lexical_cast<std::string>(uint16_t(_way));
}, [this](std::string s) {
_way = Way(boost::lexical_cast<uint16_t>(s));
return true;
}, "MoverInterpolateWay"));
result.push_back(std::make_tuple(AccessState::READONLY, "Num. Keyframes", [this]() {
return boost::lexical_cast<std::string>(_keyFrames.size());
}, boost::function<bool(std::string)>(), "Integer"));
return result;
}
} /* namespace api */
} /* namespace i6e */<๏ฝfimโend๏ฝ> | if (_positioning == Positioning::POSITIONING_RELATIVE) {
// for absolute, startPos will be ignored because it doesn't make any sense
_realStartPos = startPos;
} |
<|file_name|>nodetool.ts<|end_file_name|><๏ฝfimโbegin๏ฝ>//import toolLib = require('vsts-task-tool-lib/tool');
//import taskLib = require('vsts-task-lib/task');
import * as toolLib from 'vsts-task-tool-lib/tool';
import * as taskLib from 'vsts-task-lib/task';
import * as restm from 'typed-rest-client/RestClient';
import * as os from 'os';
import * as path from 'path';
let osPlat: string = os.platform();
let osArch: string = os.arch();
async function run() {
try {
let versionSpec = taskLib.getInput('versionSpec', true);
let checkLatest: boolean = taskLib.getBoolInput('checkLatest', false);
await getNode(versionSpec, checkLatest);
}
catch (error) {
taskLib.setResult(taskLib.TaskResult.Failed, error.message);
}
}
//
// Node versions interface
// see https://nodejs.org/dist/index.json
//
interface INodeVersion {
version: string,
files: string[]
}
//
// Basic pattern:
// if !checkLatest
// toolPath = check cache
// if !toolPath
// if version is a range
// match = query nodejs.org
// if !match
// fail
// toolPath = check cache
// if !toolPath
// download, extract, and cache
// toolPath = cacheDir
// PATH = cacheDir + PATH
//
async function getNode(versionSpec: string, checkLatest: boolean) {
if (toolLib.isExplicitVersion(versionSpec)) {
checkLatest = false; // check latest doesn't make sense when explicit version
}
// check cache
let toolPath: string;
if (!checkLatest) {
toolPath = toolLib.findLocalTool('node', versionSpec);
}
if (!toolPath) {
let version: string;
if (toolLib.isExplicitVersion(versionSpec)) {
// version to download
version = versionSpec;
}
else {
// query nodejs.org for a matching version
version = await queryLatestMatch(versionSpec);
if (!version) {
throw new Error(`Unable to find Node version '${versionSpec}' for platform ${osPlat} and architecture ${osArch}.`);
}
// check cache
toolPath = toolLib.findLocalTool('node', version)
}
if (!toolPath) {
// download, extract, cache
toolPath = await acquireNode(version);
}
}
//
// a tool installer initimately knows details about the layout of that tool
// for example, node binary is in the bin folder after the extract on Mac/Linux.
// layouts could change by version, by platform etc... but that's the tool installers job
//
if (osPlat != 'win32') {
toolPath = path.join(toolPath, 'bin');
}
//
// prepend the tools path. instructs the agent to prepend for future tasks
//
toolLib.prependPath(toolPath);
}
async function queryLatestMatch(versionSpec: string): Promise<string> {
// node offers a json list of versions
let dataFileName: string;
switch (osPlat) {
case "linux": dataFileName = "linux-" + osArch; break;
case "darwin": dataFileName = "osx-" + osArch + '-tar'; break;
case "win32": dataFileName = "win-" + osArch + '-exe'; break;<๏ฝfimโhole๏ฝ> let versions: string[] = [];
let dataUrl = "https://nodejs.org/dist/index.json";
let rest: restm.RestClient = new restm.RestClient('vsts-node-tool');
let nodeVersions: INodeVersion[] = (await rest.get<INodeVersion[]>(dataUrl)).result;
nodeVersions.forEach((nodeVersion:INodeVersion) => {
// ensure this version supports your os and platform
if (nodeVersion.files.indexOf(dataFileName) >= 0) {
versions.push(nodeVersion.version);
}
});
// get the latest version that matches the version spec
let version: string = toolLib.evaluateVersions(versions, versionSpec);
return version;
}
async function acquireNode(version: string): Promise<string> {
//
// Download - a tool installer intimately knows how to get the tool (and construct urls)
//
version = toolLib.cleanVersion(version);
let fileName: string = osPlat == 'win32'? 'node-v' + version + '-win-' + os.arch() :
'node-v' + version + '-' + osPlat + '-' + os.arch();
let urlFileName: string = osPlat == 'win32'? fileName + '.7z':
fileName + '.tar.gz';
let downloadUrl = 'https://nodejs.org/dist/v' + version + '/' + urlFileName;
let downloadPath: string;
try
{
downloadPath = await toolLib.downloadTool(downloadUrl);
}
catch (err)
{
if (err['httpStatusCode'] &&
err['httpStatusCode'] == '404')
{
return await acquireNodeFromFallbackLocation(version);
}
throw err;
}
//
// Extract
//
let extPath: string;
if (osPlat == 'win32') {
taskLib.assertAgent('2.115.0');
extPath = taskLib.getVariable('Agent.TempDirectory');
if (!extPath) {
throw new Error('Expected Agent.TempDirectory to be set');
}
extPath = path.join(extPath, 'n'); // use as short a path as possible due to nested node_modules folders
extPath = await toolLib.extract7z(downloadPath, extPath);
}
else {
extPath = await toolLib.extractTar(downloadPath);
}
//
// Install into the local tool cache - node extracts with a root folder that matches the fileName downloaded
//
let toolRoot = path.join(extPath, fileName);
return await toolLib.cacheDir(toolRoot, 'node', version);
}
// For non LTS versions of Node, the files we need (for Windows) are sometimes located
// in a different folder than they normally are for other versions.
// Normally the format is similar to: https://nodejs.org/dist/v5.10.1/node-v5.10.1-win-x64.7z
// In this case, there will be two files located at:
// /dist/v5.10.1/win-x64/node.exe
// /dist/v5.10.1/win-x64/node.lib
// This method attempts to download and cache the resources from this alternative location.
// Note also that the files are normally zipped but in this case they are just an exe
// and lib file in a folder, not zipped.
async function acquireNodeFromFallbackLocation(version: string): Promise<string> {
let exeUrl: string = `https://nodejs.org/dist/v${version}/win-${os.arch()}/node.exe`;
let libUrl: string = `https://nodejs.org/dist/v${version}/win-${os.arch()}/node.lib`;
// Create temporary folder to download in to
let tempDownloadFolder: string = 'temp_' + Math.floor(Math.random() * 2000000000);
let tempDir: string = path.join(taskLib.getVariable('agent.tempDirectory'), tempDownloadFolder);
taskLib.mkdirP(tempDir);
let exeDownloadPath: string = await toolLib.downloadTool(exeUrl, path.join(tempDir, "node.exe"));
let libDownloadPath: string = await toolLib.downloadTool(libUrl, path.join(tempDir, "node.lib"));
return await toolLib.cacheDir(tempDir, 'node', version);
}
run();<๏ฝfimโend๏ฝ> | default: throw new Error(`Unexpected OS '${osPlat}'`);
}
|
<|file_name|>http_cache.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use hyper::header::{Expires, HttpDate};
use hyper::method::Method;
use hyper::status::StatusCode;
use msg::constellation_msg::TEST_PIPELINE_ID;
use net::http_cache::HttpCache;
use net_traits::request::{Destination, Request, RequestInit};
use net_traits::response::{Response, ResponseBody};
use servo_url::ServoUrl;
use std::sync::mpsc::channel;
#[test]
fn test_refreshing_resource_sets_done_chan_the_appropriate_value() {
let response_bodies = vec![ResponseBody::Receiving(vec![]),
ResponseBody::Empty,
ResponseBody::Done(vec![])];
let url = ServoUrl::parse("https://servo.org").unwrap();
let request = Request::from_init(RequestInit {
url: url.clone(),
method: Method::Get,<๏ฝfimโhole๏ฝ> pipeline_id: Some(TEST_PIPELINE_ID),
.. RequestInit::default()
});
let mut response = Response::new(url.clone());
// Expires header makes the response cacheable.
response.headers.set(Expires(HttpDate(time::now())));
response_bodies.iter().for_each(|body| {
let mut cache = HttpCache::new();
*response.body.lock().unwrap() = body;
// First, store the 'normal' response.
cache.store(&request, &response);
// Second, mutate the response into a 304 response, and refresh the stored one.
response.status = Some(StatusCode::NotModified);
let mut done_chan = Some(channel());
let refreshed_response = cache.refresh(&request, response, &mut done_chan);
// Ensure a resource was found, and refreshed.
assert!(refreshed_response.is_some());
match body {
ResponseBody::Receiving(_) => assert!(done_chan.is_some()),
ResponseBody::Empty | ResponseBody::Done(_) => assert!(done_chan.is_none())
}
})
}<๏ฝfimโend๏ฝ> | destination: Destination::Document,
origin: url.clone().origin(), |
<|file_name|>DataTypeUtils.java<|end_file_name|><๏ฝfimโbegin๏ฝ>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.serialization.record.util;
import org.apache.nifi.serialization.SimpleRecordSchema;
import org.apache.nifi.serialization.record.DataType;
import org.apache.nifi.serialization.record.MapRecord;
import org.apache.nifi.serialization.record.Record;
import org.apache.nifi.serialization.record.RecordField;
import org.apache.nifi.serialization.record.RecordFieldType;
import org.apache.nifi.serialization.record.RecordSchema;
import org.apache.nifi.serialization.record.type.ArrayDataType;
import org.apache.nifi.serialization.record.type.ChoiceDataType;
import org.apache.nifi.serialization.record.type.DecimalDataType;
import org.apache.nifi.serialization.record.type.EnumDataType;
import org.apache.nifi.serialization.record.type.MapDataType;
import org.apache.nifi.serialization.record.type.RecordDataType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.InputStream;
import java.io.Reader;
import java.lang.reflect.Array;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.Date;
import java.sql.Time;
import java.sql.Timestamp;
import java.sql.Types;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Queue;
import java.util.Set;
import java.util.TimeZone;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.Supplier;
import java.util.regex.Pattern;
public class DataTypeUtils {
private static final Logger logger = LoggerFactory.getLogger(DataTypeUtils.class);
// Regexes for parsing Floating-Point numbers
private static final String OptionalSign = "[\\-\\+]?";
private static final String Infinity = "(Infinity)";
private static final String NotANumber = "(NaN)";
private static final String Base10Digits = "\\d+";
private static final String Base10Decimal = "\\." + Base10Digits;
private static final String OptionalBase10Decimal = "(\\.\\d*)?";
private static final String Base10Exponent = "[eE]" + OptionalSign + Base10Digits;
private static final String OptionalBase10Exponent = "(" + Base10Exponent + ")?";
private static final String doubleRegex =
OptionalSign +
"(" +
Infinity + "|" +
NotANumber + "|"+
"(" + Base10Digits + OptionalBase10Decimal + ")" + "|" +
"(" + Base10Digits + OptionalBase10Decimal + Base10Exponent + ")" + "|" +
"(" + Base10Decimal + OptionalBase10Exponent + ")" +
")";
private static final String decimalRegex =
OptionalSign +
"(" + Base10Digits + OptionalBase10Decimal + ")" + "|" +
"(" + Base10Digits + OptionalBase10Decimal + Base10Exponent + ")" + "|" +
"(" + Base10Decimal + OptionalBase10Exponent + ")";
private static final Pattern FLOATING_POINT_PATTERN = Pattern.compile(doubleRegex);
private static final Pattern DECIMAL_PATTERN = Pattern.compile(decimalRegex);
private static final TimeZone gmt = TimeZone.getTimeZone("gmt");
private static final Supplier<DateFormat> DEFAULT_DATE_FORMAT = () -> getDateFormat(RecordFieldType.DATE.getDefaultFormat());
private static final Supplier<DateFormat> DEFAULT_TIME_FORMAT = () -> getDateFormat(RecordFieldType.TIME.getDefaultFormat());
private static final Supplier<DateFormat> DEFAULT_TIMESTAMP_FORMAT = () -> getDateFormat(RecordFieldType.TIMESTAMP.getDefaultFormat());
private static final int FLOAT_SIGNIFICAND_PRECISION = 24; // As specified in IEEE 754 binary32
private static final int DOUBLE_SIGNIFICAND_PRECISION = 53; // As specified in IEEE 754 binary64
private static final Long MAX_GUARANTEED_PRECISE_WHOLE_IN_FLOAT = Double.valueOf(Math.pow(2, FLOAT_SIGNIFICAND_PRECISION)).longValue();
private static final Long MIN_GUARANTEED_PRECISE_WHOLE_IN_FLOAT = -MAX_GUARANTEED_PRECISE_WHOLE_IN_FLOAT;
private static final Long MAX_GUARANTEED_PRECISE_WHOLE_IN_DOUBLE = Double.valueOf(Math.pow(2, DOUBLE_SIGNIFICAND_PRECISION)).longValue();
private static final Long MIN_GUARANTEED_PRECISE_WHOLE_IN_DOUBLE = -MAX_GUARANTEED_PRECISE_WHOLE_IN_DOUBLE;
private static final BigInteger MAX_FLOAT_VALUE_IN_BIGINT = BigInteger.valueOf(MAX_GUARANTEED_PRECISE_WHOLE_IN_FLOAT);
private static final BigInteger MIN_FLOAT_VALUE_IN_BIGINT = BigInteger.valueOf(MIN_GUARANTEED_PRECISE_WHOLE_IN_FLOAT);
private static final BigInteger MAX_DOUBLE_VALUE_IN_BIGINT = BigInteger.valueOf(MAX_GUARANTEED_PRECISE_WHOLE_IN_DOUBLE);
private static final BigInteger MIN_DOUBLE_VALUE_IN_BIGINT = BigInteger.valueOf(MIN_GUARANTEED_PRECISE_WHOLE_IN_DOUBLE);
private static final double MAX_FLOAT_VALUE_IN_DOUBLE = Float.valueOf(Float.MAX_VALUE).doubleValue();
private static final double MIN_FLOAT_VALUE_IN_DOUBLE = -MAX_FLOAT_VALUE_IN_DOUBLE;
private static final Map<RecordFieldType, Predicate<Object>> NUMERIC_VALIDATORS = new EnumMap<>(RecordFieldType.class);
static {
NUMERIC_VALIDATORS.put(RecordFieldType.BIGINT, value -> value instanceof BigInteger);
NUMERIC_VALIDATORS.put(RecordFieldType.LONG, value -> value instanceof Long);
NUMERIC_VALIDATORS.put(RecordFieldType.INT, value -> value instanceof Integer);
NUMERIC_VALIDATORS.put(RecordFieldType.BYTE, value -> value instanceof Byte);
NUMERIC_VALIDATORS.put(RecordFieldType.SHORT, value -> value instanceof Short);
NUMERIC_VALIDATORS.put(RecordFieldType.DOUBLE, value -> value instanceof Double);
NUMERIC_VALIDATORS.put(RecordFieldType.FLOAT, value -> value instanceof Float);
NUMERIC_VALIDATORS.put(RecordFieldType.DECIMAL, value -> value instanceof BigDecimal);
}
public static Object convertType(final Object value, final DataType dataType, final String fieldName) {
return convertType(value, dataType, fieldName, StandardCharsets.UTF_8);
}
public static Object convertType(final Object value, final DataType dataType, final String fieldName, final Charset charset) {
return convertType(value, dataType, DEFAULT_DATE_FORMAT, DEFAULT_TIME_FORMAT, DEFAULT_TIMESTAMP_FORMAT, fieldName, charset);
}
public static DateFormat getDateFormat(final RecordFieldType fieldType, final Supplier<DateFormat> dateFormat,
final Supplier<DateFormat> timeFormat, final Supplier<DateFormat> timestampFormat) {
switch (fieldType) {
case DATE:
return dateFormat.get();
case TIME:
return timeFormat.get();
case TIMESTAMP:
return timestampFormat.get();
}
return null;
}
public static Object convertType(final Object value, final DataType dataType, final Supplier<DateFormat> dateFormat, final Supplier<DateFormat> timeFormat,
final Supplier<DateFormat> timestampFormat, final String fieldName) {
return convertType(value, dataType, dateFormat, timeFormat, timestampFormat, fieldName, StandardCharsets.UTF_8);
}
public static Object convertType(final Object value, final DataType dataType, final Supplier<DateFormat> dateFormat, final Supplier<DateFormat> timeFormat,
final Supplier<DateFormat> timestampFormat, final String fieldName, final Charset charset) {
if (value == null) {
return null;
}
switch (dataType.getFieldType()) {
case BIGINT:
return toBigInt(value, fieldName);
case BOOLEAN:
return toBoolean(value, fieldName);
case BYTE:
return toByte(value, fieldName);
case CHAR:
return toCharacter(value, fieldName);
case DATE:
return toDate(value, dateFormat, fieldName);
case DECIMAL:
return toBigDecimal(value, fieldName);
case DOUBLE:
return toDouble(value, fieldName);
case FLOAT:
return toFloat(value, fieldName);
case INT:
return toInteger(value, fieldName);
case LONG:
return toLong(value, fieldName);
case SHORT:
return toShort(value, fieldName);
case ENUM:
return toEnum(value, (EnumDataType) dataType, fieldName);
case STRING:
return toString(value, () -> getDateFormat(dataType.getFieldType(), dateFormat, timeFormat, timestampFormat), charset);
case TIME:
return toTime(value, timeFormat, fieldName);
case TIMESTAMP:
return toTimestamp(value, timestampFormat, fieldName);
case ARRAY:
return toArray(value, fieldName, ((ArrayDataType)dataType).getElementType(), charset);
case MAP:
return toMap(value, fieldName);
case RECORD:
final RecordDataType recordType = (RecordDataType) dataType;
final RecordSchema childSchema = recordType.getChildSchema();
return toRecord(value, childSchema, fieldName, charset);
case CHOICE: {
final ChoiceDataType choiceDataType = (ChoiceDataType) dataType;
final DataType chosenDataType = chooseDataType(value, choiceDataType);
if (chosenDataType == null) {
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass()
+ " for field " + fieldName + " to any of the following available Sub-Types for a Choice: " + choiceDataType.getPossibleSubTypes());
}
return convertType(value, chosenDataType, fieldName, charset);
}
}
return null;
}
public static boolean isCompatibleDataType(final Object value, final DataType dataType) {
switch (dataType.getFieldType()) {
case ARRAY:
return isArrayTypeCompatible(value, ((ArrayDataType) dataType).getElementType());
case BIGINT:
return isBigIntTypeCompatible(value);
case BOOLEAN:
return isBooleanTypeCompatible(value);
case BYTE:
return isByteTypeCompatible(value);
case CHAR:
return isCharacterTypeCompatible(value);
case DATE:
return isDateTypeCompatible(value, dataType.getFormat());
case DECIMAL:
return isDecimalTypeCompatible(value);
case DOUBLE:
return isDoubleTypeCompatible(value);
case FLOAT:
return isFloatTypeCompatible(value);
case INT:
return isIntegerTypeCompatible(value);
case LONG:
return isLongTypeCompatible(value);
case RECORD: {
final RecordSchema schema = ((RecordDataType) dataType).getChildSchema();
return isRecordTypeCompatible(schema, value);
}
case SHORT:
return isShortTypeCompatible(value);
case TIME:
return isTimeTypeCompatible(value, dataType.getFormat());
case TIMESTAMP:
return isTimestampTypeCompatible(value, dataType.getFormat());
case STRING:
return isStringTypeCompatible(value);
case ENUM:
return isEnumTypeCompatible(value, (EnumDataType) dataType);
case MAP:
return isMapTypeCompatible(value);
case CHOICE: {
final DataType chosenDataType = chooseDataType(value, (ChoiceDataType) dataType);
return chosenDataType != null;
}
}
return false;
}
public static DataType chooseDataType(final Object value, final ChoiceDataType choiceType) {
Queue<DataType> possibleSubTypes = new LinkedList<>(choiceType.getPossibleSubTypes());
List<DataType> compatibleSimpleSubTypes = new ArrayList<>();
DataType subType;
while ((subType = possibleSubTypes.poll()) != null) {
if (subType instanceof ChoiceDataType) {
possibleSubTypes.addAll(((ChoiceDataType) subType).getPossibleSubTypes());
} else {
if (isCompatibleDataType(value, subType)) {
compatibleSimpleSubTypes.add(subType);
}
}
}
int nrOfCompatibleSimpleSubTypes = compatibleSimpleSubTypes.size();
final DataType chosenSimpleType;
if (nrOfCompatibleSimpleSubTypes == 0) {
chosenSimpleType = null;
} else if (nrOfCompatibleSimpleSubTypes == 1) {
chosenSimpleType = compatibleSimpleSubTypes.get(0);
} else {
chosenSimpleType = findMostSuitableType(value, compatibleSimpleSubTypes, Function.identity())
.orElse(compatibleSimpleSubTypes.get(0));
}
return chosenSimpleType;
}
public static <T> Optional<T> findMostSuitableType(Object value, List<T> types, Function<T, DataType> dataTypeMapper) {
if (value instanceof String) {
return findMostSuitableTypeByStringValue((String) value, types, dataTypeMapper);
} else {
DataType inferredDataType = inferDataType(value, null);
if (inferredDataType != null && !inferredDataType.getFieldType().equals(RecordFieldType.STRING)) {
for (T type : types) {
if (inferredDataType.equals(dataTypeMapper.apply(type))) {
return Optional.of(type);
}
}
for (T type : types) {
if (getWiderType(dataTypeMapper.apply(type), inferredDataType).isPresent()) {
return Optional.of(type);
}
}
}
}
return Optional.empty();
}
public static <T> Optional<T> findMostSuitableTypeByStringValue(String valueAsString, List<T> types, Function<T, DataType> dataTypeMapper) {
// Sorting based on the RecordFieldType enum ordering looks appropriate here as we want simpler types
// first and the enum's ordering seems to reflect that
Collections.sort(types, Comparator.comparing(type -> dataTypeMapper.apply(type).getFieldType()));
for (T type : types) {
try {
if (isCompatibleDataType(valueAsString, dataTypeMapper.apply(type))) {
return Optional.of(type);
}
} catch (Exception e) {
logger.error("Exception thrown while checking if '" + valueAsString + "' is compatible with '" + type + "'", e);
}
}
return Optional.empty();
}
public static Record toRecord(final Object value, final RecordSchema recordSchema, final String fieldName) {
return toRecord(value, recordSchema, fieldName, StandardCharsets.UTF_8);
}
public static Record toRecord(final Object value, final RecordSchema recordSchema, final String fieldName, final Charset charset) {
if (value == null) {
return null;
}
if (value instanceof Record) {
return ((Record) value);
}
if (value instanceof Map) {
if (recordSchema == null) {
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass()
+ " to Record for field " + fieldName + " because the value is a Map but no Record Schema was provided");
}
final Map<?, ?> map = (Map<?, ?>) value;
final Map<String, Object> coercedValues = new LinkedHashMap<>();
for (final Map.Entry<?, ?> entry : map.entrySet()) {
final Object keyValue = entry.getKey();
if (keyValue == null) {
continue;
}
final String key = keyValue.toString();
final Optional<DataType> desiredTypeOption = recordSchema.getDataType(key);
if (!desiredTypeOption.isPresent()) {
continue;
}
final Object rawValue = entry.getValue();
final Object coercedValue = convertType(rawValue, desiredTypeOption.get(), fieldName, charset);
coercedValues.put(key, coercedValue);
}
return new MapRecord(recordSchema, coercedValues);
}
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to Record for field " + fieldName);
}
public static Record toRecord(final Object value, final String fieldName) {
return toRecord(value, fieldName, StandardCharsets.UTF_8);
}
public static RecordSchema inferSchema(final Map<String, Object> values, final String fieldName, final Charset charset) {
if (values == null) {
return null;
}
final List<RecordField> inferredFieldTypes = new ArrayList<>();
final Map<String, Object> coercedValues = new LinkedHashMap<>();
for (final Map.Entry<?, ?> entry : values.entrySet()) {
final Object keyValue = entry.getKey();
if (keyValue == null) {
continue;
}
final String key = keyValue.toString();
final Object rawValue = entry.getValue();
final DataType inferredDataType = inferDataType(rawValue, RecordFieldType.STRING.getDataType());
final RecordField recordField = new RecordField(key, inferredDataType, true);
inferredFieldTypes.add(recordField);
final Object coercedValue = convertType(rawValue, inferredDataType, fieldName, charset);
coercedValues.put(key, coercedValue);
}
final RecordSchema inferredSchema = new SimpleRecordSchema(inferredFieldTypes);
return inferredSchema;
}
public static Record toRecord(final Object value, final String fieldName, final Charset charset) {
if (value == null) {
return null;
}
if (value instanceof Record) {
return ((Record) value);
}
final List<RecordField> inferredFieldTypes = new ArrayList<>();
if (value instanceof Map) {
final Map<?, ?> map = (Map<?, ?>) value;
final Map<String, Object> coercedValues = new LinkedHashMap<>();
for (final Map.Entry<?, ?> entry : map.entrySet()) {
final Object keyValue = entry.getKey();
if (keyValue == null) {
continue;
}
final String key = keyValue.toString();
final Object rawValue = entry.getValue();
final DataType inferredDataType = inferDataType(rawValue, RecordFieldType.STRING.getDataType());
final RecordField recordField = new RecordField(key, inferredDataType, true);
inferredFieldTypes.add(recordField);
final Object coercedValue = convertType(rawValue, inferredDataType, fieldName, charset);
coercedValues.put(key, coercedValue);
}
final RecordSchema inferredSchema = new SimpleRecordSchema(inferredFieldTypes);
return new MapRecord(inferredSchema, coercedValues);
}
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to Record for field " + fieldName);
}
public static DataType inferDataType(final Object value, final DataType defaultType) {
if (value == null) {
return defaultType;
}
if (value instanceof String) {
return RecordFieldType.STRING.getDataType();
}
if (value instanceof Record) {
final RecordSchema schema = ((Record) value).getSchema();
return RecordFieldType.RECORD.getRecordDataType(schema);
}
if (value instanceof Number) {
if (value instanceof Long) {
return RecordFieldType.LONG.getDataType();
}
if (value instanceof Integer) {
return RecordFieldType.INT.getDataType();
}
if (value instanceof Short) {
return RecordFieldType.SHORT.getDataType();
}
if (value instanceof Byte) {
return RecordFieldType.BYTE.getDataType();
}
if (value instanceof Float) {
return RecordFieldType.FLOAT.getDataType();
}
if (value instanceof Double) {
return RecordFieldType.DOUBLE.getDataType();
}
if (value instanceof BigInteger) {
return RecordFieldType.BIGINT.getDataType();
}
if (value instanceof BigDecimal) {
final BigDecimal bigDecimal = (BigDecimal) value;
return RecordFieldType.DECIMAL.getDecimalDataType(bigDecimal.precision(), bigDecimal.scale());
}
}
if (value instanceof Boolean) {
return RecordFieldType.BOOLEAN.getDataType();
}
if (value instanceof java.sql.Time) {
return RecordFieldType.TIME.getDataType();
}
if (value instanceof java.sql.Timestamp) {
return RecordFieldType.TIMESTAMP.getDataType();
}
if (value instanceof java.util.Date) {
return RecordFieldType.DATE.getDataType();
}
if (value instanceof Character) {
return RecordFieldType.CHAR.getDataType();
}
// A value of a Map could be either a Record or a Map type. In either case, it must have Strings as keys.
if (value instanceof Map) {
final Map<String, Object> map;
// Only transform the map if the keys aren't strings
boolean allStrings = true;
for (final Object key : ((Map<?, ?>) value).keySet()) {
if (!(key instanceof String)) {
allStrings = false;
break;
}
}
if (allStrings) {
map = (Map<String, Object>) value;
} else {
final Map<?, ?> m = (Map<?, ?>) value;
map = new HashMap<>(m.size());
m.forEach((k, v) -> map.put(k == null ? null : k.toString(), v));
}
return inferRecordDataType(map);
// // Check if all types are the same.
// if (map.isEmpty()) {
// return RecordFieldType.MAP.getMapDataType(RecordFieldType.STRING.getDataType());
// }
//
// Object valueFromMap = null;
// Class<?> valueClass = null;
// for (final Object val : map.values()) {
// if (val == null) {
// continue;
// }
//
// valueFromMap = val;
// final Class<?> currentValClass = val.getClass();
// if (valueClass == null) {
// valueClass = currentValClass;
// } else {
// // If we have two elements that are of different types, then we cannot have a Map. Must be a Record.
// if (valueClass != currentValClass) {
// return inferRecordDataType(map);
// }
// }
// }
//
// // All values appear to be of the same type, so assume that it's a map.
// final DataType elementDataType = inferDataType(valueFromMap, RecordFieldType.STRING.getDataType());
// return RecordFieldType.MAP.getMapDataType(elementDataType);
}
if (value.getClass().isArray()) {
DataType mergedDataType = null;
int length = Array.getLength(value);
for(int index = 0; index < length; index++) {
final DataType inferredDataType = inferDataType(Array.get(value, index), RecordFieldType.STRING.getDataType());
mergedDataType = mergeDataTypes(mergedDataType, inferredDataType);
}
if (mergedDataType == null) {
mergedDataType = RecordFieldType.STRING.getDataType();
}
return RecordFieldType.ARRAY.getArrayDataType(mergedDataType);
}
if (value instanceof Iterable) {
final Iterable iterable = (Iterable<?>) value;
DataType mergedDataType = null;
for (final Object arrayValue : iterable) {
final DataType inferredDataType = inferDataType(arrayValue, RecordFieldType.STRING.getDataType());
mergedDataType = mergeDataTypes(mergedDataType, inferredDataType);
}
if (mergedDataType == null) {
mergedDataType = RecordFieldType.STRING.getDataType();
}
return RecordFieldType.ARRAY.getArrayDataType(mergedDataType);
}
return defaultType;
}
private static DataType inferRecordDataType(final Map<String, ?> map) {
final List<RecordField> fields = new ArrayList<>(map.size());
for (final Map.Entry<String, ?> entry : map.entrySet()) {
final String key = entry.getKey();
final Object value = entry.getValue();
final DataType dataType = inferDataType(value, RecordFieldType.STRING.getDataType());
final RecordField field = new RecordField(key, dataType, true);
fields.add(field);
}
final RecordSchema schema = new SimpleRecordSchema(fields);
return RecordFieldType.RECORD.getRecordDataType(schema);
}
/**
* Check if the given record structured object compatible with the schema.
* @param schema record schema, schema validation will not be performed if schema is null
* @param value the record structured object, i.e. Record or Map
* @return True if the object is compatible with the schema
*/
private static boolean isRecordTypeCompatible(RecordSchema schema, Object value) {
if (value == null) {
return false;
}
if (!(value instanceof Record) && !(value instanceof Map)) {
return false;
}
if (schema == null) {
return true;
}
for (final RecordField childField : schema.getFields()) {
final Object childValue;
if (value instanceof Record) {
childValue = ((Record) value).getValue(childField);
} else {
childValue = ((Map) value).get(childField.getFieldName());
}
if (childValue == null && !childField.isNullable()) {
logger.debug("Value is not compatible with schema because field {} has a null value, which is not allowed in the schema", childField.getFieldName());
return false;
}
if (childValue == null) {
continue; // consider compatible
}
if (!isCompatibleDataType(childValue, childField.getDataType())) {
return false;
}
}
return true;
}
public static Object[] toArray(final Object value, final String fieldName, final DataType elementDataType) {
return toArray(value, fieldName, elementDataType, StandardCharsets.UTF_8);
}
public static Object[] toArray(final Object value, final String fieldName, final DataType elementDataType, final Charset charset) {
if (value == null) {
return null;
}
if (value instanceof Object[]) {
return (Object[]) value;
}
if (value instanceof String && RecordFieldType.BYTE.getDataType().equals(elementDataType)) {
byte[] src = ((String) value).getBytes(charset);
Byte[] dest = new Byte[src.length];
for (int i = 0; i < src.length; i++) {
dest[i] = src[i];
}
return dest;
}
if (value instanceof byte[]) {
byte[] src = (byte[]) value;
Byte[] dest = new Byte[src.length];
for (int i = 0; i < src.length; i++) {
dest[i] = src[i];
}
return dest;
}
if (value instanceof List) {
final List<?> list = (List<?>)value;
return list.toArray();
}
try {
if (value instanceof Blob) {
Blob blob = (Blob) value;
long rawBlobLength = blob.length();
if(rawBlobLength > Integer.MAX_VALUE) {
throw new IllegalTypeConversionException("Value of type " + value.getClass() + " too large to convert to Object Array for field " + fieldName);
}
int blobLength = (int) rawBlobLength;
byte[] src = blob.getBytes(1, blobLength);
Byte[] dest = new Byte[blobLength];
for (int i = 0; i < src.length; i++) {
dest[i] = src[i];
}
return dest;
} else {
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to Object Array for field " + fieldName);
}
} catch (IllegalTypeConversionException itce) {
throw itce;
} catch (Exception e) {
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to Object Array for field " + fieldName, e);
}
}
public static boolean isArrayTypeCompatible(final Object value, final DataType elementDataType) {
if (value == null) {
return false;
}
// Either an object array (check the element type) or a String to be converted to byte[]
if (value instanceof Object[]) {
for (Object o : ((Object[]) value)) {
// Check each element to ensure its type is the same or can be coerced (if need be)
if (!isCompatibleDataType(o, elementDataType)) {
return false;
}
}
return true;
} else {
return value instanceof String && RecordFieldType.BYTE.getDataType().equals(elementDataType);
}
}
@SuppressWarnings("unchecked")
public static Map<String, Object> toMap(final Object value, final String fieldName) {
if (value == null) {
return null;
}
if (value instanceof Map) {
final Map<?, ?> original = (Map<?, ?>) value;
boolean keysAreStrings = true;
for (final Object key : original.keySet()) {
if (!(key instanceof String)) {
keysAreStrings = false;
}
}
if (keysAreStrings) {
return (Map<String, Object>) value;
}
final Map<String, Object> transformed = new LinkedHashMap<>();
for (final Map.Entry<?, ?> entry : original.entrySet()) {
final Object key = entry.getKey();
if (key == null) {
transformed.put(null, entry.getValue());
} else {
transformed.put(key.toString(), entry.getValue());
}
}
return transformed;
}
if (value instanceof Record) {
final Record record = (Record) value;
final RecordSchema recordSchema = record.getSchema();
if (recordSchema == null) {
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type Record to Map for field " + fieldName
+ " because Record does not have an associated Schema");
}
final Map<String, Object> map = new LinkedHashMap<>();
for (final String recordFieldName : recordSchema.getFieldNames()) {
map.put(recordFieldName, record.getValue(recordFieldName));
}
return map;
}
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to Map for field " + fieldName);
}
/**
* Creates a native Java object from a given object of a specified type. Non-scalar (complex, nested, etc.) data types are processed iteratively/recursively, such that all
* included objects are native Java objects, rather than Record API objects or implementation-specific objects.
* @param value The object to be converted
* @param dataType The type of the provided object
* @return An object representing a native Java conversion of the given input object
*/
@SuppressWarnings({"unchecked", "rawtypes"})
public static Object convertRecordFieldtoObject(final Object value, final DataType dataType) {
if (value == null) {
return null;
}
if (value instanceof Record) {
Record record = (Record) value;
RecordSchema recordSchema = record.getSchema();
if (recordSchema == null) {
throw new IllegalTypeConversionException("Cannot convert value of type Record to Map because Record does not have an associated Schema");
}
final Map<String, Object> recordMap = new LinkedHashMap<>();
for (RecordField field : recordSchema.getFields()) {
final DataType fieldDataType = field.getDataType();
final String fieldName = field.getFieldName();
Object fieldValue = record.getValue(fieldName);
if (fieldValue == null) {
recordMap.put(fieldName, null);
} else if (isScalarValue(fieldDataType, fieldValue)) {
recordMap.put(fieldName, fieldValue);
} else if (fieldDataType instanceof RecordDataType) {
Record nestedRecord = (Record) fieldValue;
recordMap.put(fieldName, convertRecordFieldtoObject(nestedRecord, fieldDataType));
} else if (fieldDataType instanceof MapDataType) {
recordMap.put(fieldName, convertRecordMapToJavaMap((Map) fieldValue, ((MapDataType)fieldDataType).getValueType()));
} else if (fieldDataType instanceof ArrayDataType) {
recordMap.put(fieldName, convertRecordArrayToJavaArray((Object[])fieldValue, ((ArrayDataType) fieldDataType).getElementType()));
} else {
throw new IllegalTypeConversionException("Cannot convert value [" + fieldValue + "] of type " + fieldDataType.toString()
+ " to Map for field " + fieldName + " because the type is not supported");
}
}
return recordMap;
} else if (value instanceof Map) {
return convertRecordMapToJavaMap((Map) value, ((MapDataType) dataType).getValueType());
} else if (dataType != null && isScalarValue(dataType, value)) {
return value;
} else if (value instanceof Object[] && dataType instanceof ArrayDataType) {
// This is likely a Map whose values are represented as an array. Return a new array with each element converted to a Java object
return convertRecordArrayToJavaArray((Object[]) value, ((ArrayDataType) dataType).getElementType());
}
throw new IllegalTypeConversionException("Cannot convert value of class " + value.getClass().getName() + " because the type is not supported");
}
public static Map<String, Object> convertRecordMapToJavaMap(final Map<String, Object> map, DataType valueDataType) {
if (map == null) {
return null;
}
Map<String, Object> resultMap = new LinkedHashMap<>();
for (Map.Entry<String, Object> entry : map.entrySet()) {
resultMap.put(entry.getKey(), convertRecordFieldtoObject(entry.getValue(), valueDataType));
}
return resultMap;
}
public static Object[] convertRecordArrayToJavaArray(final Object[] array, DataType elementDataType) {
if (array == null || array.length == 0 || isScalarValue(elementDataType, array[0])) {
return array;
} else {
// Must be an array of complex types, build an array of converted values
Object[] resultArray = new Object[array.length];
for (int i = 0; i < array.length; i++) {
resultArray[i] = convertRecordFieldtoObject(array[i], elementDataType);
}
return resultArray;
}
}
public static boolean isMapTypeCompatible(final Object value) {
return value != null && (value instanceof Map || value instanceof MapRecord);
}
public static String toString(final Object value, final Supplier<DateFormat> format) {
return toString(value, format, StandardCharsets.UTF_8);
}
public static String toString(final Object value, final Supplier<DateFormat> format, final Charset charset) {
if (value == null) {
return null;
}
if (value instanceof String) {
return (String) value;
}
if (format == null && value instanceof java.util.Date) {
return String.valueOf(((java.util.Date) value).getTime());
}
if (value instanceof java.util.Date) {
return formatDate((java.util.Date) value, format);
}
if (value instanceof byte[]) {
return new String((byte[])value, charset);
}
if (value instanceof Byte[]) {
Byte[] src = (Byte[]) value;
byte[] dest = new byte[src.length];
for(int i=0;i<src.length;i++) {
dest[i] = src[i];
}
return new String(dest, charset);
}
if (value instanceof Object[]) {
Object[] o = (Object[]) value;
if (o.length > 0) {
byte[] dest = new byte[o.length];
for (int i = 0; i < o.length; i++) {
dest[i] = (byte) o[i];
}
return new String(dest, charset);
} else {
return ""; // Empty array = empty string
}
}
if (value instanceof Clob) {
Clob clob = (Clob) value;
StringBuilder sb = new StringBuilder();
char[] buffer = new char[32 * 1024]; // 32K default buffer
try (Reader reader = clob.getCharacterStream()) {
int charsRead;
while ((charsRead = reader.read(buffer)) != -1) {
sb.append(buffer, 0, charsRead);
}
return sb.toString();
} catch (Exception e) {
throw new IllegalTypeConversionException("Cannot convert value " + value + " of type " + value.getClass() + " to a valid String", e);
}
}
return value.toString();
}
private static String formatDate(final java.util.Date date, final Supplier<DateFormat> formatSupplier) {
final DateFormat dateFormat = formatSupplier.get();
if (dateFormat == null) {
return String.valueOf((date).getTime());
}
return dateFormat.format(date);
}
public static String toString(final Object value, final String format) {
return toString(value, format, StandardCharsets.UTF_8);
}
public static String toString(final Object value, final String format, final Charset charset) {
if (value == null) {
return null;
}
if (value instanceof String) {
return (String) value;
}
if (format == null && value instanceof java.util.Date) {
return String.valueOf(((java.util.Date) value).getTime());
}
if (value instanceof java.sql.Date) {
return getDateFormat(format).format((java.util.Date) value);
}
if (value instanceof java.sql.Time) {
return getDateFormat(format).format((java.util.Date) value);
}
if (value instanceof java.sql.Timestamp) {
return getDateFormat(format).format((java.util.Date) value);
}
if (value instanceof java.util.Date) {
return getDateFormat(format).format((java.util.Date) value);
}
if (value instanceof Blob) {
Blob blob = (Blob) value;
StringBuilder sb = new StringBuilder();
byte[] buffer = new byte[32 * 1024]; // 32K default buffer
try (InputStream inStream = blob.getBinaryStream()) {
int bytesRead;
while ((bytesRead = inStream.read(buffer)) != -1) {
sb.append(new String(buffer, charset), 0, bytesRead);
}
return sb.toString();
} catch (Exception e) {
throw new IllegalTypeConversionException("Cannot convert value " + value + " of type " + value.getClass() + " to a valid String", e);
}
}
if (value instanceof Clob) {
Clob clob = (Clob) value;
StringBuilder sb = new StringBuilder();
char[] buffer = new char[32 * 1024]; // 32K default buffer
try (Reader reader = clob.getCharacterStream()) {
int charsRead;
while ((charsRead = reader.read(buffer)) != -1) {
sb.append(buffer, 0, charsRead);
}
return sb.toString();
} catch (Exception e) {
throw new IllegalTypeConversionException("Cannot convert value " + value + " of type " + value.getClass() + " to a valid String", e);
}
}
if (value instanceof Object[]) {
return Arrays.toString((Object[]) value);
}
if (value instanceof byte[]) {
return new String((byte[]) value, charset);
}
return value.toString();
}
public static boolean isStringTypeCompatible(final Object value) {
return value != null;
}
public static boolean isEnumTypeCompatible(final Object value, final EnumDataType enumType) {
return enumType.getEnums() != null && enumType.getEnums().contains(value);
}
private static Object toEnum(Object value, EnumDataType dataType, String fieldName) {
if(dataType.getEnums() != null && dataType.getEnums().contains(value)) {
return value.toString();
}
throw new IllegalTypeConversionException("Cannot convert value " + value + " of type " + dataType.toString() + " for field " + fieldName);
}
public static java.sql.Date toDate(final Object value, final Supplier<DateFormat> format, final String fieldName) {
if (value == null) {
return null;
}
if (value instanceof Date) {
return (Date) value;
}
if (value instanceof java.util.Date) {
java.util.Date _temp = (java.util.Date)value;
return new Date(_temp.getTime());
}
if (value instanceof Number) {
final long longValue = ((Number) value).longValue();
return new Date(longValue);
}
if (value instanceof String) {
try {
final String string = ((String) value).trim();
if (string.isEmpty()) {
return null;
}
if (format == null) {
return new Date(Long.parseLong(string));
}
final DateFormat dateFormat = format.get();
if (dateFormat == null) {
return new Date(Long.parseLong(string));
}
final java.util.Date utilDate = dateFormat.parse(string);
return new Date(utilDate.getTime());
} catch (final ParseException | NumberFormatException e) {
throw new IllegalTypeConversionException("Could not convert value [" + value
+ "] of type java.lang.String to Date because the value is not in the expected date format: " + format + " for field " + fieldName);
}
}
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to Date for field " + fieldName);
}
public static boolean isDateTypeCompatible(final Object value, final String format) {
if (value == null) {
return false;
}
if (value instanceof java.util.Date || value instanceof Number) {
return true;
}
if (value instanceof String) {
if (format == null) {
return isInteger((String) value);
}
try {
getDateFormat(format).parse((String) value);
return true;
} catch (final ParseException e) {
return false;
}
}
return false;
}
private static boolean isInteger(final String value) {
if (value == null || value.isEmpty()) {
return false;
}
for (int i = 0; i < value.length(); i++) {
if (!Character.isDigit(value.charAt(i))) {
return false;
}
}
return true;
}
public static Time toTime(final Object value, final Supplier<DateFormat> format, final String fieldName) {
if (value == null) {
return null;
}
if (value instanceof Time) {
return (Time) value;
}
if (value instanceof Number) {
final long longValue = ((Number) value).longValue();
return new Time(longValue);
}
if (value instanceof String) {
try {
final String string = ((String) value).trim();
if (string.isEmpty()) {
return null;
}
if (format == null) {
return new Time(Long.parseLong(string));
}
final DateFormat dateFormat = format.get();
if (dateFormat == null) {
return new Time(Long.parseLong(string));
}
final java.util.Date utilDate = dateFormat.parse(string);
return new Time(utilDate.getTime());
} catch (final ParseException e) {
throw new IllegalTypeConversionException("Could not convert value [" + value
+ "] of type java.lang.String to Time for field " + fieldName + " because the value is not in the expected date format: " + format);
}
}
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to Time for field " + fieldName);
}
public static DateFormat getDateFormat(final String format) {
if (format == null) {
return null;
}
final DateFormat df = new SimpleDateFormat(format);
df.setTimeZone(gmt);
return df;
}
public static DateFormat getDateFormat(final String format, final String timezoneID) {
if (format == null || timezoneID == null) {
return null;
}
final DateFormat df = new SimpleDateFormat(format);
df.setTimeZone(TimeZone.getTimeZone(timezoneID));
return df;
}
public static boolean isTimeTypeCompatible(final Object value, final String format) {
return isDateTypeCompatible(value, format);
}
public static Timestamp toTimestamp(final Object value, final Supplier<DateFormat> format, final String fieldName) {
if (value == null) {
return null;
}
if (value instanceof Timestamp) {
return (Timestamp) value;
}
if (value instanceof java.util.Date) {
return new Timestamp(((java.util.Date)value).getTime());
}
if (value instanceof Number) {
final long longValue = ((Number) value).longValue();
return new Timestamp(longValue);
}
if (value instanceof String) {
final String string = ((String) value).trim();
if (string.isEmpty()) {
return null;
}
try {
if (format == null) {
return new Timestamp(Long.parseLong(string));
}
final DateFormat dateFormat = format.get();
if (dateFormat == null) {
return new Timestamp(Long.parseLong(string));
}
final java.util.Date utilDate = dateFormat.parse(string);
return new Timestamp(utilDate.getTime());
} catch (final ParseException e) {
final DateFormat dateFormat = format.get();
final String formatDescription;
if (dateFormat == null) {
formatDescription = "Numeric";
} else if (dateFormat instanceof SimpleDateFormat) {
formatDescription = ((SimpleDateFormat) dateFormat).toPattern();
} else {
formatDescription = dateFormat.toString();
}
throw new IllegalTypeConversionException("Could not convert value [" + value
+ "] of type java.lang.String to Timestamp for field " + fieldName + " because the value is not in the expected date format: "
+ formatDescription);
}
}
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to Timestamp for field " + fieldName);
}
public static boolean isTimestampTypeCompatible(final Object value, final String format) {
return isDateTypeCompatible(value, format);
}
public static BigInteger toBigInt(final Object value, final String fieldName) {
if (value == null) {
return null;
}
if (value instanceof BigInteger) {
return (BigInteger) value;
}
if (value instanceof Number) {
return BigInteger.valueOf(((Number) value).longValue());
}
if (value instanceof String) {
try {
return new BigInteger((String) value);
} catch (NumberFormatException nfe) {
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to BigInteger for field " + fieldName
+ ", value is not a valid representation of BigInteger", nfe);
}
}
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to BigInteger for field " + fieldName);
}
public static boolean isBigIntTypeCompatible(final Object value) {
return isNumberTypeCompatible(value, DataTypeUtils::isIntegral);
}
public static boolean isDecimalTypeCompatible(final Object value) {
return isNumberTypeCompatible(value, DataTypeUtils::isDecimal);
}
public static Boolean toBoolean(final Object value, final String fieldName) {
if (value == null) {
return null;
}
if (value instanceof Boolean) {
return (Boolean) value;
}
if (value instanceof String) {
final String string = (String) value;
if (string.equalsIgnoreCase("true")) {
return Boolean.TRUE;
} else if (string.equalsIgnoreCase("false")) {
return Boolean.FALSE;
}
}
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to Boolean for field " + fieldName);
}
public static boolean isBooleanTypeCompatible(final Object value) {
if (value == null) {
return false;
}
if (value instanceof Boolean) {
return true;
}
if (value instanceof String) {
final String string = (String) value;
return string.equalsIgnoreCase("true") || string.equalsIgnoreCase("false");
}
return false;
}
public static BigDecimal toBigDecimal(final Object value, final String fieldName) {
if (value == null) {
return null;
}
if (value instanceof BigDecimal) {
return (BigDecimal) value;
}
if (value instanceof Number) {
final Number number = (Number) value;
if (number instanceof Byte
|| number instanceof Short
|| number instanceof Integer
|| number instanceof Long) {
return BigDecimal.valueOf(number.longValue());
}
if (number instanceof BigInteger) {
return new BigDecimal((BigInteger) number);
}
if (number instanceof Float) {
return new BigDecimal(Float.toString((Float) number));
}
if (number instanceof Double) {
return new BigDecimal(Double.toString((Double) number));
}
}
if (value instanceof String) {
try {
return new BigDecimal((String) value);
} catch (NumberFormatException nfe) {
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to BigDecimal for field " + fieldName
+ ", value is not a valid representation of BigDecimal", nfe);
}
}
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to BigDecimal for field " + fieldName);
}
public static Double toDouble(final Object value, final String fieldName) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).doubleValue();
}
if (value instanceof String) {
return Double.parseDouble((String) value);
}
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to Double for field " + fieldName);
}
public static boolean isDoubleTypeCompatible(final Object value) {
return isNumberTypeCompatible(value, s -> isDouble(s));
}
private static boolean isNumberTypeCompatible(final Object value, final Predicate<String> stringPredicate) {
if (value == null) {
return false;
}
if (value instanceof Number) {
return true;
}
if (value instanceof String) {
return stringPredicate.test((String) value);
}
return false;
}
public static Float toFloat(final Object value, final String fieldName) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).floatValue();
}
if (value instanceof String) {
return Float.parseFloat((String) value);
}
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to Float for field " + fieldName);
}
public static boolean isFloatTypeCompatible(final Object value) {
return isNumberTypeCompatible(value, s -> isFloatingPoint(s));
}
private static boolean isDecimal(final String value) {
if (value == null || value.isEmpty()) {
return false;
}
return DECIMAL_PATTERN.matcher(value).matches();
}
private static boolean isFloatingPoint(final String value) {
if (value == null || value.isEmpty()) {
return false;
}
if (!FLOATING_POINT_PATTERN.matcher(value).matches()) {
return false;
}
// Just to ensure that the exponents are in range, etc.
try {
Float.parseFloat(value);
} catch (final NumberFormatException nfe) {
return false;
}
return true;
}
private static boolean isDouble(final String value) {
if (value == null || value.isEmpty()) {
return false;
}
if (!FLOATING_POINT_PATTERN.matcher(value).matches()) {
return false;
}
// Just to ensure that the exponents are in range, etc.
try {
Double.parseDouble(value);
} catch (final NumberFormatException nfe) {
return false;
}
return true;
}
public static Long toLong(final Object value, final String fieldName) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).longValue();
}
if (value instanceof String) {
return Long.parseLong((String) value);
}
if (value instanceof java.util.Date) {
return ((java.util.Date) value).getTime();
}
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to Long for field " + fieldName);
}
public static boolean isLongTypeCompatible(final Object value) {
if (value == null) {
return false;
}
if (value instanceof Number) {
return true;
}
if (value instanceof java.util.Date) {
return true;
}
if (value instanceof String) {
return isIntegral((String) value, Long.MIN_VALUE, Long.MAX_VALUE);
}
return false;
}
/**
* Check if the value is an integral.
*/
private static boolean isIntegral(final String value) {
if (value == null || value.isEmpty()) {
return false;
}
int initialPosition = 0;
final char firstChar = value.charAt(0);
if (firstChar == '+' || firstChar == '-') {
initialPosition = 1;
if (value.length() == 1) {
return false;
}
}
for (int i = initialPosition; i < value.length(); i++) {
if (!Character.isDigit(value.charAt(i))) {
return false;
}
}
return true;
}
/**
* Check if the value is an integral within a value range.
*/
private static boolean isIntegral(final String value, final long minValue, final long maxValue) {
if (!isIntegral(value)) {
return false;
}
try {
final long longValue = Long.parseLong(value);
return longValue >= minValue && longValue <= maxValue;
} catch (final NumberFormatException nfe) {
// In case the value actually exceeds the max value of a Long
return false;
}
}
public static Integer toInteger(final Object value, final String fieldName) {
if (value == null) {
return null;
}
if (value instanceof Number) {
try {
return Math.toIntExact(((Number) value).longValue());
} catch (ArithmeticException ae) {
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to Integer for field " + fieldName
+ " as it causes an arithmetic overflow (the value is too large, e.g.)", ae);
}
}
if (value instanceof String) {
return Integer.parseInt((String) value);
}
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to Integer for field " + fieldName);
}
public static boolean isIntegerTypeCompatible(final Object value) {
return isNumberTypeCompatible(value, s -> isIntegral(s, Integer.MIN_VALUE, Integer.MAX_VALUE));
}
public static Short toShort(final Object value, final String fieldName) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).shortValue();
}
if (value instanceof String) {<๏ฝfimโhole๏ฝ> }
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to Short for field " + fieldName);
}
public static boolean isShortTypeCompatible(final Object value) {
return isNumberTypeCompatible(value, s -> isIntegral(s, Short.MIN_VALUE, Short.MAX_VALUE));
}
public static Byte toByte(final Object value, final String fieldName) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).byteValue();
}
if (value instanceof String) {
return Byte.parseByte((String) value);
}
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to Byte for field " + fieldName);
}
public static boolean isByteTypeCompatible(final Object value) {
return isNumberTypeCompatible(value, s -> isIntegral(s, Byte.MIN_VALUE, Byte.MAX_VALUE));
}
public static Character toCharacter(final Object value, final String fieldName) {
if (value == null) {
return null;
}
if (value instanceof Character) {
return ((Character) value);
}
if (value instanceof CharSequence) {
final CharSequence charSeq = (CharSequence) value;
if (charSeq.length() == 0) {
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass()
+ " to Character because it has a length of 0 for field " + fieldName);
}
return charSeq.charAt(0);
}
throw new IllegalTypeConversionException("Cannot convert value [" + value + "] of type " + value.getClass() + " to Character for field " + fieldName);
}
public static boolean isCharacterTypeCompatible(final Object value) {
return value != null && (value instanceof Character || (value instanceof CharSequence && ((CharSequence) value).length() > 0));
}
public static RecordSchema merge(final RecordSchema thisSchema, final RecordSchema otherSchema) {
if (thisSchema == null) {
return otherSchema;
}
if (otherSchema == null) {
return thisSchema;
}
if (thisSchema == otherSchema) {
return thisSchema;
}
final List<RecordField> otherFields = otherSchema.getFields();
if (otherFields.isEmpty()) {
return thisSchema;
}
final List<RecordField> thisFields = thisSchema.getFields();
if (thisFields.isEmpty()) {
return otherSchema;
}
final Map<String, Integer> fieldIndices = new HashMap<>();
final List<RecordField> fields = new ArrayList<>();
for (int i = 0; i < thisFields.size(); i++) {
final RecordField field = thisFields.get(i);
final Integer index = Integer.valueOf(i);
fieldIndices.put(field.getFieldName(), index);
for (final String alias : field.getAliases()) {
fieldIndices.put(alias, index);
}
fields.add(field);
}
for (final RecordField otherField : otherFields) {
Integer fieldIndex = fieldIndices.get(otherField.getFieldName());
// Find the field in 'thisSchema' that corresponds to 'otherField',
// if one exists.
if (fieldIndex == null) {
for (final String alias : otherField.getAliases()) {
fieldIndex = fieldIndices.get(alias);
if (fieldIndex != null) {
break;
}
}
}
// If there is no field with the same name then just add 'otherField'.
if (fieldIndex == null) {
fields.add(otherField);
continue;
}
// Merge the two fields, if necessary
final RecordField thisField = fields.get(fieldIndex);
if (isMergeRequired(thisField, otherField)) {
final RecordField mergedField = merge(thisField, otherField);
fields.set(fieldIndex, mergedField);
}
}
return new SimpleRecordSchema(fields);
}
private static boolean isMergeRequired(final RecordField thisField, final RecordField otherField) {
if (!thisField.getDataType().equals(otherField.getDataType())) {
return true;
}
if (!thisField.getAliases().equals(otherField.getAliases())) {
return true;
}
if (!Objects.equals(thisField.getDefaultValue(), otherField.getDefaultValue())) {
return true;
}
return false;
}
public static RecordField merge(final RecordField thisField, final RecordField otherField) {
final String fieldName = thisField.getFieldName();
final Set<String> aliases = new HashSet<>();
aliases.addAll(thisField.getAliases());
aliases.addAll(otherField.getAliases());
final Object defaultValue;
if (thisField.getDefaultValue() == null && otherField.getDefaultValue() != null) {
defaultValue = otherField.getDefaultValue();
} else {
defaultValue = thisField.getDefaultValue();
}
final DataType dataType = mergeDataTypes(thisField.getDataType(), otherField.getDataType());
return new RecordField(fieldName, dataType, defaultValue, aliases, thisField.isNullable() || otherField.isNullable());
}
public static DataType mergeDataTypes(final DataType thisDataType, final DataType otherDataType) {
if (thisDataType == null) {
return otherDataType;
}
if (otherDataType == null) {
return thisDataType;
}
if (thisDataType.equals(otherDataType)) {
return thisDataType;
} else {
// If one type is 'wider' than the other (such as an INT and a LONG), just use the wider type (LONG, in this case),
// rather than using a CHOICE of the two.
final Optional<DataType> widerType = getWiderType(thisDataType, otherDataType);
if (widerType.isPresent()) {
return widerType.get();
}
final Set<DataType> possibleTypes = new LinkedHashSet<>();
if (thisDataType.getFieldType() == RecordFieldType.CHOICE) {
possibleTypes.addAll(((ChoiceDataType) thisDataType).getPossibleSubTypes());
} else {
possibleTypes.add(thisDataType);
}
if (otherDataType.getFieldType() == RecordFieldType.CHOICE) {
possibleTypes.addAll(((ChoiceDataType) otherDataType).getPossibleSubTypes());
} else {
possibleTypes.add(otherDataType);
}
ArrayList<DataType> possibleChildTypes = new ArrayList<>(possibleTypes);
Collections.sort(possibleChildTypes, Comparator.comparing(DataType::getFieldType));
return RecordFieldType.CHOICE.getChoiceDataType(possibleChildTypes);
}
}
public static Optional<DataType> getWiderType(final DataType thisDataType, final DataType otherDataType) {
final RecordFieldType thisFieldType = thisDataType.getFieldType();
final RecordFieldType otherFieldType = otherDataType.getFieldType();
final int thisIntTypeValue = getIntegerTypeValue(thisFieldType);
final int otherIntTypeValue = getIntegerTypeValue(otherFieldType);
if (thisIntTypeValue > -1 && otherIntTypeValue > -1) {
if (thisIntTypeValue > otherIntTypeValue) {
return Optional.of(thisDataType);
}
return Optional.of(otherDataType);
}
switch (thisFieldType) {
case FLOAT:
if (otherFieldType == RecordFieldType.DOUBLE) {
return Optional.of(otherDataType);
} else if (otherFieldType == RecordFieldType.DECIMAL) {
return Optional.of(otherDataType);
}
break;
case DOUBLE:
if (otherFieldType == RecordFieldType.FLOAT) {
return Optional.of(thisDataType);
} else if (otherFieldType == RecordFieldType.DECIMAL) {
return Optional.of(otherDataType);
}
break;
case DECIMAL:
if (otherFieldType == RecordFieldType.DOUBLE) {
return Optional.of(thisDataType);
} else if (otherFieldType == RecordFieldType.FLOAT) {
return Optional.of(thisDataType);
} else if (otherFieldType == RecordFieldType.DECIMAL) {
final DecimalDataType thisDecimalDataType = (DecimalDataType) thisDataType;
final DecimalDataType otherDecimalDataType = (DecimalDataType) otherDataType;
final int precision = Math.max(thisDecimalDataType.getPrecision(), otherDecimalDataType.getPrecision());
final int scale = Math.max(thisDecimalDataType.getScale(), otherDecimalDataType.getScale());
return Optional.of(RecordFieldType.DECIMAL.getDecimalDataType(precision, scale));
}
break;
case CHAR:
if (otherFieldType == RecordFieldType.STRING) {
return Optional.of(otherDataType);
}
break;
case STRING:
if (otherFieldType == RecordFieldType.CHAR) {
return Optional.of(thisDataType);
}
break;
}
return Optional.empty();
}
private static int getIntegerTypeValue(final RecordFieldType fieldType) {
switch (fieldType) {
case BIGINT:
return 4;
case LONG:
return 3;
case INT:
return 2;
case SHORT:
return 1;
case BYTE:
return 0;
default:
return -1;
}
}
/**
* Converts the specified field data type into a java.sql.Types constant (INTEGER = 4, e.g.)
*
* @param dataType the DataType to be converted
* @return the SQL type corresponding to the specified RecordFieldType
*/
public static int getSQLTypeValue(final DataType dataType) {
if (dataType == null) {
return Types.NULL;
}
RecordFieldType fieldType = dataType.getFieldType();
switch (fieldType) {
case BIGINT:
case LONG:
return Types.BIGINT;
case BOOLEAN:
return Types.BOOLEAN;
case BYTE:
return Types.TINYINT;
case CHAR:
return Types.CHAR;
case DATE:
return Types.DATE;
case DOUBLE:
return Types.DOUBLE;
case FLOAT:
return Types.FLOAT;
case DECIMAL:
return Types.NUMERIC;
case INT:
return Types.INTEGER;
case SHORT:
return Types.SMALLINT;
case STRING:
return Types.VARCHAR;
case TIME:
return Types.TIME;
case TIMESTAMP:
return Types.TIMESTAMP;
case ARRAY:
return Types.ARRAY;
case MAP:
case RECORD:
return Types.STRUCT;
case CHOICE:
throw new IllegalTypeConversionException("Cannot convert CHOICE, type must be explicit");
default:
throw new IllegalTypeConversionException("Cannot convert unknown type " + fieldType.name());
}
}
public static boolean isScalarValue(final DataType dataType, final Object value) {
final RecordFieldType fieldType = dataType.getFieldType();
final RecordFieldType chosenType;
if (fieldType == RecordFieldType.CHOICE) {
final ChoiceDataType choiceDataType = (ChoiceDataType) dataType;
final DataType chosenDataType = chooseDataType(value, choiceDataType);
if (chosenDataType == null) {
return false;
}
chosenType = chosenDataType.getFieldType();
} else {
chosenType = fieldType;
}
switch (chosenType) {
case ARRAY:
case MAP:
case RECORD:
return false;
}
return true;
}
public static Charset getCharset(String charsetName) {
if(charsetName == null) {
return StandardCharsets.UTF_8;
} else {
return Charset.forName(charsetName);
}
}
/**
* Returns true if the given value is an integer value and fits into a float variable without precision loss. This is
* decided based on the numerical value of the input and the significant bytes used in the float.
*
* @param value The value to check.
*
* @return True in case of the value meets the conditions, false otherwise.
*/
public static boolean isIntegerFitsToFloat(final Object value) {
if (!(value instanceof Integer)) {
return false;
}
final int intValue = (Integer) value;
return MIN_GUARANTEED_PRECISE_WHOLE_IN_FLOAT <= intValue && intValue <= MAX_GUARANTEED_PRECISE_WHOLE_IN_FLOAT;
}
/**
* Returns true if the given value is a long value and fits into a float variable without precision loss. This is
* decided based on the numerical value of the input and the significant bytes used in the float.
*
* @param value The value to check.
*
* @return True in case of the value meets the conditions, false otherwise.
*/
public static boolean isLongFitsToFloat(final Object value) {
if (!(value instanceof Long)) {
return false;
}
final long longValue = (Long) value;
return MIN_GUARANTEED_PRECISE_WHOLE_IN_FLOAT <= longValue && longValue <= MAX_GUARANTEED_PRECISE_WHOLE_IN_FLOAT;
}
/**
* Returns true if the given value is a long value and fits into a double variable without precision loss. This is
* decided based on the numerical value of the input and the significant bytes used in the double.
*
* @param value The value to check.
*
* @return True in case of the value meets the conditions, false otherwise.
*/
public static boolean isLongFitsToDouble(final Object value) {
if (!(value instanceof Long)) {
return false;
}
final long longValue = (Long) value;
return MIN_GUARANTEED_PRECISE_WHOLE_IN_DOUBLE <= longValue && longValue <= MAX_GUARANTEED_PRECISE_WHOLE_IN_DOUBLE;
}
/**
* Returns true if the given value is a BigInteger value and fits into a float variable without precision loss. This is
* decided based on the numerical value of the input and the significant bytes used in the float.
*
* @param value The value to check.
*
* @return True in case of the value meets the conditions, false otherwise.
*/
public static boolean isBigIntFitsToFloat(final Object value) {
if (!(value instanceof BigInteger)) {
return false;
}
final BigInteger bigIntValue = (BigInteger) value;
return bigIntValue.compareTo(MIN_FLOAT_VALUE_IN_BIGINT) >= 0 && bigIntValue.compareTo(MAX_FLOAT_VALUE_IN_BIGINT) <= 0;
}
/**
* Returns true if the given value is a BigInteger value and fits into a double variable without precision loss. This is
* decided based on the numerical value of the input and the significant bytes used in the double.
*
* @param value The value to check.
*
* @return True in case of the value meets the conditions, false otherwise.
*/
public static boolean isBigIntFitsToDouble(final Object value) {
if (!(value instanceof BigInteger)) {
return false;
}
final BigInteger bigIntValue = (BigInteger) value;
return bigIntValue.compareTo(MIN_DOUBLE_VALUE_IN_BIGINT) >= 0 && bigIntValue.compareTo(MAX_DOUBLE_VALUE_IN_BIGINT) <= 0;
}
/**
* Returns true in case the incoming value is a double which is within the range of float variable type.
*
* <p>
* Note: the method only considers the covered range but not precision. The reason for this is that at this point the
* double representation might already slightly differs from the original text value.
* </p>
*
* @param value The value to check.
*
* @return True in case of the double value fits to float data type.
*/
public static boolean isDoubleWithinFloatInterval(final Object value) {
if (!(value instanceof Double)) {
return false;
}
final Double doubleValue = (Double) value;
return MIN_FLOAT_VALUE_IN_DOUBLE <= doubleValue && doubleValue <= MAX_FLOAT_VALUE_IN_DOUBLE;
}
/**
* Checks if an incoming value satisfies the requirements of a given (numeric) type or any of it's narrow data type.
*
* @param value Incoming value.
* @param fieldType The expected field type.
*
* @return Returns true if the incoming value satisfies the data type of any of it's narrow data types. Otherwise returns false. Only numeric data types are supported.
*/
public static boolean isFittingNumberType(final Object value, final RecordFieldType fieldType) {
if (NUMERIC_VALIDATORS.get(fieldType).test(value)) {
return true;
}
for (final RecordFieldType recordFieldType : fieldType.getNarrowDataTypes()) {
if (NUMERIC_VALIDATORS.get(recordFieldType).test(value)) {
return true;
}
}
return false;
}
}<๏ฝfimโend๏ฝ> | return Short.parseShort((String) value); |
<|file_name|>FLAT.py<|end_file_name|><๏ฝfimโbegin๏ฝ>################################################################
# LiveQ - An interactive volunteering computing batch system
# Copyright (C) 2013 Ioannis Charalampidis
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
################################################################
import numpy
import re
def parseFLATBuffer(buf, index=True):
"""
Parse FLAT buffer and return the structured data
"""
sections_list = []
section = None
activesection = None
# Pick appropriate return format
sections = None
if index:
sections = {}
else:
sections = []
# Start processing the buffer line-by-line
for line in buf.splitlines():
# Process lines
if not line:
# Empty line
pass
elif "# BEGIN " in line:
# Ignore labels found some times in AIDA files
dat = line.split(" ")
section = dat[2]
sectiontype = 0
# Get additional section title
title = ""
if len(dat) > 3:
title = " ".join(dat[3:])
# Allocate section record
activesection = { "d": { }, "v": [ ], "t": title }
elif ("# END " in line) and (section != None):
# Section end
if index:
sections[section] = activesection
else:
activesection['n'] = section
sections.append(activesection)
section = None
elif line.startswith("#") or line.startswith(";"):
# Comment
pass
elif section:
# Data inside section
# "SPECIAL" section is not parsable here
if section == "SPECIAL":
continue<๏ฝfimโhole๏ฝ>
# Try to split
data = line.split("=",1)
# Could not split : They are histogram data
if len(data) == 1:
# Split data values
data = FLATParser.WHITESPACE.split(line.strip())
# Check for faulty values
if len(data) < 3:
continue
# Otherwise collect
activesection['v'].append( numpy.array(data, dtype=numpy.float64) )
else:
# Store value
activesection['d'][data[0]] = data[1]
# Return sections
return sections
class FLATParser:
"""
Simple function to parser histograms in FLAT format
"""
# Precompiled regex entry
WHITESPACE = re.compile("\s+")
@staticmethod
def parseFileObject(fileobject, index=True):
"""
Function to read a FLAT file (by the file object descriptor) into python structures
"""
# Read entire file and use parseBuffer
return parseFLATBuffer(fileobject.read(), index)
@staticmethod
def parse(filename, index=True):
"""
Function to read a FLAT file into python structures
"""
# Open file
with open(filename, 'r') as f:
# Use FileObject parser to read the file
return parseFLATBuffer(f.read(), index)
def parseBuffer(buf, index=True):
"""
Parse FLAT file from buffer
"""
return parseFLATBuffer(buf, index)<๏ฝfimโend๏ฝ> | |
<|file_name|>split.py<|end_file_name|><๏ฝfimโbegin๏ฝ>'''
split a file into two randomly, line by line.
Usage: split.py <input file> <output file 1> <output file 2> [<probability of writing to the first file>]'
'''
import csv
import sys<๏ฝfimโhole๏ฝ>try:
P = float( sys.argv[4] )
except IndexError:
P = 0.9
print "P = %s" % ( P )
input_file = sys.argv[1]
output_file1 = sys.argv[2]
output_file2 = sys.argv[3]
i = open( input_file )
o1 = open( output_file1, 'wb' )
o2 = open( output_file2, 'wb' )
reader = csv.reader( i )
writer1 = csv.writer( o1 )
writer2 = csv.writer( o2 )
#headers = reader.next()
#writer1.writerow( headers )
#writer2.writerow( headers )
counter = 0
for line in reader:
r = random.random()
if r > P:
writer2.writerow( line )
else:
writer1.writerow( line )
counter += 1
if counter % 100000 == 0:
print counter<๏ฝfimโend๏ฝ> | import random
|
<|file_name|>Location.py<|end_file_name|><๏ฝfimโbegin๏ฝ><๏ฝfimโhole๏ฝ>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
class Location(object):
"""
This object represents a point on the map.
"""
def __init__(self, longitude, latitude):
"""
:param longitude: Longitude as defined by sender
:type longitude: float
:param latitude: Latitude as defined by sender
:type latitude: float
"""
self.longitude = longitude
self.latitude = latitude
@staticmethod
def build_from_json(jlocation):
"""
:param jlocation: A dictionary that contains JSON-parsed object
:type jlocation: dict
:rtype: Location
"""
return Location(jlocation['longitude'], jlocation['latitude'])<๏ฝfimโend๏ฝ> | """
<This library provides a Python interface for the Telegram Bot API>
Copyright (C) <2015> <Jacopo De Luca> |
<|file_name|>types.py<|end_file_name|><๏ฝfimโbegin๏ฝ>from __future__ import annotations
from typing import (
Any,
Awaitable, Callable, Iterable,
AsyncIterator,
Tuple,
Mapping,
)
from aiohttp import web
import aiohttp_cors
WebRequestHandler = Callable[
[web.Request],
Awaitable[web.StreamResponse]
]
WebMiddleware = Callable[
[web.Request, WebRequestHandler],
Awaitable[web.StreamResponse]
]
CORSOptions = Mapping[str, aiohttp_cors.ResourceOptions]
AppCreator = Callable[
[CORSOptions],<๏ฝfimโhole๏ฝ> Tuple[web.Application, Iterable[WebMiddleware]]
]
CleanupContext = Callable[[web.Application], AsyncIterator[None]]<๏ฝfimโend๏ฝ> | Tuple[web.Application, Iterable[WebMiddleware]]
]
PluginAppCreator = Callable[
[Mapping[str, Any], CORSOptions], |
<|file_name|>cli.py<|end_file_name|><๏ฝfimโbegin๏ฝ>#
# Utility functions for the command line drivers
#
# Copyright 2006-2007, 2013, 2014 Red Hat, Inc.
# Jeremy Katz <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA.
import argparse
import logging
import logging.handlers
import os
import shlex
import subprocess
import sys
import traceback
import libvirt
from virtcli import CLIConfig
from . import util
from .clock import Clock
from .cpu import CPU
from .deviceaudio import VirtualAudio
from .devicechar import (VirtualChannelDevice, VirtualConsoleDevice,
VirtualSerialDevice, VirtualParallelDevice)
from .devicecontroller import VirtualController
from .devicedisk import VirtualDisk
from .devicefilesystem import VirtualFilesystem
from .devicegraphics import VirtualGraphics
from .devicehostdev import VirtualHostDevice
from .deviceinput import VirtualInputDevice
from .deviceinterface import VirtualNetworkInterface
from .devicememballoon import VirtualMemballoon
from .devicepanic import VirtualPanicDevice
from .deviceredirdev import VirtualRedirDevice
from .devicerng import VirtualRNGDevice
from .devicesmartcard import VirtualSmartCardDevice
from .devicetpm import VirtualTPMDevice
from .devicevideo import VirtualVideoDevice
from .devicewatchdog import VirtualWatchdog
from .domainblkiotune import DomainBlkiotune
from .domainfeatures import DomainFeatures
from .domainmemorybacking import DomainMemorybacking
from .domainmemorytune import DomainMemorytune
from .domainnumatune import DomainNumatune
from .domainresource import DomainResource
from .idmap import IdMap
from .nodedev import NodeDevice
from .osxml import OSXML
from .pm import PM
from .seclabel import Seclabel
from .storage import StoragePool, StorageVolume
##########################
# Global option handling #
##########################
class _GlobalState(object):
def __init__(self):
self.quiet = False
self.all_checks = None
self._validation_checks = {}
def set_validation_check(self, checkname, val):
self._validation_checks[checkname] = val
def get_validation_check(self, checkname):
if self.all_checks is not None:
return self.all_checks
# Default to True for all checks
return self._validation_checks.get(checkname, True)
_globalstate = None
def get_global_state():
return _globalstate
def _reset_global_state():
global _globalstate
_globalstate = _GlobalState()
####################
# CLI init helpers #
####################
class VirtStreamHandler(logging.StreamHandler):
def emit(self, record):
"""
Based on the StreamHandler code from python 2.6: ripping out all
the unicode handling and just unconditionally logging seems to fix
logging backtraces with unicode locales (for me at least).
No doubt this is atrocious, but it WORKSFORME!
"""
try:
msg = self.format(record)
stream = self.stream
fs = "%s\n"
stream.write(fs % msg)
self.flush()
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
class VirtHelpFormatter(argparse.RawDescriptionHelpFormatter):
'''
Subclass the default help formatter to allow printing newline characters
in --help output. The way we do this is a huge hack :(
Inspiration: http://groups.google.com/group/comp.lang.python/browse_thread/thread/6df6e6b541a15bc2/09f28e26af0699b1
'''
oldwrap = None
def _split_lines(self, *args, **kwargs):
def return_default():
return argparse.RawDescriptionHelpFormatter._split_lines(
self, *args, **kwargs)
if len(kwargs) != 0 and len(args) != 2:
return return_default()
try:
text = args[0]
if "\n" in text:
return text.splitlines()
return return_default()
except:
return return_default()
def setupParser(usage, description, introspection_epilog=False):
epilog = _("See man page for examples and full option syntax.")
if introspection_epilog:
epilog = _("Use '--option=?' or '--option help' to see "
"available suboptions") + "\n" + epilog
parser = argparse.ArgumentParser(
usage=usage, description=description,
formatter_class=VirtHelpFormatter,
epilog=epilog)
parser.add_argument('--version', action='version',
version=CLIConfig.version)
return parser
def earlyLogging():
logging.basicConfig(level=logging.DEBUG, format='%(message)s')
def setupLogging(appname, debug_stdout, do_quiet, cli_app=True):
_reset_global_state()
get_global_state().quiet = do_quiet
vi_dir = None
logfile = None
if "VIRTINST_TEST_SUITE" not in os.environ:
vi_dir = util.get_cache_dir()
logfile = os.path.join(vi_dir, appname + ".log")
try:
if vi_dir and not os.access(vi_dir, os.W_OK):
if os.path.exists(vi_dir):
raise RuntimeError("No write access to directory %s" % vi_dir)
try:
os.makedirs(vi_dir, 0751)
except IOError, e:
raise RuntimeError("Could not create directory %s: %s" %
(vi_dir, e))
if (logfile and
os.path.exists(logfile) and
not os.access(logfile, os.W_OK)):
raise RuntimeError("No write access to logfile %s" % logfile)
except Exception, e:
logging.warning("Error setting up logfile: %s", e)
logfile = None
dateFormat = "%a, %d %b %Y %H:%M:%S"
fileFormat = ("[%(asctime)s " + appname + " %(process)d] "
"%(levelname)s (%(module)s:%(lineno)d) %(message)s")
streamErrorFormat = "%(levelname)-8s %(message)s"
rootLogger = logging.getLogger()
# Undo early logging
for handler in rootLogger.handlers:
rootLogger.removeHandler(handler)
rootLogger.setLevel(logging.DEBUG)
if logfile:
fileHandler = logging.handlers.RotatingFileHandler(
logfile, "ae", 1024 * 1024, 5)
fileHandler.setFormatter(
logging.Formatter(fileFormat, dateFormat))
rootLogger.addHandler(fileHandler)
streamHandler = VirtStreamHandler(sys.stderr)
if debug_stdout:
streamHandler.setLevel(logging.DEBUG)
streamHandler.setFormatter(logging.Formatter(fileFormat,
dateFormat))
elif cli_app or not logfile:
if get_global_state().quiet:
level = logging.ERROR
else:
level = logging.WARN
streamHandler.setLevel(level)
streamHandler.setFormatter(logging.Formatter(streamErrorFormat))
else:
streamHandler = None
if streamHandler:
rootLogger.addHandler(streamHandler)
util.register_libvirt_error_handler()
# Log uncaught exceptions
def exception_log(typ, val, tb):
logging.debug("Uncaught exception:\n%s",
"".join(traceback.format_exception(typ, val, tb)))
sys.__excepthook__(typ, val, tb)
sys.excepthook = exception_log
logging.getLogger("requests").setLevel(logging.ERROR)
# Log the app command string
logging.debug("Launched with command line: %s", " ".join(sys.argv))
##############################
# Libvirt connection helpers #
##############################
def getConnection(uri):
from .connection import VirtualConnection
logging.debug("Requesting libvirt URI %s", (uri or "default"))
conn = VirtualConnection(uri)
conn.open(_do_creds_authname)
conn.cache_object_fetch = True
logging.debug("Received libvirt URI %s", conn.uri)
return conn
# SASL username/pass auth
def _do_creds_authname(creds):
retindex = 4
for cred in creds:
credtype, prompt, ignore, ignore, ignore = cred
prompt += ": "
res = cred[retindex]
if credtype == libvirt.VIR_CRED_AUTHNAME:
res = raw_input(prompt)
elif credtype == libvirt.VIR_CRED_PASSPHRASE:
import getpass
res = getpass.getpass(prompt)
else:
raise RuntimeError("Unknown auth type in creds callback: %d" %
credtype)
cred[retindex] = res
return 0
##############################
# Misc CLI utility functions #
##############################
def fail(msg, do_exit=True):
"""
Convenience function when failing in cli app
"""
logging.debug("".join(traceback.format_stack()))
logging.error(msg)
if traceback.format_exc().strip() != "None":
logging.debug("", exc_info=True)
if do_exit:
_fail_exit()
def print_stdout(msg, do_force=False):
if do_force or not get_global_state().quiet:
print msg
def print_stderr(msg):
logging.debug(msg)
print >> sys.stderr, msg
def _fail_exit():
sys.exit(1)
def nice_exit():
print_stdout(_("Exiting at user request."))
sys.exit(0)
def virsh_start_cmd(guest):
return ("virsh --connect %s start %s" % (guest.conn.uri, guest.name))
def install_fail(guest):
virshcmd = virsh_start_cmd(guest)
print_stderr(
_("Domain installation does not appear to have been successful.\n"
"If it was, you can restart your domain by running:\n"
" %s\n"
"otherwise, please restart your installation.") % virshcmd)
sys.exit(1)
def set_prompt(prompt):
# Set whether we allow prompts, or fail if a prompt pops up
if prompt:
logging.warning("--prompt mode is no longer supported.")
def validate_disk(dev, warn_overwrite=False):
def _optional_fail(msg, checkname):
do_check = get_global_state().get_validation_check(checkname)
if do_check:
fail(msg + (_(" (Use --check %s=off or "
"--check all=off to override)") % checkname))
logging.debug("Skipping --check %s error condition '%s'",
checkname, msg)
logging.warn(msg)
def check_path_exists(dev):
"""
Prompt if disk file already exists and preserve mode is not used
"""
if not warn_overwrite:
return
if not VirtualDisk.path_definitely_exists(dev.conn, dev.path):
return
_optional_fail(
_("This will overwrite the existing path '%s'") % dev.path,
"path_exists")
def check_inuse_conflict(dev):
"""
Check if disk is inuse by another guest
"""
names = dev.is_conflict_disk()
if not names:
return
_optional_fail(_("Disk %s is already in use by other guests %s." %
(dev.path, names)),
"path_in_use")
def check_size_conflict(dev):
"""
Check if specified size exceeds available storage
"""
isfatal, errmsg = dev.is_size_conflict()
# The isfatal case should have already caused us to fail
if not isfatal and errmsg:
_optional_fail(errmsg, "disk_size")
def check_path_search(dev):
user, broken_paths = dev.check_path_search(dev.conn, dev.path)
if not broken_paths:
return
logging.warning(_("%s may not be accessible by the hypervisor. "
"You will need to grant the '%s' user search permissions for "
"the following directories: %s"), dev.path, user, broken_paths)
check_path_exists(dev)
check_inuse_conflict(dev)
check_size_conflict(dev)
check_path_search(dev)
def _run_console(args):
logging.debug("Running: %s", " ".join(args))
child = os.fork()
if child:
return child
os.execvp(args[0], args)
os._exit(1) # pylint: disable=protected-access
def _gfx_console(guest):
args = ["virt-viewer",
"--connect", guest.conn.uri,
"--wait", guest.name]
logging.debug("Launching virt-viewer for graphics type '%s'",
guest.get_devices("graphics")[0].type)
return _run_console(args)
def _txt_console(guest):
args = ["virsh",
"--connect", guest.conn.uri,
"console", guest.name]
logging.debug("Connecting to text console")
return _run_console(args)
def connect_console(guest, consolecb, wait):
"""
Launched the passed console callback for the already defined
domain. If domain isn't running, return an error.
"""
child = None
if consolecb:
child = consolecb(guest)
if not child or not wait:
return
# If we connected the console, wait for it to finish
try:
os.waitpid(child, 0)
except OSError, e:
logging.debug("waitpid: %s: %s", e.errno, e.message)
def get_console_cb(guest):
gdevs = guest.get_devices("graphics")
if not gdevs:
return _txt_console
gtype = gdevs[0].type
if gtype not in ["default",
VirtualGraphics.TYPE_VNC,
VirtualGraphics.TYPE_SPICE]:
logging.debug("No viewer to launch for graphics type '%s'", gtype)
return
try:
subprocess.check_output(["virt-viewer", "--version"])
except OSError:
logging.warn(_("Unable to connect to graphical console: "
"virt-viewer not installed. Please install "
"the 'virt-viewer' package."))
return None
if not os.environ.get("DISPLAY", ""):
logging.warn(_("Graphics requested but DISPLAY is not set. "
"Not running virt-viewer."))
return None
return _gfx_console
def get_meter():
quiet = (get_global_state().quiet or "VIRTINST_TEST_SUITE" in os.environ)
return util.make_meter(quiet=quiet)
###########################
# Common CLI option/group #
###########################
def add_connect_option(parser, invoker=None):
if invoker == "virt-xml":
parser.add_argument("-c", "--connect", metavar="URI",
help=_("Connect to hypervisor with libvirt URI"))
else:
parser.add_argument("--connect", metavar="URI",
help=_("Connect to hypervisor with libvirt URI"))
def add_misc_options(grp, prompt=False, replace=False,
printxml=False, printstep=False,
noreboot=False, dryrun=False,
noautoconsole=False):
if prompt:
grp.add_argument("--prompt", action="store_true",
default=False, help=argparse.SUPPRESS)
grp.add_argument("--force", action="store_true",
default=False, help=argparse.SUPPRESS)
if noautoconsole:
grp.add_argument("--noautoconsole", action="store_false",
dest="autoconsole", default=True,
help=_("Don't automatically try to connect to the guest console"))
if noreboot:
grp.add_argument("--noreboot", action="store_true",
help=_("Don't boot guest after completing install."))
if replace:
grp.add_argument("--replace", action="store_true",
help=_("Don't check name collision, overwrite any guest "
"with the same name."))
if printxml:
print_kwargs = {
"dest": "xmlonly",
"default": False,
"help": _("Print the generated domain XML rather than create "
"the guest."),
}
if printstep:
print_kwargs["nargs"] = "?"
print_kwargs["const"] = "all"
else:
print_kwargs["action"] = "store_true"
grp.add_argument("--print-xml", **print_kwargs)
if printstep:
# Back compat, argparse allows us to use --print-xml
# for everything.
grp.add_argument("--print-step", dest="xmlstep",
help=argparse.SUPPRESS)
if dryrun:
grp.add_argument("--dry-run", action="store_true", dest="dry",
help=_("Run through install process, but do not "
"create devices or define the guest."))
if prompt:
grp.add_argument("--check",
help=_("Enable or disable validation checks. Example:\n"
"--check path_in_use=off\n"
"--check all=off"))
grp.add_argument("-q", "--quiet", action="store_true",
help=_("Suppress non-error output"))
grp.add_argument("-d", "--debug", action="store_true",
help=_("Print debugging information"))
def add_metadata_option(grp):
grp.add_argument("--metadata",
help=_("Configure guest metadata. Ex:\n"
"--metadata name=foo,title=\"My pretty title\",uuid=...\n"
"--metadata description=\"My nice long description\""))
def add_memory_option(grp, backcompat=False):
grp.add_argument("--memory",
help=_("Configure guest memory allocation. Ex:\n"
"--memory 1024 (in MiB)\n"
"--memory 512,maxmemory=1024"))
if backcompat:
grp.add_argument("-r", "--ram", type=int, dest="oldmemory",
help=argparse.SUPPRESS)
def vcpu_cli_options(grp, backcompat=True, editexample=False):
grp.add_argument("--vcpus",
help=_("Number of vcpus to configure for your guest. Ex:\n"
"--vcpus 5\n"
"--vcpus 5,maxcpus=10,cpuset=1-4,6,8\n"
"--vcpus sockets=2,cores=4,threads=2,"))
extramsg = "--cpu host"
if editexample:
extramsg = "--cpu host-model,clearxml=yes"
grp.add_argument("--cpu",
help=_("CPU model and features. Ex:\n"
"--cpu coreduo,+x2apic\n") + extramsg)
if backcompat:
grp.add_argument("--check-cpu", action="store_true",
help=argparse.SUPPRESS)
grp.add_argument("--cpuset", help=argparse.SUPPRESS)
def add_gfx_option(devg):
devg.add_argument("--graphics", action="append",
help=_("Configure guest display settings. Ex:\n"
"--graphics vnc\n"
"--graphics spice,port=5901,tlsport=5902\n"
"--graphics none\n"
"--graphics vnc,password=foobar,port=5910,keymap=ja"))
def add_net_option(devg):
devg.add_argument("-w", "--network", action="append",
help=_("Configure a guest network interface. Ex:\n"
"--network bridge=mybr0\n"
"--network network=my_libvirt_virtual_net\n"
"--network network=mynet,model=virtio,mac=00:11...\n"
"--network none\n"
"--network help"))
def add_device_options(devg, sound_back_compat=False):
devg.add_argument("--controller", action="append",
help=_("Configure a guest controller device. Ex:\n"
"--controller type=usb,model=ich9-ehci1"))
devg.add_argument("--input", action="append",
help=_("Configure a guest input device. Ex:\n"
"--input tablet\n"
"--input keyboard,bus=usb"))
devg.add_argument("--serial", action="append",
help=_("Configure a guest serial device"))
devg.add_argument("--parallel", action="append",
help=_("Configure a guest parallel device"))
devg.add_argument("--channel", action="append",
help=_("Configure a guest communication channel"))
devg.add_argument("--console", action="append",
help=_("Configure a text console connection between "
"the guest and host"))
devg.add_argument("--hostdev", action="append",
help=_("Configure physical USB/PCI/etc host devices "
"to be shared with the guest"))
devg.add_argument("--filesystem", action="append",
help=_("Pass host directory to the guest. Ex: \n"
"--filesystem /my/source/dir,/dir/in/guest\n"
"--filesystem template_name,/,type=template"))
# Back compat name
devg.add_argument("--host-device", action="append", dest="hostdev",
help=argparse.SUPPRESS)
# --sound used to be a boolean option, hence the nargs handling
sound_kwargs = {
"action": "append",
"help": _("Configure guest sound device emulation"),
}
if sound_back_compat:
sound_kwargs["nargs"] = '?'
devg.add_argument("--sound", **sound_kwargs)
if sound_back_compat:
devg.add_argument("--soundhw", action="append", dest="sound",
help=argparse.SUPPRESS)
devg.add_argument("--watchdog", action="append",
help=_("Configure a guest watchdog device"))
devg.add_argument("--video", action="append",
help=_("Configure guest video hardware."))
devg.add_argument("--smartcard", action="append",
help=_("Configure a guest smartcard device. Ex:\n"
"--smartcard mode=passthrough"))
devg.add_argument("--redirdev", action="append",
help=_("Configure a guest redirection device. Ex:\n"
"--redirdev usb,type=tcp,server=192.168.1.1:4000"))
devg.add_argument("--memballoon", action="append",
help=_("Configure a guest memballoon device. Ex:\n"
"--memballoon model=virtio"))
devg.add_argument("--tpm", action="append",
help=_("Configure a guest TPM device. Ex:\n"
"--tpm /dev/tpm"))
devg.add_argument("--rng", action="append",
help=_("Configure a guest RNG device. Ex:\n"
"--rng /dev/random"))
devg.add_argument("--panic", action="append",
help=_("Configure a guest panic device. Ex:\n"
"--panic default"))
def add_guest_xml_options(geng):
geng.add_argument("--security", action="append",
help=_("Set domain security driver configuration."))
geng.add_argument("--numatune",
help=_("Tune NUMA policy for the domain process."))
geng.add_argument("--memtune", action="append",
help=_("Tune memory policy for the domain process."))
geng.add_argument("--blkiotune", action="append",
help=_("Tune blkio policy for the domain process."))
geng.add_argument("--memorybacking", action="append",
help=_("Set memory backing policy for the domain process. Ex:\n"
"--memorybacking hugepages=on"))
geng.add_argument("--features",
help=_("Set domain <features> XML. Ex:\n"
"--features acpi=off\n"
"--features apic=on,eoi=on"))
geng.add_argument("--clock",
help=_("Set domain <clock> XML. Ex:\n"
"--clock offset=localtime,rtc_tickpolicy=catchup"))
geng.add_argument("--pm",
help=_("Configure VM power management features"))
geng.add_argument("--events",
help=_("Configure VM lifecycle management policy"))
geng.add_argument("--resource", action="append",
help=_("Configure VM resource partitioning (cgroups)"))
def add_boot_options(insg):
insg.add_argument("--boot",
help=_("Configure guest boot settings. Ex:\n"
"--boot hd,cdrom,menu=on\n"
"--boot init=/sbin/init (for containers)"))
insg.add_argument("--idmap",
help=_("Enable user namespace for LXC container. Ex:\n"
"--idmap uid_start=0,uid_target=1000,uid_count=10"))
def add_disk_option(stog, editexample=False):
editmsg = ""
if editexample:
editmsg += "\n--disk cache= (unset cache)"
stog.add_argument("--disk", action="append",
help=_("Specify storage with various options. Ex.\n"
"--disk size=10 (new 10GiB image in default location)\n"
"--disk /my/existing/disk,cache=none\n"
"--disk device=cdrom,bus=scsi\n"
"--disk=?") + editmsg)
#############################################
# CLI complex parsing helpers #
# (for options like --disk, --network, etc. #
#############################################
def _on_off_convert(key, val):
if val is None:
return None
def _yes_no_convert(s):
tvalues = ["y", "yes", "1", "true", "t", "on"]
fvalues = ["n", "no", "0", "false", "f", "off"]
s = (s or "").lower()
if s in tvalues:
return True
elif s in fvalues:
return False
return None
val = _yes_no_convert(val)
if val is not None:
return val
raise fail(_("%(key)s must be 'yes' or 'no'") % {"key": key})
class _VirtCLIArgument(object):
def __init__(self, attrname, cliname,
setter_cb=None, ignore_default=False,
can_comma=False, aliases=None,
is_list=False, is_onoff=False,
lookup_cb=None, is_novalue=False):
"""
A single subargument passed to compound command lines like --disk,
--network, etc.
@attrname: The virtinst API attribute name the cliargument maps to.
If this is a virtinst object method, it will be called.
@cliname: The command line option name, 'path' for path=FOO
@setter_cb: Rather than set an attribute directly on the virtinst
object, (opts, inst, cliname, val) to this callback to handle it.
@ignore_default: If the value passed on the cli is 'default', don't
do anything.
@can_comma: If True, this option is expected to have embedded commas.
After the parser sees this option, it will iterate over the
option string until it finds another known argument name:
everything prior to that argument name is considered part of
the value of this option, '=' included. Should be used sparingly.
@aliases: List of cli aliases. Useful if we want to change a property
name on the cli but maintain back compat.
@is_list: This value should be stored as a list, so multiple instances
are appended.
@is_onoff: The value expected on the cli is on/off or yes/no, convert
it to true/false.
@lookup_cb: If specified, use this function for performing match
lookups.
@is_novalue: If specified, the parameter is not expected in the
form FOO=BAR, but just FOO.
"""
self.attrname = attrname
self.cliname = cliname
self.setter_cb = setter_cb
self.can_comma = can_comma
self.ignore_default = ignore_default
self.aliases = util.listify(aliases)
self.is_list = is_list
self.is_onoff = is_onoff
self.lookup_cb = lookup_cb
self.is_novalue = is_novalue
def parse(self, opts, inst, support_cb=None, lookup=False):
val = None
for cliname in self.aliases + [self.cliname]:
# We iterate over all values unconditionally, so they are
# removed from opts
foundval = opts.get_opt_param(cliname, self.is_novalue)
if foundval is not None:
val = foundval
if val is None:
return
if val == "":
val = None
if support_cb:
support_cb(inst, self.attrname, self.cliname)
if self.is_onoff:
val = _on_off_convert(self.cliname, val)
if val == "default" and self.ignore_default and not lookup:
return
if lookup and not self.attrname and not self.lookup_cb:
raise RuntimeError(
_("Don't know how to match device type '%(device_type)s' "
"property '%(property_name)s'") %
{"device_type": getattr(inst, "virtual_device_type", ""),
"property_name": self.cliname})
try:
if self.attrname:
eval("inst." + self.attrname) # pylint: disable=eval-used
except AttributeError:
raise RuntimeError("programming error: obj=%s does not have "
"member=%s" % (inst, self.attrname))
if lookup:
if self.lookup_cb:
return self.lookup_cb(opts, inst, self.cliname, val)
else:
return eval( # pylint: disable=eval-used
"inst." + self.attrname) == val
elif self.setter_cb:
self.setter_cb(opts, inst, self.cliname, val)
else:
exec( # pylint: disable=exec-used
"inst." + self.attrname + " = val")
class VirtOptionString(object):
def __init__(self, optstr, virtargs, remove_first):
"""
Helper class for parsing opt strings of the form
opt1=val1,opt2=val2,...
@optstr: The full option string
@virtargs: A list of VirtCLIArguments
@remove_first: List or parameters to peel off the front of
option string, and store in the returned dict.
remove_first=["char_type"] for --serial pty,foo=bar
maps to {"char_type", "pty", "foo" : "bar"}
"""
self.fullopts = optstr
virtargmap = {}
for arg in virtargs:
virtargmap[arg.cliname] = arg
for alias in arg.aliases:
virtargmap[alias] = arg
# @opts: A dictionary of the mapping {cliname: val}
# @orderedopts: A list of tuples (cliname: val), in the order
# they appeared on the CLI.
self.opts, self.orderedopts = self._parse_optstr(
virtargmap, remove_first)
def get_opt_param(self, key, is_novalue=False):
if key not in self.opts:
return None
ret = self.opts.pop(key)
if ret is None:
if not is_novalue:
raise RuntimeError("Option '%s' had no value set." % key)
ret = ""
return ret
def check_leftover_opts(self):
if not self.opts:
return
raise fail(_("Unknown options %s") % self.opts.keys())
###########################
# Actual parsing routines #
###########################
def _parse_optstr_tuples(self, virtargmap, remove_first):<๏ฝfimโhole๏ฝ> """
Parse the command string into an ordered list of tuples (see
docs for orderedopts
"""
optstr = str(self.fullopts or "")
optlist = []
argsplitter = shlex.shlex(optstr, posix=True)
argsplitter.commenters = ""
argsplitter.whitespace = ","
argsplitter.whitespace_split = True
remove_first = util.listify(remove_first)[:]
commaopt = None
for opt in list(argsplitter):
if not opt:
continue
cliname = opt
val = None
if opt.count("="):
cliname, val = opt.split("=", 1)
remove_first = []
elif remove_first:
val = cliname
cliname = remove_first.pop(0)
if commaopt:
if cliname in virtargmap:
optlist.append(tuple(commaopt))
commaopt = None
else:
commaopt[1] += "," + cliname
if val:
commaopt[1] += "=" + val
continue
if (cliname in virtargmap and virtargmap[cliname].can_comma):
commaopt = [cliname, val]
continue
optlist.append((cliname, val))
if commaopt:
optlist.append(tuple(commaopt))
return optlist
def _parse_optstr(self, virtargmap, remove_first):
orderedopts = self._parse_optstr_tuples(virtargmap, remove_first)
optdict = {}
for cliname, val in orderedopts:
if (cliname not in optdict and
cliname in virtargmap and
virtargmap[cliname].is_list):
optdict[cliname] = []
if type(optdict.get(cliname)) is list:
optdict[cliname].append(val)
else:
optdict[cliname] = val
return optdict, orderedopts
class VirtCLIParser(object):
"""
Parse a compound arg string like --option foo=bar,baz=12. This is
the desired interface to VirtCLIArgument and VirtCLIOptionString.
A command line argument just extends this interface, implements
_init_params, and calls set_param in the order it wants the options
parsed on the command line. See existing impls examples of how to
do all sorts of crazy stuff.
set_param must be set unconditionally (ex from _init_params and not
from overriding _parse), so that we can show all options when the
user requests command line introspection like --disk=?
"""
objclass = None
def __init__(self, cli_arg_name):
"""
These values should be set by subclasses in _init_params
@cli_arg_name: The command line argument this maps to, so
"hostdev" for --hostdev
@guest: Will be set parse(), the toplevel Guest object
@remove_first: Passed to VirtOptionString
@check_none: If the parsed option string is just 'none', return None
@support_cb: An extra support check function for further validation.
Called before the virtinst object is altered. Take arguments
(inst, attrname, cliname)
@clear_attr: If the user requests to clear the XML (--disk clearxml),
this is the property name we grab from inst to actually clear
(so 'security' to get guest.security). If it's True, then
clear inst (in the case of devices)
"""
self.cli_arg_name = cli_arg_name
# This is the name of the variable that argparse will set in
# the result of parse_args()
self.option_variable_name = cli_arg_name.replace("-", "_")
self.guest = None
self.remove_first = None
self.check_none = False
self.support_cb = None
self.clear_attr = None
self._params = []
self._inparse = False
self.__init_global_params()
self._init_params()
def __init_global_params(self):
def set_clearxml_cb(opts, inst, cliname, val):
ignore = opts = cliname
if not self.objclass and not self.clear_attr:
raise RuntimeError("Don't know how to clearxml --%s" %
self.cli_arg_name)
if val is not True:
return
if self.clear_attr:
getattr(inst, self.clear_attr).clear()
else:
inst.clear()
self.set_param(None, "clearxml",
setter_cb=set_clearxml_cb, is_onoff=True)
def check_introspection(self, option):
for optstr in util.listify(option):
if optstr == "?" or optstr == "help":
print "--%s options:" % self.cli_arg_name
for arg in sorted(self._params, key=lambda p: p.cliname):
print " %s" % arg.cliname
print
return True
return False
def set_param(self, *args, **kwargs):
if self._inparse:
# Otherwise we might break command line introspection
raise RuntimeError("programming error: Can not call set_param "
"from parse handler.")
self._params.append(_VirtCLIArgument(*args, **kwargs))
def parse(self, guest, optlist, inst, validate=True):
optlist = util.listify(optlist)
editting = bool(inst)
if editting and optlist:
# If an object is passed in, we are updating it in place, and
# only use the last command line occurrence, eg. from virt-xml
optlist = [optlist[-1]]
ret = []
for optstr in optlist:
new_object = False
optinst = inst
if self.objclass and not inst:
if guest.child_class_is_singleton(self.objclass):
optinst = guest.list_children_for_class(
self.objclass)[0]
else:
new_object = True
optinst = self.objclass(guest.conn) # pylint: disable=not-callable
try:
objs = self._parse_single_optstr(guest, optstr, optinst)
for obj in util.listify(objs):
if not new_object:
break
if validate:
obj.validate()
guest.add_child(obj)
ret += util.listify(objs)
except Exception, e:
logging.debug("Exception parsing inst=%s optstr=%s",
inst, optstr, exc_info=True)
fail(_("Error: --%(cli_arg_name)s %(options)s: %(err)s") %
{"cli_arg_name": self.cli_arg_name,
"options": optstr, "err": str(e)})
if not ret:
return None
if len(ret) == 1:
return ret[0]
return ret
def lookup_child_from_option_string(self, guest, optstr):
"""
Given a passed option string, search the guests' child list
for all objects which match the passed options.
Used only by virt-xml --edit lookups
"""
ret = []
objlist = guest.list_children_for_class(self.objclass)
for inst in objlist:
try:
opts = VirtOptionString(optstr, self._params,
self.remove_first)
valid = True
for param in self._params:
if param.parse(opts, inst,
support_cb=None, lookup=True) is False:
valid = False
break
if valid:
ret.append(inst)
except Exception, e:
logging.debug("Exception parsing inst=%s optstr=%s",
inst, optstr, exc_info=True)
fail(_("Error: --%(cli_arg_name)s %(options)s: %(err)s") %
{"cli_arg_name": self.cli_arg_name,
"options": optstr, "err": str(e)})
return ret
def _parse_single_optstr(self, guest, optstr, inst):
if not optstr:
return None
if self.check_none and optstr == "none":
return None
if not inst:
inst = guest
try:
self.guest = guest
self._inparse = True
opts = VirtOptionString(optstr, self._params, self.remove_first)
return self._parse(opts, inst)
finally:
self.guest = None
self._inparse = False
def _parse(self, opts, inst):
for param in self._params:
param.parse(opts, inst, self.support_cb)
opts.check_leftover_opts()
return inst
def _init_params(self):
raise NotImplementedError()
###################
# --check parsing #
###################
def convert_old_force(options):
if options.force:
if not options.check:
options.check = "all=off"
del(options.force)
class ParseCLICheck(VirtCLIParser):
# This sets properties on the _GlobalState objects
def _init_params(self):
def _set_check(opts, inst, cliname, val):
ignore = opts
inst.set_validation_check(cliname, val)
self.set_param(None, "path_in_use",
is_onoff=True, setter_cb=_set_check)
self.set_param(None, "disk_size",
is_onoff=True, setter_cb=_set_check)
self.set_param(None, "path_exists",
is_onoff=True, setter_cb=_set_check)
self.set_param("all_checks", "all", is_onoff=True)
def parse_check(checkstr):
# Overwrite this for each parse,
parser = ParseCLICheck("check")
parser.parse(None, checkstr, get_global_state())
######################
# --metadata parsing #
######################
class ParserMetadata(VirtCLIParser):
def _init_params(self):
self.set_param("name", "name", can_comma=True)
self.set_param("title", "title", can_comma=True)
self.set_param("uuid", "uuid")
self.set_param("description", "description", can_comma=True)
####################
# --events parsing #
####################
class ParserEvents(VirtCLIParser):
def _init_params(self):
self.set_param("on_poweroff", "on_poweroff")
self.set_param("on_reboot", "on_reboot")
self.set_param("on_crash", "on_crash")
######################
# --resource parsing #
######################
class ParserResource(VirtCLIParser):
def _init_params(self):
self.objclass = DomainResource
self.remove_first = "partition"
self.set_param("partition", "partition")
######################
# --numatune parsing #
######################
class ParserNumatune(VirtCLIParser):
def _init_params(self):
self.objclass = DomainNumatune
self.remove_first = "nodeset"
self.set_param("memory_nodeset", "nodeset", can_comma=True)
self.set_param("memory_mode", "mode")
####################
# --memory parsing #
####################
class ParserMemory(VirtCLIParser):
def _init_params(self):
self.remove_first = "memory"
def set_memory_cb(opts, inst, cliname, val):
ignore = opts
setattr(inst, cliname, int(val) * 1024)
self.set_param("memory", "memory", setter_cb=set_memory_cb)
self.set_param("maxmemory", "maxmemory", setter_cb=set_memory_cb)
self.set_param("memoryBacking.hugepages", "hugepages", is_onoff=True)
#####################
# --memtune parsing #
#####################
class ParserMemorytune(VirtCLIParser):
def _init_params(self):
self.objclass = DomainMemorytune
self.remove_first = "soft_limit"
self.set_param("hard_limit", "hard_limit")
self.set_param("soft_limit", "soft_limit")
self.set_param("swap_hard_limit", "swap_hard_limit")
self.set_param("min_guarantee", "min_guarantee")
###################
# --vcpus parsing #
###################
class ParserVCPU(VirtCLIParser):
def _init_params(self):
self.remove_first = "vcpus"
self.set_param("cpu.sockets", "sockets")
self.set_param("cpu.cores", "cores")
self.set_param("cpu.threads", "threads")
def set_vcpus_cb(opts, inst, cliname, val):
ignore = cliname
attrname = ("maxvcpus" in opts.opts) and "curvcpus" or "vcpus"
setattr(inst, attrname, val)
self.set_param(None, "vcpus", setter_cb=set_vcpus_cb)
self.set_param("vcpus", "maxvcpus")
def set_cpuset_cb(opts, inst, cliname, val):
if val == "auto":
try:
val = DomainNumatune.generate_cpuset(
inst.conn, inst.memory)
logging.debug("Auto cpuset is: %s", val)
except Exception, e:
logging.error("Not setting cpuset: %s", str(e))
val = None
if val:
inst.cpuset = val
self.set_param(None, "cpuset", can_comma=True,
setter_cb=set_cpuset_cb)
def _parse(self, opts, inst):
set_from_top = ("maxvcpus" not in opts.opts and
"vcpus" not in opts.opts)
ret = VirtCLIParser._parse(self, opts, inst)
if set_from_top:
inst.vcpus = inst.cpu.vcpus_from_topology()
return ret
#################
# --cpu parsing #
#################
class ParserCPU(VirtCLIParser):
def _init_params(self):
self.objclass = CPU
self.remove_first = "model"
def set_model_cb(opts, inst, cliname, val):
ignore = opts
ignore = cliname
if val == "host":
val = inst.SPECIAL_MODE_HOST_MODEL
if val == "none":
val = inst.SPECIAL_MODE_CLEAR
if val in inst.SPECIAL_MODES:
inst.set_special_mode(val)
else:
inst.model = val
def set_feature_cb(opts, inst, cliname, val):
ignore = opts
policy = cliname
for feature_name in util.listify(val):
featureobj = None
for f in inst.features:
if f.name == feature_name:
featureobj = f
break
if featureobj:
featureobj.policy = policy
else:
inst.add_feature(feature_name, policy)
self.set_param(None, "model", setter_cb=set_model_cb)
self.set_param("mode", "mode")
self.set_param("match", "match")
self.set_param("vendor", "vendor")
self.set_param(None, "force", is_list=True, setter_cb=set_feature_cb)
self.set_param(None, "require", is_list=True, setter_cb=set_feature_cb)
self.set_param(None, "optional", is_list=True, setter_cb=set_feature_cb)
self.set_param(None, "disable", is_list=True, setter_cb=set_feature_cb)
self.set_param(None, "forbid", is_list=True, setter_cb=set_feature_cb)
def _parse(self, optsobj, inst):
opts = optsobj.opts
# Convert +feature, -feature into expected format
for key, value in opts.items():
policy = None
if value or len(key) == 1:
continue
if key.startswith("+"):
policy = "force"
elif key.startswith("-"):
policy = "disable"
if policy:
del(opts[key])
if opts.get(policy) is None:
opts[policy] = []
opts[policy].append(key[1:])
return VirtCLIParser._parse(self, optsobj, inst)
##################
# --boot parsing #
##################
class ParserBoot(VirtCLIParser):
def _init_params(self):
self.clear_attr = "os"
# UEFI depends on these bits, so set them first
self.set_param("os.arch", "arch")
self.set_param("type", "domain_type")
self.set_param("os.os_type", "os_type")
self.set_param("emulator", "emulator")
def set_uefi(opts, inst, cliname, val):
ignore = opts
ignore = cliname
ignore = val
inst.set_uefi_default()
self.set_param(None, "uefi", setter_cb=set_uefi, is_novalue=True)
self.set_param("os.useserial", "useserial", is_onoff=True)
self.set_param("os.enable_bootmenu", "menu", is_onoff=True)
self.set_param("os.kernel", "kernel")
self.set_param("os.initrd", "initrd")
self.set_param("os.dtb", "dtb")
self.set_param("os.loader", "loader")
self.set_param("os.loader_ro", "loader_ro", is_onoff=True)
self.set_param("os.loader_type", "loader_type")
self.set_param("os.nvram", "nvram")
self.set_param("os.nvram_template", "nvram_template")
self.set_param("os.kernel_args", "kernel_args",
aliases=["extra_args"], can_comma=True)
self.set_param("os.init", "init")
self.set_param("os.machine", "machine")
def set_initargs_cb(opts, inst, cliname, val):
ignore = opts
ignore = cliname
inst.os.set_initargs_string(val)
self.set_param("os.initargs", "initargs", setter_cb=set_initargs_cb)
# Order matters for boot devices, we handle it specially in parse
def noset_cb(val):
ignore = val
for b in OSXML.BOOT_DEVICES:
self.set_param(noset_cb, b)
def _parse(self, opts, inst):
# Build boot order
boot_order = []
for cliname, ignore in opts.orderedopts:
if cliname not in inst.os.BOOT_DEVICES:
continue
del(opts.opts[cliname])
if cliname not in boot_order:
boot_order.append(cliname)
if boot_order:
inst.os.bootorder = boot_order
VirtCLIParser._parse(self, opts, inst)
###################
# --idmap parsing #
###################
class ParserIdmap(VirtCLIParser):
def _init_params(self):
self.objclass = IdMap
self.set_param("uid_start", "uid_start")
self.set_param("uid_target", "uid_target")
self.set_param("uid_count", "uid_count")
self.set_param("gid_start", "gid_start")
self.set_param("gid_target", "gid_target")
self.set_param("gid_count", "gid_count")
######################
# --security parsing #
######################
class ParserSecurity(VirtCLIParser):
def _init_params(self):
self.objclass = Seclabel
self.set_param("type", "type")
self.set_param("model", "model")
self.set_param("relabel", "relabel", is_onoff=True)
self.set_param("label", "label", can_comma=True)
self.set_param("baselabel", "label", can_comma=True)
######################
# --features parsing #
######################
class ParserFeatures(VirtCLIParser):
def _init_params(self):
self.objclass = DomainFeatures
self.set_param("acpi", "acpi", is_onoff=True)
self.set_param("apic", "apic", is_onoff=True)
self.set_param("pae", "pae", is_onoff=True)
self.set_param("privnet", "privnet",
is_onoff=True)
self.set_param("hap", "hap",
is_onoff=True)
self.set_param("viridian", "viridian",
is_onoff=True)
self.set_param("eoi", "eoi", is_onoff=True)
self.set_param("pmu", "pmu", is_onoff=True)
self.set_param("hyperv_vapic", "hyperv_vapic",
is_onoff=True)
self.set_param("hyperv_relaxed", "hyperv_relaxed",
is_onoff=True)
self.set_param("hyperv_spinlocks", "hyperv_spinlocks",
is_onoff=True)
self.set_param("hyperv_spinlocks_retries",
"hyperv_spinlocks_retries")
self.set_param("vmport", "vmport", is_onoff=True)
self.set_param("kvm_hidden", "kvm_hidden", is_onoff=True)
self.set_param("pvspinlock", "pvspinlock", is_onoff=True)
###################
# --clock parsing #
###################
class ParserClock(VirtCLIParser):
def _init_params(self):
self.objclass = Clock
self.set_param("offset", "offset")
def set_timer(opts, inst, cliname, val):
ignore = opts
tname, attrname = cliname.split("_")
timerobj = None
for t in inst.timers:
if t.name == tname:
timerobj = t
break
if not timerobj:
timerobj = inst.add_timer()
timerobj.name = tname
setattr(timerobj, attrname, val)
for tname in Clock.TIMER_NAMES:
self.set_param(None, tname + "_present",
is_onoff=True,
setter_cb=set_timer)
self.set_param(None, tname + "_tickpolicy", setter_cb=set_timer)
################
# --pm parsing #
################
class ParserPM(VirtCLIParser):
def _init_params(self):
self.objclass = PM
self.set_param("suspend_to_mem", "suspend_to_mem", is_onoff=True)
self.set_param("suspend_to_disk", "suspend_to_disk", is_onoff=True)
##########################
# Guest <device> parsing #
##########################
##################
# --disk parsing #
##################
def _default_image_file_format(conn):
if conn.check_support(conn.SUPPORT_CONN_DEFAULT_QCOW2):
return "qcow2"
return "raw"
def _get_default_image_format(conn, poolobj):
tmpvol = StorageVolume(conn)
tmpvol.pool = poolobj
if tmpvol.file_type != StorageVolume.TYPE_FILE:
return None
return _default_image_file_format(conn)
def _generate_new_volume_name(guest, poolobj, fmt):
collidelist = []
for disk in guest.get_devices("disk"):
if (disk.get_vol_install() and
disk.get_vol_install().pool.name() == poolobj.name()):
collidelist.append(os.path.basename(disk.path))
ext = StorageVolume.get_file_extension_for_format(fmt)
return StorageVolume.find_free_name(
poolobj, guest.name, suffix=ext, collidelist=collidelist)
class ParserDisk(VirtCLIParser):
def _init_params(self):
self.objclass = VirtualDisk
self.remove_first = "path"
def noset_cb(opts, inst, cliname, val):
ignore = opts, inst, cliname, val
# These are all handled specially in _parse
self.set_param(None, "backing_store", setter_cb=noset_cb)
self.set_param(None, "pool", setter_cb=noset_cb)
self.set_param(None, "vol", setter_cb=noset_cb)
self.set_param(None, "size", setter_cb=noset_cb)
self.set_param(None, "format", setter_cb=noset_cb)
self.set_param(None, "sparse", setter_cb=noset_cb)
self.set_param("source_pool", "source_pool")
self.set_param("source_volume", "source_volume")
self.set_param("source_name", "source_name")
self.set_param("source_protocol", "source_protocol")
self.set_param("source_host_name", "source_host_name")
self.set_param("source_host_port", "source_host_port")
self.set_param("source_host_socket", "source_host_socket")
self.set_param("source_host_transport", "source_host_transport")
self.set_param("path", "path")
self.set_param("device", "device")
self.set_param("bus", "bus")
self.set_param("removable", "removable", is_onoff=True)
self.set_param("driver_cache", "cache")
self.set_param("driver_discard", "discard")
self.set_param("driver_name", "driver_name")
self.set_param("driver_type", "driver_type")
self.set_param("driver_io", "io")
self.set_param("error_policy", "error_policy")
self.set_param("serial", "serial")
self.set_param("target", "target")
self.set_param("startup_policy", "startup_policy")
self.set_param("read_only", "readonly", is_onoff=True)
self.set_param("shareable", "shareable", is_onoff=True)
self.set_param("boot.order", "boot_order")
self.set_param("iotune_rbs", "read_bytes_sec")
self.set_param("iotune_wbs", "write_bytes_sec")
self.set_param("iotune_tbs", "total_bytes_sec")
self.set_param("iotune_ris", "read_iops_sec")
self.set_param("iotune_wis", "write_iops_sec")
self.set_param("iotune_tis", "total_iops_sec")
self.set_param("sgio", "sgio")
def _parse(self, opts, inst):
if opts.fullopts == "none":
return
def parse_size(val):
if val is None:
return None
try:
return float(val)
except Exception, e:
fail(_("Improper value for 'size': %s") % str(e))
def convert_perms(val):
if val is None:
return
if val == "ro":
opts.opts["readonly"] = "on"
elif val == "sh":
opts.opts["shareable"] = "on"
elif val == "rw":
# It's default. Nothing to do.
pass
else:
fail(_("Unknown '%s' value '%s'") % ("perms", val))
has_path = "path" in opts.opts
backing_store = opts.get_opt_param("backing_store")
poolname = opts.get_opt_param("pool")
volname = opts.get_opt_param("vol")
size = parse_size(opts.get_opt_param("size"))
fmt = opts.get_opt_param("format")
sparse = _on_off_convert("sparse", opts.get_opt_param("sparse"))
convert_perms(opts.get_opt_param("perms"))
has_type_volume = ("source_pool" in opts.opts or
"source_volume" in opts.opts)
has_type_network = ("source_protocol" in opts.opts)
optcount = sum([bool(p) for p in [has_path, poolname, volname,
has_type_volume, has_type_network]])
if optcount > 1:
fail(_("Cannot specify more than 1 storage path"))
if optcount == 0 and size:
# Saw something like --disk size=X, have it imply pool=default
poolname = "default"
if volname:
if volname.count("/") != 1:
raise ValueError(_("Storage volume must be specified as "
"vol=poolname/volname"))
poolname, volname = volname.split("/")
logging.debug("Parsed --disk volume as: pool=%s vol=%s",
poolname, volname)
VirtCLIParser._parse(self, opts, inst)
# Generate and fill in the disk source info
newvolname = None
poolobj = None
if poolname:
if poolname == "default":
StoragePool.build_default_pool(self.guest.conn)
poolobj = self.guest.conn.storagePoolLookupByName(poolname)
if volname:
vol_object = poolobj.storageVolLookupByName(volname)
inst.set_vol_object(vol_object, poolobj)
poolobj = None
if ((poolobj or inst.wants_storage_creation()) and
(fmt or size or sparse or backing_store)):
if not poolobj:
poolobj = inst.get_parent_pool()
newvolname = os.path.basename(inst.path)
if poolobj and not fmt:
fmt = _get_default_image_format(self.guest.conn, poolobj)
if newvolname is None:
newvolname = _generate_new_volume_name(self.guest, poolobj,
fmt)
vol_install = VirtualDisk.build_vol_install(
self.guest.conn, newvolname, poolobj, size, sparse,
fmt=fmt, backing_store=backing_store)
inst.set_vol_install(vol_install)
if not inst.target:
skip_targets = [d.target for d in self.guest.get_devices("disk")]
inst.generate_target(skip_targets)
inst.cli_generated_target = True
return inst
#####################
# --network parsing #
#####################
class ParserNetwork(VirtCLIParser):
def _init_params(self):
self.objclass = VirtualNetworkInterface
self.remove_first = "type"
def set_mac_cb(opts, inst, cliname, val):
ignore = opts
ignore = cliname
if val == "RANDOM":
return None
inst.macaddr = val
return val
def set_type_cb(opts, inst, cliname, val):
ignore = opts
ignore = cliname
if val == "default":
inst.set_default_source()
else:
inst.type = val
self.set_param("type", "type", setter_cb=set_type_cb)
self.set_param("source", "source")
self.set_param("source_mode", "source_mode")
self.set_param("portgroup", "portgroup")
self.set_param("target_dev", "target")
self.set_param("model", "model")
self.set_param("macaddr", "mac", setter_cb=set_mac_cb)
self.set_param("filterref", "filterref")
self.set_param("boot.order", "boot_order")
self.set_param("driver_name", "driver_name")
self.set_param("driver_queues", "driver_queues")
self.set_param("virtualport.type", "virtualport_type")
# For 802.1Qbg
self.set_param("virtualport.managerid", "virtualport_managerid")
self.set_param("virtualport.typeid", "virtualport_typeid")
self.set_param("virtualport.typeidversion",
"virtualport_typeidversion")
self.set_param("virtualport.instanceid", "virtualport_instanceid")
# For openvswitch & 802.1Qbh
self.set_param("virtualport.profileid", "virtualport_profileid")
# For openvswitch & midonet
self.set_param("virtualport.interfaceid", "virtualport_interfaceid")
def _parse(self, optsobj, inst):
if optsobj.fullopts == "none":
return
opts = optsobj.opts
if "type" not in opts:
if "network" in opts:
opts["type"] = VirtualNetworkInterface.TYPE_VIRTUAL
opts["source"] = opts.pop("network")
elif "bridge" in opts:
opts["type"] = VirtualNetworkInterface.TYPE_BRIDGE
opts["source"] = opts.pop("bridge")
return VirtCLIParser._parse(self, optsobj, inst)
######################
# --graphics parsing #
######################
class ParserGraphics(VirtCLIParser):
def _init_params(self):
self.objclass = VirtualGraphics
self.remove_first = "type"
def set_keymap_cb(opts, inst, cliname, val):
ignore = opts
ignore = cliname
from . import hostkeymap
if not val:
val = None
elif val.lower() == "local":
val = VirtualGraphics.KEYMAP_LOCAL
elif val.lower() == "none":
val = None
else:
use_keymap = hostkeymap.sanitize_keymap(val)
if not use_keymap:
raise ValueError(
_("Didn't match keymap '%s' in keytable!") % val)
val = use_keymap
inst.keymap = val
def set_type_cb(opts, inst, cliname, val):
ignore = opts
if val == "default":
return
inst.type = val
self.set_param(None, "type", setter_cb=set_type_cb)
self.set_param("port", "port")
self.set_param("tlsPort", "tlsport")
self.set_param("listen", "listen")
self.set_param(None, "keymap", setter_cb=set_keymap_cb)
self.set_param("passwd", "password")
self.set_param("passwdValidTo", "passwordvalidto")
self.set_param("connected", "connected")
self.set_param("defaultMode", "defaultMode")
self.set_param("image_compression", "image_compression")
self.set_param("streaming_mode", "streaming_mode")
self.set_param("clipboard_copypaste", "clipboard_copypaste",
is_onoff=True)
self.set_param("mouse_mode", "mouse_mode")
self.set_param("filetransfer_enable", "filetransfer_enable",
is_onoff=True)
def _parse(self, opts, inst):
if opts.fullopts == "none":
self.guest.skip_default_graphics = True
return
return VirtCLIParser._parse(self, opts, inst)
########################
# --controller parsing #
########################
class ParserController(VirtCLIParser):
def _init_params(self):
self.objclass = VirtualController
self.remove_first = "type"
self.set_param("type", "type")
self.set_param("model", "model")
self.set_param("index", "index")
self.set_param("master_startport", "master")
def set_server_cb(opts, inst, cliname, val):
ignore = opts = cliname
inst.address.set_addrstr(val)
self.set_param(None, "address", setter_cb=set_server_cb)
def _parse(self, opts, inst):
if opts.fullopts == "usb2":
return VirtualController.get_usb2_controllers(inst.conn)
elif opts.fullopts == "usb3":
inst.type = "usb"
inst.model = "nec-xhci"
return inst
return VirtCLIParser._parse(self, opts, inst)
###################
# --input parsing #
###################
class ParserInput(VirtCLIParser):
def _init_params(self):
self.objclass = VirtualInputDevice
self.remove_first = "type"
self.set_param("type", "type")
self.set_param("bus", "bus")
#######################
# --smartcard parsing #
#######################
class ParserSmartcard(VirtCLIParser):
def _init_params(self):
self.objclass = VirtualSmartCardDevice
self.remove_first = "mode"
self.check_none = True
self.set_param("mode", "mode")
self.set_param("type", "type")
######################
# --redirdev parsing #
######################
class ParserRedir(VirtCLIParser):
def _init_params(self):
self.objclass = VirtualRedirDevice
self.remove_first = "bus"
self.set_param("bus", "bus")
self.set_param("type", "type")
self.set_param("boot.order", "boot_order")
def set_server_cb(opts, inst, cliname, val):
ignore = opts = cliname
inst.parse_friendly_server(val)
self.set_param(None, "server", setter_cb=set_server_cb)
def _parse(self, opts, inst):
if opts.fullopts == "none":
self.guest.skip_default_usbredir = True
return
return VirtCLIParser._parse(self, opts, inst)
#################
# --tpm parsing #
#################
class ParserTPM(VirtCLIParser):
def _init_params(self):
self.objclass = VirtualTPMDevice
self.remove_first = "type"
self.check_none = True
self.set_param("type", "type")
self.set_param("model", "model")
self.set_param("device_path", "path")
def _parse(self, opts, inst):
if (opts.opts.get("type", "").startswith("/")):
opts.opts["path"] = opts.opts.pop("type")
return VirtCLIParser._parse(self, opts, inst)
#################
# --rng parsing #
#################
class ParserRNG(VirtCLIParser):
def _init_params(self):
self.objclass = VirtualRNGDevice
self.remove_first = "type"
self.check_none = True
def set_hosts_cb(opts, inst, cliname, val):
namemap = {}
inst.backend_type = self._cli_backend_type
if self._cli_backend_mode == "connect":
namemap["backend_host"] = "connect_host"
namemap["backend_service"] = "connect_service"
if self._cli_backend_mode == "bind":
namemap["backend_host"] = "bind_host"
namemap["backend_service"] = "bind_service"
if self._cli_backend_type == "udp":
namemap["backend_connect_host"] = "connect_host"
namemap["backend_connect_service"] = "connect_service"
if cliname in namemap:
setattr(inst, namemap[cliname], val)
def set_backend_cb(opts, inst, cliname, val):
ignore = opts
ignore = inst
# pylint: disable=attribute-defined-outside-init
if cliname == "backend_mode":
self._cli_backend_mode = val
elif cliname == "backend_type":
self._cli_backend_type = val
self.set_param("type", "type")
self.set_param(None, "backend_mode", setter_cb=set_backend_cb)
self.set_param(None, "backend_type", setter_cb=set_backend_cb)
self.set_param(None, "backend_host", setter_cb=set_hosts_cb)
self.set_param(None, "backend_service", setter_cb=set_hosts_cb)
self.set_param(None, "backend_connect_host", setter_cb=set_hosts_cb)
self.set_param(None, "backend_connect_service", setter_cb=set_hosts_cb)
self.set_param("device", "device")
self.set_param("model", "model")
self.set_param("rate_bytes", "rate_bytes")
self.set_param("rate_period", "rate_period")
def _parse(self, optsobj, inst):
opts = optsobj.opts
# pylint: disable=attribute-defined-outside-init
# Defined outside init, but its easier this way
self._cli_backend_mode = "connect"
self._cli_backend_type = "udp"
# pylint: enable=attribute-defined-outside-init
if opts.get("type", "").startswith("/"):
# Allow --rng /dev/random
opts["device"] = opts.pop("type")
opts["type"] = "random"
return VirtCLIParser._parse(self, optsobj, inst)
######################
# --watchdog parsing #
######################
class ParserWatchdog(VirtCLIParser):
def _init_params(self):
self.objclass = VirtualWatchdog
self.remove_first = "model"
self.set_param("model", "model")
self.set_param("action", "action")
########################
# --memballoon parsing #
########################
class ParserMemballoon(VirtCLIParser):
def _init_params(self):
self.objclass = VirtualMemballoon
self.remove_first = "model"
self.set_param("model", "model")
###################
# --panic parsing #
###################
class ParserPanic(VirtCLIParser):
def _init_params(self):
self.objclass = VirtualPanicDevice
self.remove_first = "iobase"
def set_iobase_cb(opts, inst, cliname, val):
ignore = opts
ignore = cliname
if val == "default":
return
inst.iobase = val
self.set_param(None, "iobase", setter_cb=set_iobase_cb)
#######################
# --blkiotune parsing #
#######################
class ParserBlkiotune(VirtCLIParser):
def _init_params(self):
self.objclass = DomainBlkiotune
self.remove_first = "weight"
self.set_param("weight", "weight")
self.set_param("device_path", "device_path")
self.set_param("device_weight", "device_weight")
########################
# --memorybacking parsing #
########################
class ParserMemorybacking(VirtCLIParser):
def _init_params(self):
self.objclass = DomainMemorybacking
self.set_param("hugepages", "hugepages", is_onoff=True)
self.set_param("page_size", "size")
self.set_param("page_unit", "unit")
self.set_param("page_nodeset", "nodeset", can_comma=True)
self.set_param("nosharepages", "nosharepages", is_onoff=True)
self.set_param("locked", "locked", is_onoff=True)
######################################################
# --serial, --parallel, --channel, --console parsing #
######################################################
class _ParserChar(VirtCLIParser):
def _init_params(self):
self.remove_first = "char_type"
def support_check(inst, attrname, cliname):
if type(attrname) is not str:
return
if not inst.supports_property(attrname):
raise ValueError(_("%(devtype)s type '%(chartype)s' does not "
"support '%(optname)s' option.") %
{"devtype" : inst.virtual_device_type,
"chartype": inst.type,
"optname" : cliname})
self.support_cb = support_check
self.set_param("type", "char_type")
self.set_param("source_path", "path")
self.set_param("protocol", "protocol")
self.set_param("target_type", "target_type")
self.set_param("target_name", "name")
def set_host_cb(opts, inst, cliname, val):
ignore = cliname
if ("bind_host" not in opts.opts and
opts.opts.get("mode", None) == "bind"):
inst.set_friendly_bind(val)
else:
inst.set_friendly_source(val)
self.set_param(None, "host", setter_cb=set_host_cb)
def set_bind_cb(opts, inst, cliname, val):
ignore = opts = cliname
inst.set_friendly_bind(val)
self.set_param(None, "bind_host", setter_cb=set_bind_cb)
def set_target_cb(opts, inst, cliname, val):
ignore = opts = cliname
inst.set_friendly_target(val)
self.set_param(None, "target_address", setter_cb=set_target_cb)
self.set_param("source_mode", "mode")
def _parse(self, opts, inst):
if opts.fullopts == "none" and inst.virtual_device_type == "console":
self.guest.skip_default_console = True
return
if opts.fullopts == "none" and inst.virtual_device_type == "channel":
self.guest.skip_default_channel = True
return
return VirtCLIParser._parse(self, opts, inst)
class ParserSerial(_ParserChar):
objclass = VirtualSerialDevice
class ParserParallel(_ParserChar):
objclass = VirtualParallelDevice
class ParserChannel(_ParserChar):
objclass = VirtualChannelDevice
class ParserConsole(_ParserChar):
objclass = VirtualConsoleDevice
########################
# --filesystem parsing #
########################
class ParserFilesystem(VirtCLIParser):
def _init_params(self):
self.objclass = VirtualFilesystem
self.remove_first = ["source", "target"]
self.set_param("type", "type")
self.set_param("accessmode", "accessmode", aliases=["mode"])
self.set_param("source", "source")
self.set_param("target", "target")
###################
# --video parsing #
###################
class ParserVideo(VirtCLIParser):
def _init_params(self):
self.objclass = VirtualVideoDevice
self.remove_first = "model"
self.set_param("model", "model", ignore_default=True)
###################
# --sound parsing #
###################
class ParserSound(VirtCLIParser):
def _init_params(self):
self.objclass = VirtualAudio
self.remove_first = "model"
self.set_param("model", "model", ignore_default=True)
def _parse(self, opts, inst):
if opts.fullopts == "none":
self.guest.skip_default_sound = True
return
return VirtCLIParser._parse(self, opts, inst)
#########################
# --hostdev parsing #
#########################
class ParserHostdev(VirtCLIParser):
def _init_params(self):
self.objclass = VirtualHostDevice
self.remove_first = "name"
# If using the name_lookup_cb, this saves us repeatedly trying to
# lookup the nodedev
_nodedev_lookup_cache = {}
def set_name_cb(opts, inst, cliname, val):
ignore = opts
ignore = cliname
val = NodeDevice.lookupNodedevFromString(inst.conn, val)
inst.set_from_nodedev(val)
def name_lookup_cb(opts, inst, cliname, val):
ignore = opts
ignore = cliname
if val not in _nodedev_lookup_cache:
_nodedev_lookup_cache[val] = \
NodeDevice.lookupNodedevFromString(inst.conn, val)
nodedev = _nodedev_lookup_cache[val]
return nodedev.compare_to_hostdev(inst)
self.set_param(None, "name",
setter_cb=set_name_cb, lookup_cb=name_lookup_cb)
self.set_param("driver_name", "driver_name")
self.set_param("boot.order", "boot_order")
self.set_param("rom_bar", "rom_bar", is_onoff=True)
###########################
# Register parser classes #
###########################
def build_parser_map(options, skip=None, only=None):
"""
Build a dictionary with mapping of cli-name->parserinstance, so
--vcpus -> ParserVCPU object.
"""
parsermap = {}
def register_parser(cli_arg_name, parserclass):
if cli_arg_name in util.listify(skip):
return
if only and cli_arg_name not in util.listify(only):
return
parserobj = parserclass(cli_arg_name)
if not hasattr(options, parserobj.option_variable_name):
raise RuntimeError("programming error: unknown option=%s "
"cliname=%s class=%s" %
(parserobj.option_variable_name,
parserobj.cli_arg_name, parserclass))
parsermap[parserobj.option_variable_name] = parserobj
register_parser("metadata", ParserMetadata)
register_parser("events", ParserEvents)
register_parser("resource", ParserResource)
register_parser("memory", ParserMemory)
register_parser("memtune", ParserMemorytune)
register_parser("vcpus", ParserVCPU)
register_parser("cpu", ParserCPU)
register_parser("numatune", ParserNumatune)
register_parser("blkiotune", ParserBlkiotune)
register_parser("memorybacking", ParserMemorybacking)
register_parser("idmap", ParserIdmap)
register_parser("boot", ParserBoot)
register_parser("security", ParserSecurity)
register_parser("features", ParserFeatures)
register_parser("clock", ParserClock)
register_parser("pm", ParserPM)
register_parser("features", ParserFeatures)
register_parser("disk", ParserDisk)
register_parser("network", ParserNetwork)
register_parser("graphics", ParserGraphics)
register_parser("controller", ParserController)
register_parser("input", ParserInput)
register_parser("smartcard", ParserSmartcard)
register_parser("redirdev", ParserRedir)
register_parser("tpm", ParserTPM)
register_parser("rng", ParserRNG)
register_parser("watchdog", ParserWatchdog)
register_parser("memballoon", ParserMemballoon)
register_parser("serial", ParserSerial)
register_parser("parallel", ParserParallel)
register_parser("channel", ParserChannel)
register_parser("console", ParserConsole)
register_parser("filesystem", ParserFilesystem)
register_parser("video", ParserVideo)
register_parser("sound", ParserSound)
register_parser("hostdev", ParserHostdev)
register_parser("panic", ParserPanic)
return parsermap
def parse_option_strings(parsermap, options, guest, instlist, update=False):
"""
Iterate over the parsermap, and launch the associated parser
function for every value that was filled in on 'options', which
came from argparse/the command line.
@update: If we are updating an existing guest, like from virt-xml
"""
instlist = util.listify(instlist)
if not instlist:
instlist = [None]
ret = []
for option_variable_name in dir(options):
if option_variable_name not in parsermap:
continue
for inst in util.listify(instlist):
parseret = parsermap[option_variable_name].parse(
guest, getattr(options, option_variable_name), inst,
validate=not update)
ret += util.listify(parseret)
return ret
def check_option_introspection(options, parsermap):
"""
Check if the user requested option introspection with ex: '--disk=?'
"""
ret = False
for option_variable_name in dir(options):
if option_variable_name not in parsermap:
continue
if parsermap[option_variable_name].check_introspection(
getattr(options, option_variable_name)):
ret = True
return ret<๏ฝfimโend๏ฝ> | |
<|file_name|>_helpers.py<|end_file_name|><๏ฝfimโbegin๏ฝ># Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared helper functions for RuntimeConfig API classes."""
<๏ฝfimโhole๏ฝ>
def config_name_from_full_name(full_name):
"""Extract the config name from a full resource name.
>>> config_name_from_full_name('projects/my-proj/configs/my-config')
"my-config"
:type full_name: str
:param full_name:
The full resource name of a config. The full resource name looks like
``projects/project-name/configs/config-name`` and is returned as the
``name`` field of a config resource. See:
https://cloud.google.com/deployment-manager/runtime-configurator/reference/rest/v1beta1/projects.configs
:rtype: str
:returns: The config's short name, given its full resource name.
:raises: :class:`ValueError` if ``full_name`` is not the expected format
"""
projects, _, configs, result = full_name.split('/')
if projects != 'projects' or configs != 'configs':
raise ValueError(
'Unexpected format of resource', full_name,
'Expected "projects/{proj}/configs/{cfg}"')
return result
def variable_name_from_full_name(full_name):
"""Extract the variable name from a full resource name.
>>> variable_name_from_full_name(
'projects/my-proj/configs/my-config/variables/var-name')
"var-name"
>>> variable_name_from_full_name(
'projects/my-proj/configs/my-config/variables/another/var/name')
"another/var/name"
:type full_name: str
:param full_name:
The full resource name of a variable. The full resource name looks like
``projects/prj-name/configs/cfg-name/variables/var-name`` and is
returned as the ``name`` field of a variable resource. See:
https://cloud.google.com/deployment-manager/runtime-configurator/reference/rest/v1beta1/projects.configs.variables
:rtype: str
:returns: The variable's short name, given its full resource name.
:raises: :class:`ValueError` if ``full_name`` is not the expected format
"""
projects, _, configs, _, variables, result = full_name.split('/', 5)
if (projects != 'projects' or configs != 'configs' or
variables != 'variables'):
raise ValueError(
'Unexpected format of resource', full_name,
'Expected "projects/{proj}/configs/{cfg}/variables/..."')
return result<๏ฝfimโend๏ฝ> | |
<|file_name|>identify.js<|end_file_name|><๏ฝfimโbegin๏ฝ>/* globals describe, beforeEach, $fixture, qq, assert, it, qqtest, helpme, purl */
if (qq.supportedFeatures.imagePreviews && qqtest.canDownloadFileAsBlob) {
describe("identify.js", function() {
"use strict";
function testPreviewability(expectedToBePreviewable, key, expectedMime, done) {
qqtest.downloadFileAsBlob(key, expectedMime).then(function(blob) {
new qq.Identify(blob, function() {}).isPreviewable().then(function(mime) {
!expectedToBePreviewable && assert.fail();
assert.equal(mime, expectedMime);
done();
}, function() {
expectedToBePreviewable && assert.fail();
assert.ok(true);
done();
});
}, function() {
assert.fail("Problem downloading test file");
});
}
it("classifies gif as previewable", function(done) {
testPreviewability(true, "drop-background.gif", "image/gif", done);
});
it("classifies jpeg as previewable", function(done) {
testPreviewability(true, "fearless.jpeg", "image/jpeg", done);
});
it("classifies bmp as previewable", function(done) {
testPreviewability(true, "g04.bmp", "image/bmp", done);
});
it("classifies png as previewable", function(done) {
testPreviewability(true, "not-available_l.png", "image/png", done);
});
it("classifies tiff as previewable", function(done) {
testPreviewability(qq.supportedFeatures.tiffPreviews, "sample.tif", "image/tiff", done);
});
it("marks a non-image as not previewable", function(done) {
testPreviewability(false, "simpletext.txt", null, done);
});<๏ฝfimโhole๏ฝ><๏ฝfimโend๏ฝ> | });
} |
<|file_name|>extinfo.cc<|end_file_name|><๏ฝfimโbegin๏ฝ>#include "peerreview/vrf.h"
#define SUBSYSTEM "VrfExtInfoPolicy"
VrfExtInfoPolicy::VrfExtInfoPolicy(VerifiablePRNG *vprng) : ExtInfoPolicy()
{
this->vprng = vprng;
}
VrfExtInfoPolicy::~VrfExtInfoPolicy()
{
}
int VrfExtInfoPolicy::storeExtInfo(SecureHistory *history, long long followingSeq, unsigned char *buffer, unsigned int maxlen)
{
int extInfoLen = vprng->storeExtInfo(buffer, maxlen);
if (extInfoLen > 0) {
unsigned char ty = EVT_VRF;
int ne = history->findNextEntry(&ty, 1, followingSeq);
if (ne >= 0) {
//plog(3, "GETTING VRF @%d/%lld", ne, followingSeq);<๏ฝfimโhole๏ฝ> //plog(3, "=> %d", extInfoLen);
}
}
return extInfoLen;
}<๏ฝfimโend๏ฝ> | extInfoLen = history->getEntry(ne, buffer, maxlen); |
<|file_name|>__init__.py<|end_file_name|><๏ฝfimโbegin๏ฝ>#!/usr/bin/env python
"""
The scripts that compose this module contains a set of functions
needed to process properly a background subtraction of each camera
of a dataset
"""
<๏ฝfimโhole๏ฝ>import numpy as np
import sys
from gui import trackbar
from threedgeometry import frameretriever<๏ฝfimโend๏ฝ> | import cbackground
import cv2 |
<|file_name|>worldMap.py<|end_file_name|><๏ฝfimโbegin๏ฝ># Copyright (C) 2001 Steve Howell
# You must read the file called INFO.txt before distributing this code.
# ---
# Worlds for Karel are defined with simple text files that we parse out
# in this module. See the worlds folder for examples.
from world import NORTH,SOUTH,EAST,WEST
from utils import trace_error
import re,gettext
# Use this to be able to extract strings for translation by pygettext.py
try:
#print "current _", _
old_ = _
except Exception,info:
print >> sys.stderr, "in gvrparser locale switch:\n",info
_ = gettext.gettext
KEYWORDS = (
_('ROBOT'),
_('WALL'),
_('BEEPERS'),
_('SIZE'))
DIRECTIONS = (NORTH,SOUTH,EAST,WEST)
####################### Start I18N part #####################################
# Now we install a gettext file in the builtin namespace
# If this fails the bogus '_()' function is used and we end up with a english - english
# look-up table :-(
# A possible solution would be to test for locales but i think it won't matter much in speed.
_ = old_
#print _
# get a list with the translated strings
trans_commands, org_commands = [],[]
words = KEYWORDS
for i in words:
trans_commands.append(_(i))
org_commands.append(i) # this is the english one
# With this we build a look-up dictionary that is used in the Program class.
# The look-up dict: {'beweeg':'move','rechtsaf':turnright',....}
# the keys are the gettext strings and the vals are the original names.
lookup_dict = {}
for k,v in map(None,trans_commands,org_commands):
lookup_dict[k] = v
lookup_dir_dict = {_('N'):'N',_('S'):'S',_('E'):'E',_('W'):'W'}#
class WorldMapException(Exception):
def __init__(self, line, str):
self.line = line
self.str = str
def __str__(self): return self.str
def checkDirection(line, dir):
if dir not in lookup_dir_dict.values():
raise WorldMapException(line,
_("In line %d:\n%s is not a valid direction -- use N, S, E, or W")
% (line, dir))
def removeComment(line):
foundComment = False
for i in range(len(line)):
if line[i] == "#":
foundComment = True
break
if foundComment:
return line[:i]
else:
return line
def readWorld(lines, world):
definedRobot = 0
useGuido = False
linenumber = 0
worldSize = None
#print "worldMap lines",lines
for line in lines:
linenumber += 1
try:
if re.search("\S", line) and not re.match("\s*#", line):
line = removeComment(line)
tokens = line.split()
tokens = [x.upper() for x in tokens]
keyword = tokens[0]
if lookup_dict.has_key(keyword):
keyword = lookup_dict[keyword]
dir = tokens[3]
if lookup_dir_dict.has_key(dir):
dir = lookup_dir_dict[dir]
tokens[3] = dir
if keyword ==_('WALL') or keyword == 'WALL':
tokens[0] = keyword
#print "wall",tokens
checkDirection(linenumber, dir)
#print "tokens",tokens
world.setWall(*tokens[1:])
elif keyword == _('ROBOT') or keyword == 'ROBOT':
if definedRobot:
raise WorldMapException(linenumber, _('You may only have one robot definition.'))
definedRobot = 1
tokens = [x.upper() for x in tokens]
if len(tokens) == 5:
x, y, dir, numBeepers = tokens[1:]
else:
x, y, dir = tokens[1:]<๏ฝfimโhole๏ฝ> robotX, robotY = int(x), int(y)
world.positionRobot(robotX, robotY, dir)
if numBeepers == "unlimited":
world.unlimitedBeepers = True
numBeepers = 0
world.setRobotBeepers(int(numBeepers))
elif keyword == _('BEEPERS') or keyword == 'BEEPERS':
x, y, numBeepers = tokens[1:]
world.setBeepers(int(x), int(y), int(numBeepers))
elif keyword == 'BDFL':
useGuido = True
elif keyword == _('SIZE') or keyword == 'SIZE':
if worldSize:
raise WorldMapException(linenumber,
_('You may only have one size statement'))
try:
avenues, streets = [int(coord) for coord in tokens[1:]]
except ValueError:
raise WorldMapException(linenumber,
_('Size statement should have 2 integers'))
if avenues < 7 or streets < 7:
raise WorldMapException(linenumber,
_('Size coordinates must be at least 7'))
worldSize = (avenues, streets)
else:
raise WorldMapException(linenumber,_("Cannot understand: %s") % line)
except Exception,info:
info = _("Error in line %s:\n%s\nCheck your world file for syntax errors") % (linenumber,line)
raise WorldMapException(linenumber, info)
if not definedRobot:
raise WorldMapException(linenumber, _("The world map seems to be missing information."))
world.useGuido = useGuido
return worldSize<๏ฝfimโend๏ฝ> | numBeepers = 0 |
<|file_name|>M_InOutLine_Shipment.java<|end_file_name|><๏ฝfimโbegin๏ฝ>package de.metas.inoutcandidate.modelvalidator;
/*
* #%L<๏ฝfimโhole๏ฝ> * Copyright (C) 2015 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
import org.adempiere.ad.modelvalidator.annotations.ModelChange;
import org.adempiere.ad.modelvalidator.annotations.Validator;
import org.compiere.model.I_M_InOutLine;
import org.compiere.model.ModelValidator;
@Validator(I_M_InOutLine.class)
public class M_InOutLine_Shipment
{
@ModelChange(
timings = { ModelValidator.TYPE_AFTER_CHANGE, ModelValidator.TYPE_AFTER_DELETE, ModelValidator.TYPE_AFTER_NEW },
ifColumnsChanged = I_M_InOutLine.COLUMNNAME_MovementQty)
public void onMovementQtyChange(final I_M_InOutLine inOutLine)
{
// All code from here was moved to de.metas.handlingunits.model.validator.M_InOutLine.onMovementQtyChange(I_M_InOutLine)
// because we need to be aware if this is about HUs or not....
// TODO: implement a generic approach is applies the algorithm without actually going through HUs stuff
}
}<๏ฝfimโend๏ฝ> | * de.metas.swat.base
* %% |
<|file_name|>rule_profiler.cc<|end_file_name|><๏ฝfimโbegin๏ฝ>//--------------------------------------------------------------------------
// Copyright (C) 2015-2016 Cisco and/or its affiliates. All rights reserved.
//
// This program is free software; you can redistribute it and/or modify it
// under the terms of the GNU General Public License Version 2 as published
// by the Free Software Foundation. You may not use, modify or distribute
// this program under any other version of the GNU General Public License.
//
// This program is distributed in the hope that it will be useful, but
// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, write to the Free Software Foundation, Inc.,
// 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
//--------------------------------------------------------------------------
// rule_profiler.cc author Joel Cornett <[email protected]>
#include "rule_profiler.h"
#if HAVE_CONFIG_H
#include "config.h"
#endif
#include <algorithm>
#include <functional>
#include <iostream>
#include <sstream>
#include <vector>
#include "detection/detection_options.h"
#include "detection/treenodes.h"
#include "hash/sfghash.h"
#include "main/snort_config.h"
#include "main/thread_config.h"
#include "parser/parser.h"
#include "target_based/snort_protocols.h"
#include "profiler_printer.h"
#include "profiler_stats_table.h"
#include "rule_profiler_defs.h"
#ifdef UNIT_TEST
#include "catch/catch.hpp"
#endif
#define s_rule_table_title "Rule Profile Statistics"
static inline OtnState& operator+=(OtnState& lhs, const OtnState& rhs)
{
lhs.elapsed += rhs.elapsed;
lhs.elapsed_match += rhs.elapsed_match;
lhs.checks += rhs.checks;
lhs.matches += rhs.matches;
lhs.alerts += rhs.alerts;
return lhs;
}
namespace rule_stats
{
static const StatsTable::Field fields[] =
{
{ "#", 5, '\0', 0, std::ios_base::left },
{ "gid", 6, '\0', 0, std::ios_base::fmtflags() },
{ "sid", 6, '\0', 0, std::ios_base::fmtflags() },
{ "rev", 4, '\0', 0, std::ios_base::fmtflags() },
{ "checks", 7, '\0', 0, std::ios_base::fmtflags() },
{ "matches", 8, '\0', 0, std::ios_base::fmtflags() },
{ "alerts", 7, '\0', 0, std::ios_base::fmtflags() },
{ "time (us)", 10, '\0', 0, std::ios_base::fmtflags() },
{ "avg/check", 10, '\0', 1, std::ios_base::fmtflags() },
{ "avg/match", 10, '\0', 1, std::ios_base::fmtflags() },
{ "avg/non-match", 14, '\0', 1, std::ios_base::fmtflags() },
{ "timeouts", 9, '\0', 0, std::ios_base::fmtflags() },
{ "suspends", 9, '\0', 0, std::ios_base::fmtflags() },
{ nullptr, 0, '\0', 0, std::ios_base::fmtflags() }
};
struct View
{
OtnState state;
SigInfo sig_info;
hr_duration elapsed() const
{ return state.elapsed; }
hr_duration elapsed_match() const
{ return state.elapsed_match; }
hr_duration elapsed_no_match() const
{ return elapsed() - elapsed_match(); }
uint64_t checks() const
{ return state.checks; }
uint64_t matches() const
{ return state.matches; }
uint64_t no_matches() const
{ return checks() - matches(); }
uint64_t alerts() const
{ return state.alerts; }
uint64_t timeouts() const
{ return state.latency_timeouts; }
uint64_t suspends() const
{ return state.latency_suspends; }
hr_duration time_per(hr_duration d, uint64_t v) const
{
if ( v == 0 )
return 0_ticks;
return hr_duration(d.count() / v);
}
hr_duration avg_match() const
{ return time_per(elapsed_match(), matches()); }
hr_duration avg_no_match() const
{ return time_per(elapsed_no_match(), no_matches()); }
hr_duration avg_check() const
{ return time_per(elapsed(), checks()); }
View(const OtnState& otn_state, const SigInfo* si = nullptr) :
state(otn_state)
{
if ( si )
// FIXIT-L J does sig_info need to be initialized otherwise?
sig_info = *si;
}
};
static const ProfilerSorter<View> sorters[] =
{
{ "", nullptr },
{
"checks",
[](const View& lhs, const View& rhs)
{ return lhs.checks() >= rhs.checks(); }
},
{
"avg_check",
[](const View& lhs, const View& rhs)
{ return lhs.avg_check() >= rhs.avg_check(); }
},
{
"total_time",
[](const View& lhs, const View& rhs)
{ return lhs.elapsed().count() >= rhs.elapsed().count(); }
},
{
"matches",
[](const View& lhs, const View& rhs)
{ return lhs.matches() >= rhs.matches(); }
},
{
"no_matches",
[](const View& lhs, const View& rhs)
{ return lhs.no_matches() >= rhs.no_matches(); }
},
{
"avg_match",
[](const View& lhs, const View& rhs)
{ return lhs.avg_match() >= rhs.avg_match(); }
},
{
"avg_no_match",
[](const View& lhs, const View& rhs)
{ return lhs.avg_no_match() >= rhs.avg_no_match(); }
}
};
static void consolidate_otn_states(OtnState* states)
{
for ( unsigned i = 1; i < ThreadConfig::get_instance_max(); ++i )
states[0] += states[i];
}
static std::vector<View> build_entries()
{
assert(snort_conf);
detection_option_tree_update_otn_stats(snort_conf->detection_option_tree_hash_table);
auto* otn_map = snort_conf->otn_map;
std::vector<View> entries;
for ( auto* h = sfghash_findfirst(otn_map); h; h = sfghash_findnext(otn_map) )
{
auto* otn = static_cast<OptTreeNode*>(h->data);
assert(otn);
auto* states = otn->state;
consolidate_otn_states(states);
auto& state = states[0];
if ( !state )
continue;
// FIXIT-L J should we assert(otn->sigInfo)?
entries.emplace_back(state, &otn->sigInfo);
}
return entries;
}
// FIXIT-L J logic duplicated from ProfilerPrinter
static void print_single_entry(const View& v, unsigned n)
{
using std::chrono::duration_cast;
using std::chrono::microseconds;
std::ostringstream ss;
{
StatsTable table(fields, ss);
table << StatsTable::ROW;
table << n; // #
table << v.sig_info.generator; // gid
table << v.sig_info.id; // sid
table << v.sig_info.rev; // rev
table << v.checks(); // checks
table << v.matches(); // matches
table << v.alerts(); // alerts
table << duration_cast<microseconds>(v.elapsed()).count(); // time
table << duration_cast<microseconds>(v.avg_check()).count(); // avg/check
table << duration_cast<microseconds>(v.avg_match()).count(); // avg/match
table << duration_cast<microseconds>(v.avg_no_match()).count(); // avg/non-match
table << v.timeouts();
table << v.suspends();
}
LogMessage("%s", ss.str().c_str());
}
// FIXIT-L J logic duplicated from ProfilerPrinter
static void print_entries(std::vector<View>& entries, ProfilerSorter<View> sort, unsigned count)
{
std::ostringstream ss;
{
StatsTable table(fields, ss);
table << StatsTable::SEP;
table << s_rule_table_title;
if ( count )
table << " (worst " << count;
else
table << " (all";
if ( sort )
table << ", sorted by " << sort.name;
table << ")\n";
table << StatsTable::HEADER;
}
LogMessage("%s", ss.str().c_str());
if ( !count || count > entries.size() )
count = entries.size();
if ( sort )
std::partial_sort(entries.begin(), entries.begin() + count, entries.end(), sort);
for ( unsigned i = 0; i < count; ++i )
print_single_entry(entries[i], i + 1);
}
}
void show_rule_profiler_stats(const RuleProfilerConfig& config)
{
if ( !config.show )
return;
auto entries = rule_stats::build_entries();
// if there aren't any eval'd rules, don't sort or print
if ( entries.empty() )
return;
auto sort = rule_stats::sorters[config.sort];
// FIXIT-L J do we eventually want to be able print rule totals, too?
print_entries(entries, sort, config.count);
}
void reset_rule_profiler_stats()
{
assert(snort_conf);
auto* otn_map = snort_conf->otn_map;
for ( auto* h = sfghash_findfirst(otn_map); h; h = sfghash_findnext(otn_map) )
{
auto* otn = static_cast<OptTreeNode*>(h->data);
assert(otn);
auto* rtn = getRtnFromOtn(otn);
if ( !rtn || !is_network_protocol(rtn->proto) )
continue;
for ( unsigned i = 0; i < ThreadConfig::get_instance_max(); ++i )
{
auto& state = otn->state[i];
state = OtnState();
}
}
}
void RuleContext::stop(bool match)
{
if ( finished )
return;
finished = true;
stats.update(sw.get(), match);
}
#ifdef UNIT_TEST
namespace
{
using RuleEntryVector = std::vector<rule_stats::View>;
using RuleStatsVector = std::vector<OtnState>;
} // anonymous namespace
static inline bool operator==(const RuleEntryVector& lhs, const RuleStatsVector& rhs)
{
if ( lhs.size() != rhs.size() )
return false;
for ( unsigned i = 0; i < lhs.size(); ++i )
if ( lhs[i].state != rhs[i] )
return false;
return true;
}
static inline OtnState make_otn_state(
hr_duration elapsed, hr_duration elapsed_match,
uint64_t checks, uint64_t matches)
{
OtnState state;
state.elapsed = elapsed;
state.elapsed_match = elapsed_match;
state.checks = checks;
state.matches = matches;
return state;
}
static inline rule_stats::View make_rule_entry(
hr_duration elapsed, hr_duration elapsed_match,
uint64_t checks, uint64_t matches)
{
return {
make_otn_state(elapsed, elapsed_match, checks, matches),
nullptr
};
}
static void avoid_optimization()
{ for ( int i = 0; i < 1024; ++i ); }
TEST_CASE( "otn state", "[profiler][rule_profiler]" )
{
OtnState state_a;
state_a.elapsed = 1_ticks;
state_a.elapsed_match = 2_ticks;
state_a.elapsed_no_match = 2_ticks;
state_a.checks = 1;
state_a.matches = 2;
state_a.noalerts = 3;
state_a.alerts = 4;
SECTION( "incremental addition" )
{
OtnState state_b;
state_b.elapsed = 4_ticks;
state_b.elapsed_match = 5_ticks;
state_b.elapsed_no_match = 6_ticks;
state_b.checks = 5;
state_b.matches = 6;
state_b.noalerts = 7;
state_b.alerts = 8;
state_a += state_b;
CHECK( state_a.elapsed == 5_ticks );
CHECK( state_a.elapsed_match == 7_ticks );
CHECK( state_a.checks == 6 );
CHECK( state_a.matches == 8 );
CHECK( state_a.alerts == 12 );
}
SECTION( "reset" )
{
state_a = OtnState();
CHECK( state_a.elapsed == 0_ticks );
CHECK( state_a.elapsed_match == 0_ticks );
CHECK( state_a.checks == 0 );
CHECK( state_a.matches == 0 );
CHECK( state_a.alerts == 0 );
}
SECTION( "bool()" )
{
CHECK( state_a );
OtnState state_c = OtnState();
CHECK_FALSE( state_c );
state_c.elapsed = 1_ticks;
CHECK( state_c );
state_c.elapsed = 0_ticks;
state_c.checks = 1;
CHECK( state_c );
}
}
TEST_CASE( "rule entry", "[profiler][rule_profiler]" )
{
SigInfo sig_info;
auto entry = make_rule_entry(3_ticks, 2_ticks, 3, 2);
entry.state.alerts = 77;
entry.state.latency_timeouts = 5;
entry.state.latency_suspends = 2;
SECTION( "copy assignment" )
{
auto copy = entry;<๏ฝfimโhole๏ฝ> CHECK( copy.sig_info.generator == entry.sig_info.generator );
CHECK( copy.sig_info.id == entry.sig_info.id );
CHECK( copy.sig_info.rev == entry.sig_info.rev );
CHECK( copy.state == entry.state );
}
SECTION( "copy construction" )
{
rule_stats::View copy(entry);
CHECK( copy.sig_info.generator == entry.sig_info.generator );
CHECK( copy.sig_info.id == entry.sig_info.id );
CHECK( copy.sig_info.rev == entry.sig_info.rev );
CHECK( copy.state == entry.state );
}
SECTION( "elapsed" )
{
CHECK( entry.elapsed() == 3_ticks );
}
SECTION( "elapsed_match" )
{
CHECK( entry.elapsed_match() == 2_ticks );
}
SECTION( "elapsed_no_match" )
{
CHECK( entry.elapsed_no_match() == 1_ticks );
}
SECTION( "checks" )
{
CHECK( entry.checks() == 3 );
}
SECTION( "matches" )
{
CHECK( entry.matches() == 2 );
}
SECTION( "no_matches" )
{
CHECK( entry.no_matches() == 1 );
}
SECTION( "alerts" )
{
CHECK( entry.alerts() == 77 );
}
SECTION( "timeouts" )
{
CHECK( entry.timeouts() == 5 );
}
SECTION( "suspends" )
{
CHECK( entry.suspends() == 2 );
}
SECTION( "avg_match" )
{
auto ticks = entry.avg_match();
INFO( ticks.count() << " == " << (1_ticks).count() );
CHECK( ticks == 1_ticks );
}
SECTION( "avg_no_match" )
{
auto ticks = entry.avg_no_match();
INFO( ticks.count() << " == " << (1_ticks).count() );
CHECK( ticks == 1_ticks );
}
SECTION( "avg_check" )
{
auto ticks = entry.avg_check();
INFO( ticks.count() << " == " << (1_ticks).count() );
CHECK( ticks == 1_ticks );
}
}
TEST_CASE( "rule profiler sorting", "[profiler][rule_profiler]" )
{
using Sort = RuleProfilerConfig::Sort;
SECTION( "checks" )
{
RuleEntryVector entries {
make_rule_entry(0_ticks, 0_ticks, 0, 0),
make_rule_entry(0_ticks, 0_ticks, 1, 0),
make_rule_entry(0_ticks, 0_ticks, 2, 0)
};
RuleStatsVector expected {
make_otn_state(0_ticks, 0_ticks, 2, 0),
make_otn_state(0_ticks, 0_ticks, 1, 0),
make_otn_state(0_ticks, 0_ticks, 0, 0)
};
const auto& sorter = rule_stats::sorters[Sort::SORT_CHECKS];
std::partial_sort(entries.begin(), entries.end(), entries.end(), sorter);
CHECK( entries == expected );
}
SECTION( "avg_check" )
{
RuleEntryVector entries {
make_rule_entry(2_ticks, 0_ticks, 2, 0),
make_rule_entry(8_ticks, 0_ticks, 4, 0),
make_rule_entry(4_ticks, 0_ticks, 1, 0)
};
RuleStatsVector expected {
make_otn_state(4_ticks, 0_ticks, 1, 0),
make_otn_state(8_ticks, 0_ticks, 4, 0),
make_otn_state(2_ticks, 0_ticks, 2, 0)
};
const auto& sorter = rule_stats::sorters[Sort::SORT_AVG_CHECK];
std::partial_sort(entries.begin(), entries.end(), entries.end(), sorter);
CHECK( entries == expected );
}
SECTION( "total_time" )
{
RuleEntryVector entries {
make_rule_entry(0_ticks, 0_ticks, 0, 0),
make_rule_entry(1_ticks, 0_ticks, 0, 0),
make_rule_entry(2_ticks, 0_ticks, 0, 0)
};
RuleStatsVector expected {
make_otn_state(2_ticks, 0_ticks, 0, 0),
make_otn_state(1_ticks, 0_ticks, 0, 0),
make_otn_state(0_ticks, 0_ticks, 0, 0)
};
const auto& sorter = rule_stats::sorters[Sort::SORT_TOTAL_TIME];
std::partial_sort(entries.begin(), entries.end(), entries.end(), sorter);
CHECK( entries == expected );
}
SECTION( "matches" )
{
RuleEntryVector entries {
make_rule_entry(0_ticks, 0_ticks, 0, 0),
make_rule_entry(0_ticks, 0_ticks, 0, 1),
make_rule_entry(0_ticks, 0_ticks, 0, 2)
};
RuleStatsVector expected {
make_otn_state(0_ticks, 0_ticks, 0, 2),
make_otn_state(0_ticks, 0_ticks, 0, 1),
make_otn_state(0_ticks, 0_ticks, 0, 0)
};
const auto& sorter = rule_stats::sorters[Sort::SORT_MATCHES];
std::partial_sort(entries.begin(), entries.end(), entries.end(), sorter);
CHECK( entries == expected );
}
SECTION( "no matches" )
{
RuleEntryVector entries {
make_rule_entry(0_ticks, 0_ticks, 4, 3),
make_rule_entry(0_ticks, 0_ticks, 3, 1),
make_rule_entry(0_ticks, 0_ticks, 4, 1)
};
RuleStatsVector expected {
make_otn_state(0_ticks, 0_ticks, 4, 1),
make_otn_state(0_ticks, 0_ticks, 3, 1),
make_otn_state(0_ticks, 0_ticks, 4, 3)
};
const auto& sorter = rule_stats::sorters[Sort::SORT_NO_MATCHES];
std::partial_sort(entries.begin(), entries.end(), entries.end(), sorter);
CHECK( entries == expected );
}
SECTION( "avg match" )
{
RuleEntryVector entries {
make_rule_entry(4_ticks, 0_ticks, 0, 2),
make_rule_entry(6_ticks, 0_ticks, 0, 2),
make_rule_entry(8_ticks, 0_ticks, 0, 2)
};
RuleStatsVector expected {
make_otn_state(8_ticks, 0_ticks, 0, 2),
make_otn_state(6_ticks, 0_ticks, 0, 2),
make_otn_state(4_ticks, 0_ticks, 0, 2)
};
const auto& sorter = rule_stats::sorters[Sort::SORT_AVG_MATCH];
std::partial_sort(entries.begin(), entries.end(), entries.end(), sorter);
CHECK( entries == expected );
}
SECTION( "avg no match" )
{
RuleEntryVector entries {
make_rule_entry(4_ticks, 0_ticks, 6, 2),
make_rule_entry(6_ticks, 0_ticks, 5, 2),
make_rule_entry(8_ticks, 0_ticks, 2, 0)
};
RuleStatsVector expected {
make_otn_state(8_ticks, 0_ticks, 2, 0),
make_otn_state(6_ticks, 0_ticks, 5, 2),
make_otn_state(4_ticks, 0_ticks, 6, 2)
};
const auto& sorter = rule_stats::sorters[Sort::SORT_AVG_NO_MATCH];
std::partial_sort(entries.begin(), entries.end(), entries.end(), sorter);
CHECK( entries == expected );
}
}
TEST_CASE( "rule profiler time context", "[profiler][rule_profiler]" )
{
dot_node_state_t stats;
stats.elapsed = 0_ticks;
stats.checks = 0;
stats.elapsed_match = 0_ticks;
SECTION( "automatically updates stats" )
{
{
RuleContext ctx(stats);
avoid_optimization();
}
INFO( "elapsed: " << stats.elapsed.count() );
CHECK( stats.elapsed > 0_ticks );
CHECK( stats.checks == 1 );
INFO( "elapsed_match: " << stats.elapsed_match.count() );
CHECK( stats.elapsed_match == 0_ticks );
}
SECTION( "explicitly calling stop" )
{
dot_node_state_t save;
SECTION( "stop(true)" )
{
{
RuleContext ctx(stats);
avoid_optimization();
ctx.stop(true);
INFO( "elapsed: " << stats.elapsed.count() );
CHECK( stats.elapsed > 0_ticks );
CHECK( stats.checks == 1 );
CHECK( stats.elapsed_match == stats.elapsed );
save = stats;
}
}
SECTION( "stop(false)" )
{
{
RuleContext ctx(stats);
avoid_optimization();
ctx.stop(false);
INFO( "elapsed: " << stats.elapsed.count() );
CHECK( stats.elapsed > 0_ticks );
CHECK( stats.checks == 1 );
CHECK( stats.elapsed_match == 0_ticks );
save = stats;
}
}
INFO( "elapsed: " << stats.elapsed.count() );
CHECK( stats.elapsed == save.elapsed );
CHECK( stats.elapsed_match == save.elapsed_match );
CHECK( stats.checks == save.checks );
}
}
#endif<๏ฝfimโend๏ฝ> | |
<|file_name|>test_whoosh_backend.py<|end_file_name|><๏ฝfimโbegin๏ฝ># encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import unittest
from datetime import timedelta
from decimal import Decimal
from django.conf import settings
from django.test import TestCase
from django.test.utils import override_settings
from django.utils.datetime_safe import date, datetime
from whoosh.fields import BOOLEAN, DATETIME, KEYWORD, NUMERIC, TEXT
from whoosh.qparser import QueryParser
from haystack import connections, indexes, reset_search_queries
from haystack.exceptions import SkipDocument, SearchBackendError
from haystack.inputs import AutoQuery
from haystack.models import SearchResult
from haystack.query import SearchQuerySet, SQ
from haystack.utils.loading import UnifiedIndex
from ..core.models import AFourthMockModel, AnotherMockModel, MockModel
from ..mocks import MockSearchResult
from .testcases import WhooshTestCase
class WhooshMockSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
name = indexes.CharField(model_attr='author')
pub_date = indexes.DateTimeField(model_attr='pub_date')
def get_model(self):
return MockModel
class WhooshMockSearchIndexWithSkipDocument(WhooshMockSearchIndex):
def prepare_text(self, obj):
if obj.author == 'daniel3':
raise SkipDocument
return obj.author
class WhooshAnotherMockSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True)
name = indexes.CharField(model_attr='author')
pub_date = indexes.DateTimeField(model_attr='pub_date')
def get_model(self):
return AnotherMockModel
def prepare_text(self, obj):
return obj.author
class AllTypesWhooshMockSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
name = indexes.CharField(model_attr='author', indexed=False)
pub_date = indexes.DateTimeField(model_attr='pub_date')
sites = indexes.MultiValueField()
seen_count = indexes.IntegerField(indexed=False)
is_active = indexes.BooleanField(default=True)
def get_model(self):
return MockModel
class WhooshMaintainTypeMockSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True)
month = indexes.CharField(indexed=False)
pub_date = indexes.DateTimeField(model_attr='pub_date')
def get_model(self):
return MockModel
def prepare_text(self, obj):
return "Indexed!\n%s" % obj.pk
def prepare_month(self, obj):
return "%02d" % obj.pub_date.month
class WhooshBoostMockSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(
document=True, use_template=True,
template_name='search/indexes/core/mockmodel_template.txt'
)
author = indexes.CharField(model_attr='author', weight=2.0)
editor = indexes.CharField(model_attr='editor')
pub_date = indexes.DateTimeField(model_attr='pub_date')
def get_model(self):
return AFourthMockModel
def prepare(self, obj):
data = super(WhooshBoostMockSearchIndex, self).prepare(obj)
if obj.pk % 2 == 0:
data['boost'] = 2.0
return data
class WhooshAutocompleteMockModelSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(model_attr='foo', document=True)
name = indexes.CharField(model_attr='author')
pub_date = indexes.DateTimeField(model_attr='pub_date')
text_auto = indexes.EdgeNgramField(model_attr='foo')
name_auto = indexes.EdgeNgramField(model_attr='author')
def get_model(self):
return MockModel
class WhooshSearchBackendTestCase(WhooshTestCase):
fixtures = ['bulk_data.json']
def setUp(self):
super(WhooshSearchBackendTestCase, self).setUp()
self.old_ui = connections['whoosh'].get_unified_index()
self.ui = UnifiedIndex()
self.wmmi = WhooshMockSearchIndex()
self.wmmidni = WhooshMockSearchIndexWithSkipDocument()
self.wmtmmi = WhooshMaintainTypeMockSearchIndex()
self.ui.build(indexes=[self.wmmi])
self.sb = connections['whoosh'].get_backend()
connections['whoosh']._index = self.ui
self.sb.setup()
self.raw_whoosh = self.sb.index
self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
self.sb.delete_index()
self.sample_objs = MockModel.objects.all()
def tearDown(self):
connections['whoosh']._index = self.old_ui
super(WhooshSearchBackendTestCase, self).tearDown()
def whoosh_search(self, query):
self.raw_whoosh = self.raw_whoosh.refresh()
searcher = self.raw_whoosh.searcher()
return searcher.search(self.parser.parse(query), limit=1000)
def test_non_silent(self):
bad_sb = connections['whoosh'].backend('bad', PATH='/tmp/bad_whoosh', SILENTLY_FAIL=False)
bad_sb.use_file_storage = False
bad_sb.storage = 'omg.wtf.bbq'
try:
bad_sb.update(self.wmmi, self.sample_objs)
self.fail()
except:
pass
try:
bad_sb.remove('core.mockmodel.1')
self.fail()
except:
pass
try:
bad_sb.clear()
self.fail()
except:
pass
try:
bad_sb.search('foo')
self.fail()
except:
pass
def test_update(self):
self.sb.update(self.wmmi, self.sample_objs)
# Check what Whoosh thinks is there.
self.assertEqual(len(self.whoosh_search(u'*')), 23)
self.assertEqual([doc.fields()['id'] for doc in self.whoosh_search(u'*')], [u'core.mockmodel.%s' % i for i in range(1, 24)])
def test_update_with_SkipDocument_raised(self):
self.sb.update(self.wmmidni, self.sample_objs)
# Check what Whoosh thinks is there.
res = self.whoosh_search(u'*')
self.assertEqual(len(res), 14)
ids = [1, 2, 5, 6, 7, 8, 9, 11, 12, 14, 15, 18, 20, 21]
self.assertListEqual(
[doc.fields()['id'] for doc in res],
[u'core.mockmodel.%s' % i for i in ids]
)
def test_remove(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(self.sb.index.doc_count(), 23)
self.sb.remove(self.sample_objs[0])
self.assertEqual(self.sb.index.doc_count(), 22)
def test_clear(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(self.sb.index.doc_count(), 23)
self.sb.clear()
self.assertEqual(self.sb.index.doc_count(), 0)
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(self.sb.index.doc_count(), 23)
self.sb.clear([AnotherMockModel])
self.assertEqual(self.sb.index.doc_count(), 23)
self.sb.clear([MockModel])
self.assertEqual(self.sb.index.doc_count(), 0)
self.sb.index.refresh()
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(self.sb.index.doc_count(), 23)
self.sb.clear([AnotherMockModel, MockModel])
self.assertEqual(self.raw_whoosh.doc_count(), 0)
def test_search(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(len(self.whoosh_search(u'*')), 23)
# No query string should always yield zero results.
self.assertEqual(self.sb.search(u''), {'hits': 0, 'results': []})
# A one letter query string gets nabbed by a stopwords filter. Should
# always yield zero results.
self.assertEqual(self.sb.search(u'a'), {'hits': 0, 'results': []})
# Possible AttributeError?
# self.assertEqual(self.sb.search(u'a b'), {'hits': 0, 'results': [], 'spelling_suggestion': '', 'facets': {}})
self.assertEqual(self.sb.search(u'*')['hits'], 23)
self.assertEqual([result.pk for result in self.sb.search(u'*')['results']], [u'%s' % i for i in range(1, 24)])
self.assertEqual(self.sb.search(u'Indexe')['hits'], 23)
self.assertEqual(self.sb.search(u'Indexe')['spelling_suggestion'], u'indexed')
self.assertEqual(self.sb.search(u'', facets=['name']), {'hits': 0, 'results': []})
results = self.sb.search(u'Index*', facets=['name'])
results = self.sb.search(u'index*', facets=['name'])
self.assertEqual(results['hits'], 23)
self.assertEqual(results['facets'], {})
self.assertEqual(self.sb.search(u'', date_facets={'pub_date': {'start_date': date(2008, 2, 26), 'end_date': date(2008, 2, 26), 'gap': '/MONTH'}}), {'hits': 0, 'results': []})
results = self.sb.search(u'Index*', date_facets={'pub_date': {'start_date': date(2008, 2, 26), 'end_date': date(2008, 2, 26), 'gap': '/MONTH'}})
results = self.sb.search(u'index*', date_facets={'pub_date': {'start_date': date(2008, 2, 26), 'end_date': date(2008, 2, 26), 'gap': '/MONTH'}})
self.assertEqual(results['hits'], 23)
self.assertEqual(results['facets'], {})
self.assertEqual(self.sb.search(u'', query_facets={'name': '[* TO e]'}), {'hits': 0, 'results': []})
results = self.sb.search(u'Index*', query_facets={'name': '[* TO e]'})
results = self.sb.search(u'index*', query_facets={'name': '[* TO e]'})
self.assertEqual(results['hits'], 23)
self.assertEqual(results['facets'], {})
# self.assertEqual(self.sb.search('', narrow_queries=set(['name:daniel1'])), {'hits': 0, 'results': []})
# results = self.sb.search('Index*', narrow_queries=set(['name:daniel1']))
# self.assertEqual(results['hits'], 1)
# Ensure that swapping the ``result_class`` works.
self.assertTrue(isinstance(self.sb.search(u'Index*', result_class=MockSearchResult)['results'][0], MockSearchResult))
# Check the use of ``limit_to_registered_models``.
self.assertEqual(self.sb.search(u'', limit_to_registered_models=False), {'hits': 0, 'results': []})
self.assertEqual(self.sb.search(u'*', limit_to_registered_models=False)['hits'], 23)
self.assertEqual([result.pk for result in self.sb.search(u'*', limit_to_registered_models=False)['results']], [u'%s' % i for i in range(1, 24)])
# Stow.
old_limit_to_registered_models = getattr(settings, 'HAYSTACK_LIMIT_TO_REGISTERED_MODELS', True)
settings.HAYSTACK_LIMIT_TO_REGISTERED_MODELS = False
self.assertEqual(self.sb.search(u''), {'hits': 0, 'results': []})
self.assertEqual(self.sb.search(u'*')['hits'], 23)
self.assertEqual([result.pk for result in self.sb.search(u'*')['results']], [u'%s' % i for i in range(1, 24)])
# Restore.
settings.HAYSTACK_LIMIT_TO_REGISTERED_MODELS = old_limit_to_registered_models
def test_highlight(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(len(self.whoosh_search(u'*')), 23)
self.assertEqual(self.sb.search(u'', highlight=True), {'hits': 0, 'results': []})
self.assertEqual(self.sb.search(u'index*', highlight=True)['hits'], 23)
query = self.sb.search('Index*', highlight=True)['results']
result = [result.highlighted['text'][0] for result in query]
self.assertEqual(result, ['<em>Indexed</em>!\n%d' % i for i in range(1, 24)])
def test_search_all_models(self):
wamsi = WhooshAnotherMockSearchIndex()
self.ui.build(indexes=[self.wmmi, wamsi])
self.sb.update(self.wmmi, self.sample_objs)
self.sb.update(wamsi, AnotherMockModel.objects.all())
self.assertEqual(len(self.whoosh_search(u'*')), 25)
self.ui.build(indexes=[self.wmmi])
def test_more_like_this(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(len(self.whoosh_search(u'*')), 23)
# Now supported by Whoosh (as of 1.8.4). See the ``LiveWhooshMoreLikeThisTestCase``.
self.assertEqual(self.sb.more_like_this(self.sample_objs[0])['hits'], 22)
# Make sure that swapping the ``result_class`` doesn't blow up.
try:
self.sb.more_like_this(self.sample_objs[0], result_class=MockSearchResult)
except:
self.fail()
def test_delete_index(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertTrue(self.sb.index.doc_count() > 0)
self.sb.delete_index()
self.assertEqual(self.sb.index.doc_count(), 0)
def test_order_by(self):
self.sb.update(self.wmmi, self.sample_objs)
results = self.sb.search(u'*', sort_by=['pub_date'])
self.assertEqual([result.pk for result in results['results']], [u'1', u'3', u'2', u'4', u'5', u'6', u'7', u'8', u'9', u'10', u'11', u'12', u'13', u'14', u'15', u'16', u'17', u'18', u'19', u'20', u'21', u'22', u'23'])
results = self.sb.search(u'*', sort_by=['-pub_date'])
self.assertEqual([result.pk for result in results['results']], [u'23', u'22', u'21', u'20', u'19', u'18', u'17', u'16', u'15', u'14', u'13', u'12', u'11', u'10', u'9', u'8', u'7', u'6', u'5', u'4', u'2', u'3', u'1'])
results = self.sb.search(u'*', sort_by=['id'])
self.assertEqual([result.pk for result in results['results']], [u'1', u'10', u'11', u'12', u'13', u'14', u'15', u'16', u'17', u'18', u'19', u'2', u'20', u'21', u'22', u'23', u'3', u'4', u'5', u'6', u'7', u'8', u'9'])
results = self.sb.search(u'*', sort_by=['-id'])
self.assertEqual([result.pk for result in results['results']], [u'9', u'8', u'7', u'6', u'5', u'4', u'3', u'23', u'22', u'21', u'20', u'2', u'19', u'18', u'17', u'16', u'15', u'14', u'13', u'12', u'11', u'10', u'1'])
results = self.sb.search(u'*', sort_by=['-pub_date', '-id'])
self.assertEqual([result.pk for result in results['results']],
[u'23', u'22', u'21', u'20', u'19', u'18', u'17', u'16', u'15', u'14', u'13', u'12',
u'11', u'10', u'9', u'8', u'7', u'6', u'5', u'4', u'2', u'3', u'1' ])
self.assertRaises(SearchBackendError, self.sb.search, u'*', sort_by=['-pub_date', 'id'])
def test__from_python(self):
self.assertEqual(self.sb._from_python('abc'), u'abc')
self.assertEqual(self.sb._from_python(1), 1)
self.assertEqual(self.sb._from_python(2653), 2653)
self.assertEqual(self.sb._from_python(25.5), 25.5)
self.assertEqual(self.sb._from_python([1, 2, 3]), u'1,2,3')
self.assertTrue("a': 1" in self.sb._from_python({'a': 1, 'c': 3, 'b': 2}))
self.assertEqual(self.sb._from_python(datetime(2009, 5, 9, 16, 14)), datetime(2009, 5, 9, 16, 14))
self.assertEqual(self.sb._from_python(datetime(2009, 5, 9, 0, 0)), datetime(2009, 5, 9, 0, 0))
self.assertEqual(self.sb._from_python(datetime(1899, 5, 18, 0, 0)), datetime(1899, 5, 18, 0, 0))
self.assertEqual(self.sb._from_python(datetime(2009, 5, 18, 1, 16, 30, 250)), datetime(2009, 5, 18, 1, 16, 30, 250))
def test__to_python(self):
self.assertEqual(self.sb._to_python('abc'), 'abc')
self.assertEqual(self.sb._to_python('1'), 1)
self.assertEqual(self.sb._to_python('2653'), 2653)
self.assertEqual(self.sb._to_python('25.5'), 25.5)
self.assertEqual(self.sb._to_python('[1, 2, 3]'), [1, 2, 3])
self.assertEqual(self.sb._to_python('{"a": 1, "b": 2, "c": 3}'), {'a': 1, 'c': 3, 'b': 2})
self.assertEqual(self.sb._to_python('2009-05-09T16:14:00'), datetime(2009, 5, 9, 16, 14))
self.assertEqual(self.sb._to_python('2009-05-09T00:00:00'), datetime(2009, 5, 9, 0, 0))
self.assertEqual(self.sb._to_python(None), None)
def test_range_queries(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(len(self.whoosh_search(u'[d TO]')), 23)
self.assertEqual(len(self.whoosh_search(u'name:[d TO]')), 23)
self.assertEqual(len(self.whoosh_search(u'Ind* AND name:[d to]')), 23)
self.assertEqual(len(self.whoosh_search(u'Ind* AND name:[to c]')), 0)
def test_date_queries(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(len(self.whoosh_search(u"pub_date:20090717003000")), 1)
self.assertEqual(len(self.whoosh_search(u"pub_date:20090717000000")), 0)
self.assertEqual(len(self.whoosh_search(u'Ind* AND pub_date:[to 20090717003000]')), 3)
def test_escaped_characters_queries(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(len(self.whoosh_search(u"Indexed\!")), 23)
self.assertEqual(len(self.whoosh_search(u"http\:\/\/www\.example\.com")), 0)
def test_build_schema(self):
ui = UnifiedIndex()
ui.build(indexes=[AllTypesWhooshMockSearchIndex()])
(content_field_name, schema) = self.sb.build_schema(ui.all_searchfields())
self.assertEqual(content_field_name, 'text')
schema_names = set(schema.names())
required_schema = {'django_ct', 'django_id', 'id', 'is_active', 'name', 'pub_date', 'seen_count',
'sites', 'text'}
self.assertTrue(required_schema.issubset(schema_names))
self.assertIsInstance(schema._fields['text'], TEXT)
self.assertIsInstance(schema._fields['pub_date'], DATETIME)
self.assertIsInstance(schema._fields['seen_count'], NUMERIC)
self.assertIsInstance(schema._fields['sites'], KEYWORD)
self.assertIsInstance(schema._fields['is_active'], BOOLEAN)
def test_verify_type(self):
old_ui = connections['whoosh'].get_unified_index()
ui = UnifiedIndex()
wmtmmi = WhooshMaintainTypeMockSearchIndex()
ui.build(indexes=[wmtmmi])
connections['whoosh']._index = ui
sb = connections['whoosh'].get_backend()
sb.setup()
sb.update(wmtmmi, self.sample_objs)
self.assertEqual(sb.search(u'*')['hits'], 23)
self.assertEqual([result.month for result in sb.search(u'*')['results']], [u'06', u'07', u'06', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07'])
connections['whoosh']._index = old_ui
@unittest.skipIf(settings.HAYSTACK_CONNECTIONS['whoosh'].get('STORAGE') != 'file',
'testing writability requires Whoosh to use STORAGE=file')
def test_writable(self):
if not os.path.exists(settings.HAYSTACK_CONNECTIONS['whoosh']['PATH']):
os.makedirs(settings.HAYSTACK_CONNECTIONS['whoosh']['PATH'])
os.chmod(settings.HAYSTACK_CONNECTIONS['whoosh']['PATH'], 0o400)
try:
self.sb.setup()
self.fail()
except IOError:
# Yay. We failed
pass
os.chmod(settings.HAYSTACK_CONNECTIONS['whoosh']['PATH'], 0o755)
def test_slicing(self):
self.sb.update(self.wmmi, self.sample_objs)
page_1 = self.sb.search(u'*', start_offset=0, end_offset=20)
page_2 = self.sb.search(u'*', start_offset=20, end_offset=30)
self.assertEqual(len(page_1['results']), 20)
self.assertEqual([result.pk for result in page_1['results']], [u'%s' % i for i in range(1, 21)])
self.assertEqual(len(page_2['results']), 3)
self.assertEqual([result.pk for result in page_2['results']], [u'21', u'22', u'23'])
# This used to throw an error.
page_0 = self.sb.search(u'*', start_offset=0, end_offset=0)
self.assertEqual(len(page_0['results']), 1)
@unittest.expectedFailure
def test_scoring(self):
self.sb.update(self.wmmi, self.sample_objs)
page_1 = self.sb.search(u'index', start_offset=0, end_offset=20)
page_2 = self.sb.search(u'index', start_offset=20, end_offset=30)
self.assertEqual(len(page_1['results']), 20)
self.assertEqual(["%0.2f" % result.score for result in page_1['results']], ['0.51', '0.51', '0.51', '0.51', '0.51', '0.51', '0.51', '0.51', '0.51', '0.40', '0.40', '0.40', '0.40', '0.40', '0.40', '0.40', '0.40', '0.40', '0.40', '0.40'])
self.assertEqual(len(page_2['results']), 3)
self.assertEqual(["%0.2f" % result.score for result in page_2['results']], ['0.40', '0.40', '0.40'])
class WhooshBoostBackendTestCase(WhooshTestCase):
def setUp(self):
super(WhooshBoostBackendTestCase, self).setUp()
self.old_ui = connections['whoosh'].get_unified_index()
self.ui = UnifiedIndex()
self.wmmi = WhooshBoostMockSearchIndex()
self.ui.build(indexes=[self.wmmi])
self.sb = connections['whoosh'].get_backend()
connections['whoosh']._index = self.ui
self.sb.setup()
self.raw_whoosh = self.sb.index
self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
self.sb.delete_index()
self.sample_objs = []
for i in range(1, 5):
mock = AFourthMockModel()
mock.id = i
if i % 2:
mock.author = 'daniel'
mock.editor = 'david'
else:
mock.author = 'david'
mock.editor = 'daniel'
mock.pub_date = date(2009, 2, 25) - timedelta(days=i)
self.sample_objs.append(mock)
def tearDown(self):
connections['whoosh']._index = self.ui
super(WhooshBoostBackendTestCase, self).tearDown()
@unittest.expectedFailure
def test_boost(self):
self.sb.update(self.wmmi, self.sample_objs)
self.raw_whoosh = self.raw_whoosh.refresh()
searcher = self.raw_whoosh.searcher()
self.assertEqual(len(searcher.search(self.parser.parse(u'*'), limit=1000)), 2)
results = SearchQuerySet('whoosh').filter(SQ(author='daniel') | SQ(editor='daniel'))
self.assertEqual([result.id for result in results], [
'core.afourthmockmodel.1',
'core.afourthmockmodel.3',
])
self.assertEqual(results[0].boost, 1.1)
class LiveWhooshSearchQueryTestCase(WhooshTestCase):
def setUp(self):
super(LiveWhooshSearchQueryTestCase, self).setUp()
# Stow.
self.old_ui = connections['whoosh'].get_unified_index()
self.ui = UnifiedIndex()
self.wmmi = WhooshMockSearchIndex()
self.wmtmmi = WhooshMaintainTypeMockSearchIndex()
self.ui.build(indexes=[self.wmmi])
self.sb = connections['whoosh'].get_backend()
connections['whoosh']._index = self.ui
self.sb.setup()
self.raw_whoosh = self.sb.index
self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
self.sb.delete_index()
self.sample_objs = []
for i in range(1, 4):
mock = MockModel()
mock.id = i
mock.author = 'daniel%s' % i
mock.pub_date = date(2009, 2, 25) - timedelta(days=i)
self.sample_objs.append(mock)
self.sq = connections['whoosh'].get_query()
def tearDown(self):
connections['whoosh']._index = self.old_ui
super(LiveWhooshSearchQueryTestCase, self).tearDown()
def test_get_spelling(self):
self.sb.update(self.wmmi, self.sample_objs)
self.sq.add_filter(SQ(content='Indexe'))
self.assertEqual(self.sq.get_spelling_suggestion(), u'indexed')
def test_log_query(self):
from django.conf import settings
reset_search_queries()
self.assertEqual(len(connections['whoosh'].queries), 0)
# Stow.
with self.settings(DEBUG=False):
len(self.sq.get_results())
self.assertEqual(len(connections['whoosh'].queries), 0)
with self.settings(DEBUG=True):
# Redefine it to clear out the cached results.
self.sq = connections['whoosh'].get_query()
self.sq.add_filter(SQ(name='bar'))
len(self.sq.get_results())
self.assertEqual(len(connections['whoosh'].queries), 1)
self.assertEqual(connections['whoosh'].queries[0]['query_string'], 'name:(bar)')
# And again, for good measure.
self.sq = connections['whoosh'].get_query()
self.sq.add_filter(SQ(name='baz'))
self.sq.add_filter(SQ(text='foo'))
len(self.sq.get_results())
self.assertEqual(len(connections['whoosh'].queries), 2)
self.assertEqual(connections['whoosh'].queries[0]['query_string'], 'name:(bar)')
self.assertEqual(connections['whoosh'].queries[1]['query_string'], u'(name:(baz) AND text:(foo))')
@override_settings(DEBUG=True)
class LiveWhooshSearchQuerySetTestCase(WhooshTestCase):
def setUp(self):
super(LiveWhooshSearchQuerySetTestCase, self).setUp()
# Stow.
self.old_ui = connections['whoosh'].get_unified_index()
self.ui = UnifiedIndex()
self.wmmi = WhooshMockSearchIndex()
self.ui.build(indexes=[self.wmmi])
self.sb = connections['whoosh'].get_backend()
connections['whoosh']._index = self.ui
self.sb.setup()<๏ฝfimโhole๏ฝ>
self.sample_objs = []
for i in range(1, 4):
mock = MockModel()
mock.id = i
mock.author = 'daniel%s' % i
mock.pub_date = date(2009, 2, 25) - timedelta(days=i)
self.sample_objs.append(mock)
self.sq = connections['whoosh'].get_query()
self.sqs = SearchQuerySet('whoosh')
def tearDown(self):
connections['whoosh']._index = self.old_ui
super(LiveWhooshSearchQuerySetTestCase, self).tearDown()
def test_various_searchquerysets(self):
self.sb.update(self.wmmi, self.sample_objs)
sqs = self.sqs.filter(content='Index')
self.assertEqual(sqs.query.build_query(), u'(Index)')
self.assertEqual(len(sqs), 3)
sqs = self.sqs.auto_query('Indexed!')
self.assertEqual(sqs.query.build_query(), u"('Indexed!')")
self.assertEqual(len(sqs), 3)
sqs = self.sqs.auto_query('Indexed!').filter(pub_date__lte=date(2009, 8, 31))
self.assertEqual(sqs.query.build_query(), u"(('Indexed!') AND pub_date:([to 20090831000000]))")
self.assertEqual(len(sqs), 3)
sqs = self.sqs.auto_query('Indexed!').filter(pub_date__lte=date(2009, 2, 23))
self.assertEqual(sqs.query.build_query(), u"(('Indexed!') AND pub_date:([to 20090223000000]))")
self.assertEqual(len(sqs), 2)
sqs = self.sqs.auto_query('Indexed!').filter(pub_date__lte=date(2009, 2, 25)).filter(django_id__in=[1, 2]).exclude(name='daniel1')
self.assertEqual(sqs.query.build_query(), u'((\'Indexed!\') AND pub_date:([to 20090225000000]) AND django_id:(1 OR 2) AND NOT (name:(daniel1)))')
self.assertEqual(len(sqs), 1)
sqs = self.sqs.auto_query('re-inker')
self.assertEqual(sqs.query.build_query(), u"('re-inker')")
self.assertEqual(len(sqs), 0)
sqs = self.sqs.auto_query('0.7 wire')
self.assertEqual(sqs.query.build_query(), u"('0.7' wire)")
self.assertEqual(len(sqs), 0)
sqs = self.sqs.auto_query("daler-rowney pearlescent 'bell bronze'")
self.assertEqual(sqs.query.build_query(), u"('daler-rowney' pearlescent 'bell bronze')")
self.assertEqual(len(sqs), 0)
sqs = self.sqs.models(MockModel)
self.assertEqual(sqs.query.build_query(), u'*')
self.assertEqual(len(sqs), 3)
def test_all_regression(self):
sqs = SearchQuerySet('whoosh')
self.assertEqual([result.pk for result in sqs], [])
self.sb.update(self.wmmi, self.sample_objs)
self.assertTrue(self.sb.index.doc_count() > 0)
sqs = SearchQuerySet('whoosh')
self.assertEqual(len(sqs), 3)
self.assertEqual(sorted([result.pk for result in sqs]), [u'1', u'2', u'3'])
try:
sqs = repr(SearchQuerySet('whoosh'))
except:
self.fail()
def test_regression_space_query(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertTrue(self.sb.index.doc_count() > 0)
sqs = SearchQuerySet('whoosh').auto_query(" ")
self.assertEqual(len(sqs), 3)
sqs = SearchQuerySet('whoosh').filter(content=" ")
self.assertEqual(len(sqs), 0)
def test_iter(self):
self.sb.update(self.wmmi, self.sample_objs)
reset_search_queries()
self.assertEqual(len(connections['whoosh'].queries), 0)
sqs = self.sqs.auto_query('Indexed!')
results = [int(result.pk) for result in iter(sqs)]
self.assertEqual(sorted(results), [1, 2, 3])
self.assertEqual(len(connections['whoosh'].queries), 1)
def test_slice(self):
self.sb.update(self.wmmi, self.sample_objs)
reset_search_queries()
self.assertEqual(len(connections['whoosh'].queries), 0)
results = self.sqs.auto_query('Indexed!')
self.assertEqual(sorted([int(result.pk) for result in results[1:3]]), [1, 2])
self.assertEqual(len(connections['whoosh'].queries), 1)
reset_search_queries()
self.assertEqual(len(connections['whoosh'].queries), 0)
results = self.sqs.auto_query('Indexed!')
self.assertEqual(int(results[0].pk), 1)
self.assertEqual(len(connections['whoosh'].queries), 1)
def test_values_slicing(self):
self.sb.update(self.wmmi, self.sample_objs)
reset_search_queries()
self.assertEqual(len(connections['whoosh'].queries), 0)
# TODO: this would be a good candidate for refactoring into a TestCase subclass shared across backends
# The values will come back as strings because Hasytack doesn't assume PKs are integers.
# We'll prepare this set once since we're going to query the same results in multiple ways:
expected_pks = ['3', '2', '1']
results = self.sqs.all().order_by('pub_date').values('pk')
self.assertListEqual([i['pk'] for i in results[1:11]], expected_pks)
results = self.sqs.all().order_by('pub_date').values_list('pk')
self.assertListEqual([i[0] for i in results[1:11]], expected_pks)
results = self.sqs.all().order_by('pub_date').values_list('pk', flat=True)
self.assertListEqual(results[1:11], expected_pks)
self.assertEqual(len(connections['whoosh'].queries), 3)
def test_manual_iter(self):
self.sb.update(self.wmmi, self.sample_objs)
results = self.sqs.auto_query('Indexed!')
reset_search_queries()
self.assertEqual(len(connections['whoosh'].queries), 0)
results = [int(result.pk) for result in results._manual_iter()]
self.assertEqual(sorted(results), [1, 2, 3])
self.assertEqual(len(connections['whoosh'].queries), 1)
def test_fill_cache(self):
self.sb.update(self.wmmi, self.sample_objs)
reset_search_queries()
self.assertEqual(len(connections['whoosh'].queries), 0)
results = self.sqs.auto_query('Indexed!')
self.assertEqual(len(results._result_cache), 0)
self.assertEqual(len(connections['whoosh'].queries), 0)
results._fill_cache(0, 10)
self.assertEqual(len([result for result in results._result_cache if result is not None]), 3)
self.assertEqual(len(connections['whoosh'].queries), 1)
results._fill_cache(10, 20)
self.assertEqual(len([result for result in results._result_cache if result is not None]), 3)
self.assertEqual(len(connections['whoosh'].queries), 2)
def test_cache_is_full(self):
self.sb.update(self.wmmi, self.sample_objs)
reset_search_queries()
self.assertEqual(len(connections['whoosh'].queries), 0)
self.assertEqual(self.sqs._cache_is_full(), False)
results = self.sqs.auto_query('Indexed!')
result_list = [i for i in iter(results)]
self.assertEqual(results._cache_is_full(), True)
self.assertEqual(len(connections['whoosh'].queries), 1)
def test_count(self):
more_samples = []
for i in range(1, 50):
mock = MockModel()
mock.id = i
mock.author = 'daniel%s' % i
mock.pub_date = date(2009, 2, 25) - timedelta(days=i)
more_samples.append(mock)
self.sb.update(self.wmmi, more_samples)
reset_search_queries()
self.assertEqual(len(connections['whoosh'].queries), 0)
results = self.sqs.all()
self.assertEqual(len(results), 49)
self.assertEqual(results._cache_is_full(), False)
self.assertEqual(len(connections['whoosh'].queries), 1)
def test_query_generation(self):
sqs = self.sqs.filter(SQ(content=AutoQuery("hello world")) | SQ(title=AutoQuery("hello world")))
self.assertEqual(sqs.query.build_query(), u"((hello world) OR title:(hello world))")
def test_result_class(self):
self.sb.update(self.wmmi, self.sample_objs)
# Assert that we're defaulting to ``SearchResult``.
sqs = self.sqs.all()
self.assertTrue(isinstance(sqs[0], SearchResult))
# Custom class.
sqs = self.sqs.result_class(MockSearchResult).all()
self.assertTrue(isinstance(sqs[0], MockSearchResult))
# Reset to default.
sqs = self.sqs.result_class(None).all()
self.assertTrue(isinstance(sqs[0], SearchResult))
class LiveWhooshMultiSearchQuerySetTestCase(WhooshTestCase):
fixtures = ['bulk_data.json']
def setUp(self):
super(LiveWhooshMultiSearchQuerySetTestCase, self).setUp()
# Stow.
self.old_ui = connections['whoosh'].get_unified_index()
self.ui = UnifiedIndex()
self.wmmi = WhooshMockSearchIndex()
self.wamsi = WhooshAnotherMockSearchIndex()
self.ui.build(indexes=[self.wmmi, self.wamsi])
self.sb = connections['whoosh'].get_backend()
connections['whoosh']._index = self.ui
self.sb.setup()
self.raw_whoosh = self.sb.index
self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
self.sb.delete_index()
self.wmmi.update(using='whoosh')
self.wamsi.update(using='whoosh')
self.sqs = SearchQuerySet('whoosh')
def tearDown(self):
connections['whoosh']._index = self.old_ui
super(LiveWhooshMultiSearchQuerySetTestCase, self).tearDown()
def test_searchquerysets_with_models(self):
sqs = self.sqs.all()
self.assertEqual(sqs.query.build_query(), u'*')
self.assertEqual(len(sqs), 25)
sqs = self.sqs.models(MockModel)
self.assertEqual(sqs.query.build_query(), u'*')
self.assertEqual(len(sqs), 23)
sqs = self.sqs.models(AnotherMockModel)
self.assertEqual(sqs.query.build_query(), u'*')
self.assertEqual(len(sqs), 2)
class LiveWhooshMoreLikeThisTestCase(WhooshTestCase):
fixtures = ['bulk_data.json']
def setUp(self):
super(LiveWhooshMoreLikeThisTestCase, self).setUp()
# Stow.
self.old_ui = connections['whoosh'].get_unified_index()
self.ui = UnifiedIndex()
self.wmmi = WhooshMockSearchIndex()
self.wamsi = WhooshAnotherMockSearchIndex()
self.ui.build(indexes=[self.wmmi, self.wamsi])
self.sb = connections['whoosh'].get_backend()
connections['whoosh']._index = self.ui
self.sb.setup()
self.raw_whoosh = self.sb.index
self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
self.sb.delete_index()
self.wmmi.update()
self.wamsi.update()
self.sqs = SearchQuerySet('whoosh')
def tearDown(self):
connections['whoosh']._index = self.old_ui
super(LiveWhooshMoreLikeThisTestCase, self).tearDown()
# We expect failure here because, despite not changing the code, Whoosh
# 2.5.1 returns incorrect counts/results. Huzzah.
@unittest.expectedFailure
def test_more_like_this(self):
mlt = self.sqs.more_like_this(MockModel.objects.get(pk=22))
self.assertEqual(mlt.count(), 22)
self.assertEqual(sorted([result.pk for result in mlt]), sorted([u'9', u'8', u'7', u'6', u'5', u'4', u'3', u'2', u'1', u'21', u'20', u'19', u'18', u'17', u'16', u'15', u'14', u'13', u'12', u'11', u'10', u'23']))
self.assertEqual(len([result.pk for result in mlt]), 22)
alt_mlt = self.sqs.filter(name='daniel3').more_like_this(MockModel.objects.get(pk=13))
self.assertEqual(alt_mlt.count(), 8)
self.assertEqual(sorted([result.pk for result in alt_mlt]), sorted([u'4', u'3', u'22', u'19', u'17', u'16', u'10', u'23']))
self.assertEqual(len([result.pk for result in alt_mlt]), 8)
alt_mlt_with_models = self.sqs.models(MockModel).more_like_this(MockModel.objects.get(pk=11))
self.assertEqual(alt_mlt_with_models.count(), 22)
self.assertEqual(sorted([result.pk for result in alt_mlt_with_models]), sorted([u'9', u'8', u'7', u'6', u'5', u'4', u'3', u'2', u'1', u'22', u'21', u'20', u'19', u'18', u'17', u'16', u'15', u'14', u'13', u'12', u'10', u'23']))
self.assertEqual(len([result.pk for result in alt_mlt_with_models]), 22)
if hasattr(MockModel.objects, 'defer'):
# Make sure MLT works with deferred bits.
mi = MockModel.objects.defer('foo').get(pk=21)
self.assertEqual(mi._deferred, True)
deferred = self.sqs.models(MockModel).more_like_this(mi)
self.assertEqual(deferred.count(), 0)
self.assertEqual([result.pk for result in deferred], [])
self.assertEqual(len([result.pk for result in deferred]), 0)
# Ensure that swapping the ``result_class`` works.
self.assertTrue(isinstance(self.sqs.result_class(MockSearchResult).more_like_this(MockModel.objects.get(pk=21))[0], MockSearchResult))
@override_settings(DEBUG=True)
class LiveWhooshAutocompleteTestCase(WhooshTestCase):
fixtures = ['bulk_data.json']
def setUp(self):
super(LiveWhooshAutocompleteTestCase, self).setUp()
# Stow.
self.old_ui = connections['whoosh'].get_unified_index()
self.ui = UnifiedIndex()
self.wacsi = WhooshAutocompleteMockModelSearchIndex()
self.ui.build(indexes=[self.wacsi])
self.sb = connections['whoosh'].get_backend()
connections['whoosh']._index = self.ui
# Stow.
import haystack
self.sb.setup()
self.sqs = SearchQuerySet('whoosh')
# Wipe it clean.
self.sqs.query.backend.clear()
self.wacsi.update(using='whoosh')
def tearDown(self):
connections['whoosh']._index = self.old_ui
super(LiveWhooshAutocompleteTestCase, self).tearDown()
def test_autocomplete(self):
autocomplete = self.sqs.autocomplete(text_auto='mod')
self.assertEqual(autocomplete.count(), 5)
self.assertEqual([result.pk for result in autocomplete], [u'1', u'12', u'6', u'7', u'14'])
self.assertTrue('mod' in autocomplete[0].text.lower())
self.assertTrue('mod' in autocomplete[1].text.lower())
self.assertTrue('mod' in autocomplete[2].text.lower())
self.assertTrue('mod' in autocomplete[3].text.lower())
self.assertTrue('mod' in autocomplete[4].text.lower())
self.assertEqual(len([result.pk for result in autocomplete]), 5)
def test_edgengram_regression(self):
autocomplete = self.sqs.autocomplete(text_auto='ngm')
self.assertEqual(autocomplete.count(), 0)
def test_extra_whitespace(self):
autocomplete = self.sqs.autocomplete(text_auto='mod ')
self.assertEqual(autocomplete.count(), 5)
class WhooshRoundTripSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, default='')
name = indexes.CharField()
is_active = indexes.BooleanField()
post_count = indexes.IntegerField()
average_rating = indexes.FloatField()
price = indexes.DecimalField()
pub_date = indexes.DateField()
created = indexes.DateTimeField()
tags = indexes.MultiValueField()
sites = indexes.MultiValueField()
# For a regression involving lists with nothing in them.
empty_list = indexes.MultiValueField()
def get_model(self):
return MockModel
def prepare(self, obj):
prepped = super(WhooshRoundTripSearchIndex, self).prepare(obj)
prepped.update({
'text': 'This is some example text.',
'name': 'Mister Pants',
'is_active': True,
'post_count': 25,
'average_rating': 3.6,
'price': Decimal('24.99'),
'pub_date': date(2009, 11, 21),
'created': datetime(2009, 11, 21, 21, 31, 00),
'tags': ['staff', 'outdoor', 'activist', 'scientist'],
'sites': [3, 5, 1],
'empty_list': [],
})
return prepped
@override_settings(DEBUG=True)
class LiveWhooshRoundTripTestCase(WhooshTestCase):
def setUp(self):
super(LiveWhooshRoundTripTestCase, self).setUp()
# Stow.
self.old_ui = connections['whoosh'].get_unified_index()
self.ui = UnifiedIndex()
self.wrtsi = WhooshRoundTripSearchIndex()
self.ui.build(indexes=[self.wrtsi])
self.sb = connections['whoosh'].get_backend()
connections['whoosh']._index = self.ui
self.sb.setup()
self.raw_whoosh = self.sb.index
self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
self.sb.delete_index()
self.sqs = SearchQuerySet('whoosh')
# Wipe it clean.
self.sqs.query.backend.clear()
# Fake indexing.
mock = MockModel()
mock.id = 1
self.sb.update(self.wrtsi, [mock])
def tearDown(self):
super(LiveWhooshRoundTripTestCase, self).tearDown()
def test_round_trip(self):
results = self.sqs.filter(id='core.mockmodel.1')
# Sanity check.
self.assertEqual(results.count(), 1)
# Check the individual fields.
result = results[0]
self.assertEqual(result.id, 'core.mockmodel.1')
self.assertEqual(result.text, 'This is some example text.')
self.assertEqual(result.name, 'Mister Pants')
self.assertEqual(result.is_active, True)
self.assertEqual(result.post_count, 25)
self.assertEqual(result.average_rating, 3.6)
self.assertEqual(result.price, u'24.99')
self.assertEqual(result.pub_date, datetime(2009, 11, 21, 0, 0))
self.assertEqual(result.created, datetime(2009, 11, 21, 21, 31, 00))
self.assertEqual(result.tags, ['staff', 'outdoor', 'activist', 'scientist'])
self.assertEqual(result.sites, [u'3', u'5', u'1'])
self.assertEqual(result.empty_list, [])
# Check boolean filtering...
results = self.sqs.filter(id='core.mockmodel.1', is_active=True)
self.assertEqual(results.count(), 1)
@override_settings(DEBUG=True)
class LiveWhooshRamStorageTestCase(TestCase):
def setUp(self):
super(LiveWhooshRamStorageTestCase, self).setUp()
# Stow.
self.old_whoosh_storage = settings.HAYSTACK_CONNECTIONS['whoosh'].get('STORAGE', 'file')
settings.HAYSTACK_CONNECTIONS['whoosh']['STORAGE'] = 'ram'
self.old_ui = connections['whoosh'].get_unified_index()
self.ui = UnifiedIndex()
self.wrtsi = WhooshRoundTripSearchIndex()
self.ui.build(indexes=[self.wrtsi])
self.sb = connections['whoosh'].get_backend()
connections['whoosh']._index = self.ui
# Stow.
import haystack
self.sb.setup()
self.raw_whoosh = self.sb.index
self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
self.sqs = SearchQuerySet('whoosh')
# Wipe it clean.
self.sqs.query.backend.clear()
# Fake indexing.
mock = MockModel()
mock.id = 1
self.sb.update(self.wrtsi, [mock])
def tearDown(self):
self.sqs.query.backend.clear()
settings.HAYSTACK_CONNECTIONS['whoosh']['STORAGE'] = self.old_whoosh_storage
connections['whoosh']._index = self.old_ui
super(LiveWhooshRamStorageTestCase, self).tearDown()
def test_ram_storage(self):
results = self.sqs.filter(id='core.mockmodel.1')
# Sanity check.
self.assertEqual(results.count(), 1)
# Check the individual fields.
result = results[0]
self.assertEqual(result.id, 'core.mockmodel.1')
self.assertEqual(result.text, 'This is some example text.')
self.assertEqual(result.name, 'Mister Pants')
self.assertEqual(result.is_active, True)
self.assertEqual(result.post_count, 25)
self.assertEqual(result.average_rating, 3.6)
self.assertEqual(result.pub_date, datetime(2009, 11, 21, 0, 0))
self.assertEqual(result.created, datetime(2009, 11, 21, 21, 31, 00))
self.assertEqual(result.tags, ['staff', 'outdoor', 'activist', 'scientist'])
self.assertEqual(result.sites, [u'3', u'5', u'1'])
self.assertEqual(result.empty_list, [])<๏ฝfimโend๏ฝ> | self.raw_whoosh = self.sb.index
self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
self.sb.delete_index() |
<|file_name|>manifest.js<|end_file_name|><๏ฝfimโbegin๏ฝ>import ResourceProcessorBase from './resource-processor-base';
class ManifestProcessor extends ResourceProcessorBase {
processResource (manifest, ctx, charset, urlReplacer) {
var lines = manifest.split('\n');<๏ฝfimโhole๏ฝ> if (line && line !== 'CACHE MANIFEST' && line !== 'NETWORK:' && line !== 'FALLBACK:' &&
line !== 'CACHE:' && line[0] !== '#' && line !== '*') {
var isFallbackItem = line.indexOf(' ') !== -1;
if (isFallbackItem) {
var urls = line.split(' ');
lines[i] = urlReplacer(urls[0]) + ' ' + urlReplacer(urls[1]);
}
else
lines[i] = urlReplacer(line);
}
}
return lines.join('\n');
}
shouldProcessResource (ctx) {
return ctx.contentInfo.isManifest;
}
}
export default new ManifestProcessor();<๏ฝfimโend๏ฝ> |
for (var i = 0; i < lines.length; i++) {
var line = lines[i].trim();
|
<|file_name|>vip.py<|end_file_name|><๏ฝfimโbegin๏ฝ>import urllib
from models.vipsubscriber import VipSubscriber
from base import BaseHandler
class Vip(BaseHandler):
LOCATION = "../views/vip.html"
def GetContext(self):
tContext = {}
tVipList = []
tVipKey = urllib.unquote(self.request.get('key'))
if(tVipKey != None and len(tVipKey) > 0):
tVip = VipSubscriber.get(tVipKey)
tContext['tVip'] = tVip
return tContext
def PostContext(self):
tContext = {}
tVip = VipSubscriber()
tVipForumName = urllib.unquote(self.request.get('forumname'))
tVipKey = urllib.unquote(self.request.get('key'))
if(tVipKey != None and len(tVipKey) > 0):<๏ฝfimโhole๏ฝ> tVip = VipSubscriber.get(tVipKey)
tContext['tVip'] = tVip
if(tVipForumName != None and len(tVipForumName) > 0):
tVip.subscriberForumName = tVipForumName
tVip.put()
return tContext<๏ฝfimโend๏ฝ> | |
<|file_name|>_exception.py<|end_file_name|><๏ฝfimโbegin๏ฝ>from __future__ import absolute_import, division, print_function
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
class TreeError(Exception):
"""General tree error"""
pass
class NoLengthError(TreeError):
"""Missing length when expected"""
pass
<๏ฝfimโhole๏ฝ>
class MissingNodeError(TreeError):
"""Expecting a node"""
pass
class NoParentError(MissingNodeError):
"""Missing a parent"""
pass<๏ฝfimโend๏ฝ> | class DuplicateNodeError(TreeError):
"""Duplicate nodes with identical names"""
pass
|
<|file_name|>unify.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use extra::smallintmap::SmallIntMap;
use middle::ty::{Vid, expected_found, IntVarValue};
use middle::ty;
use middle::typeck::infer::{Bounds, uok, ures};
use middle::typeck::infer::InferCtxt;
use middle::typeck::infer::to_str::InferStr;
use syntax::ast;
#[deriving(Clone)]
pub enum VarValue<V, T> {
Redirect(V),
Root(T, uint),
}
pub struct ValsAndBindings<V, T> {
vals: SmallIntMap<VarValue<V, T>>,
bindings: ~[(V, VarValue<V, T>)],
}
pub struct Node<V, T> {
root: V,
possible_types: T,
rank: uint,
}
pub trait UnifyVid<T> {
fn appropriate_vals_and_bindings<'v>(infcx: &'v mut InferCtxt)
-> &'v mut ValsAndBindings<Self, T>;
}
pub trait UnifyInferCtxtMethods {
fn get<T:Clone,
V:Clone + Eq + Vid + UnifyVid<T>>(
&mut self,
vid: V)
-> Node<V, T>;
fn set<T:Clone + InferStr,
V:Clone + Vid + ToStr + UnifyVid<T>>(
&mut self,
vid: V,
new_v: VarValue<V, T>);
fn unify<T:Clone + InferStr,
V:Clone + Vid + ToStr + UnifyVid<T>>(
&mut self,
node_a: &Node<V, T>,
node_b: &Node<V, T>)
-> (V, uint);
}
impl UnifyInferCtxtMethods for InferCtxt {
fn get<T:Clone,
V:Clone + Eq + Vid + UnifyVid<T>>(
&mut self,
vid: V)
-> Node<V, T> {
/*!
*
* Find the root node for `vid`. This uses the standard
* union-find algorithm with path compression:
* http://en.wikipedia.org/wiki/Disjoint-set_data_structure
*/
let tcx = self.tcx;
let vb = UnifyVid::appropriate_vals_and_bindings(self);
return helper(tcx, vb, vid);
fn helper<T:Clone, V:Clone+Eq+Vid>(
tcx: ty::ctxt,
vb: &mut ValsAndBindings<V,T>,
vid: V) -> Node<V, T>
{
let vid_u = vid.to_uint();
let var_val = match vb.vals.find(&vid_u) {
Some(&ref var_val) => (*var_val).clone(),
None => {
tcx.sess.bug(format!(
"failed lookup of vid `{}`", vid_u));
}
};
match var_val {
Redirect(vid) => {
let node: Node<V,T> = helper(tcx, vb, vid.clone());
if node.root != vid {
// Path compression
vb.vals.insert(vid.to_uint(),
Redirect(node.root.clone()));
}
node
}
Root(pt, rk) => {
Node {root: vid, possible_types: pt, rank: rk}
}
}
}
}
fn set<T:Clone + InferStr,
V:Clone + Vid + ToStr + UnifyVid<T>>(
&mut self,
vid: V,
new_v: VarValue<V, T>) {
/*!
*
* Sets the value for `vid` to `new_v`. `vid` MUST be a root node!
*/
debug2!("Updating variable {} to {}",
vid.to_str(), new_v.inf_str(self));
let vb = UnifyVid::appropriate_vals_and_bindings(self);
let old_v = (*vb.vals.get(&vid.to_uint())).clone();
vb.bindings.push((vid.clone(), old_v));
vb.vals.insert(vid.to_uint(), new_v);
}
fn unify<T:Clone + InferStr,
V:Clone + Vid + ToStr + UnifyVid<T>>(
&mut self,
node_a: &Node<V, T>,
node_b: &Node<V, T>)
-> (V, uint) {
// Rank optimization: if you don't know what it is, check
// out <http://en.wikipedia.org/wiki/Disjoint-set_data_structure>
debug2!("unify(node_a(id={:?}, rank={:?}), \
node_b(id={:?}, rank={:?}))",
node_a.root, node_a.rank,
node_b.root, node_b.rank);
if node_a.rank > node_b.rank {
// a has greater rank, so a should become b's parent,
// i.e., b should redirect to a.
self.set(node_b.root.clone(), Redirect(node_a.root.clone()));
(node_a.root.clone(), node_a.rank)
} else if node_a.rank < node_b.rank {
// b has greater rank, so a should redirect to b.
self.set(node_a.root.clone(), Redirect(node_b.root.clone()));
(node_b.root.clone(), node_b.rank)
} else {
// If equal, redirect one to the other and increment the
// other's rank.
assert_eq!(node_a.rank, node_b.rank);
self.set(node_b.root.clone(), Redirect(node_a.root.clone()));
(node_a.root.clone(), node_a.rank + 1)
}
}
}
// ______________________________________________________________________
// Code to handle simple variables like ints, floats---anything that
// doesn't have a subtyping relationship we need to worry about.
pub trait SimplyUnifiable {
fn to_type_err(expected_found<Self>) -> ty::type_err;
}
pub fn mk_err<T:SimplyUnifiable>(a_is_expected: bool,
a_t: T,
b_t: T) -> ures {
if a_is_expected {
Err(SimplyUnifiable::to_type_err(
ty::expected_found {expected: a_t, found: b_t}))
} else {
Err(SimplyUnifiable::to_type_err(
ty::expected_found {expected: b_t, found: a_t}))
}
}
pub trait InferCtxtMethods {
fn simple_vars<T:Clone + Eq + InferStr + SimplyUnifiable,
V:Clone + Eq + Vid + ToStr + UnifyVid<Option<T>>>(
&mut self,
a_is_expected: bool,
a_id: V,
b_id: V)
-> ures;
fn simple_var_t<T:Clone + Eq + InferStr + SimplyUnifiable,
V:Clone + Eq + Vid + ToStr + UnifyVid<Option<T>>>(
&mut self,
a_is_expected: bool,
a_id: V,
b: T)
-> ures;
}
<๏ฝfimโhole๏ฝ> V:Clone + Eq + Vid + ToStr + UnifyVid<Option<T>>>(
&mut self,
a_is_expected: bool,
a_id: V,
b_id: V)
-> ures {
/*!
*
* Unifies two simple variables. Because simple variables do
* not have any subtyping relationships, if both variables
* have already been associated with a value, then those two
* values must be the same. */
let node_a = self.get(a_id);
let node_b = self.get(b_id);
let a_id = node_a.root.clone();
let b_id = node_b.root.clone();
if a_id == b_id { return uok(); }
let combined = match (&node_a.possible_types, &node_b.possible_types)
{
(&None, &None) => None,
(&Some(ref v), &None) | (&None, &Some(ref v)) => {
Some((*v).clone())
}
(&Some(ref v1), &Some(ref v2)) => {
if *v1 != *v2 {
return mk_err(a_is_expected, (*v1).clone(), (*v2).clone())
}
Some((*v1).clone())
}
};
let (new_root, new_rank) = self.unify(&node_a, &node_b);
self.set(new_root, Root(combined, new_rank));
return uok();
}
fn simple_var_t<T:Clone + Eq + InferStr + SimplyUnifiable,
V:Clone + Eq + Vid + ToStr + UnifyVid<Option<T>>>(
&mut self,
a_is_expected: bool,
a_id: V,
b: T)
-> ures {
/*!
*
* Sets the value of the variable `a_id` to `b`. Because
* simple variables do not have any subtyping relationships,
* if `a_id` already has a value, it must be the same as
* `b`. */
let node_a = self.get(a_id);
let a_id = node_a.root.clone();
match node_a.possible_types {
None => {
self.set(a_id, Root(Some(b), node_a.rank));
return uok();
}
Some(ref a_t) => {
if *a_t == b {
return uok();
} else {
return mk_err(a_is_expected, (*a_t).clone(), b);
}
}
}
}
}
// ______________________________________________________________________
impl UnifyVid<Bounds<ty::t>> for ty::TyVid {
fn appropriate_vals_and_bindings<'v>(infcx: &'v mut InferCtxt)
-> &'v mut ValsAndBindings<ty::TyVid, Bounds<ty::t>> {
return &mut infcx.ty_var_bindings;
}
}
impl UnifyVid<Option<IntVarValue>> for ty::IntVid {
fn appropriate_vals_and_bindings<'v>(infcx: &'v mut InferCtxt)
-> &'v mut ValsAndBindings<ty::IntVid, Option<IntVarValue>> {
return &mut infcx.int_var_bindings;
}
}
impl SimplyUnifiable for IntVarValue {
fn to_type_err(err: expected_found<IntVarValue>) -> ty::type_err {
return ty::terr_int_mismatch(err);
}
}
impl UnifyVid<Option<ast::float_ty>> for ty::FloatVid {
fn appropriate_vals_and_bindings<'v>(infcx: &'v mut InferCtxt)
-> &'v mut ValsAndBindings<ty::FloatVid, Option<ast::float_ty>> {
return &mut infcx.float_var_bindings;
}
}
impl SimplyUnifiable for ast::float_ty {
fn to_type_err(err: expected_found<ast::float_ty>) -> ty::type_err {
return ty::terr_float_mismatch(err);
}
}<๏ฝfimโend๏ฝ> | impl InferCtxtMethods for InferCtxt {
fn simple_vars<T:Clone + Eq + InferStr + SimplyUnifiable, |
<|file_name|>logcontrol.py<|end_file_name|><๏ฝfimโbegin๏ฝ>"Messages used to internally control thesplog settings."
from thespian.actors import ActorSystemMessage<๏ฝfimโhole๏ฝ>class SetLogging(ActorSystemMessage):
def __init__(self, threshold, useLogging, useFile):
self.threshold = threshold
self.useLogging = useLogging
self.useFile = useFile<๏ฝfimโend๏ฝ> | |
<|file_name|>http_cache.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![deny(missing_docs)]
//! A memory cache implementing the logic specified in <http://tools.ietf.org/html/rfc7234>
//! and <http://tools.ietf.org/html/rfc7232>.
use fetch::methods::{Data, DoneChannel};
use hyper::header;
use hyper::header::ContentType;
use hyper::header::Headers;
use hyper::method::Method;
use hyper::status::StatusCode;
use hyper_serde::Serde;
use malloc_size_of::{MallocSizeOf, MallocSizeOfOps, MallocUnconditionalSizeOf, MallocUnconditionalShallowSizeOf};
use malloc_size_of::Measurable;
use net_traits::{Metadata, FetchMetadata};
use net_traits::request::Request;
use net_traits::response::{HttpsState, Response, ResponseBody};
use servo_arc::Arc;
use servo_config::prefs::PREFS;
use servo_url::ServoUrl;
use std::collections::HashMap;
use std::str;
use std::sync::Mutex;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::mpsc::{channel, Sender};
use time;
use time::{Duration, Tm};
/// The key used to differentiate requests in the cache.
#[derive(Clone, Eq, Hash, MallocSizeOf, PartialEq )]
pub struct CacheKey {
url: ServoUrl
}
impl CacheKey {
fn new(request: Request) -> CacheKey {
CacheKey {
url: request.current_url().clone()
}
}
fn from_servo_url(servo_url: &ServoUrl) -> CacheKey {
CacheKey {
url: servo_url.clone()
}
}
/// Retrieve the URL associated with this key
pub fn url(&self) -> ServoUrl {
self.url.clone()
}
}
/// A complete cached resource.
#[derive(Clone)]
struct CachedResource {
request_headers: Arc<Mutex<Headers>>,
body: Arc<Mutex<ResponseBody>>,
aborted: Arc<AtomicBool>,
awaiting_body: Arc<Mutex<Vec<Sender<Data>>>>,
data: Measurable<MeasurableCachedResource>
}
#[derive(Clone, MallocSizeOf)]
struct MeasurableCachedResource {
metadata: CachedMetadata,
location_url: Option<Result<ServoUrl, String>>,
https_state: HttpsState,
status: Option<StatusCode>,
raw_status: Option<(u16, Vec<u8>)>,
url_list: Vec<ServoUrl>,
expires: Duration,
last_validated: Tm,
}
impl MallocSizeOf for CachedResource {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.request_headers.unconditional_size_of(ops) +
self.body.unconditional_size_of(ops) +
self.aborted.unconditional_size_of(ops) +
self.awaiting_body.unconditional_size_of(ops) +
self.data.size_of(ops)
}
}
/// Metadata about a loaded resource, such as is obtained from HTTP headers.
#[derive(Clone)]
struct CachedMetadata {
/// Headers
pub headers: Arc<Mutex<Headers>>,
/// Fields that implement MallocSizeOf
pub data: Measurable<MeasurableCachedMetadata>
}
#[derive(Clone, MallocSizeOf)]
struct MeasurableCachedMetadata {
/// Final URL after redirects.
pub final_url: ServoUrl,
/// MIME type / subtype.
pub content_type: Option<Serde<ContentType>>,
/// Character set.
pub charset: Option<String>,
/// HTTP Status
pub status: Option<(u16, Vec<u8>)>
}
impl MallocSizeOf for CachedMetadata {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.headers.unconditional_shallow_size_of(ops) +
self.headers.size_of(ops) +
self.data.size_of(ops)
}
}
/// Wrapper around a cached response, including information on re-validation needs
pub struct CachedResponse {
/// The response constructed from the cached resource
pub response: Response,
/// The revalidation flag for the stored response
pub needs_validation: bool
}
/// A memory cache.
#[derive(MallocSizeOf)]
pub struct HttpCache {
/// cached responses.
entries: HashMap<CacheKey, Vec<CachedResource>>,
}
/// Determine if a given response is cacheable based on the initial metadata received.
/// Based on <https://tools.ietf.org/html/rfc7234#section-3>
fn response_is_cacheable(metadata: &Metadata) -> bool {
// TODO: if we determine that this cache should be considered shared:
// 1. check for absence of private response directive <https://tools.ietf.org/html/rfc7234#section-5.2.2.6>
// 2. check for absence of the Authorization header field.
let mut is_cacheable = false;
let headers = metadata.headers.as_ref().unwrap();
if headers.has::<header::Expires>() ||
headers.has::<header::LastModified>() ||
headers.has::<header::ETag>() {
is_cacheable = true;
}
if let Some(&header::CacheControl(ref directive)) = headers.get::<header::CacheControl>() {
for directive in directive.iter() {
match *directive {
header::CacheDirective::NoStore => return false,
header::CacheDirective::Public | header::CacheDirective::SMaxAge(_)
| header::CacheDirective::MaxAge(_) | header::CacheDirective::NoCache => is_cacheable = true,
_ => {},
}
}
}
if let Some(&header::Pragma::NoCache) = headers.get::<header::Pragma>() {
return false;
}
is_cacheable
}
/// Calculating Age
/// <https://tools.ietf.org/html/rfc7234#section-4.2.3>
fn calculate_response_age(response: &Response) -> Duration {
// TODO: follow the spec more closely (Date headers, request/response lag, ...)
if let Some(secs) = response.headers.get_raw("Age") {
let seconds_string = String::from_utf8_lossy(&secs[0]);
if let Ok(secs) = seconds_string.parse::<i64>() {
return Duration::seconds(secs);
}
}
Duration::seconds(0i64)
}
/// Determine the expiry date from relevant headers,
/// or uses a heuristic if none are present.
fn get_response_expiry(response: &Response) -> Duration {
// Calculating Freshness Lifetime <https://tools.ietf.org/html/rfc7234#section-4.2.1>
let age = calculate_response_age(&response);
if let Some(&header::CacheControl(ref directives)) = response.headers.get::<header::CacheControl>() {
let has_no_cache_directive = directives.iter().any(|directive| {
header::CacheDirective::NoCache == *directive
});
if has_no_cache_directive {
// Requires validation on first use.
return Duration::seconds(0i64);
} else {
for directive in directives {
match *directive {
header::CacheDirective::SMaxAge(secs) | header::CacheDirective::MaxAge(secs) => {
let max_age = Duration::seconds(secs as i64);
if max_age < age {
return Duration::seconds(0i64);
}
return max_age - age;
},
_ => (),
}
}
}
}
if let Some(&header::Expires(header::HttpDate(t))) = response.headers.get::<header::Expires>() {
// store the period of time from now until expiry
let desired = t.to_timespec();
let current = time::now().to_timespec();
if desired > current {
return desired - current;
} else {
return Duration::seconds(0i64);
}
} else {
if let Some(_) = response.headers.get_raw("Expires") {
// Malformed Expires header, shouldn't be used to construct a valid response.
return Duration::seconds(0i64);
}
}
// Calculating Heuristic Freshness
// <https://tools.ietf.org/html/rfc7234#section-4.2.2>
if let Some((ref code, _)) = response.raw_status {
// <https://tools.ietf.org/html/rfc7234#section-5.5.4>
// Since presently we do not generate a Warning header field with a 113 warn-code,
// 24 hours minus response age is the max for heuristic calculation.
let max_heuristic = Duration::hours(24) - age;
let heuristic_freshness = if let Some(&header::LastModified(header::HttpDate(t))) =
// If the response has a Last-Modified header field,
// caches are encouraged to use a heuristic expiration value
// that is no more than some fraction of the interval since that time.
response.headers.get::<header::LastModified>() {
let last_modified = t.to_timespec();
let current = time::now().to_timespec();
// A typical setting of this fraction might be 10%.
let raw_heuristic_calc = (current - last_modified) / 10;
let result = if raw_heuristic_calc < max_heuristic {
raw_heuristic_calc
} else {
max_heuristic
};
result
} else {
max_heuristic
};
match *code {
200 | 203 | 204 | 206 | 300 | 301 | 404 | 405 | 410 | 414 | 501 => {
// Status codes that are cacheable by default <https://tools.ietf.org/html/rfc7231#section-6.1>
return heuristic_freshness
},
_ => {
// Other status codes can only use heuristic freshness if the public cache directive is present.
if let Some(&header::CacheControl(ref directives)) = response.headers.get::<header::CacheControl>() {
let has_public_directive = directives.iter().any(|directive| {
header::CacheDirective::Public == *directive
});
if has_public_directive {
return heuristic_freshness;
}
}
},
}
}
// Requires validation upon first use as default.
Duration::seconds(0i64)
}
/// Request Cache-Control Directives
/// <https://tools.ietf.org/html/rfc7234#section-5.2.1>
fn get_expiry_adjustment_from_request_headers(request: &Request, expires: Duration) -> Duration {
let directive_data = match request.headers.get_raw("cache-control") {
Some(data) => data,
None => return expires,
};
let directives_string = String::from_utf8_lossy(&directive_data[0]);
for directive in directives_string.split(",") {
let mut directive_info = directive.split("=");
match (directive_info.next(), directive_info.next()) {
(Some("max-stale"), Some(sec_str)) => {
if let Ok(secs) = sec_str.parse::<i64>() {
return expires + Duration::seconds(secs);
}
},
(Some("max-age"), Some(sec_str)) => {
if let Ok(secs) = sec_str.parse::<i64>() {
let max_age = Duration::seconds(secs);
if expires > max_age {
return Duration::min_value();
}
return expires - max_age;
}
},
(Some("min-fresh"), Some(sec_str)) => {
if let Ok(secs) = sec_str.parse::<i64>() {
let min_fresh = Duration::seconds(secs);
if expires < min_fresh {
return Duration::min_value();
}
return expires - min_fresh;
}
},
(Some("no-cache"), _) | (Some("no-store"), _) => return Duration::min_value(),
_ => {}
}
}
expires
}
/// Create a CachedResponse from a request and a CachedResource.
fn create_cached_response(request: &Request,
cached_resource: &CachedResource,
cached_headers: &Headers,
done_chan: &mut DoneChannel)
-> CachedResponse {
let mut response = Response::new(cached_resource.data.metadata.data.final_url.clone());
response.headers = cached_headers.clone();
response.body = cached_resource.body.clone();
if let ResponseBody::Receiving(_) = *cached_resource.body.lock().unwrap() {
let (done_sender, done_receiver) = channel();
*done_chan = Some((done_sender.clone(), done_receiver));
cached_resource.awaiting_body.lock().unwrap().push(done_sender);
}
response.location_url = cached_resource.data.location_url.clone();
response.status = cached_resource.data.status.clone();
response.raw_status = cached_resource.data.raw_status.clone();
response.url_list = cached_resource.data.url_list.clone();
response.https_state = cached_resource.data.https_state.clone();
response.referrer = request.referrer.to_url().cloned();
response.referrer_policy = request.referrer_policy.clone();
response.aborted = cached_resource.aborted.clone();
let expires = cached_resource.data.expires;
let adjusted_expires = get_expiry_adjustment_from_request_headers(request, expires);
let now = Duration::seconds(time::now().to_timespec().sec);
let last_validated = Duration::seconds(cached_resource.data.last_validated.to_timespec().sec);
let time_since_validated = now - last_validated;
// TODO: take must-revalidate into account <https://tools.ietf.org/html/rfc7234#section-5.2.2.1>
// TODO: if this cache is to be considered shared, take proxy-revalidate into account
// <https://tools.ietf.org/html/rfc7234#section-5.2.2.7>
let has_expired = (adjusted_expires < time_since_validated) ||
(adjusted_expires == time_since_validated);
CachedResponse { response: response, needs_validation: has_expired }
}<๏ฝfimโhole๏ฝ>
/// Create a new resource, based on the bytes requested, and an existing resource,
/// with a status-code of 206.
fn create_resource_with_bytes_from_resource(bytes: &[u8], resource: &CachedResource)
-> CachedResource {
CachedResource {
request_headers: resource.request_headers.clone(),
body: Arc::new(Mutex::new(ResponseBody::Done(bytes.to_owned()))),
aborted: Arc::new(AtomicBool::new(false)),
awaiting_body: Arc::new(Mutex::new(vec![])),
data: Measurable(MeasurableCachedResource {
metadata: resource.data.metadata.clone(),
location_url: resource.data.location_url.clone(),
https_state: resource.data.https_state.clone(),
status: Some(StatusCode::PartialContent),
raw_status: Some((206, b"Partial Content".to_vec())),
url_list: resource.data.url_list.clone(),
expires: resource.data.expires.clone(),
last_validated: resource.data.last_validated.clone(),
})
}
}
/// Support for range requests <https://tools.ietf.org/html/rfc7233>.
fn handle_range_request(request: &Request,
candidates: Vec<&CachedResource>,
range_spec: &[header::ByteRangeSpec],
done_chan: &mut DoneChannel)
-> Option<CachedResponse> {
let mut complete_cached_resources = candidates.iter().filter(|resource| {
match resource.data.raw_status {
Some((ref code, _)) => *code == 200,
None => false
}
});
let partial_cached_resources = candidates.iter().filter(|resource| {
match resource.data.raw_status {
Some((ref code, _)) => *code == 206,
None => false
}
});
match (range_spec.first().unwrap(), complete_cached_resources.next()) {
// TODO: take the full range spec into account.
// If we have a complete resource, take the request range from the body.
// When there isn't a complete resource available, we loop over cached partials,
// and see if any individual partial response can fulfill the current request for a bytes range.
// TODO: combine partials that in combination could satisfy the requested range?
// see <https://tools.ietf.org/html/rfc7233#section-4.3>.
// TODO: add support for complete and partial resources,
// whose body is in the ResponseBody::Receiving state.
(&header::ByteRangeSpec::FromTo(beginning, end), Some(ref complete_resource)) => {
if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() {
let b = beginning as usize;
let e = end as usize + 1;
let requested = body.get(b..e);
if let Some(bytes) = requested {
let new_resource = create_resource_with_bytes_from_resource(bytes, complete_resource);
let cached_headers = new_resource.data.metadata.headers.lock().unwrap();
let cached_response = create_cached_response(request, &new_resource, &*cached_headers, done_chan);
return Some(cached_response);
}
}
},
(&header::ByteRangeSpec::FromTo(beginning, end), None) => {
for partial_resource in partial_cached_resources {
let headers = partial_resource.data.metadata.headers.lock().unwrap();
let content_range = headers.get::<header::ContentRange>();
let (res_beginning, res_end) = match content_range {
Some(&header::ContentRange(
header::ContentRangeSpec::Bytes {
range: Some((res_beginning, res_end)), .. })) => (res_beginning, res_end),
_ => continue,
};
if res_beginning - 1 < beginning && res_end + 1 > end {
let resource_body = &*partial_resource.body.lock().unwrap();
let requested = match resource_body {
&ResponseBody::Done(ref body) => {
let b = beginning as usize - res_beginning as usize;
let e = end as usize - res_beginning as usize + 1;
body.get(b..e)
},
_ => continue,
};
if let Some(bytes) = requested {
let new_resource = create_resource_with_bytes_from_resource(&bytes, partial_resource);
let cached_response = create_cached_response(request, &new_resource, &*headers, done_chan);
return Some(cached_response);
}
}
}
},
(&header::ByteRangeSpec::AllFrom(beginning), Some(ref complete_resource)) => {
if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() {
let b = beginning as usize;
let requested = body.get(b..);
if let Some(bytes) = requested {
let new_resource = create_resource_with_bytes_from_resource(bytes, complete_resource);
let cached_headers = new_resource.data.metadata.headers.lock().unwrap();
let cached_response = create_cached_response(request, &new_resource, &*cached_headers, done_chan);
return Some(cached_response);
}
}
},
(&header::ByteRangeSpec::AllFrom(beginning), None) => {
for partial_resource in partial_cached_resources {
let headers = partial_resource.data.metadata.headers.lock().unwrap();
let content_range = headers.get::<header::ContentRange>();
let (res_beginning, res_end, total) = match content_range {
Some(&header::ContentRange(
header::ContentRangeSpec::Bytes {
range: Some((res_beginning, res_end)),
instance_length: Some(total) })) => (res_beginning, res_end, total),
_ => continue,
};
if res_beginning < beginning && res_end == total - 1 {
let resource_body = &*partial_resource.body.lock().unwrap();
let requested = match resource_body {
&ResponseBody::Done(ref body) => {
let from_byte = beginning as usize - res_beginning as usize;
body.get(from_byte..)
},
_ => continue,
};
if let Some(bytes) = requested {
let new_resource = create_resource_with_bytes_from_resource(&bytes, partial_resource);
let cached_response = create_cached_response(request, &new_resource, &*headers, done_chan);
return Some(cached_response);
}
}
}
},
(&header::ByteRangeSpec::Last(offset), Some(ref complete_resource)) => {
if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() {
let from_byte = body.len() - offset as usize;
let requested = body.get(from_byte..);
if let Some(bytes) = requested {
let new_resource = create_resource_with_bytes_from_resource(bytes, complete_resource);
let cached_headers = new_resource.data.metadata.headers.lock().unwrap();
let cached_response = create_cached_response(request, &new_resource, &*cached_headers, done_chan);
return Some(cached_response);
}
}
},
(&header::ByteRangeSpec::Last(offset), None) => {
for partial_resource in partial_cached_resources {
let headers = partial_resource.data.metadata.headers.lock().unwrap();
let content_range = headers.get::<header::ContentRange>();
let (res_beginning, res_end, total) = match content_range {
Some(&header::ContentRange(
header::ContentRangeSpec::Bytes {
range: Some((res_beginning, res_end)),
instance_length: Some(total) })) => (res_beginning, res_end, total),
_ => continue,
};
if (total - res_beginning) > (offset - 1 ) && (total - res_end) < offset + 1 {
let resource_body = &*partial_resource.body.lock().unwrap();
let requested = match resource_body {
&ResponseBody::Done(ref body) => {
let from_byte = body.len() - offset as usize;
body.get(from_byte..)
},
_ => continue,
};
if let Some(bytes) = requested {
let new_resource = create_resource_with_bytes_from_resource(&bytes, partial_resource);
let cached_response = create_cached_response(request, &new_resource, &*headers, done_chan);
return Some(cached_response);
}
}
}
}
}
None
}
impl HttpCache {
/// Create a new memory cache instance.
pub fn new() -> HttpCache {
HttpCache {
entries: HashMap::new()
}
}
/// Constructing Responses from Caches.
/// <https://tools.ietf.org/html/rfc7234#section-4>
pub fn construct_response(&self, request: &Request, done_chan: &mut DoneChannel) -> Option<CachedResponse> {
// TODO: generate warning headers as appropriate <https://tools.ietf.org/html/rfc7234#section-5.5>
if request.method != Method::Get {
// Only Get requests are cached, avoid a url based match for others.
return None;
}
let entry_key = CacheKey::new(request.clone());
let resources = self.entries.get(&entry_key)?.into_iter().filter(|r| { !r.aborted.load(Ordering::Relaxed) });
let mut candidates = vec![];
for cached_resource in resources {
let mut can_be_constructed = true;
let cached_headers = cached_resource.data.metadata.headers.lock().unwrap();
let original_request_headers = cached_resource.request_headers.lock().unwrap();
if let Some(vary_data) = cached_headers.get_raw("Vary") {
// Calculating Secondary Keys with Vary <https://tools.ietf.org/html/rfc7234#section-4.1>
let vary_data_string = String::from_utf8_lossy(&vary_data[0]);
let vary_values = vary_data_string.split(",").map(|val| val.trim());
for vary_val in vary_values {
// For every header name found in the Vary header of the stored response.
if vary_val == "*" {
// A Vary header field-value of "*" always fails to match.
can_be_constructed = false;
break;
}
match request.headers.get_raw(vary_val) {
Some(header_data) => {
// If the header is present in the request.
let request_header_data_string = String::from_utf8_lossy(&header_data[0]);
if let Some(original_header_data) = original_request_headers.get_raw(vary_val) {
// Check that the value of the nominated header field,
// in the original request, matches the value in the current request.
let original_request_header_data_string =
String::from_utf8_lossy(&original_header_data[0]);
if original_request_header_data_string != request_header_data_string {
can_be_constructed = false;
break;
}
}
},
None => {
// If a header field is absent from a request,
// it can only match a stored response if those headers,
// were also absent in the original request.
can_be_constructed = original_request_headers.get_raw(vary_val).is_none();
},
}
if !can_be_constructed {
break;
}
}
}
if can_be_constructed {
candidates.push(cached_resource);
}
}
// Support for range requests
if let Some(&header::Range::Bytes(ref range_spec)) = request.headers.get::<header::Range>() {
return handle_range_request(request, candidates, &range_spec, done_chan);
} else {
// Not a Range request.
if let Some(ref cached_resource) = candidates.first() {
// Returning the first response that can be constructed
// TODO: select the most appropriate one, using a known mechanism from a selecting header field,
// or using the Date header to return the most recent one.
let cached_headers = cached_resource.data.metadata.headers.lock().unwrap();
let cached_response = create_cached_response(request, cached_resource, &*cached_headers, done_chan);
return Some(cached_response);
}
}
None
}
/// Updating consumers who received a response constructed with a ResponseBody::Receiving.
pub fn update_awaiting_consumers(&mut self, request: &Request, response: &Response) {
if let ResponseBody::Done(ref completed_body) = *response.body.lock().unwrap() {
let entry_key = CacheKey::new(request.clone());
if let Some(cached_resources) = self.entries.get(&entry_key) {
for cached_resource in cached_resources.iter() {
let mut awaiting_consumers = cached_resource.awaiting_body.lock().unwrap();
for done_sender in awaiting_consumers.drain(..) {
if cached_resource.aborted.load(Ordering::Relaxed) {
let _ = done_sender.send(Data::Cancelled);
} else {
let _ = done_sender.send(Data::Payload(completed_body.clone()));
let _ = done_sender.send(Data::Done);
}
};
}
}
}
}
/// Freshening Stored Responses upon Validation.
/// <https://tools.ietf.org/html/rfc7234#section-4.3.4>
pub fn refresh(&mut self, request: &Request, response: Response, done_chan: &mut DoneChannel) -> Option<Response> {
assert_eq!(response.status, Some(StatusCode::NotModified));
let entry_key = CacheKey::new(request.clone());
if let Some(cached_resources) = self.entries.get_mut(&entry_key) {
for cached_resource in cached_resources.iter_mut() {
// done_chan will have been set to Some(..) by http_network_fetch.
// If the body is not receiving data, set the done_chan back to None.
// Otherwise, create a new dedicated channel to update the consumer.
// The response constructed here will replace the 304 one from the network.
let in_progress_channel = match *cached_resource.body.lock().unwrap() {
ResponseBody::Receiving(..) => {
Some(channel())
},
ResponseBody::Empty | ResponseBody::Done(..) => None
};
match in_progress_channel {
Some((done_sender, done_receiver)) => {
*done_chan = Some((done_sender.clone(), done_receiver));
cached_resource.awaiting_body.lock().unwrap().push(done_sender);
},
None => *done_chan = None
}
// Received a response with 304 status code, in response to a request that matches a cached resource.
// 1. update the headers of the cached resource.
// 2. return a response, constructed from the cached resource.
let mut constructed_response = Response::new(cached_resource.data.metadata.data.final_url.clone());
constructed_response.body = cached_resource.body.clone();
constructed_response.status = cached_resource.data.status.clone();
constructed_response.https_state = cached_resource.data.https_state.clone();
constructed_response.referrer = request.referrer.to_url().cloned();
constructed_response.referrer_policy = request.referrer_policy.clone();
constructed_response.raw_status = cached_resource.data.raw_status.clone();
constructed_response.url_list = cached_resource.data.url_list.clone();
cached_resource.data.expires = get_response_expiry(&constructed_response);
let mut stored_headers = cached_resource.data.metadata.headers.lock().unwrap();
stored_headers.extend(response.headers.iter());
constructed_response.headers = stored_headers.clone();
return Some(constructed_response);
}
}
None
}
fn invalidate_for_url(&mut self, url: &ServoUrl) {
let entry_key = CacheKey::from_servo_url(url);
if let Some(cached_resources) = self.entries.get_mut(&entry_key) {
for cached_resource in cached_resources.iter_mut() {
cached_resource.data.expires = Duration::seconds(0i64);
}
}
}
/// Invalidation.
/// <https://tools.ietf.org/html/rfc7234#section-4.4>
pub fn invalidate(&mut self, request: &Request, response: &Response) {
if let Some(&header::Location(ref location)) = response.headers.get::<header::Location>() {
if let Ok(url) = request.current_url().join(location) {
self.invalidate_for_url(&url);
}
}
// TODO: update hyper to use typed getter.
if let Some(url_data) = response.headers.get_raw("Content-Location") {
if let Ok(content_location) = str::from_utf8(&url_data[0]) {
if let Ok(url) = request.current_url().join(content_location) {
self.invalidate_for_url(&url);
}
}
}
self.invalidate_for_url(&request.url());
}
/// Storing Responses in Caches.
/// <https://tools.ietf.org/html/rfc7234#section-3>
pub fn store(&mut self, request: &Request, response: &Response) {
if PREFS.get("network.http-cache.disabled").as_boolean().unwrap_or(false) {
return
}
if request.method != Method::Get {
// Only Get requests are cached.
return
}
let entry_key = CacheKey::new(request.clone());
let metadata = match response.metadata() {
Ok(FetchMetadata::Filtered {
filtered: _,
unsafe_: metadata }) |
Ok(FetchMetadata::Unfiltered(metadata)) => metadata,
_ => return,
};
if !response_is_cacheable(&metadata) {
return;
}
let expiry = get_response_expiry(&response);
let cacheable_metadata = CachedMetadata {
headers: Arc::new(Mutex::new(response.headers.clone())),
data: Measurable(MeasurableCachedMetadata {
final_url: metadata.final_url,
content_type: metadata.content_type,
charset: metadata.charset,
status: metadata.status
})
};
let entry_resource = CachedResource {
request_headers: Arc::new(Mutex::new(request.headers.clone())),
body: response.body.clone(),
aborted: response.aborted.clone(),
awaiting_body: Arc::new(Mutex::new(vec![])),
data: Measurable(MeasurableCachedResource {
metadata: cacheable_metadata,
location_url: response.location_url.clone(),
https_state: response.https_state.clone(),
status: response.status.clone(),
raw_status: response.raw_status.clone(),
url_list: response.url_list.clone(),
expires: expiry,
last_validated: time::now()
})
};
let entry = self.entries.entry(entry_key).or_insert(vec![]);
entry.push(entry_resource);
}
}<๏ฝfimโend๏ฝ> | |
<|file_name|>online.py<|end_file_name|><๏ฝfimโbegin๏ฝ>'''
Online link spider test
'''
from __future__ import print_function
from future import standard_library
standard_library.install_aliases()
from builtins import next
import unittest
from unittest import TestCase
import time
import sys
from os import path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
import scrapy
import redis
from redis.exceptions import ConnectionError
import json
import threading, time
from crawling.spiders.link_spider import LinkSpider
from scrapy.utils.project import get_project_settings
from twisted.internet import reactor
from scrapy.crawler import CrawlerRunner
from kafka import KafkaConsumer
class CustomSpider(LinkSpider):
'''
Overridden link spider for testing
'''
name = "test-spider"
class TestLinkSpider(TestCase):
example_feed = "{\"allowed_domains\":null,\"allow_regex\":null,\""\
"crawlid\":\"abc12345\",\"url\":\"http://dmoztools.net/\",\"expires\":0,\""\
"ts\":1461549923.7956631184,\"priority\":1,\"deny_regex\":null,\""\
"cookie\":null,\"attrs\":null,\"appid\":\"test\",\"spiderid\":\""\
"test-link\",\"useragent\":null,\"deny_extensions\":null,\"maxdepth\":0}"
def setUp(self):
self.settings = get_project_settings()
self.settings.set('KAFKA_TOPIC_PREFIX', "demo_test")
# set up redis
self.redis_conn = redis.Redis(host=self.settings['REDIS_HOST'],
port=self.settings['REDIS_PORT'],
db=self.settings['REDIS_DB'])
try:
self.redis_conn.info()
except ConnectionError:
print("Could not connect to Redis")
# plugin is essential to functionality<๏ฝfimโhole๏ฝ> keys = self.redis_conn.keys("test-spider:*")
for key in keys:
self.redis_conn.delete(key)
# set up kafka to consumer potential result
self.consumer = KafkaConsumer(
"demo_test.crawled_firehose",
bootstrap_servers=self.settings['KAFKA_HOSTS'],
group_id="demo-id",
auto_commit_interval_ms=10,
consumer_timeout_ms=5000,
auto_offset_reset='earliest'
)
time.sleep(1)
def test_crawler_process(self):
runner = CrawlerRunner(self.settings)
d = runner.crawl(CustomSpider)
d.addBoth(lambda _: reactor.stop())
# add crawl to redis
key = "test-spider:dmoztools.net:queue"
self.redis_conn.zadd(key, self.example_feed, -99)
# run the spider, give 20 seconds to see the url, crawl it,
# and send to kafka. Then we kill the reactor
def thread_func():
time.sleep(20)
reactor.stop()
thread = threading.Thread(target=thread_func)
thread.start()
reactor.run()
message_count = 0
m = next(self.consumer)
if m is None:
pass
else:
the_dict = json.loads(m.value)
if the_dict is not None and the_dict['appid'] == 'test' \
and the_dict['crawlid'] == 'abc12345':
message_count += 1
self.assertEquals(message_count, 1)
def tearDown(self):
keys = self.redis_conn.keys('stats:crawler:*:test-spider:*')
keys = keys + self.redis_conn.keys('test-spider:*')
for key in keys:
self.redis_conn.delete(key)
# if for some reason the tests fail, we end up falling behind on
# the consumer
for m in self.consumer:
pass
self.consumer.close()
if __name__ == '__main__':
unittest.main()<๏ฝfimโend๏ฝ> | sys.exit(1)
# clear out older test keys if any |
<|file_name|>LoginUsuarioServlet.java<|end_file_name|><๏ฝfimโbegin๏ฝ>package br.edu.utfpr.cm.pi.controller;
import java.io.IOException;
<๏ฝfimโhole๏ฝ>import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import br.edu.utfpr.cm.pi.beans.UsuarioSistema;
import br.edu.utfpr.cm.pi.ldap.LoginLDAP;
@WebServlet(name = "LoginUsuarioServlet", urlPatterns = { "/LoginUsuarioServlet" })
public class LoginUsuarioServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
public LoginUsuarioServlet() {
super();
}
protected void doGet(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
doPost(request, response);
}
protected void doPost(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
service(request, response);
}
protected void service(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
LoginLDAP ldap = new LoginLDAP();
String login = request.getParameter("login");
String senha = request.getParameter("senha");
UsuarioSistema usuario = ldap.logarNoLDAP(login, senha);
if (usuario != null) {
HttpSession sessao = request.getSession(true);
sessao.setAttribute("usuario", usuario);
response.sendRedirect("bemvindoUsuario.jsp");
} else {
response.sendRedirect("loginInvalidoUsuario.jsp");
}
}
}<๏ฝfimโend๏ฝ> | import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
|
<|file_name|>transformation.py<|end_file_name|><๏ฝfimโbegin๏ฝ>import numpy<๏ฝfimโhole๏ฝ>
"""Combined translational and rotational transformation.
This is a subclass of Transformation.
Objects of this class are not created directly, but can be the
result of a composition of rotations and translations.
"""
def __init__(self, tensor, vector):
self.tensor = tensor.copy()
self.vector = vector.copy()
is_rotation_translation = 1
def __mul__(self, other):
if hasattr(other, 'is_rotation'):
return RotationTranslation(numpy.dot(self.tensor, other.tensor),
self.vector)
elif hasattr(other, 'is_translation'):
return RotationTranslation(self.tensor,
numpy.dot(self.tensor, other.vector)+self.vector)
elif hasattr(other, 'is_rotation_translation'):
return RotationTranslation(numpy.dot(self.tensor, other.tensor),
numpy.dot(self.tensor, other.vector)+self.vector)
else:
raise ValueError, 'incompatible object'
def __call__(self, vector):
return numpy.dot(self.tensor, vector) + self.vector
def inverse(self):
return RotationTranslation(numpy.transpose(self.tensor),
numpy.dot(numpy.transpose(self.tensor),
-self.vector))<๏ฝfimโend๏ฝ> |
class RotationTranslation: |
<|file_name|>yaml-comparison.py<|end_file_name|><๏ฝfimโbegin๏ฝ>#!/usr/bin/env python
from __future__ import division
import sys, os
import numpy as np
import readnew
from glob import glob
#import re
import yaml
import os.path
import time # Need to wait some time if file is being written
# Example: /home/jordan/sad-monte-carlo/
filename_location = sys.argv[1]
# Example: data/samc-1e4-256-cpp-reference-lndos.dat
reference = sys.argv[2]
# Used for where we save the data.: s000/periodic-ww1.50-ff0.17-N256
filebase = sys.argv[3]
# The number to divide moves by! N is added back in comparison-plot
N = int(sys.argv[4])
# Energy range
Smin = int(sys.argv[5])
Smax = int(sys.argv[6])
# Are you comparing to a yaml reference?
yamlRef = bool(sys.argv[7])
filename = sys.argv[8:]
print(('filenames are ', filename))
for f in filename:
name = '%s.yaml' % (f)
print(('trying filename ', name))
while not os.path.exists(filename_location + name):
print('I am waiting for file to be written.')
time.sleep(30)
# Read YAML file
if os.path.isfile(filename_location + name):
with open(filename_location + name, 'r') as stream:
yaml_data = yaml.load(stream)
else:
raise ValueError("%s isn't a file!" % (filename_location + name))
#print(data_loaded)
data = yaml_data
data['bins']['histogram'] = np.array(data['bins']['histogram'])
data['bins']['lnw'] = np.array(data['bins']['lnw'])
data['movies']['energy']
minyaml = data['movies']['energy'].index(-Smax)
maxyaml = data['movies']['energy'].index(-Smin)
#print(data['bins']['lnw'])
moves = data['moves']
data['movies']['entropy'] = np.array(data['movies']['entropy'])
lndos = data['movies']['entropy']
N_save_times = len(data['movies']['entropy'])
ref = reference
if ref[:len('data/')] != 'data/':
ref = 'data/' + ref
maxref = Smax #int(readnew.max_entropy_state(ref))
minref = Smin # int(readnew.min_important_energy(ref))
n_energies = int(minref - maxref+1)
#print maxref, minref
try:
eref, lndosref, Nrt_ref = readnew.e_lndos_ps(ref)
except:
eref, lndosref = readnew.e_lndos(ref)
errorinentropy = np.zeros(N_save_times)
maxerror = np.zeros(N_save_times)
for i in range(0, N_save_times):
# below just set average S equal between lndos and lndosref<๏ฝfimโhole๏ฝ> norm_factor = np.mean(lndos[i][maxyaml:minyaml+1]) - np.mean(lndosref[0:(minyaml+1-maxyaml)])
doserror = lndos[i][maxyaml:minyaml+1][::-1] - lndosref[0:(minyaml+1-maxyaml)] - norm_factor
else:
norm_factor = np.mean(lndos[i][maxyaml:minyaml+1]) - np.mean(lndosref[maxref:minref+1])
doserror = lndos[i][maxyaml:minyaml+1][::-1] - lndosref[maxref:minref+1] - norm_factor
errorinentropy[i] = np.sum(abs(doserror))/len(doserror)
maxerror[i] = np.amax(doserror) - np.amin(doserror)
# remove N from moves in yaml file because N is added back in the
# comparison-plot script
moves = list(map(int, data['movies']['time']))
moves = [x / N for x in moves]
errorinentropy = errorinentropy[:len(moves)]
maxerror = maxerror[:len(moves)]
dirname = 'data/comparison/%s-%s' % (filebase, name.replace('.yaml', ''))
print('saving to', dirname)
try:
os.mkdir(dirname)
except OSError:
pass
else:
print(("Successfully created the directory %s " % dirname))
np.savetxt('%s/errors.txt' %(dirname),
np.c_[moves, errorinentropy, maxerror],
fmt = ('%.4g'),
delimiter = '\t',
header = 'iterations\t errorinentropy\t maxerror\t(generated with python %s' % ' '.join(sys.argv))
# The following is intended for testing whether there is a
# systematic error in any of our codes.
#np.savetxt('%s/error-vs-energy.txt' %(dirname),
#np.c_[eref, doserror],
#fmt = ('%.4g'),
#delimiter = '\t', header = 'E\t Serror')<๏ฝfimโend๏ฝ> | if yamlRef:
# if using yaml as a reference the range is from 0 to len while for C++ the range is
# from maxref to minref + 1 |
<|file_name|>com_biomedcentral.py<|end_file_name|><๏ฝfimโbegin๏ฝ>from share.transform.chain import * # noqa
class AgentIdentifier(Parser):
uri = IRI(ctx)
class WorkIdentifier(Parser):
uri = IRI(ctx)
class Tag(Parser):
name = ctx
class ThroughTags(Parser):
tag = Delegate(Tag, ctx)
class Person(Parser):
given_name = ParseName(ctx.creator).first
family_name = ParseName(ctx.creator).last
additional_name = ParseName(ctx.creator).middle
suffix = ParseName(ctx.creator).suffix
class Creator(Parser):
agent = Delegate(Person, ctx)
cited_as = ctx.creator
order_cited = ctx('index')
class Organization(Parser):
name = ctx.publisher
identifiers = Map(Delegate(AgentIdentifier), ctx.issn)
class Publisher(Parser):
agent = Delegate(Organization, ctx)
class Extra:
publication_name = ctx.publicationName
class Article(Parser):
title = ctx.title
description = ctx.abstract
rights = ctx.copyright
date_published = ParseDate(ctx.publicationDate)
date_updated = ParseDate(ctx.publicationDate)
identifiers = Map(
Delegate(WorkIdentifier),
ctx.doi,
ctx.identifier,
Map(ctx.value, ctx.url),
)
related_agents = Concat(
Map(Delegate(Creator), ctx.creators),
Map(Delegate(Publisher), ctx)
)
tags = Map(Delegate(ThroughTags), ctx.genre)<๏ฝfimโhole๏ฝ> ending_page = Try(ctx.endingPage)
issue_type = Try(ctx.issuetype)
number = ctx.number
starting_page = ctx.startingPage
topicalCollection = Try(ctx.topicalCollection)
journalid = Try(ctx.journalid)
issn = Try(ctx.issn)
class BioMedCentralTransformer(ChainTransformer):
VERSION = 1
root_parser = Article<๏ฝfimโend๏ฝ> |
class Extra:
openaccess = ctx.openaccess |
<|file_name|>reload.py<|end_file_name|><๏ฝfimโbegin๏ฝ>"""Class to reload platforms."""
from __future__ import annotations
import asyncio
from collections.abc import Iterable
import logging
from typing import Any
from homeassistant import config as conf_util
from homeassistant.const import SERVICE_RELOAD
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.loader import async_get_integration
from homeassistant.setup import async_setup_component
from . import config_per_platform
from .entity_platform import EntityPlatform, async_get_platforms
from .typing import ConfigType
# mypy: disallow-any-generics
_LOGGER = logging.getLogger(__name__)
async def async_reload_integration_platforms(
hass: HomeAssistant, integration_name: str, integration_platforms: Iterable[str]
) -> None:
"""Reload an integration's platforms.
The platform must support being re-setup.
This functionality is only intended to be used for integrations that process
Home Assistant data and make this available to other integrations.
Examples are template, stats, derivative, utility meter.
"""
try:
unprocessed_conf = await conf_util.async_hass_config_yaml(hass)
except HomeAssistantError as err:
_LOGGER.error(err)
return
tasks = [
_resetup_platform(
hass, integration_name, integration_platform, unprocessed_conf
)
for integration_platform in integration_platforms
]
await asyncio.gather(*tasks)
async def _resetup_platform(
hass: HomeAssistant,
integration_name: str,
integration_platform: str,
unprocessed_conf: ConfigType,
) -> None:
"""Resetup a platform."""
integration = await async_get_integration(hass, integration_platform)
conf = await conf_util.async_process_component_config(
hass, unprocessed_conf, integration
)
if not conf:
return
root_config: dict[str, Any] = {integration_platform: []}
# Extract only the config for template, ignore the rest.
for p_type, p_config in config_per_platform(conf, integration_platform):
if p_type != integration_name:
continue
root_config[integration_platform].append(p_config)
component = integration.get_component()
if hasattr(component, "async_reset_platform"):
# If the integration has its own way to reset
# use this method.
await component.async_reset_platform(hass, integration_name)
await component.async_setup(hass, root_config)
return
# If it's an entity platform, we use the entity_platform
# async_reset method
platform = async_get_platform_without_config_entry(
hass, integration_name, integration_platform
)
if platform:
await _async_reconfig_platform(platform, root_config[integration_platform])
return
if not root_config[integration_platform]:
# No config for this platform
# and it's not loaded. Nothing to do.
return
await _async_setup_platform(
hass, integration_name, integration_platform, root_config[integration_platform]
)
async def _async_setup_platform(
hass: HomeAssistant,
integration_name: str,
integration_platform: str,
platform_configs: list[dict[str, Any]],
) -> None:
"""Platform for the first time when new configuration is added."""
if integration_platform not in hass.data:
await async_setup_component(
hass, integration_platform, {integration_platform: platform_configs}
)
return
entity_component = hass.data[integration_platform]
tasks = [
entity_component.async_setup_platform(integration_name, p_config)<๏ฝfimโhole๏ฝ>
async def _async_reconfig_platform(
platform: EntityPlatform, platform_configs: list[dict[str, Any]]
) -> None:
"""Reconfigure an already loaded platform."""
await platform.async_reset()
tasks = [platform.async_setup(p_config) for p_config in platform_configs]
await asyncio.gather(*tasks)
async def async_integration_yaml_config(
hass: HomeAssistant, integration_name: str
) -> ConfigType | None:
"""Fetch the latest yaml configuration for an integration."""
integration = await async_get_integration(hass, integration_name)
return await conf_util.async_process_component_config(
hass, await conf_util.async_hass_config_yaml(hass), integration
)
@callback
def async_get_platform_without_config_entry(
hass: HomeAssistant, integration_name: str, integration_platform_name: str
) -> EntityPlatform | None:
"""Find an existing platform that is not a config entry."""
for integration_platform in async_get_platforms(hass, integration_name):
if integration_platform.config_entry is not None:
continue
if integration_platform.domain == integration_platform_name:
platform: EntityPlatform = integration_platform
return platform
return None
async def async_setup_reload_service(
hass: HomeAssistant, domain: str, platforms: Iterable[str]
) -> None:
"""Create the reload service for the domain."""
if hass.services.has_service(domain, SERVICE_RELOAD):
return
async def _reload_config(call: Event) -> None:
"""Reload the platforms."""
await async_reload_integration_platforms(hass, domain, platforms)
hass.bus.async_fire(f"event_{domain}_reloaded", context=call.context)
hass.helpers.service.async_register_admin_service(
domain, SERVICE_RELOAD, _reload_config
)
def setup_reload_service(
hass: HomeAssistant, domain: str, platforms: Iterable[str]
) -> None:
"""Sync version of async_setup_reload_service."""
asyncio.run_coroutine_threadsafe(
async_setup_reload_service(hass, domain, platforms),
hass.loop,
).result()<๏ฝfimโend๏ฝ> | for p_config in platform_configs
]
await asyncio.gather(*tasks) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.