prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>regions-addr-of-self.rs<|end_file_name|><|fim▁begin|>struct Dog {
cats_chased: usize,
}
impl Dog {
pub fn chase_cat(&mut self) {
let p: &'static mut usize = &mut self.cats_chased; //~ ERROR E0759
*p += 1;
}
pub fn chase_cat_2(&mut self) {
let p: &mut usize = &mut self.cats_chased;
*p += 1;
}
}
fn dog() -> Dog {
Dog {
cats_chased: 0
}
}<|fim▁hole|>fn main() {
let mut d = dog();
d.chase_cat();
println!("cats_chased: {}", d.cats_chased);
}<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate encoding;
extern crate minifb;
extern crate cpal;
extern crate futures;
#[macro_use]
extern crate clap;
extern crate combine;
extern crate rustual_boy_core;
extern crate rustual_boy_middleware;
mod argparse;
#[macro_use]
mod logging;
mod command;
mod cpal_driver;
mod emulator;
mod system_time_source;
mod wave_file_buffer_sink;
use rustual_boy_core::rom::*;
use rustual_boy_core::sram::*;
use rustual_boy_core::vsu::*;
use cpal_driver::*;
use emulator::*;
fn main() {
let config = argparse::parse_args();
logln!("Loading ROM file {}", config.rom_path);<|fim▁hole|> let rom = Rom::load(&config.rom_path).unwrap();
log!("ROM size: ");
if rom.size() >= 1024 * 1024 {
logln!("{}MB", rom.size() / 1024 / 1024);
} else {
logln!("{}KB", rom.size() / 1024);
}
logln!("Header info:");
logln!(" name: \"{}\"", rom.name().unwrap());
logln!(" maker code: \"{}\"", rom.maker_code().unwrap());
logln!(" game code: \"{}\"", rom.game_code().unwrap());
logln!(" game version: 1.{:#02}", rom.game_version_byte());
logln!("Attempting to load SRAM file: {}", config.sram_path);
let sram = match Sram::load(&config.sram_path) {
Ok(sram) => {
logln!(" SRAM loaded successfully");
sram
}
Err(err) => {
logln!(" Couldn't load SRAM file: {}", err);
Sram::new()
}
};
let audio_driver = CpalDriver::new(SAMPLE_RATE as _, 100).unwrap();
let audio_buffer_sink = audio_driver.sink();
let time_source = audio_driver.time_source();
let mut emulator = Emulator::new(rom, sram, audio_buffer_sink, time_source);
emulator.run();
if emulator.virtual_boy.interconnect.sram.size() > 0 {
logln!("SRAM used, saving to {}", config.sram_path);
emulator.virtual_boy.interconnect.sram.save(config.sram_path).unwrap();
}
}<|fim▁end|> | |
<|file_name|>run_tests.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import sys
import os
sys.path.append(os.path.realpath("."))
import unittest
import cleanstream
import tagger
import pretransfer
import transfer
import interchunk
import postchunk
import adaptdocx
if __name__ == "__main__":
os.chdir(os.path.dirname(__file__))<|fim▁hole|> transfer,
interchunk,
postchunk,
adaptdocx,
cleanstream]:
suite = unittest.TestLoader().loadTestsFromModule(module)
res = unittest.TextTestRunner(verbosity=2).run(suite)
if(not(res.wasSuccessful())):
failures += 1
sys.exit(min(failures, 255))<|fim▁end|> | failures = 0
for module in [tagger,
pretransfer, |
<|file_name|>post_to_rails.js<|end_file_name|><|fim▁begin|>var postData = querystring.stringify({
'value' : '55',
'room_id' : '1'
});
var options = {
hostname: 'localhost',
port: 80,
path: '/temperatures',
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': postData.length
}<|fim▁hole|>};
var req = http.request(options, function(res) {
console.log('STATUS: ' + res.statusCode);
console.log('HEADERS: ' + JSON.stringify(res.headers));
res.setEncoding('utf8');
res.on('data', function (chunk) {
console.log('BODY: ' + chunk);
});
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
// write data to request body
req.write(postData);
req.end();<|fim▁end|> | |
<|file_name|>a.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_type="rlib"]
#[cfg(rpass1)]
pub struct X {
pub x: u32
}
#[cfg(rpass2)]
pub struct X {<|fim▁hole|>pub struct EmbedX {
pub x: X
}
pub struct Y {
pub y: char
}<|fim▁end|> | pub x: i32
}
|
<|file_name|>mallocstacks.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# mallocstacks Trace malloc() calls in a process and print the full
# stack trace for all callsites.
# For Linux, uses BCC, eBPF. Embedded C.
#
# This script is a basic example of the new Linux 4.6+ BPF_STACK_TRACE
# table API.
#
# Copyright 2016 GitHub, Inc.
# Licensed under the Apache License, Version 2.0 (the "License")
from __future__ import print_function
from bcc import BPF
from bcc.utils import printb
from time import sleep
import sys
if len(sys.argv) < 2:
print("USAGE: mallocstacks PID [NUM_STACKS=1024]")
exit()
pid = int(sys.argv[1])
if len(sys.argv) == 3:
try:
assert int(sys.argv[2]) > 0, ""
except (ValueError, AssertionError) as e:
print("USAGE: mallocstacks PID [NUM_STACKS=1024]")
print("NUM_STACKS must be a non-zero, positive integer")
exit()
stacks = sys.argv[2]
else:
stacks = "1024"
# load BPF program<|fim▁hole|>b = BPF(text="""
#include <uapi/linux/ptrace.h>
BPF_HASH(calls, int);
BPF_STACK_TRACE(stack_traces, """ + stacks + """);
int alloc_enter(struct pt_regs *ctx, size_t size) {
int key = stack_traces.get_stackid(ctx, BPF_F_USER_STACK);
if (key < 0)
return 0;
// could also use `calls.increment(key, size);`
u64 zero = 0, *val;
val = calls.lookup_or_try_init(&key, &zero);
if (val) {
(*val) += size;
}
return 0;
};
""")
b.attach_uprobe(name="c", sym="malloc", fn_name="alloc_enter", pid=pid)
print("Attaching to malloc in pid %d, Ctrl+C to quit." % pid)
# sleep until Ctrl-C
try:
sleep(99999999)
except KeyboardInterrupt:
pass
calls = b.get_table("calls")
stack_traces = b.get_table("stack_traces")
for k, v in reversed(sorted(calls.items(), key=lambda c: c[1].value)):
print("%d bytes allocated at:" % v.value)
if k.value > 0 :
for addr in stack_traces.walk(k.value):
printb(b"\t%s" % b.sym(addr, pid, show_offset=True))<|fim▁end|> | |
<|file_name|>index.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react'
import * as css from './styles.scss'
import { Classes } from 'helpers'
export interface P {
name?: string;
type?: 'button' | 'checkbox' | 'file' | 'hidden' | 'password' | 'radio' | 'text';
label?: string;
value?: string | number;
focus?: boolean;
integer?: boolean;
floating?: boolean;
clearable?: boolean;
placeholder?: string;
className: string;
wrapperClassName: string;
handleBlur?: (value: string) => void | boolean;
handleFocus?: (e: Event) => void | boolean;
handleChange?: (e: Event) => void | boolean;
handleKeyPress?: (keyCode: number) => void | boolean;
}
export interface S {
value?: string | number;
}
const cx = Classes.bind(css)
export default class SimpleInput extends React.Component<P, S> {
static defaultProps = {
type: 'text',
label: '',
value: '',
focus: false,
integer: false,
floating: false,
clearable: false,
}
static getDerivedStateFromProps (props: P, state: S) {
if (state.value !== props.value) {
return {
value: props.value,
touch: false,
}
}
return {
touch: false,
}
}
state = {
value: '',
}
input: any = React.createRef()
componentDidMount () {
if (this.props.focus) {
this.input.current.focus()
}
}
prepared = (value: string) => {
const { integer, floating } = this.props
if (integer) {
return parseInt(value, 10)
} else if (floating) {
return parseFloat(value)
}
return value
}
handleClear = () => {
this.setState((state: S) => ({
...state, value: '',
}), () => {
if (this.props.handleChange) {
this.props.handleChange('')
}
})
}
handleChange = (e: any) => {
const value = this.prepared(e.target.value)
this.setState((state: S) => ({
...state, value,
}), () => {
if (this.props.handleChange) {
this.props.handleChange(value)
}
})
}<|fim▁hole|> this.props.handleFocus(e)
}
}
handleBlur = (e: any) => {
const value = this.prepared(e.target.value)
if (this.props.handleBlur) {
this.props.handleBlur(value)
}
}
handleKeyPress = (e: any) => {
const keyCode = e.keyCode || e.charCode || e.which
if (this.props.handleKeyPress) {
this.props.handleKeyPress(keyCode)
}
}
render () {
const { value } = this.state
const { label, clearable, placeholder, wrapperClassName, className } = this.props
return (
<label className={cx({ wrapper: true }, wrapperClassName)}>
{label &&
<div className={cx(css.label)}>{label}</div>
}
<input
ref={this.input}
value={value}
onBlur={this.handleBlur}
onFocus={this.handleFocus}
onChange={this.handleChange}
onKeyPress={this.handleKeyPress}
placeholder={placeholder}
className={cx(css.control, css.controlInput, className)}
/>
{value && clearable &&
<button type="button" onClick={this.handleClear} className={css.cleaner} />
}
</label>
)
}
}<|fim▁end|> |
handleFocus = (e: any) => {
if (this.props.handleFocus) { |
<|file_name|>model_classes.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import sys
import os
import logging
import random
import PyQt4
from PyQt4.QtCore import *
#from PyQt4.QtCore import QAbstractTableModel
import constants
class Model(QAbstractTableModel):
keys = list()
modelType = None
def __init__(self, parent = None):
''' '''
self.log = logging.getLogger('Model')
#self.log.debug('__init__ start')<|fim▁hole|> #self.log.debug('rowCount start')
#self.log.debug('rowCount end')
if hasattr(self, 'album') and self.album:
if hasattr(self.album, 'rows'):
return len(self.album.rows)
return 0
def columnCount(self, parent = None):
''' '''
#self.log.debug('columnCount start')
#self.log.debug('columnCount end')
return len(self.keys)
def data(self, index, role = None):
''' '''
#self.log.debug('data start')
if index.isValid():
if index.row() >= 0 or index.row() < len(self.rows):
if role == Qt.DisplayRole or role == Qt.ToolTipRole or role == Qt.EditRole:
return self.album.rows[index.row()][self.keys[index.column()]]
#self.log.debug('data end')
return QVariant()
def setData(self, index, value, role):
''' '''
#self.log.debug('setData start')
if index.isValid() and role == Qt.EditRole:
key = self.keys[index.column()]
row = index.row()
value = unicode(value.toString())
self.album.rows[index.row()][key] = value
self.emit(SIGNAL('dataChanged'), index, index)
#self.log.debug('setData end')
return True
def headerData(self, section, orientation, role):
''' '''
#self.log.debug('headerData start' + str(section))
if section >= 0 and section < len(self.keys):
if orientation == Qt.Horizontal and role == Qt.DisplayRole:
return self.keys[section]
#self.log.debug('headerData end ')
return QVariant()
def flags(self, index):
''' '''
#self.log.debug('flags start')
if self.modelType == constants.ModelType.ModelTypeFinal:
return super(QAbstractTableModel, self).flags(index) | Qt.ItemIsEditable
#self.log.debug('flags end')
return super(QAbstractTableModel, self).flags(index)
def getModelType(self):
''' '''
#self.log.debug('getModelType start')
#self.log.debug('getModelType end')
return self.modelType
#def getState(self):
#''' '''
##self.log.debug('getState start')
##self.log.debug('getState end')
#return None<|fim▁end|> | super(QAbstractTableModel, self).__init__(parent)
def rowCount(self, parent = None):
''' ''' |
<|file_name|>routes.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2011-2013 Lp digital system<|fim▁hole|> * it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* BackBuilder5 is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with BackBuilder5. If not, see <http://www.gnu.org/licenses/>.
*/
define(['tb.core'], function (Core) {
'use strict';
/**
* Register every routes of bundle application into Core.routeManager
*/
Core.RouteManager.registerRoute('user', {
prefix: 'user',
routes: {
index: {
url: '/index',
action: 'MainController:index'
}
}
});
});<|fim▁end|> | *
* This file is part of BackBuilder5.
*
* BackBuilder5 is free software: you can redistribute it and/or modify |
<|file_name|>copy_if.hpp<|end_file_name|><|fim▁begin|>#ifndef BOOST_MPL_COPY_IF_HPP_INCLUDED<|fim▁hole|>// Copyright David Abrahams 2003-2004
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// See http://www.boost.org/libs/mpl for documentation.
// $Source$
// $Date: 2004-09-02 11:41:37 -0400 (Thu, 02 Sep 2004) $
// $Revision: 24874 $
#include <boost/mpl/fold.hpp>
#include <boost/mpl/reverse_fold.hpp>
#include <boost/mpl/apply.hpp>
#include <boost/mpl/eval_if.hpp>
#include <boost/mpl/identity.hpp>
#include <boost/mpl/protect.hpp>
#include <boost/mpl/aux_/inserter_algorithm.hpp>
#include <boost/mpl/aux_/config/forwarding.hpp>
namespace boost { namespace mpl {
namespace aux {
template<
typename Operation
, typename Predicate
>
struct copy_if_op
{
template< typename Sequence, typename T > struct apply
#if !defined(BOOST_MPL_CFG_NO_NESTED_FORWARDING)
: eval_if<
typename apply1<Predicate,T>::type
, apply2<Operation,Sequence,T>
, identity<Sequence>
>
{
#else
{
typedef typename eval_if<
typename apply1<Predicate,T>::type
, apply2<Operation,Sequence,T>
, identity<Sequence>
>::type type;
#endif
};
};
template<
typename Sequence
, typename Predicate
, typename Inserter
>
struct copy_if_impl
: fold<
Sequence
, typename Inserter::state
, protect< aux::copy_if_op<
typename Inserter::operation
, Predicate
> >
>
{
};
template<
typename Sequence
, typename Predicate
, typename Inserter
>
struct reverse_copy_if_impl
: reverse_fold<
Sequence
, typename Inserter::state
, protect< aux::copy_if_op<
typename Inserter::operation
, Predicate
> >
>
{
};
} // namespace aux
BOOST_MPL_AUX_INSERTER_ALGORITHM_DEF(3, copy_if)
}}
#endif // BOOST_MPL_COPY_IF_HPP_INCLUDED<|fim▁end|> | #define BOOST_MPL_COPY_IF_HPP_INCLUDED
// Copyright Aleksey Gurtovoy 2000-2004 |
<|file_name|>0005_auto__add_field_person_education__add_field_person_birthday.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Person.education'
db.add_column('person_person', 'education',
self.gf('django.db.models.fields.CharField')(default='', max_length=255, blank=True),
keep_default=False)
# Adding field 'Person.birthday'
db.add_column('person_person', 'birthday',
self.gf('django.db.models.fields.DateTimeField')(null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Person.education'
db.delete_column('person_person', 'education')
# Deleting field 'Person.birthday'
db.delete_column('person_person', 'birthday')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},<|fim▁hole|> 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'person.person': {
'Meta': {'object_name': 'Person'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'birthday': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'education': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'fullname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'default': "'unknown'", 'max_length': '7'}),
'nickname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'occupation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'normal'", 'max_length': '20'}),
'subscribing': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'})
}
}
complete_apps = ['person']<|fim▁end|> | 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), |
<|file_name|>value_prop_tests.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::values::{prop::layout_and_value_strategy, Value};
use move_core_types::value::MoveValue;
use proptest::prelude::*;
proptest! {
#[test]
fn serializer_round_trip((layout, value) in layout_and_value_strategy()) {
let blob = value.simple_serialize(&layout).expect("must serialize");
let value_deserialized = Value::simple_deserialize(&blob, &layout).expect("must deserialize");<|fim▁hole|> assert!(value.equals(&value_deserialized).unwrap());
let move_value = value.as_move_value(&layout);
let blob2 = move_value.simple_serialize().expect("must serialize");
assert_eq!(blob, blob2);
let move_value_deserialized = MoveValue::simple_deserialize(&blob2, &layout).expect("must deserialize.");
assert_eq!(move_value, move_value_deserialized);
}
}<|fim▁end|> | |
<|file_name|>senateisvp.py<|end_file_name|><|fim▁begin|># coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
unsmuggle_url,
)
from ..compat import (
compat_parse_qs,
compat_urlparse,
)
class SenateISVPIE(InfoExtractor):
_COMM_MAP = [
['ag', '76440', 'http://ag-f.akamaihd.net'],
['aging', '76442', 'http://aging-f.akamaihd.net'],
['approps', '76441', 'http://approps-f.akamaihd.net'],
['armed', '76445', 'http://armed-f.akamaihd.net'],
['banking', '76446', 'http://banking-f.akamaihd.net'],
['budget', '76447', 'http://budget-f.akamaihd.net'],
['cecc', '76486', 'http://srs-f.akamaihd.net'],
['commerce', '80177', 'http://commerce1-f.akamaihd.net'],
['csce', '75229', 'http://srs-f.akamaihd.net'],
['dpc', '76590', 'http://dpc-f.akamaihd.net'],
['energy', '76448', 'http://energy-f.akamaihd.net'],
['epw', '76478', 'http://epw-f.akamaihd.net'],
['ethics', '76449', 'http://ethics-f.akamaihd.net'],
['finance', '76450', 'http://finance-f.akamaihd.net'],
['foreign', '76451', 'http://foreign-f.akamaihd.net'],
['govtaff', '76453', 'http://govtaff-f.akamaihd.net'],
['help', '76452', 'http://help-f.akamaihd.net'],
['indian', '76455', 'http://indian-f.akamaihd.net'],
['intel', '76456', 'http://intel-f.akamaihd.net'],
['intlnarc', '76457', 'http://intlnarc-f.akamaihd.net'],
['jccic', '85180', 'http://jccic-f.akamaihd.net'],
['jec', '76458', 'http://jec-f.akamaihd.net'],
['judiciary', '76459', 'http://judiciary-f.akamaihd.net'],
['rpc', '76591', 'http://rpc-f.akamaihd.net'],
['rules', '76460', 'http://rules-f.akamaihd.net'],
['saa', '76489', 'http://srs-f.akamaihd.net'],
['smbiz', '76461', 'http://smbiz-f.akamaihd.net'],
['srs', '75229', 'http://srs-f.akamaihd.net'],
['uscc', '76487', 'http://srs-f.akamaihd.net'],
['vetaff', '76462', 'http://vetaff-f.akamaihd.net'],
['arch', '', 'http://ussenate-f.akamaihd.net/']
]
_IE_NAME = 'senate.gov'
_VALID_URL = r'https?://(?:www\.)?senate\.gov/isvp/?\?(?P<qs>.+)'
_TESTS = [{
'url': 'http://www.senate.gov/isvp/?comm=judiciary&type=live&stt=&filename=judiciary031715&auto_play=false&wmode=transparent&poster=http%3A%2F%2Fwww.judiciary.senate.gov%2Fthemes%2Fjudiciary%2Fimages%2Fvideo-poster-flash-fit.png',
'info_dict': {
'id': 'judiciary031715',
'ext': 'mp4',
'title': 'Integrated Senate Video Player',
'thumbnail': r're:^https?://.*\.(?:jpg|png)$',
},
'params': {
# m3u8 download
'skip_download': True,
},
}, {
'url': 'http://www.senate.gov/isvp/?type=live&comm=commerce&filename=commerce011514.mp4&auto_play=false',
'info_dict': {
'id': 'commerce011514',
'ext': 'mp4',
'title': 'Integrated Senate Video Player'
},
'params': {
# m3u8 download
'skip_download': True,
},
}, {
'url': 'http://www.senate.gov/isvp/?type=arch&comm=intel&filename=intel090613&hc_location=ufi',
# checksum differs each time
'info_dict': {
'id': 'intel090613',
'ext': 'mp4',
'title': 'Integrated Senate Video Player'
}
}, {
# From http://www.c-span.org/video/?96791-1
'url': 'http://www.senate.gov/isvp?type=live&comm=banking&filename=banking012715',
'only_matching': True,
}]
@staticmethod
def _search_iframe_url(webpage):
mobj = re.search(
r"<iframe[^>]+src=['\"](?P<url>http://www\.senate\.gov/isvp/?\?[^'\"]+)['\"]",
webpage)
if mobj:
return mobj.group('url')
def _get_info_for_comm(self, committee):
for entry in self._COMM_MAP:
if entry[0] == committee:<|fim▁hole|>
def _real_extract(self, url):
url, smuggled_data = unsmuggle_url(url, {})
qs = compat_parse_qs(re.match(self._VALID_URL, url).group('qs'))
if not qs.get('filename') or not qs.get('type') or not qs.get('comm'):
raise ExtractorError('Invalid URL', expected=True)
video_id = re.sub(r'.mp4$', '', qs['filename'][0])
webpage = self._download_webpage(url, video_id)
if smuggled_data.get('force_title'):
title = smuggled_data['force_title']
else:
title = self._html_search_regex(r'<title>([^<]+)</title>', webpage, video_id)
poster = qs.get('poster')
thumbnail = poster[0] if poster else None
video_type = qs['type'][0]
committee = video_type if video_type == 'arch' else qs['comm'][0]
stream_num, domain = self._get_info_for_comm(committee)
formats = []
if video_type == 'arch':
filename = video_id if '.' in video_id else video_id + '.mp4'
formats = [{
# All parameters in the query string are necessary to prevent a 403 error
'url': compat_urlparse.urljoin(domain, filename) + '?v=3.1.0&fp=&r=&g=',
}]
else:
hdcore_sign = 'hdcore=3.1.0'
url_params = (domain, video_id, stream_num)
f4m_url = '%s/z/%s_1@%s/manifest.f4m?' % url_params + hdcore_sign
m3u8_url = '%s/i/%s_1@%s/master.m3u8' % url_params
for entry in self._extract_f4m_formats(f4m_url, video_id, f4m_id='f4m'):
# URLs without the extra param induce an 404 error
entry.update({'extra_param_to_segment_url': hdcore_sign})
formats.append(entry)
for entry in self._extract_m3u8_formats(m3u8_url, video_id, ext='mp4', m3u8_id='m3u8'):
mobj = re.search(r'(?P<tag>(?:-p|-b)).m3u8', entry['url'])
if mobj:
entry['format_id'] += mobj.group('tag')
formats.append(entry)
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'formats': formats,
'thumbnail': thumbnail,
}<|fim▁end|> | return entry[1:] |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.shortcuts import render
from django.http import HttpResponse
import json<|fim▁hole|>
def services(request):
return render(request, 'services/services.html', {})<|fim▁end|> | |
<|file_name|>match-bot.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT<|fim▁hole|>// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() {
let i: int =
match Some::<int>(3) { None::<int> => { panic!() } Some::<int>(_) => { 5 } };
println!("{}", i);
}<|fim▁end|> | // file at the top-level directory of this distribution and at |
<|file_name|>test_caching.py<|end_file_name|><|fim▁begin|>import datetime
import gzip
from itertools import count
import os
curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
import sys
import threading
import time
import urllib
import cherrypy
from cherrypy._cpcompat import next, ntob, quote, xrange
from cherrypy.lib import httputil
gif_bytes = ntob(
'GIF89a\x01\x00\x01\x00\x82\x00\x01\x99"\x1e\x00\x00\x00\x00\x00'
'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
'\x00,\x00\x00\x00\x00\x01\x00\x01\x00\x02\x03\x02\x08\t\x00;'
)
from cherrypy.test import helper
class CacheTest(helper.CPWebCase):
def setup_server():
@cherrypy.config(**{'tools.caching.on': True})
class Root:
def __init__(self):
self.counter = 0
self.control_counter = 0
self.longlock = threading.Lock()
@cherrypy.expose
def index(self):
self.counter += 1
msg = "visit #%s" % self.counter
return msg
@cherrypy.expose
def control(self):
self.control_counter += 1
return "visit #%s" % self.control_counter
@cherrypy.expose
def a_gif(self):
cherrypy.response.headers[
'Last-Modified'] = httputil.HTTPDate()
return gif_bytes
@cherrypy.expose
def long_process(self, seconds='1'):
try:
self.longlock.acquire()
time.sleep(float(seconds))
finally:
self.longlock.release()
return 'success!'
@cherrypy.expose
def clear_cache(self, path):
cherrypy._cache.store[cherrypy.request.base + path].clear()
@cherrypy.config(**{
'tools.caching.on': True,
'tools.response_headers.on': True,
'tools.response_headers.headers': [
('Vary', 'Our-Varying-Header')
],
})
class VaryHeaderCachingServer(object):
def __init__(self):
self.counter = count(1)
@cherrypy.expose
def index(self):
return "visit #%s" % next(self.counter)
@cherrypy.config(**{
'tools.expires.on': True,
'tools.expires.secs': 60,
'tools.staticdir.on': True,
'tools.staticdir.dir': 'static',
'tools.staticdir.root': curdir,
})
class UnCached(object):
@cherrypy.expose
@cherrypy.config(**{'tools.expires.secs': 0})
def force(self):
cherrypy.response.headers['Etag'] = 'bibbitybobbityboo'
self._cp_config['tools.expires.force'] = True
self._cp_config['tools.expires.secs'] = 0
return "being forceful"
@cherrypy.expose
def dynamic(self):
cherrypy.response.headers['Etag'] = 'bibbitybobbityboo'
cherrypy.response.headers['Cache-Control'] = 'private'
return "D-d-d-dynamic!"
@cherrypy.expose
def cacheable(self):
cherrypy.response.headers['Etag'] = 'bibbitybobbityboo'
return "Hi, I'm cacheable."
@cherrypy.expose
@cherrypy.config(**{'tools.expires.secs': 86400})
def specific(self):
cherrypy.response.headers[
'Etag'] = 'need_this_to_make_me_cacheable'
return "I am being specific"
class Foo(object):
pass
@cherrypy.expose
@cherrypy.config(**{'tools.expires.secs': Foo()})
def wrongtype(self):
cherrypy.response.headers[<|fim▁hole|>
cherrypy.tree.mount(Root())
cherrypy.tree.mount(UnCached(), "/expires")
cherrypy.tree.mount(VaryHeaderCachingServer(), "/varying_headers")
cherrypy.config.update({'tools.gzip.on': True})
setup_server = staticmethod(setup_server)
def testCaching(self):
elapsed = 0.0
for trial in range(10):
self.getPage("/")
# The response should be the same every time,
# except for the Age response header.
self.assertBody('visit #1')
if trial != 0:
age = int(self.assertHeader("Age"))
self.assert_(age >= elapsed)
elapsed = age
# POST, PUT, DELETE should not be cached.
self.getPage("/", method="POST")
self.assertBody('visit #2')
# Because gzip is turned on, the Vary header should always Vary for
# content-encoding
self.assertHeader('Vary', 'Accept-Encoding')
# The previous request should have invalidated the cache,
# so this request will recalc the response.
self.getPage("/", method="GET")
self.assertBody('visit #3')
# ...but this request should get the cached copy.
self.getPage("/", method="GET")
self.assertBody('visit #3')
self.getPage("/", method="DELETE")
self.assertBody('visit #4')
# The previous request should have invalidated the cache,
# so this request will recalc the response.
self.getPage("/", method="GET", headers=[('Accept-Encoding', 'gzip')])
self.assertHeader('Content-Encoding', 'gzip')
self.assertHeader('Vary')
self.assertEqual(
cherrypy.lib.encoding.decompress(self.body), ntob("visit #5"))
# Now check that a second request gets the gzip header and gzipped body
# This also tests a bug in 3.0 to 3.0.2 whereby the cached, gzipped
# response body was being gzipped a second time.
self.getPage("/", method="GET", headers=[('Accept-Encoding', 'gzip')])
self.assertHeader('Content-Encoding', 'gzip')
self.assertEqual(
cherrypy.lib.encoding.decompress(self.body), ntob("visit #5"))
# Now check that a third request that doesn't accept gzip
# skips the cache (because the 'Vary' header denies it).
self.getPage("/", method="GET")
self.assertNoHeader('Content-Encoding')
self.assertBody('visit #6')
def testVaryHeader(self):
self.getPage("/varying_headers/")
self.assertStatus("200 OK")
self.assertHeaderItemValue('Vary', 'Our-Varying-Header')
self.assertBody('visit #1')
# Now check that different 'Vary'-fields don't evict each other.
# This test creates 2 requests with different 'Our-Varying-Header'
# and then tests if the first one still exists.
self.getPage("/varying_headers/",
headers=[('Our-Varying-Header', 'request 2')])
self.assertStatus("200 OK")
self.assertBody('visit #2')
self.getPage("/varying_headers/",
headers=[('Our-Varying-Header', 'request 2')])
self.assertStatus("200 OK")
self.assertBody('visit #2')
self.getPage("/varying_headers/")
self.assertStatus("200 OK")
self.assertBody('visit #1')
def testExpiresTool(self):
# test setting an expires header
self.getPage("/expires/specific")
self.assertStatus("200 OK")
self.assertHeader("Expires")
# test exceptions for bad time values
self.getPage("/expires/wrongtype")
self.assertStatus(500)
self.assertInBody("TypeError")
# static content should not have "cache prevention" headers
self.getPage("/expires/index.html")
self.assertStatus("200 OK")
self.assertNoHeader("Pragma")
self.assertNoHeader("Cache-Control")
self.assertHeader("Expires")
# dynamic content that sets indicators should not have
# "cache prevention" headers
self.getPage("/expires/cacheable")
self.assertStatus("200 OK")
self.assertNoHeader("Pragma")
self.assertNoHeader("Cache-Control")
self.assertHeader("Expires")
self.getPage('/expires/dynamic')
self.assertBody("D-d-d-dynamic!")
# the Cache-Control header should be untouched
self.assertHeader("Cache-Control", "private")
self.assertHeader("Expires")
# configure the tool to ignore indicators and replace existing headers
self.getPage("/expires/force")
self.assertStatus("200 OK")
# This also gives us a chance to test 0 expiry with no other headers
self.assertHeader("Pragma", "no-cache")
if cherrypy.server.protocol_version == "HTTP/1.1":
self.assertHeader("Cache-Control", "no-cache, must-revalidate")
self.assertHeader("Expires", "Sun, 28 Jan 2007 00:00:00 GMT")
# static content should now have "cache prevention" headers
self.getPage("/expires/index.html")
self.assertStatus("200 OK")
self.assertHeader("Pragma", "no-cache")
if cherrypy.server.protocol_version == "HTTP/1.1":
self.assertHeader("Cache-Control", "no-cache, must-revalidate")
self.assertHeader("Expires", "Sun, 28 Jan 2007 00:00:00 GMT")
# the cacheable handler should now have "cache prevention" headers
self.getPage("/expires/cacheable")
self.assertStatus("200 OK")
self.assertHeader("Pragma", "no-cache")
if cherrypy.server.protocol_version == "HTTP/1.1":
self.assertHeader("Cache-Control", "no-cache, must-revalidate")
self.assertHeader("Expires", "Sun, 28 Jan 2007 00:00:00 GMT")
self.getPage('/expires/dynamic')
self.assertBody("D-d-d-dynamic!")
# dynamic sets Cache-Control to private but it should be
# overwritten here ...
self.assertHeader("Pragma", "no-cache")
if cherrypy.server.protocol_version == "HTTP/1.1":
self.assertHeader("Cache-Control", "no-cache, must-revalidate")
self.assertHeader("Expires", "Sun, 28 Jan 2007 00:00:00 GMT")
def testLastModified(self):
self.getPage("/a.gif")
self.assertStatus(200)
self.assertBody(gif_bytes)
lm1 = self.assertHeader("Last-Modified")
# this request should get the cached copy.
self.getPage("/a.gif")
self.assertStatus(200)
self.assertBody(gif_bytes)
self.assertHeader("Age")
lm2 = self.assertHeader("Last-Modified")
self.assertEqual(lm1, lm2)
# this request should match the cached copy, but raise 304.
self.getPage("/a.gif", [('If-Modified-Since', lm1)])
self.assertStatus(304)
self.assertNoHeader("Last-Modified")
if not getattr(cherrypy.server, "using_apache", False):
self.assertHeader("Age")
def test_antistampede(self):
SECONDS = 4
# We MUST make an initial synchronous request in order to create the
# AntiStampedeCache object, and populate its selecting_headers,
# before the actual stampede.
self.getPage("/long_process?seconds=%d" % SECONDS)
self.assertBody('success!')
self.getPage("/clear_cache?path=" +
quote('/long_process?seconds=%d' % SECONDS, safe=''))
self.assertStatus(200)
start = datetime.datetime.now()
def run():
self.getPage("/long_process?seconds=%d" % SECONDS)
# The response should be the same every time
self.assertBody('success!')
ts = [threading.Thread(target=run) for i in xrange(100)]
for t in ts:
t.start()
for t in ts:
t.join()
self.assertEqualDates(start, datetime.datetime.now(),
# Allow a second (two, for slow hosts)
# for our thread/TCP overhead etc.
seconds=SECONDS + 2)
def test_cache_control(self):
self.getPage("/control")
self.assertBody('visit #1')
self.getPage("/control")
self.assertBody('visit #1')
self.getPage("/control", headers=[('Cache-Control', 'no-cache')])
self.assertBody('visit #2')
self.getPage("/control")
self.assertBody('visit #2')
self.getPage("/control", headers=[('Pragma', 'no-cache')])
self.assertBody('visit #3')
self.getPage("/control")
self.assertBody('visit #3')
time.sleep(1)
self.getPage("/control", headers=[('Cache-Control', 'max-age=0')])
self.assertBody('visit #4')
self.getPage("/control")
self.assertBody('visit #4')<|fim▁end|> | 'Etag'] = 'need_this_to_make_me_cacheable'
return "Woops" |
<|file_name|>newlambdas-ret-infer2.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.<|fim▁hole|>// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that the lambda kind is inferred correctly as a return
// expression
fn unique() -> proc():'static { proc() () }
pub fn main() {
}<|fim▁end|> | // |
<|file_name|>overlapping.cpp<|end_file_name|><|fim▁begin|>#include "overlapping.hpp"
/* TODO:
* grow seeds from edges, not nodes.
* Why are the results changing?
* Then, speed up delV
* ZEntropy shouldn't consider the number of groups, that should be taken out to another function.
* That factorial expression would be better
* Write down some of the stats, and be sure they're correct.
* unordered_map for efficiency more often?
* a big vector for the comm_count_per_edge?
* keep track of frontier properly in growingSeed
* type tags for the multi_index_container, instead of get<1>
* random tie-breaking in frontier
* update _p_in also.
*
* PLAN:
* Fully abstract interface to grouping
* Track count of types of edges.
* Varying p_in and p_out
* Each edge to know how many communities it's in.
* Calculate global objective function every now and then.
* More efficient finding of seeds
* Random tie breaking
* Stop seed growing at first positive?
*/
/*
* Sources of randomness:
* 246: Choice of initial edge seed
* 249: (arbitrary, not random) Randomized p_in
* 459: (arbitrary, not random) Tie breaker in seed expansion
*/
#include <list>
#include <map>
#include <algorithm>
#include <functional>
#include <math.h>
#include <string.h>
#include <fstream>
#include <sstream>
#include <float.h>
#include <boost/multi_index_container.hpp>
#include <boost/multi_index/ordered_index.hpp>
#include <boost/multi_index/hashed_index.hpp>
//#include <boost/multi_index/identity.hpp>
#include <boost/multi_index/indexed_by.hpp>
#include <boost/multi_index/member.hpp>
#include <boost/unordered_map.hpp>
#include <boost/unordered_set.hpp>
#include "iterative.hpp"
#include "options.hpp"
#include "Range.hpp"
#include "grouping.hpp"
#include <set>
using namespace std;
using namespace std::tr1 ;
using namespace grouping;
char option_saveMOSESscores[1000] = "";
long option_seed = 0;
int flag_justCalcObjective = 0;
static void runMutual(void) {
//system("echo;echo -n \" >> ;${x};${gt};${me}; \" ; bash -c '~/Code/Mutual3/mutual3/mutual \"${x}\"{\"${me}\",\"${gt}\"}' 1>&2 ");
}
namespace overlapping {
bool flag_save_group_id_in_output = true;
template <class N> static void overlappingT(bloomGraph<N> &g);
void overlapping(SimpleIntGraph &g) { overlappingT(g); }
void overlapping(SimpleStringGraph &g) { overlappingT(g); }
void addSeed(Grouping &ging, const set<V> &nodes, bool randomized_p_in);
static pair<long double, set<V> > growThisEdge(Grouping &ging, V edgeNumber, const long double &boost, bool randomized_p_in);
static void update_p_out(Grouping &ging);
static void estimate_p_in_and_p_out(Grouping &ging);
long double MOSES_objective(const Grouping &ging);
void addSeed(Grouping &ging, const set<V> &nodes, bool randomized_p_in) {
assert(nodes.size()>0);
Group *g = ging.newG(randomized_p_in);
ForeachContainer(V v, nodes) {
ging.addV(g, v);
}
}
void seedGroupingWithPartition(Grouping &ging, const iterative::Partition &p);
template <class N> static void useOneNodeSeeds(Grouping &ging, bloomGraph<N> &g, bool randomize_p_in);
template <class N> static void groupStats(const Grouping &ging, bloomGraph<N> &g);
template <class N> static void save(Grouping &ging, bloomGraph<N> &g);
template <class N> static void louvainStyle(Grouping &ging, bloomGraph<N> &g);
static void tryMerges(Grouping &ging);
static void tryDeletions(Grouping &ging, bool SaveScores = false);
static bool tryAndApplyThisOne(Grouping &ging, V e, bool randomized_p_in);
template <class N> static void overlappingT(bloomGraph<N> &g) {
printf("env: ALLOW_BOOST000 %s\n", getenv("ALLOW_BOOST000"));
printf("env: Lookahead %s\n", getenv("Lookahead"));
printf("env: MaxCommSize %s\n", getenv("MaxCommSize"));
printf("env: OUTER_ITERS %s\n", getenv("OUTER_ITERS"));
printf("env: LOUVAIN_ITERS %s\n", getenv("LOUVAIN_ITERS"));
printf("NoBroken\n");
srand48(option_seed); // default seed. Will be changeable by command line arg.
/*
iterative::Partition p(g);
iterative::findPartition(p, g);
assert(option_planted[0]==0);
strcpy(option_planted, option_overlapping);
strcat(option_planted, "_partition");
savePartition(g, p, -1);
long double p_in = (long double) p.m_i / p.a;
long double p_out;
{
int64 b = p.N * (p.N-1L) / 2L;
int64 m_e = p.m-p.m_i;
p_out = (long double) (m_e) / (b - p.a);
}
PP(p_in);
PP(p_out);
//Grouping ging(g, p_in, p_out);
//seedGroupingWithPartition(ging, p);
*/
Grouping ging(g, option_p_in, option_p_out);
estimate_p_in_and_p_out(ging);
ging.value_of_objective_with_no_communities = MOSES_objective(ging);
//PP(ging._p_in);
//PP(ging._p_out);
if(option_loadOverlapping[0]) { // load preexisting grouping
Pn("Preloading grouping '%s'", option_loadOverlapping);
ifstream inFile(option_loadOverlapping);
//for (int i=0; i<5; ++i)
int lineNo=0;
while(inFile.peek()!=EOF)
{
lineNo ++;
string line;
getline(inFile, line);
if(line.length()==0) break;
Group *grp = ging.newG();
istringstream linestream(line);
//PP(line);//
if(linestream.peek()=='"') {
char ch;
linestream >> ch;
while(linestream.peek() != EOF) {
linestream >> ch;
if(ch=='"')
break;
}
if(linestream.peek()!='\t') Die("grouping file should have a tab after any \" \". line %d {%d '%c'}", lineNo, (int) ch, ch);
}
while(linestream >> ws, linestream.peek() != EOF) {
N n;
linestream >> n;
V v = g.key_for_vertexName(n);
assert(v>=0);
assert(v<g.vcount());
ging.addV(grp, v);
}
}
save(ging, g);
}
estimate_p_in_and_p_out(ging);
groupStats(ging, g);
MOSES_objective(ging);
if(flag_justCalcObjective)
exit(0);
const int max_OuterIters = atoi(getenv("OUTER_ITERS") ? : "20");
PP(max_OuterIters);
for (int k=0; k<max_OuterIters; k++) {
MOSES_objective(ging);
const size_t num_groups_before = ging.groups.size();
ostringstream s;
s << "Whole outer iter " << k << "/" << max_OuterIters;
Timer timer(s.str());
Pn("\ngrow seeds %d/%d", k, max_OuterIters);
bool randomize_p_in;
if(k < max_OuterIters / 2) {
randomize_p_in = true;
Pn("Random p_i for each edge that we try");
estimate_p_in_and_p_out(ging); // this is just to estimate p_out
useOneNodeSeeds(ging, g, true);
estimate_p_in_and_p_out(ging);
tryDeletions(ging);
} else {
randomize_p_in = false;
useOneNodeSeeds(ging, g, false);
tryDeletions(ging);
}
groupStats(ging, g);
save(ging, g);
estimate_p_in_and_p_out(ging);
//tryDeletions(ging);
//save(ging, g);
const size_t num_groups_after = ging.groups.size();
if(!randomize_p_in && /*num_groups_after > 1000 && */ 0.99L * num_groups_after <= num_groups_before) {
Pn("breaking after %d growing passes, as VERY FEW more have been found among the most recent subset", k+1);
break;
}
}
int louvainStyle_iter=0;
const int max_louvainStyleIters = atoi(getenv("LOUVAIN_ITERS") ? : "10");
PP(max_louvainStyleIters);
while(louvainStyle_iter != max_louvainStyleIters) {
MOSES_objective(ging);
Timer timer("Whole Louvain iter");
Pn("\nLouvain-style iteration %d/%d", louvainStyle_iter++, max_louvainStyleIters);
louvainStyle(ging, g);
tryDeletions(ging, louvainStyle_iter==max_louvainStyleIters);
estimate_p_in_and_p_out(ging);
groupStats(ging, g);
save(ging, g);
}
if(0) {
tryMerges(ging);
save(ging, g);
groupStats(ging, g);
}
Pn("\n\nFINAL Grouping");
groupStats(ging, g);
estimate_p_in_and_p_out(ging);
MOSES_objective(ging);
}
static bool tryAndApplyThisOne(Grouping &ging, V e, bool randomized_p_in) {
static long double boost = 0.0L;
if(!getenv("ALLOW_BOOST000"))
boost=0.0L; // reset it back to zero unless ALLOW_BOOST000 is defined in the environment
pair<long double, set<V> > bestSeed = growThisEdge(ging, e, boost, randomized_p_in);
if(bestSeed.first + boost> 0.0L && bestSeed.second.size()>0 ) {
addSeed(ging, bestSeed.second, randomized_p_in);
boost /= 2.0; // if(boost<0.0L) boost=0.0L;
return true;
//Pn("Applied best 1-seed. Now returning. %zd nodes (+%Lg). Now there are %zd communities", bestSeed.second.size(), bestSeed.first, ging.groups.size());
// if(bestSeed.second.size() < 20) ForeachContainer (V v, bestSeed.second) { cout << "|" << g.name(v); } P("\n");
}
boost += 0.1;
return false;
}
template <class N> static void useOneNodeSeeds(Grouping &ging, bloomGraph<N> &g, bool randomize_p_in) {
Timer timer(__FUNCTION__);
const int numTries = g.ecount()/5;
P(" \n Now just use one-EDGE seeds at a time. Try %d edges\n", numTries);
// groupStats(ging, g);
for(int x=0; x<numTries; ++x) {
if(x && numTries>5 && x%(numTries/5)==0) {
PP(x);
PP(ging.groups.size());
}
// choose an edge at random, but prefer to use it iff it's sharedCommunities score is low.
V e = V(drand48() * (2*ging._g.ecount()));
assert(e >= 0);
assert(e < 2*ging._g.ecount());
if(randomize_p_in) {
ging._p_in = 0.01L + 0.98L*drand48();
assert(ging._p_in < 1.0L);
}
tryAndApplyThisOne(ging, e, randomize_p_in);
#if 0
int sharedCommunities = ging.comm_count_per_edge(ging._g.neighbours(0).first[e], &(ging._g.neighbours(0).first[e])); // a little time in here
//PP(sharedCommunities);
if( (double(rand())/RAND_MAX)
< powl(0.5, sharedCommunities))
{
//Pn(" X %d", sharedCommunities);
tryAndApplyThisOne(ging, e);
} else {
//Pn(" %d", sharedCommunities);
}
#endif
}
}
template <class N> static void groupStats(const Grouping &ging, bloomGraph<N> &g) {
map<size_t, int> group_sizes_of_the_randomized;
map<size_t, int> group_sizes;
int64 totalAssignments = 0; // to help calculate average communities per node.
ForeachContainer(Group *group, ging.groups) {
DYINGWORDS(group->vs.size()>0) {
PP(group->vs.size());
}
group_sizes[group->vs.size()]++;
totalAssignments += group->vs.size();
if(group->_randomized_p_in)
group_sizes_of_the_randomized[group->vs.size()]++;
}
//Perror(" %zd\n", ging.groups.size());
Pn("#groups=%zd. %zd nodes, out of %d, are in at least one community. avgs grps/node=%g", ging.groups.size(), ging.vgroups_size(), g.vcount(), (double) totalAssignments / g.vcount());
pair<size_t, int> group_size;
size_t max_group_size = group_sizes.size()==0 ? 0 : group_sizes.rbegin()->first;
int entries_per_row = 15;
int number_of_rows = (max_group_size / entries_per_row) + 1;
for(int r = 0; r<number_of_rows ; r++) {
for(size_t c = r; c <= max_group_size; c+=number_of_rows) {
if(group_sizes[c]>0)
P("%6d{%3zd}", group_sizes[c], c);
else
P(" ");
}
P("\n");
}
{ // now, just the randomized ones
size_t max_group_size = group_sizes_of_the_randomized.size()==0 ? 0 : group_sizes_of_the_randomized.rbegin()->first;
int entries_per_row = 15;
int number_of_rows = (max_group_size / entries_per_row) + 1;
for(int r = 0; r<number_of_rows ; r++) {
for(size_t c = r; c <= max_group_size; c+=number_of_rows) {
if(group_sizes_of_the_randomized[c]>0)
P("%6d{%3zd}", group_sizes_of_the_randomized[c], c);
else
P(" ");
}
P("\n");
}
}
#if 0
set<V> lonelyNodesInLargestGroup;
Group *largestGroup = NULL;
ForeachContainer(Group *group, ging.groups) {
if(group->vs.size() == max_group_size) {
ForeachContainer(V v, group->vs) {
P("(%zd)", ging.vgroups(v).size());
if(1 == ging.vgroups(v).size() && 0 == rand()%2)
lonelyNodesInLargestGroup.insert(v);
}
P("\n");
largestGroup = group;
break;
}
}
#endif
{
int print_count = 0;
for(map<int, V>::const_iterator it = ging.global_edge_counts.begin(); it!=ging.global_edge_counts.end(); ++it) {
P("%6d(%3d)", (int) it->second ,(int) it->first);
print_count++;
if(print_count%15==0)
P("\n");
}
P("\n");
//if(0) update_p_out(ging);
//PP(ging._p_in);
//PP(ging._p_out);
}
}
<|fim▁hole|>void seedGroupingWithPartition(Grouping &ging, const iterative::Partition &p) {
for(V c=0; c<p.g.vcount(); ++c) {
if(p.p[c].c == c) { // we're at the head node
//Pn(" order %d", p.p[c].order);
Group *grp = ging.newG();
V v = c;
do {
assert(c == p.p[v].c);
ging.addV(grp, v);
//Pn ("%d is in %d", v ,c);
v = p.p[v].next;
} while (v != c);
}
}
}
template <class It>
static size_t count_intersection(It it1b, It it1e, It it2b, It it2e) {
vector< typename It::value_type > is;
set_intersection(it1b, it1e, it2b, it2e, back_inserter(is));
return is.size();
}
template <class Container>
static size_t count_intersection(const Container &container1, const Container &container2) {
return count_intersection(
container1.begin()
, container1.end()
, container2.begin()
, container2.end()
);
}
struct DeltaSeed {
const V _v; // for this node
const int _group_size_smaller; // ...and this group (which v is NOT currently in)
const Grouping &_ging;
// what'd be the change in entropy (edge+Z+Q) from joining it?
long double _deltadeltaEdgeEntropy;
int count_edges_back_into_this_group;
long double deltadeltaPairEntropy() const {
return _deltadeltaEdgeEntropy + log2l(1.0L - _ging._p_in) * (_group_size_smaller - count_edges_back_into_this_group);
}
explicit DeltaSeed(V v, int group_size_smaller, Grouping &ging) : _v(v), _group_size_smaller(group_size_smaller), _ging(ging), _deltadeltaEdgeEntropy(0.0L), count_edges_back_into_this_group(0) {
//assert(_grp.vs.count(v)==0);
}
void addEdge2(V n, const V* edgeVN_ptr) { // n is connected to _v
assert(*edgeVN_ptr == n);
this->addEdge(n, _ging.comm_count_per_edge(n, edgeVN_ptr));
}
void addEdge(V , int sharedCommunities) { // n is connected to _v // TODO: might be quicker to pass in the count of sharedCommunities too
//assert(_ging._g.are_connected(_v, n)); // TODO: remove these assertions
//assert(_grp.vs.count(n) == 1);
//assert(_grp.vs.count(_v) == 0);
count_edges_back_into_this_group ++;
_deltadeltaEdgeEntropy += log2l(1.0L - (1.0L-_ging._p_out)*powl(1.0L - _ging._p_in, 1+sharedCommunities))
- log2l(1.0L - (1.0L-_ging._p_out)*powl(1.0L - _ging._p_in, sharedCommunities));
}
void redoEdge(V , int previous_sharedCommunities) { // n is connected to _v // TODO: might be quicker to pass in the count of sharedCommunities too
_deltadeltaEdgeEntropy -=(log2l(1.0L - (1.0L-_ging._p_out)*powl(1.0L - _ging._p_in, 1+previous_sharedCommunities))
- log2l(1.0L - (1.0L-_ging._p_out)*powl(1.0L - _ging._p_in, previous_sharedCommunities)));
_deltadeltaEdgeEntropy += log2l(1.0L - (1.0L-_ging._p_out)*powl(1.0L - _ging._p_in, 2+previous_sharedCommunities))
- log2l(1.0L - (1.0L-_ging._p_out)*powl(1.0L - _ging._p_in, 1+previous_sharedCommunities));
}
long double _deltaZentropy() const {
const size_t N = _ging._g.vcount();
const size_t x = _group_size_smaller;
const size_t x2 = 1+x;
return( x2 * log2l(x2) + (N-x2) * log2l(N-x2)
-x * log2l(x) - (N-x ) * log2l(N-x ) );
/*
(x2 * (log2l(x2)-log2l(N)) + (N-x2) * (log2l(N-x2)-log2l(N)) + log2l(1+ging.groups.size()) - log2l(N))
-(x * (log2l(x) -log2l(N)) + (N-x ) * (log2l(N-x )-log2l(N)) + log2l(1+ging.groups.size()) - log2l(N))
= (x2 * (log2l(x2)-log2l(N)) + (N-x2) * (log2l(N-x2)-log2l(N)) )
-(x * (log2l(x) -log2l(N)) + (N-x ) * (log2l(N-x )-log2l(N)) )
= (x2 * (log2l(x2) ) + (N-x2) * (log2l(N-x2) ) )
-(x * (log2l(x) ) + (N-x ) * (log2l(N-x ) ) )
= x2 * log2l(x2) + (N-x2) * log2l(N-x2)
-x * log2l(x) - (N-x ) * log2l(N-x )
*/
}
long double _deltaTotalentropy() const {
return this->deltadeltaPairEntropy() + this->_deltaZentropy();
}
};
struct FrontierNode {
FrontierNode(long double &score, V v) : _score(score), _v(v) {}
long double _score;
V _v;
struct Incrementer {
long double _x;
Incrementer(long double &x) : _x(x) {}
void operator() (FrontierNode &fn) const { fn._score += _x; }
};
};
using namespace boost::multi_index;
struct VertexTag {};
struct Frontier : private multi_index_container < // TODO: Some sort of binary tree sometime?
FrontierNode,
indexed_by<
ordered_non_unique< member<FrontierNode,long double,&FrontierNode::_score>, greater<long double> >,
hashed_unique< tag<VertexTag>, member<FrontierNode,V,&FrontierNode::_v> >
>
> {
// vertices, and their scores.
// easy removal of the highest-score vertices.
// easy increase of score of arbitrary members, adding them if they don't exist already.
public:
static long double __attribute__ ((noinline)) calcddEE(const Grouping &ging, int sharedCommunities) {
return log2l(1.0L - (1.0L-ging._p_out) * powl(1.0L - ging._p_in, 1+sharedCommunities))
- log2l(1.0L - (1.0L-ging._p_out) * powl(1.0L - ging._p_in, sharedCommunities))
- log2l(1.0L - ging._p_in)
+ 1e-20L * drand48() // random tie breaking
;
}
void addNode(const Grouping &ging, V to, const V *edgeFT_ptr) {
// to is being added to the frontier, BUT it may already be in the frontier.
// from is in the seed.
//assert(*edgeFT_ptr == to);
int sharedCommunities = ging.comm_count_per_edge(to, edgeFT_ptr); // a little time in here
long double deltadeltaEdgeEntropy = calcddEE(ging, sharedCommunities /*, to*/); // a little time in here
Frontier::nth_index<1>::type::iterator addOrModifyThis = this->get<1>().find(to);
if(addOrModifyThis==this->get<1>().end()) { // TODO: faster if search for to is done just once?
this->insert(FrontierNode(deltadeltaEdgeEntropy, to));
} else {
this->get<1>().modify(addOrModifyThis, FrontierNode::Incrementer(deltadeltaEdgeEntropy));
}
}
void erase_best_node() {
Frontier::iterator best_node = this->get<0>().begin();
this->erase(best_node);
}
int erase_this_node(V to) {
return this->get<1>().erase(to);
}
long double best_node_score() const {
Frontier::iterator best_node = this->get<0>().begin();
return best_node -> _score;
}
V best_node_v() const {
Frontier::iterator best_node = this->get<0>().begin();
return best_node -> _v;
}
bool Empty() const {
return this->empty();
}
};
static long double logNchoose(int64 N, int64 n_c) {
if(n_c==N)
return 0;
static vector<long double> logNchoose_vector;
// static int64 usedN;
assert(n_c>0);
assert(n_c<=N);
if (logNchoose_vector.size()==0) {
Timer t("logNchoose Initialization");
logNchoose_vector.resize(N+1);
// usedN = N;
long double lN = 0.0L;
for(int64 x1=1; x1<=N; x1++) {
if(x1>1) {
int64 i = x1-1;
lN += log2l(i) - log2l(N-i);
}
logNchoose_vector.at(x1) = lN;
}
}
assert(logNchoose_vector.at(0) == 0);
// DYINGWORDS(logNchoose_vector.at(N) == 0) { // will never be exactly zero, but it should be, in theory
assert(logNchoose_vector.size()>0);
// assert(usedN == N);
assert( size_t(N+1) == logNchoose_vector.size());
assert( size_t(n_c) < logNchoose_vector.size());
return logNchoose_vector.at(n_c);
}
pair<long double, set<V> > growingSeed(Grouping &ging, int lookahead
, set<V> &seed
, pair<long double, set<V> > bestSoFar
, long double seedEdgeEntropy
, int seed_size
, Frontier &frontier
, int edges_in_seed
, const long double &boost // to allow some negative communities to persist. The deletion phase will fix them later. This is to ensure that we have the best chance of filling the graph up quickly.
, bool randomized_p_in
)
// Find the expansion among the frontier that best improves the score. Then recurse to it.
// Stop growing if dead end is reached (empty frontier) or the seed isn't increasing and we already have at least 5 nodes.
// Return the best set of nodes
// TODO: Profile, then make this damn efficient
{
assert((int) seed.size() == seed_size);
if(frontier.Empty())
return bestSoFar;
const V best_v = frontier.best_node_v();
#if 0
{
IteratorRange<const V *> ns(ging._g.neighbours(best_v));
Foreach(V n, ns) {
if(1==seed.count(n)) { // This neighbour is already in the seed. That means the count of edges within the seed is about to be increased. Should update randomized p_in in light of this.
++edges_in_seed;
}
}
int64 pairsInSeed = seed_size * (1+seed_size) / 2;
long double new_p_in = 1.0L * (2*edges_in_seed+1) / (2 * pairsInSeed + 2) ;
if(randomized_p_in) ging._p_in = new_p_in;
/*
cout << edges_in_seed << "/" << (1+seed_size)
<< "\t" << 100.0 * edges_in_seed / (seed_size * (1+seed_size) / 2)
// << "\t" << ging._p_in << "\t" << randomized_p_in
<< "\t" << new_p_in
<< endl;
*/
assert(edges_in_seed <= (seed_size * (1+seed_size) / 2));
}
#endif
const long double newseedEdgeEntropy = seedEdgeEntropy + frontier.best_node_score() + log2l(1.0L-ging._p_in) * seed_size;
const int N = ging._g.vcount();
//const int x = seed_size;
const int x1= 1+seed_size;
//long double seed_totalDeltaEntropy = seedEdgeEntropy + x * (log2l(x)-log2l(N)) + (N-x) * (log2l(N-x)-log2l(N)) + log2l(1+ging.groups.size())/*equivalent groupings*/ - log2l(N)/*encoding of size of the group*/;
UNUSED int64 q = ging.groups.size(); if (q==0) q=1;
UNUSED const int64 q_= 1 + q;
const long double logNchoosen = logNchoose(N,x1);
const long double newseed_totalDeltaEntropy = newseedEdgeEntropy
//+ x1 * (log2l(x1)-log2l(N)) + (N-x1) * (log2l(N-x1)-log2l(N))
+ logNchoosen
- log2l(N+1)/*encoding of size of the group*/
+ log2l(1+ging.groups.size())/*equivalent groupings*/
//+ (getenv("UseBroken") ? ( ( q_ * (log2l(q_) - log2l(exp(1))) + log2l(N+1) - log2l(N+1-q_) ) - ( q * (log2l(q ) - log2l(exp(1))) + log2l(N+1) - log2l(N+1-q ) ) ) : 0)
;
if(bestSoFar.first < newseed_totalDeltaEntropy) {
bestSoFar.first = newseed_totalDeltaEntropy;
bestSoFar.second = seed;
bestSoFar.second.insert(best_v);
}
if( (size_t) seed_size >= lookahead +bestSoFar.second.size() ) // lookahead
return bestSoFar;
const size_t max_commsize = atoi(getenv("MaxCommSize") ? : "10000000");
if( (size_t) seed_size >= max_commsize ) { // max comm size
bestSoFar.first = -10000000; return bestSoFar;
}
//if( bestSoFar.first > 0.0L) // once positive, return immediately!!!!!!!!!!!!!
//return bestSoFar;
if( (size_t) seed_size > bestSoFar.second.size() && bestSoFar.first + boost > 0.0L) // once positive, return immediately if it drops.
return bestSoFar;
//if(bestSoFar.first > 0.0L)
//return bestSoFar; // This isn't working so well. Too many communities (I think), and no faster (for Oklahoma at least)
frontier.erase_best_node();
IteratorRange<const V *> ns(ging._g.neighbours(best_v));
const V *edgeVN_offset = ging._g.neighbours(best_v).first;
Foreach(V n, ns) {
assert( *edgeVN_offset == n);
//assert(ging._g.neighbours(0).first[ging._comm_count_per_edge2[edgeVN_offset].other_index] == best_v);
if(0==seed.count(n))
frontier.addNode(ging, n
, edgeVN_offset
);
++edgeVN_offset;
}
seed.insert(best_v);
return growingSeed(ging
, lookahead
, seed
, bestSoFar
, newseedEdgeEntropy
, 1 + seed_size
, frontier
, edges_in_seed
, boost
, randomized_p_in
);
}
struct ThrowingIterator {
struct Dereferenced {};
V & operator *() { throw Dereferenced(); }
void operator ++() { throw Dereferenced(); }
};
bool emptyIntersection(const pair<const V*, const V*> &l, const pair<const V*, const V*> &r) {
try {
set_intersection(
l.first
,l.second
,r.first
,r.second
,ThrowingIterator()
);
} catch (ThrowingIterator::Dereferenced &) {
return false;
}
return true; //inter.empty();
}
static pair<long double, set<V> > growThisEdge(Grouping &ging, const V edgeNumber, const long double &boost, bool randomized_p_in) {
assert(edgeNumber < 2*ging._g.ecount());
V r = ging._g.neighbours(0).first[edgeNumber];
V l = ging._g.neighbours(0).first[ging.comm_count_per_edge2[edgeNumber].other_index];
// there must be a triangle available
if(emptyIntersection(ging._g.neighbours(l),ging._g.neighbours(r)))
return make_pair(-1.0L, set<V>());
Frontier frontier;
{
IteratorRange<const V *> ns(ging._g.neighbours(l));
const V *edgeIN_ptr = ging._g.neighbours(l).first;
Foreach(V n, ns) {
frontier.addNode(ging, n, edgeIN_ptr);
++edgeIN_ptr;
}
}
//PP(frontier.size());
const int erased = frontier.erase_this_node(r);
DYINGWORDS(erased==1) {
PP(erased);
PP(l);
PP(r);
//PP(frontier.size());
PP(ging._g.degree(l));
PP(ging._g.degree(r));
IteratorRange<const V *> ns(ging._g.neighbours(l));
Foreach(V n, ns) { PP(n); }
}
assert(erased==1);
{
IteratorRange<const V *> ns(ging._g.neighbours(r));
const V *edgeIN_ptr = ging._g.neighbours(r).first;
Foreach(V n, ns) {
if(n!=l)
frontier.addNode(ging, n, edgeIN_ptr);
++edgeIN_ptr;
}
}
set<V> seed;
seed.insert(l);
seed.insert(r);
int sharedCommunities = ging.comm_count_per_edge(r, &(ging._g.neighbours(0).first[edgeNumber])); // a little time in here
pair<long double, set<V> > grownSeed = growingSeed(ging
, atoi(getenv("Lookahead") ? : "2")
, seed
, make_pair(-10000000.0L, seed) // This score is too low, but then we don't expect a singleton community to come out best anyway! Only positive scores are used.
, log2l(1.0L - (1.0L-ging._p_out) * powl(1.0L - ging._p_in, 1+sharedCommunities))
- log2l(1.0L - (1.0L-ging._p_out) * powl(1.0L - ging._p_in, sharedCommunities))
, 2
, frontier
, 1
, boost
, randomized_p_in
);
return grownSeed;
}
template <class N> static void save(Grouping &ging, bloomGraph<N> &g) {
ofstream saveFile(option_overlapping);
ForeachContainer(Group *grp, ging.groups) {
bool firstLine = true;
if(flag_save_group_id_in_output)
saveFile << '\"' << grp->_id << "\"\t";
ForeachContainer(V v, grp->vs) {
saveFile << (firstLine?"":" ") << g.name(v);
firstLine = false;
}
saveFile << endl;
}
runMutual();
}
struct hashGroup {
int operator() (Group *grp) const {
return grp->_id;
}
};
typedef boost::unordered_map<Group *, DeltaSeed, hashGroup> SeedDeltasT;
template <class N> static void louvainStyle(Grouping &ging, bloomGraph<N> &g) {
Timer t(__FUNCTION__);
// find a node, isolate it from its communities, Add back one at a time if appropriate.
const bool DEBUG_louvainStyle = 0;
for(V v=0; v<g.vcount(); v++) {
if(0) update_p_out(ging);
// if(v%(g.vcount()/20)==0) PP(v);
if(DEBUG_louvainStyle)
groupStats(ging, g);
if(DEBUG_louvainStyle)
cout << "removing node in these many groups: " << ging.vgroups(v).size() << endl;
ging.isolate(v);
SeedDeltasT _seedDeltas;
IteratorRange<const V *> ns(ging._g.neighbours(v));
{
const V * edgeVN_ptr = ging._g.neighbours(v).first;
Foreach(V n, ns) {
int sharedCommunities = ging.comm_count_per_edge(n,edgeVN_ptr);
// TODO: Could prepare the results for addEdge of v<>n
ForeachContainer(Group *grp, ging.vgroups(n)) {
_seedDeltas.insert(make_pair(grp, DeltaSeed(v, grp->vs.size(), ging))).first->second.addEdge(n, sharedCommunities);
//_seedDeltas.find(grp)->second.addEdge(n, sharedCommunities);
}
++edgeVN_ptr;
}
}
for(int addedBack = 0; _seedDeltas.size()>0 ; addedBack++) {
// for each neighbouring group, calculate the delta-entropy of expanding back in here.
pair<long double, Group *> bestGroup(-LDBL_MAX, (Group*) NULL);
int num_positive = 0;
for(SeedDeltasT::iterator i = _seedDeltas.begin(); i!=_seedDeltas.end(); ) {
if(i->second._deltaTotalentropy()<=0.0L) {
i = _seedDeltas.erase(i);
continue;
} else {
long double delta2 = i->second._deltaTotalentropy();
// TODO: Count the positive scores. No point proceeding if there aren't any more positive scores, as they can only decrease
if(bestGroup.first < delta2)
bestGroup = make_pair(delta2, i->first);
if(delta2>0.0L)
++num_positive;
}
++i;
}
if(bestGroup.first > 0.0L) {
assert(num_positive>=1);
ging.addV(bestGroup.second, v);
if(num_positive==1) { // if just one was positive, then there's no point continuing, as the rest will only lose more score.
break;
}
_seedDeltas.erase(bestGroup.second);
// the other potential groups on the end of this edge need to have their addEdge undone
const V * edgeVN_ptr = ging._g.neighbours(v).first;
IteratorRange<const V*> ns(ging._g.neighbours(v));
Foreach(V n, ns) {
assert(*edgeVN_ptr == n);
if(bestGroup.second->vs.count(n)) {
int previous_sharedCommunities = ging.comm_count_per_edge(n, edgeVN_ptr) - 1;
ForeachContainer(Group *grp, ging.vgroups(n)) {
SeedDeltasT::iterator grpInSeed =_seedDeltas.find(grp);
if(grpInSeed != _seedDeltas.end())
{
const long double before = grpInSeed->second._deltaTotalentropy();
grpInSeed->second.redoEdge(n, previous_sharedCommunities);
const long double after = grpInSeed->second._deltaTotalentropy();
if(after > before) {
Perror("%s:%d _deltaTotalentropy %Lg -> %Lg\n", __FILE__, __LINE__, before, after);
}
}
}
}
++edgeVN_ptr;
}
}
else
break;
}
}
}
static void update_p_out(Grouping &ging) {
long double new_p_out = 2.0L * ging.global_edge_counts[0] / ging._g.vcount() / (ging._g.vcount()-1) ;
if(new_p_out > 0.1L)
ging._p_out = 0.1L;
else
ging._p_out = new_p_out;
}
struct PairGroupHash {
int operator() (const pair<Group*,Group*> &pg) const {
return pg.first->_id + pg.second->_id;
}
};
static void tryMerges(Grouping &ging) {
Timer timer(__FUNCTION__);
typedef boost::unordered_map< pair<Group*,Group*> , long double, PairGroupHash> MergesT;
MergesT proposed_merges;
size_t counter=0;
for(V e = 0; e <
/*100000 */
2*ging._g.ecount()
; e++) {
//if(e % (2*ging._g.ecount()/10) ==0) { PP(e); }
V e2 = ging.comm_count_per_edge2[e].other_index;
V l = ging._g.neighbours(0).first[e2];
V r = ging._g.neighbours(0).first[e];
if(r<l) continue; // no point considering each edge twice
V sharedCommunities = ging.comm_count_per_edge2[e].shared_comms;
assert(sharedCommunities == ging.comm_count_per_edge2[e2].shared_comms);
//Pn("%d\t%d", l, r);
counter++;
const set<Group *> &lgrps = ging.vgroups(l);
const set<Group *> &rgrps = ging.vgroups(r);
//PP(lgrps.size());
//PP(rgrps.size());
vector<Group *> lonly, ronly;
set_difference(lgrps.begin(),lgrps.end(),rgrps.begin(),rgrps.end(),back_inserter(lonly));
set_difference(rgrps.begin(),rgrps.end(),lgrps.begin(),lgrps.end(),back_inserter(ronly));
//Pn("# unmatched %zd,%zd", lonly.size(), ronly.size());
ForeachContainer(Group *lg, lonly) {
ForeachContainer(Group *rg, ronly) {
long double qi = 1.0L - ging._p_in;
long double qo = 1.0L - ging._p_out;
//const int64 oldQz = ging.groups.size();
Group * lg1 = lg;
Group * rg1 = rg;
if(lg1 > rg1) swap(lg1, rg1);
MergesT::key_type key = make_pair(lg1, rg1);
MergesT::iterator pm = proposed_merges.find(key);
if(pm == proposed_merges.end()) {
const int64 s1 = lg1->vs.size();
const int64 s2 = rg1->vs.size();
vector<V> Union;
set_union(lg1->vs.begin(),lg1->vs.end(),rg1->vs.begin(),rg1->vs.end(),back_inserter(Union));
const int64 N = ging._g.vcount();
pm = proposed_merges.insert(make_pair(key,
log2l(qi)*0.5L* (long double)( Union.size() * (Union.size()-1) - s1*(s1-1) - s2*(s2-1) )
// + ( (oldQz) *log2l(oldQz-1) + log2l(exp(1)) - log2l(oldQz-2-N) )
// - ( (oldQz+1)*log2l(oldQz ) - log2l(oldQz-1-N) )
+ (s1+s2) * (log2l(s1+s2) /*- log2l(N)*/)
- (s1 ) * (log2l(s1 ) /*- log2l(N)*/)
- (s2 ) * (log2l(s2 ) /*- log2l(N)*/)
+ log2l(N) // one fewer community whose pi has to be encoded
)).first;
}
pm -> second+=
log2l(1.0L - qo * powl(qi, 1+sharedCommunities))
- log2l(1.0L - qo * powl(qi, sharedCommunities))
- log2l(qi) // for a given member of the map, each edge will be found exactly once. So here we cancel the affect of assuming it was disconnected
;
}
}
}
for(V e = 0; e <
/*100000 */
2*ging._g.ecount()
; e++) {
if(e % (2*ging._g.ecount()/10) ==0) {
PP(e);
}
V e2 = ging.comm_count_per_edge2[e].other_index;
V l = ging._g.neighbours(0).first[e2];
V r = ging._g.neighbours(0).first[e];
if(r<l) continue; // no point considering each edge twice
V sharedCommunities = ging.comm_count_per_edge2[e].shared_comms;
assert(sharedCommunities == ging.comm_count_per_edge2[e2].shared_comms);
//Pn("%d\t%d", l, r);
counter++;
const set<Group *> &lgrps = ging.vgroups(l);
const set<Group *> &rgrps = ging.vgroups(r);
//PP(lgrps.size());
//PP(rgrps.size());
vector<Group *> inter;
set_intersection(lgrps.begin(),lgrps.end(),rgrps.begin(),rgrps.end(),back_inserter(inter));
//Pn("# unmatched %zd,%zd", lonly.size(), ronly.size());
ForeachContainer(Group *lg, inter) {
ForeachContainer(Group *rg, inter) {
if(lg < rg) { // no point proposing a merge between a group and itself
long double qi = 1.0L - ging._p_in;
long double qo = 1.0L - ging._p_out;
Group * lg1 = lg;
Group * rg1 = rg;
if(lg1 > rg1) swap(lg1, rg1);
MergesT::key_type key = make_pair(lg1, rg1);
MergesT::iterator pm = proposed_merges.find(key);
if(pm != proposed_merges.end())
pm -> second+=
log2l(1.0L - qo * powl(qi, sharedCommunities-1))
- log2l(1.0L - qo * powl(qi, sharedCommunities))
+ log2l(qi) // for a given member of the map, each edge will be found exactly once. So here we cancel the affect of assuming it was disconnected
;
}
}
}
}
int64 merges_accepted = 0;
int64 merges_applied = 0;
for(MergesT::const_iterator pm = proposed_merges.begin(); pm != proposed_merges.end(); ++pm) {
const long double score = pm->second;
//const int64 N = ging._g.vcount();
boost::unordered_set<Group *> already_merged;
if(score >0.0) {
//PP(scoreEdges);
//PP(scoreZ);
merges_accepted++;
MergesT::key_type merge_these = pm->first;
if(already_merged.count(merge_these.first)==0 && already_merged.count(merge_these.second)==0) {
Group * l = merge_these.first;
Group * r = merge_these.second;
const set<V> these_nodes(l->vs); // copy them, so as to iterate properly over them.
//P(" "); PP(ging.groups.size());
ForeachContainer(V v, these_nodes) {
if(r->vs.count(v)==0) {
ging.addV(r,v);
}
assert(r->vs.count(v)==1);
//ging.delV(l,v);
}
//PP(ging.groups.size());
already_merged.insert(merge_these.first);
already_merged.insert(merge_these.second);
merges_applied++;
}
}
//if(score > -25.0) { printf("merge: %-11.2Lf\n", score); }
}
PP(proposed_merges.size());
PP(merges_accepted);
PP(merges_applied);
}
static void tryDeletions(Grouping &ging, bool SaveScores /*= true*/) { // delete groups which aren't making a positive contribution any more.
Timer timer(__FUNCTION__);
typedef boost::unordered_map< Group* , long double, hashGroup> DeletionsT;
DeletionsT proposed_deletions;
const int64 N = ging._g.vcount();
ForeachContainer(Group *grp, ging.groups) { // preseed with all groups, because we want the groups even that don't have an edge in them!
const int64 sz = grp->vs.size();
//int64 q = ging.groups.size() -1; if (q<=0) q=1;
//const int64 q_= 1 + q;
const long double logNchoosen = logNchoose(N,sz);
proposed_deletions.insert(make_pair(grp,
log2l(1.0L - ging._p_in)*(sz*(sz-1)/2)
+ logNchoosen
- log2l(N+1)/*encoding of size of the group*/
+ log2l(1+ging.groups.size())/*equivalent groupings*/
//+ (getenv("UseBroken") ? ( ( q_ * (log2l(q_) - log2l(exp(1))) + log2l(N+1) - log2l(N+1-q_) ) - ( q * (log2l(q ) - log2l(exp(1))) + log2l(N+1) - log2l(N+1-q ) ) ) : 0)
));
}
for(V e = 0; e <
/*100000 */
2*ging._g.ecount()
; e++) {
/*
if(e % (2*ging._g.ecount()/10) ==0) {
PP(e);
}
*/
V e2 = ging.comm_count_per_edge2[e].other_index;
V sharedCommunities = ging.comm_count_per_edge2[e].shared_comms;
assert(sharedCommunities == ging.comm_count_per_edge2[e2].shared_comms);
V l = ging._g.neighbours(0).first[e2];
V r = ging._g.neighbours(0).first[e];
if(r<l) continue; // no point considering each edge twice
const set<Group *> &lgrps = ging.vgroups(l);
const set<Group *> &rgrps = ging.vgroups(r);
vector<Group *> sharedComms;
set_intersection(lgrps.begin(),lgrps.end(),rgrps.begin(),rgrps.end(),back_inserter(sharedComms));
assert((size_t)sharedCommunities == sharedComms.size());
ForeachContainer(Group *grp, sharedComms) {
DeletionsT::iterator pm = proposed_deletions.find(grp);
assert(pm != proposed_deletions.end());
pm -> second+=
log2l(1.0L - (1.0L-ging._p_out) * powl(1.0L - ging._p_in, sharedCommunities))
- log2l(1.0L - (1.0L-ging._p_out) * powl(1.0L - ging._p_in, sharedCommunities-1))
- log2l(1.0L - ging._p_in)
;
}
}
assert(proposed_deletions.size() <= ging.groups.size()); // maybe some communities didn't have an edge in them
V deletions_accepted = 0;
map<V, int> deletions_sizes;
PP(ging.groups.size());
for(DeletionsT::const_iterator pm = proposed_deletions.begin(); pm != proposed_deletions.end(); ++pm) {
const long double score = pm->second;
//const int64 N = ging._g.vcount();
if(score < 0.0) {
//PP(scoreEdges);
//PP(scoreZ);
deletions_accepted++;
deletions_sizes[pm->first->vs.size()]++;
{ // delete the group
set<V> vs = pm->first->vs; // COPY the vertices in
ForeachContainer(V v, vs) {
ging.delV(pm->first, v);
}
// By now, pm->first will be an invalid pointer, as it will be been delete'd
}
}
}
P("deletions_accepted: %d\t", deletions_accepted);
pair<V, int> delete_size;
ForeachContainer(delete_size, deletions_sizes) {
P("%d{%d} ", delete_size.second, delete_size.first);
}
P("\n");
PP(ging.groups.size());
//if(SaveScores && option_saveMOSESscores[0]) Pn("NOT Saving the delta-scores for each comm");
if(SaveScores && option_saveMOSESscores[0]) {
Pn("Saving the MOSES delta-score for each community as per the --saveMOSESscores option");
ofstream saveFile(option_saveMOSESscores);
ForeachContainer(Group *grp, ging.groups) { // preseed with all groups, because we want the groups even that don't have an edge in them!
saveFile << proposed_deletions[grp] << '\t' << grp->vs.size() << endl;
}
}
}
long double P_x_given_z(const Grouping &ging, long double p_o, long double p_i, int sigma_shared_Xis1) {
const int64 N = ging._g.vcount();
const int64 m = ging._g.ecount() / 2L;
long double logP_XgivenZ = 0.0;
logP_XgivenZ += log2l(1.0L - p_o) * (N * (N-1) / 2 - m);
logP_XgivenZ += log2l(1.0L - p_i) * (ging._sigma_shared - sigma_shared_Xis1);
typedef pair <int,V> edge_countT;
ForeachContainer(const edge_countT &edge_count, ging.global_edge_counts) {
const int64 s = edge_count.first;
const int64 m_s = edge_count.second;
logP_XgivenZ += log2l(1.0L - (1.0L - p_o)*powl(1.0L - p_i, s)) * m_s;
}
return logP_XgivenZ;
}
long double MOSES_objective(const Grouping &ging) {
Timer t(__FUNCTION__);
// Three components
// P(x|z)
// Qz!
// product of binomial/N+1
int64 sigma_shared_Xis1 = 0;
pair <int,V> edge_count;
ForeachContainer(edge_count, ging.global_edge_counts) {
sigma_shared_Xis1 += edge_count.first * edge_count.second;
}
long double Pxz = P_x_given_z(ging, ging._p_out, ging._p_in, sigma_shared_Xis1);
long double Pz = 0.0;
for (size_t i = 1; i<=ging.groups.size(); i++) {
Pz += log2l(i); //+ log2l(1+ging.groups.size())/*equivalent groupings*/
//P(Pz);
}
// PP(Pz);
int64 N = ging._g.vcount();
ForeachContainer(const Group *grp, ging.groups) {
long double logNchoosen = logNchoose(N,grp->vs.size());
DYINGWORDS(logNchoosen <= 0.0) {
PP(logNchoosen);
}
assert(logNchoosen <= 0.0);
Pz += logNchoosen - log2l(N+1) ;
}
// PP(Pxz);
// PP(Pz);
// PP(Pxz + Pz);
// PP(ging.value_of_objective_with_no_communities);
if(ging.value_of_objective_with_no_communities==1.0)
Pn("Compression:\t%Lf\t%Lg\t%Lg", 1.0L
,Pz
,Pxz
);
else
Pn("Compression:\t%Lf\t%Lg\t%Lg", (Pxz + Pz) / ging.value_of_objective_with_no_communities
,Pz
,Pxz
);
return Pxz + Pz;
}
static void estimate_p_in_and_p_out(Grouping &ging) {
Timer t(__FUNCTION__);
//PP(ging._sigma_shared);
/*
int64 _sigma_shared2 = 0;
ForeachContainer(const Group *grp, ging.groups) {
_sigma_shared2 += int64(grp->vs.size()) * int64(grp->vs.size()-1);
}
PP(_sigma_shared2/2);
assert(_sigma_shared2 = 2 * ging._sigma_shared);
*/
const int64 N = ging._g.vcount();
const int64 m = ging._g.ecount() / 2L;
int64 sigma_shared_Xis1 = 0;
pair <int,V> edge_count;
ForeachContainer(edge_count, ging.global_edge_counts) {
sigma_shared_Xis1 += edge_count.first * edge_count.second;
}
//PP(sigma_shared_Xis1);
map<long double, pair<long double, long double>, greater<long double> > ALLlogP_XgivenZ;
for(long double p_i = 0.0L; (p_i+=0.001L) < 1.0L; ) {
for(long double p_o = 1e-11L; (p_o*=1.1L) < 1.0L; ) {
long double logP_XgivenZ = 0.0;
logP_XgivenZ += log2l(1.0L - p_o) * (N * (N-1) / 2 - m);
logP_XgivenZ += log2l(1.0L - p_i) * (ging._sigma_shared - sigma_shared_Xis1);
ForeachContainer(edge_count, ging.global_edge_counts) {
const int64 s = edge_count.first;
const int64 m_s = edge_count.second;
logP_XgivenZ += log2l(1.0L - (1.0L - p_o)*powl(1.0L - p_i, s)) * m_s;
}
assert(logP_XgivenZ == P_x_given_z(ging ,p_o ,p_i ,sigma_shared_Xis1));
ALLlogP_XgivenZ[logP_XgivenZ] = make_pair(p_i, p_o);
}
}
pair<long double, pair<long double, long double> > best;
ForeachContainer(best, ALLlogP_XgivenZ) {
Pn("BEST: %Lg,%Lg -> %9.0Lf ", best.second.first, best.second.second, best.first);
ging._p_in = best.second.first;
ging._p_out= best.second.second;
break;
}
}
} // namespace overlapping<|fim▁end|> | |
<|file_name|>__helper.py<|end_file_name|><|fim▁begin|>""" This provides some useful code used by other modules. This is not to be
used by the end user which is why it is hidden. """
import string, sys
class LinkError(Exception):
pass
def refine_import_err(mod_name, extension_name, exc):
""" Checks to see if the ImportError was because the library
itself was not there or because there was a link error. If there
was a link error it raises a LinkError if not it does nothing.
Keyword arguments
-----------------<|fim▁hole|> imported by the module having mod_name.
- exc : The exception raised when the module called mod_name was
imported.
To see example usage look at __init__.py.
"""
try:
del sys.modules['vtk.%s'%mod_name]
except KeyError:
pass
if string.find(str(exc), extension_name) == -1:
raise LinkError, str(exc)<|fim▁end|> |
- mod_name : The name of the Python module that was imported.
- extension_name : The name of the extension module that is to be |
<|file_name|>driver.py<|end_file_name|><|fim▁begin|># EMU code from https://github.com/rainforestautomation/Emu-Serial-API
from emu import *
import sys
import json
import msgpack
from xbos import get_client
from bw2python.bwtypes import PayloadObject
import time
with open("params.json") as f:
try:
params = json.loads(f.read())
except ValueError as e:
print "Invalid parameter file"
sys.exit(1)
emu_instance = emu(params["port"])<|fim▁hole|># get network info
emu_instance.get_network_info()
while not hasattr(emu_instance, 'NetworkInfo'):
time.sleep(10)
macid = emu_instance.NetworkInfo.DeviceMacId
c = get_client(agent=params["agent"], entity=params["entity"])
PONUM = (2,0,9,1)
baseuri = params["baseuri"]
signaluri = "{0}/s.emu2/{1}/i.meter/signal/meter".format(baseuri, macid)
print ">",signaluri
def send_message(msg):
"""
msg has keys:
current_demand
current_price
current_tier
current_summation_delivered
current_summation_received
"""
po = PayloadObject(PONUM, None, msgpack.packb(msg))
c.publish(signaluri, payload_objects=(po,))
msg = {}
while True:
#print emu_instance.get_instantaneous_demand()
emu_instance.get_current_summation_delivered()
emu_instance.get_instantaneous_demand('Y')
emu_instance.get_current_price('Y')
time.sleep(10)
msg['current_time'] = time.time()#int(pc.TimeStamp) + 00:00:00 1 Jan 2000
# handle PriceCluster
if hasattr(emu_instance, "PriceCluster"):
pc = emu_instance.PriceCluster
print dir(emu_instance.PriceCluster)
msg['current_price'] = float(int(pc.Price, 16)) / (10**int(pc.TrailingDigits,16))
msg['current_tier'] = int(pc.Tier, 16)
# handle demand
if hasattr(emu_instance, "InstantaneousDemand"):
d = emu_instance.InstantaneousDemand
msg['current_demand'] = int(d.Demand, 16)
print dir(emu_instance)
# handle current summation
if hasattr(emu_instance, "CurrentSummationDelivered"):
d = emu_instance.CurrentSummationDelivered
multiplier = int(d.Multiplier, 16)
divisor = float(int(d.Divisor, 16))
msg['current_summation_delivered'] = int(d.SummationDelivered, 16) * multiplier / divisor
msg['current_summation_received'] = int(d.SummationReceived, 16) * multiplier / divisor
send_message(msg)
emu_instance.stop_serial()<|fim▁end|> | emu_instance.start_serial()
|
<|file_name|>sfsetup.js<|end_file_name|><|fim▁begin|>"use strict";
var async = require('async');
var fs = require('fs');
var util = require('util');
var prompt = require('prompt');
var httpRequest = require('emsoap').subsystems.httpRequest;
var common = require('./common');
var mms = require('./mms');
var mmscmd = require('./mmscmd');
var deploy = require('./deploy');
var session; // MMS session
var modelFile = "sfmodel.json";
var externalSystemType = 'NODEX';
var externalSystem;
var accessAddress;
var credentials;
var mappedObjects;
var verboseLoggingForExternalSystem;
function afterAuth(cb) {
// munge mappedObjects as required
for (var name in mappedObjects) {
var map = mappedObjects[name];
if (!map.typeBindingProperties) {
map.typeBindingProperties = {};
for (var propName in map) {
switch(propName) {
case "target":
case "properties":
;
default:
map.typeBindingProperties[name] = map[name];
}
}
}
}
// invoke op to create model
session.directive(
{
op : "INVOKE",
targetType: "CdmExternalSystem",
name: "invokeExternal",
params: {
externalSystem: externalSystem,
opName : "createSfModel",
params : {
sfVersion : credentials.sfVersion,
externalSystem : externalSystem,
typeDescs : mappedObjects
}
}
},
function (err, result) {
if (err) {
return cb(err);
}
fs.writeFileSync(modelFile, JSON.stringify(result.results, null, 2));
mmscmd.execFile(modelFile,session, deploy.outputWriter, cb);
}
);
}
exports.deployModel = function deployModel(externalSystemName,mmsSession,cb) {
session = mmsSession;
externalSystem = externalSystemName;
var text;
if(!session.creds.externalCredentials) {
console.log("Profile must include externalCredentials");
process.exit(1);
}
credentials = session.creds.externalCredentials[externalSystemName];
if(!credentials) {
console.log("Profile does not provide externalCredentials for " + externalSystemName);
process.exit(1);
}
if(!credentials.oauthKey || !credentials.oauthSecret) {
console.log("externalSystemName for " + externalSystemName + " must contain the oAuth key and secret.");
}
accessAddress = credentials.host;
try {<|fim▁hole|> }
try {
mappedObjects = JSON.parse(text);
} catch(err) {
console.log('Error parsing JSON in salesforce.json:' + err);
process.exit(1);
}
if(mappedObjects._verbose_logging_) {
verboseLoggingForExternalSystem = mappedObjects._verbose_logging_;
}
delete mappedObjects._verbose_logging_;
createExternalSystem(function(err) {
if (err) {
return cb(err);
}
var addr = common.global.session.creds.server + "/oauth/" + externalSystem + "/authenticate";
if (common.global.argv.nonInteractive) {
console.log("Note: what follows will fail unless Emotive has been authorized at " + addr);
afterAuth(cb);
}
else {
console.log("Please navigate to " + addr.underline + " with your browser");
prompt.start();
prompt.colors = false;
prompt.message = 'Press Enter when done';
prompt.delimiter = '';
var props = {
properties: {
q: {
description : ":"
}
}
}
prompt.get(props, function (err, result) {
if (err) {
return cb(err);
}
afterAuth(cb);
});
}
});
}
function createExternalSystem(cb) {
if (!session.creds.username)
{
console.log("session.creds.username was null");
process.exit(1);
}
if(verboseLoggingForExternalSystem) console.log('VERBOSE LOGGING IS ON FOR ' + externalSystem);
session.directive({
op: 'INVOKE',
targetType: 'CdmExternalSystem',
name: "updateOAuthExternalSystem",
params: {
name: externalSystem,
typeName: externalSystemType,
"oauthCredentials" : {
"oauthType": "salesforce",
"oauthKey": credentials.oauthKey,
"oauthSecret": credentials.oauthSecret
},
properties: {
proxyConfiguration: {verbose: verboseLoggingForExternalSystem, sfVersion: credentials.sfVersion},
globalPackageName : "sfProxy"
}
}
},
cb);
}<|fim▁end|> | text = fs.readFileSync("salesforce.json");
} catch(err) {
console.log('Error reading file salesforce.json:' + err);
process.exit(1); |
<|file_name|>clean.rs<|end_file_name|><|fim▁begin|>extern crate hamcrest;
extern crate cargotest;
use std::env;
use cargotest::support::{git, project, execs, main_file, basic_bin_manifest};
use cargotest::support::registry::Package;
use hamcrest::{assert_that, existing_dir, existing_file, is_not};
#[test]
fn cargo_clean_simple() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]));
assert_that(p.cargo_process("build"), execs().with_status(0));
assert_that(&p.build_dir(), existing_dir());
assert_that(p.cargo("clean"),
execs().with_status(0));
assert_that(&p.build_dir(), is_not(existing_dir()));
}
#[test]
fn different_dir() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.file("src/bar/a.rs", "");
assert_that(p.cargo_process("build"), execs().with_status(0));
assert_that(&p.build_dir(), existing_dir());
assert_that(p.cargo("clean").cwd(&p.root().join("src")),
execs().with_status(0).with_stdout(""));
assert_that(&p.build_dir(), is_not(existing_dir()));
}
#[test]
fn clean_multiple_packages() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.d1]
path = "d1"
[dependencies.d2]
path = "d2"
[[bin]]
name = "foo"
"#)
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.file("d1/Cargo.toml", r#"
[package]
name = "d1"
version = "0.0.1"
authors = []
[[bin]]
name = "d1"
"#)
.file("d1/src/main.rs", "fn main() { println!(\"d1\"); }")
.file("d2/Cargo.toml", r#"
[package]
name = "d2"
version = "0.0.1"
authors = []
[[bin]]
name = "d2"
"#)
.file("d2/src/main.rs", "fn main() { println!(\"d2\"); }");
p.build();
assert_that(p.cargo_process("build").arg("-p").arg("d1").arg("-p").arg("d2")
.arg("-p").arg("foo"),
execs().with_status(0));
let d1_path = &p.build_dir().join("debug").join("deps")
.join(format!("d1{}", env::consts::EXE_SUFFIX));
let d2_path = &p.build_dir().join("debug").join("deps")
.join(format!("d2{}", env::consts::EXE_SUFFIX));
assert_that(&p.bin("foo"), existing_file());
assert_that(d1_path, existing_file());
assert_that(d2_path, existing_file());
assert_that(p.cargo("clean").arg("-p").arg("d1").arg("-p").arg("d2")
.cwd(&p.root().join("src")),
execs().with_status(0).with_stdout(""));
assert_that(&p.bin("foo"), existing_file());
assert_that(d1_path, is_not(existing_file()));
assert_that(d2_path, is_not(existing_file()));
}
#[test]
fn clean_release() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies]
a = { path = "a" }
"#)
.file("src/main.rs", "fn main() {}")
.file("a/Cargo.toml", r#"
[package]
name = "a"
version = "0.0.1"
authors = []
"#)
.file("a/src/lib.rs", "");
p.build();
assert_that(p.cargo_process("build").arg("--release"),
execs().with_status(0));
assert_that(p.cargo("clean").arg("-p").arg("foo"),
execs().with_status(0));
assert_that(p.cargo("build").arg("--release"),
execs().with_status(0).with_stdout(""));
assert_that(p.cargo("clean").arg("-p").arg("foo").arg("--release"),
execs().with_status(0));
assert_that(p.cargo("build").arg("--release"),
execs().with_status(0).with_stderr("\
[COMPILING] foo v0.0.1 ([..])
[FINISHED] release [optimized] target(s) in [..]
"));
}
#[test]
fn build_script() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
build = "build.rs"
"#)
.file("src/main.rs", "fn main() {}")
.file("build.rs", r#"
use std::path::PathBuf;
use std::env;
fn main() {
let out = PathBuf::from(env::var_os("OUT_DIR").unwrap());
if env::var("FIRST").is_ok() {
std::fs::File::create(out.join("out")).unwrap();
} else {
assert!(!std::fs::metadata(out.join("out")).is_ok());
}
}
"#)
.file("a/src/lib.rs", "");
p.build();
assert_that(p.cargo_process("build").env("FIRST", "1"),
execs().with_status(0));
assert_that(p.cargo("clean").arg("-p").arg("foo"),
execs().with_status(0));
assert_that(p.cargo("build").arg("-v"),
execs().with_status(0).with_stderr("\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc build.rs [..]`
[RUNNING] `[..]build-script-build`
[RUNNING] `rustc src[/]main.rs [..]`
[FINISHED] debug [unoptimized + debuginfo] target(s) in [..]
"));
}
#[test]
fn clean_git() {
let git = git::new("dep", |project| {
project.file("Cargo.toml", r#"
[project]
name = "dep"
version = "0.5.0"
authors = []
"#)
.file("src/lib.rs", "")
}).unwrap();
let p = project("foo")
.file("Cargo.toml", &format!(r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies]
dep = {{ git = '{}' }}
"#, git.url()))
.file("src/main.rs", "fn main() {}");
p.build();
assert_that(p.cargo_process("build"),
execs().with_status(0));
assert_that(p.cargo("clean").arg("-p").arg("dep"),
execs().with_status(0).with_stdout(""));
assert_that(p.cargo("build"),
execs().with_status(0));
}
#[test]
fn registry() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies]
bar = "0.1"
"#)
.file("src/main.rs", "fn main() {}");
p.build();<|fim▁hole|>
assert_that(p.cargo_process("build"),
execs().with_status(0));
assert_that(p.cargo("clean").arg("-p").arg("bar"),
execs().with_status(0).with_stdout(""));
assert_that(p.cargo("build"),
execs().with_status(0));
}<|fim▁end|> |
Package::new("bar", "0.1.0").publish(); |
<|file_name|>dnsutils.py<|end_file_name|><|fim▁begin|># Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Author: Endre Karlson <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
import socket
import base64
import time
from threading import Lock
import six
import dns
import dns.exception
import dns.zone
import eventlet
from dns import rdatatype
from oslo_log import log as logging
from oslo_config import cfg
from designate import context
from designate import exceptions
from designate import objects
from designate.i18n import _LE
from designate.i18n import _LI
LOG = logging.getLogger(__name__)
util_opts = [
cfg.IntOpt('xfr_timeout', help="Timeout in seconds for XFR's.", default=10)<|fim▁hole|>]
class DNSMiddleware(object):
"""Base DNS Middleware class with some utility methods"""
def __init__(self, application):
self.application = application
def process_request(self, request):
"""Called on each request.
If this returns None, the next application down the stack will be
executed. If it returns a response then that response will be returned
and execution will stop here.
"""
return None
def process_response(self, response):
"""Do whatever you'd like to the response."""
return response
def __call__(self, request):
response = self.process_request(request)
if response:
return response
response = self.application(request)
return self.process_response(response)
def _build_error_response(self):
response = dns.message.make_response(
dns.message.make_query('unknown', dns.rdatatype.A))
response.set_rcode(dns.rcode.FORMERR)
return response
class SerializationMiddleware(DNSMiddleware):
"""DNS Middleware to serialize/deserialize DNS Packets"""
def __init__(self, application, tsig_keyring=None):
self.application = application
self.tsig_keyring = tsig_keyring
def __call__(self, request):
# Generate the initial context. This may be updated by other middleware
# as we learn more information about the Request.
ctxt = context.DesignateContext.get_admin_context(all_tenants=True)
try:
message = dns.message.from_wire(request['payload'],
self.tsig_keyring)
if message.had_tsig:
LOG.debug('Request signed with TSIG key: %s', message.keyname)
# Create + Attach the initial "environ" dict. This is similar to
# the environ dict used in typical WSGI middleware.
message.environ = {
'context': ctxt,
'addr': request['addr'],
}
except dns.message.UnknownTSIGKey:
LOG.error(_LE("Unknown TSIG key from %(host)s:"
"%(port)d") % {'host': request['addr'][0],
'port': request['addr'][1]})
response = self._build_error_response()
except dns.tsig.BadSignature:
LOG.error(_LE("Invalid TSIG signature from %(host)s:"
"%(port)d") % {'host': request['addr'][0],
'port': request['addr'][1]})
response = self._build_error_response()
except dns.exception.DNSException:
LOG.error(_LE("Failed to deserialize packet from %(host)s:"
"%(port)d") % {'host': request['addr'][0],
'port': request['addr'][1]})
response = self._build_error_response()
except Exception:
LOG.exception(_LE("Unknown exception deserializing packet "
"from %(host)s %(port)d") %
{'host': request['addr'][0],
'port': request['addr'][1]})
response = self._build_error_response()
else:
# Hand the Deserialized packet onto the Application
for response in self.application(message):
# Serialize and return the response if present
if isinstance(response, dns.message.Message):
yield response.to_wire(max_size=65535)
elif isinstance(response, dns.renderer.Renderer):
yield response.get_wire()
class TsigInfoMiddleware(DNSMiddleware):
"""Middleware which looks up the information available for a TsigKey"""
def __init__(self, application, storage):
super(TsigInfoMiddleware, self).__init__(application)
self.storage = storage
def process_request(self, request):
if not request.had_tsig:
return None
try:
criterion = {'name': request.keyname.to_text(True)}
tsigkey = self.storage.find_tsigkey(
context.get_current(), criterion)
request.environ['tsigkey'] = tsigkey
request.environ['context'].tsigkey_id = tsigkey.id
except exceptions.TsigKeyNotFound:
# This should never happen, as we just validated the key.. Except
# for race conditions..
return self._build_error_response()
return None
class TsigKeyring(object):
"""Implements the DNSPython KeyRing API, backed by the Designate DB"""
def __init__(self, storage):
self.storage = storage
def __getitem__(self, key):
return self.get(key)
def get(self, key, default=None):
try:
criterion = {'name': key.to_text(True)}
tsigkey = self.storage.find_tsigkey(
context.get_current(), criterion)
return base64.decodestring(tsigkey.secret)
except exceptions.TsigKeyNotFound:
return default
class ZoneLock(object):
"""A Lock across all zones that enforces a rate limit on NOTIFYs"""
def __init__(self, delay):
self.lock = Lock()
self.data = {}
self.delay = delay
def acquire(self, zone):
with self.lock:
# If no one holds the lock for the zone, grant it
if zone not in self.data:
self.data[zone] = time.time()
return True
# Otherwise, get the time that it was locked
locktime = self.data[zone]
now = time.time()
period = now - locktime
# If it has been locked for longer than the allowed period
# give the lock to the new requester
if period > self.delay:
self.data[zone] = now
return True
LOG.debug('Lock for %(zone)s can\'t be releaesed for %(period)s'
'seconds' % {'zone': zone,
'period': str(self.delay - period)})
# Don't grant the lock for the zone
return False
def release(self, zone):
# Release the lock
with self.lock:
try:
self.data.pop(zone)
except KeyError:
pass
class LimitNotifyMiddleware(DNSMiddleware):
"""Middleware that rate limits NOTIFYs to the Agent"""
def __init__(self, application):
super(LimitNotifyMiddleware, self).__init__(application)
self.delay = cfg.CONF['service:agent'].notify_delay
self.locker = ZoneLock(self.delay)
def process_request(self, request):
opcode = request.opcode()
if opcode != dns.opcode.NOTIFY:
return None
zone_name = request.question[0].name.to_text()
if self.locker.acquire(zone_name):
time.sleep(self.delay)
self.locker.release(zone_name)
return None
else:
LOG.debug('Threw away NOTIFY for %(zone)s, already '
'working on an update.' % {'zone': zone_name})
response = dns.message.make_response(request)
# Provide an authoritative answer
response.flags |= dns.flags.AA
return (response,)
def from_dnspython_zone(dnspython_zone):
# dnspython never builds a zone with more than one SOA, even if we give
# it a zonefile that contains more than one
soa = dnspython_zone.get_rdataset(dnspython_zone.origin, 'SOA')
if soa is None:
raise exceptions.BadRequest('An SOA record is required')
email = soa[0].rname.to_text().rstrip('.')
email = email.replace('.', '@', 1)
values = {
'name': dnspython_zone.origin.to_text(),
'email': email,
'ttl': soa.ttl,
'serial': soa[0].serial,
'retry': soa[0].retry,
'expire': soa[0].expire
}
zone = objects.Domain(**values)
rrsets = dnspyrecords_to_recordsetlist(dnspython_zone.nodes)
zone.recordsets = rrsets
return zone
def dnspyrecords_to_recordsetlist(dnspython_records):
rrsets = objects.RecordList()
for rname in six.iterkeys(dnspython_records):
for rdataset in dnspython_records[rname]:
rrset = dnspythonrecord_to_recordset(rname, rdataset)
if rrset is None:
continue
rrsets.append(rrset)
return rrsets
def dnspythonrecord_to_recordset(rname, rdataset):
record_type = rdatatype.to_text(rdataset.rdtype)
# Create the other recordsets
values = {
'name': rname.to_text(),
'type': record_type
}
if rdataset.ttl != 0:
values['ttl'] = rdataset.ttl
rrset = objects.RecordSet(**values)
rrset.records = objects.RecordList()
for rdata in rdataset:
rr = objects.Record(data=rdata.to_text())
rrset.records.append(rr)
return rrset
def bind_tcp(host, port, tcp_backlog):
# Bind to the TCP port
LOG.info(_LI('Opening TCP Listening Socket on %(host)s:%(port)d') %
{'host': host, 'port': port})
sock_tcp = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock_tcp.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock_tcp.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
# NOTE: Linux supports socket.SO_REUSEPORT only in 3.9 and later releases.
try:
sock_tcp.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except Exception:
pass
sock_tcp.setblocking(True)
sock_tcp.bind((host, port))
sock_tcp.listen(tcp_backlog)
return sock_tcp
def bind_udp(host, port):
# Bind to the UDP port
LOG.info(_LI('Opening UDP Listening Socket on %(host)s:%(port)d') %
{'host': host, 'port': port})
sock_udp = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock_udp.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# NOTE: Linux supports socket.SO_REUSEPORT only in 3.9 and later releases.
try:
sock_udp.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except Exception:
pass
sock_udp.setblocking(True)
sock_udp.bind((host, port))
return sock_udp
def do_axfr(zone_name, servers, timeout=None, source=None):
"""
Performs an AXFR for a given zone name
"""
random.shuffle(servers)
timeout = timeout or cfg.CONF["service:mdns"].xfr_timeout
xfr = None
for srv in servers:
to = eventlet.Timeout(timeout)
log_info = {'name': zone_name, 'host': srv}
try:
LOG.info(_LI("Doing AXFR for %(name)s from %(host)s") % log_info)
xfr = dns.query.xfr(srv['host'], zone_name, relativize=False,
timeout=1, port=srv['port'], source=source)
raw_zone = dns.zone.from_xfr(xfr, relativize=False)
break
except eventlet.Timeout as t:
if t == to:
msg = _LE("AXFR timed out for %(name)s from %(host)s")
LOG.error(msg % log_info)
continue
except dns.exception.FormError:
msg = _LE("Domain %(name)s is not present on %(host)s."
"Trying next server.")
LOG.error(msg % log_info)
except socket.error:
msg = _LE("Connection error when doing AXFR for %(name)s from "
"%(host)s")
LOG.error(msg % log_info)
except Exception:
msg = _LE("Problem doing AXFR %(name)s from %(host)s. "
"Trying next server.")
LOG.exception(msg % log_info)
finally:
to.cancel()
continue
else:
msg = _LE("XFR failed for %(name)s. No servers in %(servers)s was "
"reached.")
raise exceptions.XFRFailure(
msg % {"name": zone_name, "servers": servers})
LOG.debug("AXFR Successful for %s" % raw_zone.origin.to_text())
return raw_zone<|fim▁end|> | |
<|file_name|>obj-hash.js<|end_file_name|><|fim▁begin|>import Ember from 'ember';
export default Ember.Object.extend({
content: {},
contentLength: 0,
add: function(obj) {
var id = this.generateId();
this.get('content')[id] = obj;
this.incrementProperty("contentLength");
return id;
},
getObj: function(key) {
var res = this.get('content')[key];
if (!res) {<|fim▁hole|> throw "no obj for key "+key;
}
return res;
},
generateId: function() {
var num = Math.random() * 1000000000000.0;
num = parseInt(num);
num = ""+num;
return num;
},
keys: function() {
var res = [];
for (var key in this.get('content')) {
res.push(key);
}
return Ember.A(res);
},
lengthBinding: "contentLength"
});<|fim▁end|> | |
<|file_name|>union_with_anon_unnamed_union_1_0.rs<|end_file_name|><|fim▁begin|>#![allow(
dead_code,
non_snake_case,
non_camel_case_types,
non_upper_case_globals
)]
#[repr(C)]
pub struct __BindgenUnionField<T>(::std::marker::PhantomData<T>);
impl<T> __BindgenUnionField<T> {
#[inline]
pub fn new() -> Self {
__BindgenUnionField(::std::marker::PhantomData)
}
#[inline]
pub unsafe fn as_ref(&self) -> &T {
::std::mem::transmute(self)
}
#[inline]
pub unsafe fn as_mut(&mut self) -> &mut T {
::std::mem::transmute(self)
}
}
impl<T> ::std::default::Default for __BindgenUnionField<T> {
#[inline]
fn default() -> Self {
Self::new()
}
}
impl<T> ::std::clone::Clone for __BindgenUnionField<T> {
#[inline]
fn clone(&self) -> Self {
Self::new()
}
}
impl<T> ::std::marker::Copy for __BindgenUnionField<T> {}
impl<T> ::std::fmt::Debug for __BindgenUnionField<T> {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
fmt.write_str("__BindgenUnionField")
}
}
impl<T> ::std::hash::Hash for __BindgenUnionField<T> {
fn hash<H: ::std::hash::Hasher>(&self, _state: &mut H) {}
}
impl<T> ::std::cmp::PartialEq for __BindgenUnionField<T> {
fn eq(&self, _other: &__BindgenUnionField<T>) -> bool {
true
}
}
impl<T> ::std::cmp::Eq for __BindgenUnionField<T> {}
#[repr(C)]
#[derive(Debug, Default, Copy, Hash, PartialEq, Eq)]
pub struct foo {
pub a: __BindgenUnionField<::std::os::raw::c_uint>,
pub __bindgen_anon_1: __BindgenUnionField<foo__bindgen_ty_1>,
pub bindgen_union_field: u32,
}
#[repr(C)]
#[derive(Debug, Default, Copy, Hash, PartialEq, Eq)]
pub struct foo__bindgen_ty_1 {
pub b: __BindgenUnionField<::std::os::raw::c_ushort>,
pub c: __BindgenUnionField<::std::os::raw::c_uchar>,
pub bindgen_union_field: u16,
}
#[test]
fn bindgen_test_layout_foo__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<foo__bindgen_ty_1>(),
2usize,
concat!("Size of: ", stringify!(foo__bindgen_ty_1))
);
assert_eq!(
::std::mem::align_of::<foo__bindgen_ty_1>(),
2usize,
concat!("Alignment of ", stringify!(foo__bindgen_ty_1))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<foo__bindgen_ty_1>())).b as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(foo__bindgen_ty_1),
"::",
stringify!(b)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<foo__bindgen_ty_1>())).c as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(foo__bindgen_ty_1),<|fim▁hole|> stringify!(c)
)
);
}
impl Clone for foo__bindgen_ty_1 {
fn clone(&self) -> Self {
*self
}
}
#[test]
fn bindgen_test_layout_foo() {
assert_eq!(
::std::mem::size_of::<foo>(),
4usize,
concat!("Size of: ", stringify!(foo))
);
assert_eq!(
::std::mem::align_of::<foo>(),
4usize,
concat!("Alignment of ", stringify!(foo))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<foo>())).a as *const _ as usize },
0usize,
concat!("Offset of field: ", stringify!(foo), "::", stringify!(a))
);
}
impl Clone for foo {
fn clone(&self) -> Self {
*self
}
}<|fim▁end|> | "::", |
<|file_name|>about.js<|end_file_name|><|fim▁begin|><|fim▁hole|>
$(d.body).html(repl("<div>\
<p>"+__("Open Source Applications for the Web")+"</p> \
<p><i class='fa fa-globe fa-fw'></i>\
Website: <a href='https://frappe.io' target='_blank'>https://frappe.io</a></p>\
<p><i class='fa fa-github fa-fw'></i>\
Source: <a href='https://github.com/frappe' target='_blank'>https://github.com/frappe</a></p>\
<hr>\
<h4>Installed Apps</h4>\
<div id='about-app-versions'>Loading versions...</div>\
<hr>\
<p class='text-muted'>© Frappe Technologies Pvt. Ltd and contributors </p> \
</div>", frappe.app));
frappe.ui.misc.about_dialog = d;
frappe.ui.misc.about_dialog.on_page_show = function() {
if(!frappe.versions) {
frappe.call({
method: "frappe.utils.change_log.get_versions",
callback: function(r) {
show_versions(r.message);
}
})
}
};
var show_versions = function(versions) {
var $wrap = $("#about-app-versions").empty();
$.each(Object.keys(versions).sort(), function(i, key) {
var v = versions[key];
if(v.branch) {
var text = $.format('<p><b>{0}:</b> v{1} ({2})<br></p>',
[v.title, v.branch_version || v.version, v.branch])
} else {
var text = $.format('<p><b>{0}:</b> v{1}<br></p>',
[v.title, v.version])
}
$(text).appendTo($wrap);
});
frappe.versions = versions;
}
}
frappe.ui.misc.about_dialog.show();
}<|fim▁end|> | frappe.provide('frappe.ui.misc');
frappe.ui.misc.about = function() {
if(!frappe.ui.misc.about_dialog) {
var d = new frappe.ui.Dialog({title: __('Frappe Framework')}); |
<|file_name|>MovingObject.py<|end_file_name|><|fim▁begin|>__author__ = 'jmoran'
from Asteroids import Object
class MovingObject(Object):
def __init__(self, window, game, init_point, slope):
Object.__init__(self, window, game)<|fim▁hole|> self.point = init_point
self.slope = slope<|fim▁end|> | |
<|file_name|>register.go<|end_file_name|><|fim▁begin|>package v1
import (<|fim▁hole|>)
var (
GroupName = "operator.openshift.io"
GroupVersion = schema.GroupVersion{Group: GroupName, Version: "v1"}
schemeBuilder = runtime.NewSchemeBuilder(addKnownTypes, configv1.Install)
// Install is a function which adds this version to a scheme
Install = schemeBuilder.AddToScheme
// SchemeGroupVersion generated code relies on this name
// Deprecated
SchemeGroupVersion = GroupVersion
// AddToScheme exists solely to keep the old generators creating valid code
// DEPRECATED
AddToScheme = schemeBuilder.AddToScheme
)
// Resource generated code relies on this being here, but it logically belongs to the group
// DEPRECATED
func Resource(resource string) schema.GroupResource {
return schema.GroupResource{Group: GroupName, Resource: resource}
}
func addKnownTypes(scheme *runtime.Scheme) error {
metav1.AddToGroupVersion(scheme, GroupVersion)
scheme.AddKnownTypes(GroupVersion,
&Authentication{},
&AuthenticationList{},
&DNS{},
&DNSList{},
&CloudCredential{},
&CloudCredentialList{},
&ClusterCSIDriver{},
&ClusterCSIDriverList{},
&Console{},
&ConsoleList{},
&CSISnapshotController{},
&CSISnapshotControllerList{},
&Etcd{},
&EtcdList{},
&KubeAPIServer{},
&KubeAPIServerList{},
&KubeControllerManager{},
&KubeControllerManagerList{},
&KubeScheduler{},
&KubeSchedulerList{},
&KubeStorageVersionMigrator{},
&KubeStorageVersionMigratorList{},
&Network{},
&NetworkList{},
&OpenShiftAPIServer{},
&OpenShiftAPIServerList{},
&OpenShiftControllerManager{},
&OpenShiftControllerManagerList{},
&ServiceCA{},
&ServiceCAList{},
&ServiceCatalogAPIServer{},
&ServiceCatalogAPIServerList{},
&ServiceCatalogControllerManager{},
&ServiceCatalogControllerManagerList{},
&IngressController{},
&IngressControllerList{},
&Storage{},
&StorageList{},
)
return nil
}<|fim▁end|> | configv1 "github.com/openshift/api/config/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema" |
<|file_name|>request_utils.py<|end_file_name|><|fim▁begin|>import uuid
from flask import g
# Utility method to get source_ip from a request - first checks headers for forwarded IP, then uses remote_addr if not<|fim▁hole|>def get_source_ip(my_request):
try:
# First check for an X-Forwarded-For header provided by a proxy / router e.g. on Heroku
source_ip = my_request.headers['X-Forwarded-For']
except KeyError:
try:
# First check for an X-Forwarded-For header provided by a proxy / router e.g. on Heroku
source_ip = my_request.headers['X-Client-IP']
except KeyError:
# If that header is not present, attempt to get the Source IP address from the request itself
source_ip = my_request.remote_addr
g.source_ip = source_ip
return source_ip
# Utility method to get the request_id from the X-Request-Id header, and if not present generate one
def get_request_id(my_request):
try:
request_id = my_request.headers['X-Request-Id']
except KeyError:
request_id = str(uuid.uuid4())
g.request_id = request_id
return request_id
# Utility method which takes a dict of request parameters and writes them out as pipe delimeted kv pairs
def dict_to_piped_kv_pairs(dict_for_conversion):
output_string = ""
for key, value in sorted(dict_for_conversion.items()):
output_string += "{0}={1}|".format(key, value)
return output_string<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use chrono::{offset::Utc, DateTime};
use diesel::{self, pg::PgConnection};
use serde_json::Value;
use sql_types::{FollowPolicy, Url};
pub mod follow_request;
pub mod follower;
pub mod group;
pub mod persona;
use self::follower::Follower;
use schema::base_actors;
use user::UserLike;
#[derive(Debug, AsChangeset)]
#[table_name = "base_actors"]
pub struct ModifiedBaseActor {
id: i32,
display_name: String,
profile_url: Url,
inbox_url: Url,
outbox_url: Url,
follow_policy: FollowPolicy,
original_json: Value,
}
impl ModifiedBaseActor {
pub fn set_display_name(&mut self, display_name: String) {
self.display_name = display_name;
}
pub fn set_profile_url<U: Into<Url>>(&mut self, profile_url: U) {
self.profile_url = profile_url.into();<|fim▁hole|> pub fn set_inbox_url<U: Into<Url>>(&mut self, inbox_url: U) {
self.inbox_url = inbox_url.into();
}
pub fn set_outbox_url<U: Into<Url>>(&mut self, outbox_url: U) {
self.outbox_url = outbox_url.into();
}
pub fn set_follow_policy(&mut self, follow_policy: FollowPolicy) {
self.follow_policy = follow_policy;
}
pub fn save_changes(self, conn: &PgConnection) -> Result<BaseActor, diesel::result::Error> {
use diesel::prelude::*;
diesel::update(base_actors::table)
.set(&self)
.get_result(conn)
}
}
#[derive(Debug, Queryable, QueryableByName)]
#[table_name = "base_actors"]
pub struct BaseActor {
id: i32,
display_name: String, // max_length: 80
profile_url: Url, // max_length: 2048
inbox_url: Url, // max_length: 2048
outbox_url: Url, // max_length: 2048
local_user: Option<i32>, // foreign key to User
follow_policy: FollowPolicy, // max_length: 8
original_json: Value, // original json
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
}
impl BaseActor {
pub fn id(&self) -> i32 {
self.id
}
pub fn modify(self) -> ModifiedBaseActor {
ModifiedBaseActor {
id: self.id,
display_name: self.display_name,
profile_url: self.profile_url,
inbox_url: self.inbox_url,
outbox_url: self.outbox_url,
follow_policy: self.follow_policy,
original_json: self.original_json,
}
}
pub fn is_following(
&self,
follows: &BaseActor,
conn: &PgConnection,
) -> Result<bool, diesel::result::Error> {
self.is_following_id(follows.id, conn)
}
pub fn is_following_id(
&self,
follows: i32,
conn: &PgConnection,
) -> Result<bool, diesel::result::Error> {
use diesel::prelude::*;
use schema::followers;
followers::table
.filter(followers::dsl::follower.eq(self.id))
.filter(followers::dsl::follows.eq(follows))
.get_result(conn)
.map(|_: Follower| true)
.or_else(|e| match e {
diesel::result::Error::NotFound => Ok(false),
e => Err(e),
})
}
pub fn display_name(&self) -> &str {
&self.display_name
}
pub fn profile_url(&self) -> &Url {
&self.profile_url
}
pub fn inbox_url(&self) -> &Url {
&self.inbox_url
}
pub fn outbox_url(&self) -> &Url {
&self.outbox_url
}
pub fn local_user(&self) -> Option<i32> {
self.local_user
}
pub fn follow_policy(&self) -> FollowPolicy {
self.follow_policy
}
pub fn original_json(&self) -> &Value {
&self.original_json
}
}
#[derive(Insertable)]
#[table_name = "base_actors"]
pub struct NewBaseActor {
display_name: String,
profile_url: Url,
inbox_url: Url,
outbox_url: Url,
local_user: Option<i32>,
follow_policy: FollowPolicy,
original_json: Value,
}
impl NewBaseActor {
pub fn insert(self, conn: &PgConnection) -> Result<BaseActor, diesel::result::Error> {
use diesel::prelude::*;
diesel::insert_into(base_actors::table)
.values(&self)
.get_result(conn)
}
pub fn new<U: UserLike>(
display_name: String,
profile_url: Url,
inbox_url: Url,
outbox_url: Url,
local_user: Option<&U>,
follow_policy: FollowPolicy,
original_json: Value,
) -> Self {
NewBaseActor {
display_name,
profile_url: profile_url.into(),
inbox_url: inbox_url.into(),
outbox_url: outbox_url.into(),
local_user: local_user.map(|lu| lu.id()),
follow_policy,
original_json,
}
}
}
#[cfg(test)]
mod tests {
use test_helper::*;
#[test]
fn create_base_actor() {
with_connection(|conn| with_base_actor(conn, |_| Ok(())))
}
}<|fim▁end|> | }
|
<|file_name|>test_bing_search.py<|end_file_name|><|fim▁begin|>from bing_search_api import BingSearchAPI <|fim▁hole|>my_key = "MEL5FOrb1H5G1E78YY8N5mkfcvUK2hNBYsZl1aAEEbE"
def query(query_string):
bing = BingSearchAPI(my_key)
params = {'ImageFilters':'"Face:Face"',
'$format': 'json',
'$top': 10,
'$skip': 0}
results = bing.search('web',query_string,params).json() # requests 1.0+
return [result['Url'] for result in results['d']['results'][0]['Web']]
if __name__ == "__main__":
query_string = "Your Query"
print query(query_string)<|fim▁end|> | |
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or<|fim▁hole|>#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
{
"name": "MRP - Product variants",
"version": "1.0",
"depends": [
"product",
"mrp",
"product_variants_no_automatic_creation",
"mrp_production_editable_scheduled_products",
],
"author": "OdooMRP team,"
"AvanzOSC,"
"Serv. Tecnol. Avanzados - Pedro M. Baeza",
"contributors": [
"Oihane Crucelaegui <[email protected]>",
"Pedro M. Baeza <[email protected]>",
"Ana Juaristi <[email protected]>",
],
"category": "Manufacturing",
"website": "http://www.odoomrp.com",
"summary": "Customized product in manufacturing",
"data": [
"security/ir.model.access.csv",
"views/mrp_production_view.xml",
"views/product_attribute_view.xml",
],
"installable": True,
"post_init_hook": "assign_product_template",
}<|fim▁end|> | # (at your option) any later version. |
<|file_name|>icons.component.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>@Component({
templateUrl: './icons.component.html',
styles: [`
.icons-list {
text-align: center;
}
.icons-list i {
font-size: 2em;
}
.icons-list .ui-md-2 {
padding-bottom: 2em;
}
`]
})
export class IconsComponent {
}<|fim▁end|> | import {Component} from '@angular/core';
|
<|file_name|>pvrow.py<|end_file_name|><|fim▁begin|>"""Module will classes related to PV row geometries"""
import numpy as np
from pvfactors.config import COLOR_DIC
from pvfactors.geometry.base import \
BaseSide, _coords_from_center_tilt_length, PVSegment
from shapely.geometry import GeometryCollection, LineString
from pvfactors.geometry.timeseries import \
TsShadeCollection, TsLineCoords, TsSurface
from pvlib.tools import cosd, sind
class TsPVRow(object):
"""Timeseries PV row class: this class is a vectorized version of the
PV row geometries. The coordinates and attributes (front and back sides)
are all vectorized."""
def __init__(self, ts_front_side, ts_back_side, xy_center, index=None,
full_pvrow_coords=None):
"""Initialize timeseries PV row with its front and back sides.
Parameters
----------
ts_front_side : :py:class:`~pvfactors.geometry.pvrow.TsSide`
Timeseries front side of the PV row
ts_back_side : :py:class:`~pvfactors.geometry.pvrow.TsSide`
Timeseries back side of the PV row
xy_center : tuple of float
x and y coordinates of the PV row center point (invariant)
index : int, optional
index of the PV row (Default = None)
full_pvrow_coords : \
:py:class:`~pvfactors.geometry.timeseries.TsLineCoords`, optional
Timeseries coordinates of the full PV row, end to end
(Default = None)
"""
self.front = ts_front_side
self.back = ts_back_side
self.xy_center = xy_center
self.index = index
self.full_pvrow_coords = full_pvrow_coords
@classmethod
def from_raw_inputs(cls, xy_center, width, rotation_vec,
cut, shaded_length_front, shaded_length_back,
index=None, param_names=None):
"""Create timeseries PV row using raw inputs.
Note: shading will always be zero when pv rows are flat.
Parameters
----------
xy_center : tuple of float
x and y coordinates of the PV row center point (invariant)
width : float
width of the PV rows [m]
rotation_vec : np.ndarray
Timeseries rotation values of the PV row [deg]
cut : dict
Discretization scheme of the PV row. Eg {'front': 2, 'back': 4}.
Will create segments of equal length on the designated sides.
shaded_length_front : np.ndarray
Timeseries values of front side shaded length [m]
shaded_length_back : np.ndarray
Timeseries values of back side shaded length [m]
index : int, optional
Index of the pv row (default = None)
param_names : list of str, optional
List of names of surface parameters to use when creating geometries
(Default = None)
Returns
-------
New timeseries PV row object
"""
# Calculate full pvrow coords
pvrow_coords = TsPVRow._calculate_full_coords(
xy_center, width, rotation_vec)
# Calculate normal vectors
dx = pvrow_coords.b2.x - pvrow_coords.b1.x
dy = pvrow_coords.b2.y - pvrow_coords.b1.y
normal_vec_front = np.array([-dy, dx])
# Calculate front side coords
ts_front = TsSide.from_raw_inputs(
xy_center, width, rotation_vec, cut.get('front', 1),
shaded_length_front, n_vector=normal_vec_front,
param_names=param_names)
# Calculate back side coords
ts_back = TsSide.from_raw_inputs(
xy_center, width, rotation_vec, cut.get('back', 1),
shaded_length_back, n_vector=-normal_vec_front,
param_names=param_names)
return cls(ts_front, ts_back, xy_center, index=index,
full_pvrow_coords=pvrow_coords)
@staticmethod
def _calculate_full_coords(xy_center, width, rotation):
"""Method to calculate the full PV row coordinaltes.
Parameters
----------
xy_center : tuple of float
x and y coordinates of the PV row center point (invariant)
width : float
width of the PV rows [m]
rotation : np.ndarray
Timeseries rotation values of the PV row [deg]
Returns
-------
coords: :py:class:`~pvfactors.geometry.timeseries.TsLineCoords`
Timeseries coordinates of full PV row
"""
x_center, y_center = xy_center
radius = width / 2.
# Calculate coords
x1 = radius * cosd(rotation + 180.) + x_center
y1 = radius * sind(rotation + 180.) + y_center
x2 = radius * cosd(rotation) + x_center
y2 = radius * sind(rotation) + y_center
coords = TsLineCoords.from_array(np.array([[x1, y1], [x2, y2]]))
return coords
def surfaces_at_idx(self, idx):
"""Get all PV surface geometries in timeseries PV row for a certain
index.
Parameters
----------
idx : int
Index to use to generate PV surface geometries
Returns
-------
list of :py:class:`~pvfactors.geometry.base.PVSurface` objects
List of PV surfaces
"""
pvrow = self.at(idx)
return pvrow.all_surfaces
def plot_at_idx(self, idx, ax, color_shaded=COLOR_DIC['pvrow_shaded'],
color_illum=COLOR_DIC['pvrow_illum'],
with_surface_index=False):
"""Plot timeseries PV row at a certain index.
Parameters
----------
idx : int
Index to use to plot timeseries PV rows
ax : :py:class:`matplotlib.pyplot.axes` object
Axes for plotting
color_shaded : str, optional
Color to use for plotting the shaded surfaces (Default =
COLOR_DIC['pvrow_shaded'])
color_shaded : str, optional
Color to use for plotting the illuminated surfaces (Default =
COLOR_DIC['pvrow_illum'])
with_surface_index : bool, optional
Plot the surfaces with their index values (Default = False)
"""
pvrow = self.at(idx)
pvrow.plot(ax, color_shaded=color_shaded,
color_illum=color_illum, with_index=with_surface_index)
def at(self, idx):
"""Generate a PV row geometry for the desired index.
Parameters
----------
idx : int
Index to use to generate PV row geometry
Returns
-------
pvrow : :py:class:`~pvfactors.geometry.pvrow.PVRow`
"""
front_geom = self.front.at(idx)
back_geom = self.back.at(idx)
original_line = LineString(
self.full_pvrow_coords.as_array[:, :, idx])
pvrow = PVRow(front_side=front_geom, back_side=back_geom,
index=self.index, original_linestring=original_line)
return pvrow
def update_params(self, new_dict):
"""Update timeseries surface parameters of the PV row.
Parameters
----------
new_dict : dict
Parameters to add or update for the surfaces
"""
self.front.update_params(new_dict)
self.back.update_params(new_dict)
@property
def n_ts_surfaces(self):
"""Number of timeseries surfaces in the ts PV row"""
return self.front.n_ts_surfaces + self.back.n_ts_surfaces
@property
def all_ts_surfaces(self):
"""List of all timeseries surfaces"""
return self.front.all_ts_surfaces + self.back.all_ts_surfaces
@property
def centroid(self):
"""Centroid point of the timeseries pv row"""
centroid = (self.full_pvrow_coords.centroid
if self.full_pvrow_coords is not None else None)
return centroid
@property
def length(self):
"""Length of both sides of the timeseries PV row"""
return self.front.length + self.back.length
@property
def highest_point(self):
"""Timeseries point coordinates of highest point of PV row"""
high_pt = (self.full_pvrow_coords.highest_point
if self.full_pvrow_coords is not None else None)
return high_pt
class TsSide(object):
"""Timeseries side class: this class is a vectorized version of the
BaseSide geometries. The coordinates and attributes (list of segments,
normal vector) are all vectorized."""
def __init__(self, segments, n_vector=None):
"""Initialize timeseries side using list of timeseries segments.
Parameters
----------
segments : list of :py:class:`~pvfactors.geometry.pvrow.TsSegment`
List of timeseries segments of the side
n_vector : np.ndarray, optional
Timeseries normal vectors of the side (Default = None)
"""
self.list_segments = segments
self.n_vector = n_vector
@classmethod
def from_raw_inputs(cls, xy_center, width, rotation_vec, cut,
shaded_length, n_vector=None, param_names=None):
"""Create timeseries side using raw PV row inputs.
Note: shading will always be zero when PV rows are flat.
Parameters
----------
xy_center : tuple of float
x and y coordinates of the PV row center point (invariant)
width : float
width of the PV rows [m]
rotation_vec : np.ndarray
Timeseries rotation values of the PV row [deg]
cut : int
Discretization scheme of the PV side.
Will create segments of equal length.
shaded_length : np.ndarray
Timeseries values of side shaded length from lowest point [m]
n_vector : np.ndarray, optional
Timeseries normal vectors of the side
param_names : list of str, optional
List of names of surface parameters to use when creating geometries
(Default = None)
Returns
-------
New timeseries side object
"""
mask_tilted_to_left = rotation_vec >= 0
# Create Ts segments
x_center, y_center = xy_center
radius = width / 2.
segment_length = width / cut
is_not_flat = rotation_vec != 0.
# Calculate coords of shading point
r_shade = radius - shaded_length
x_sh = np.where(
mask_tilted_to_left,
r_shade * cosd(rotation_vec + 180.) + x_center,
r_shade * cosd(rotation_vec) + x_center)
y_sh = np.where(
mask_tilted_to_left,
r_shade * sind(rotation_vec + 180.) + y_center,
r_shade * sind(rotation_vec) + y_center)
# Calculate coords
list_segments = []
for i in range(cut):
# Calculate segment coords
r1 = radius - i * segment_length
r2 = radius - (i + 1) * segment_length
x1 = r1 * cosd(rotation_vec + 180.) + x_center
y1 = r1 * sind(rotation_vec + 180.) + y_center
x2 = r2 * cosd(rotation_vec + 180) + x_center
y2 = r2 * sind(rotation_vec + 180) + y_center
segment_coords = TsLineCoords.from_array(
np.array([[x1, y1], [x2, y2]]))
# Determine lowest and highest points of segment
x_highest = np.where(mask_tilted_to_left, x2, x1)
y_highest = np.where(mask_tilted_to_left, y2, y1)
x_lowest = np.where(mask_tilted_to_left, x1, x2)
y_lowest = np.where(mask_tilted_to_left, y1, y2)
# Calculate illum and shaded coords
x2_illum, y2_illum = x_highest, y_highest
x1_shaded, y1_shaded, x2_shaded, y2_shaded = \
x_lowest, y_lowest, x_lowest, y_lowest
mask_all_shaded = (y_sh > y_highest) & (is_not_flat)
mask_partial_shaded = (y_sh > y_lowest) & (~ mask_all_shaded) \
& (is_not_flat)
# Calculate second boundary point of shade
x2_shaded = np.where(mask_all_shaded, x_highest, x2_shaded)
x2_shaded = np.where(mask_partial_shaded, x_sh, x2_shaded)
y2_shaded = np.where(mask_all_shaded, y_highest, y2_shaded)
y2_shaded = np.where(mask_partial_shaded, y_sh, y2_shaded)
x1_illum = x2_shaded
y1_illum = y2_shaded
illum_coords = TsLineCoords.from_array(
np.array([[x1_illum, y1_illum], [x2_illum, y2_illum]]))
shaded_coords = TsLineCoords.from_array(
np.array([[x1_shaded, y1_shaded], [x2_shaded, y2_shaded]]))
# Create illuminated and shaded collections
is_shaded = False
illum = TsShadeCollection(
[TsSurface(illum_coords, n_vector=n_vector,
param_names=param_names, shaded=is_shaded)],
is_shaded)
is_shaded = True
shaded = TsShadeCollection(
[TsSurface(shaded_coords, n_vector=n_vector,
param_names=param_names, shaded=is_shaded)],
is_shaded)
# Create segment
segment = TsSegment(segment_coords, illum, shaded,
n_vector=n_vector, index=i)
list_segments.append(segment)
return cls(list_segments, n_vector=n_vector)
def surfaces_at_idx(self, idx):
"""Get all PV surface geometries in timeseries side for a certain
index.
Parameters
----------
idx : int
Index to use to generate PV surface geometries
Returns
-------
list of :py:class:`~pvfactors.geometry.base.PVSurface` objects
List of PV surfaces
"""
side_geom = self.at(idx)
return side_geom.all_surfaces
def at(self, idx):
"""Generate a side geometry for the desired index.
Parameters
----------
idx : int
Index to use to generate side geometry
Returns
-------
side : :py:class:`~pvfactors.geometry.base.BaseSide`
"""
list_geom_segments = []
for ts_seg in self.list_segments:
list_geom_segments.append(ts_seg.at(idx))
side = BaseSide(list_geom_segments)
return side
def plot_at_idx(self, idx, ax, color_shaded=COLOR_DIC['pvrow_shaded'],
color_illum=COLOR_DIC['pvrow_illum']):
"""Plot timeseries side at a certain index.
Parameters
----------
idx : int
Index to use to plot timeseries side
ax : :py:class:`matplotlib.pyplot.axes` object
Axes for plotting
color_shaded : str, optional
Color to use for plotting the shaded surfaces (Default =
COLOR_DIC['pvrow_shaded'])
color_shaded : str, optional
Color to use for plotting the illuminated surfaces (Default =
COLOR_DIC['pvrow_illum'])
"""
side_geom = self.at(idx)
side_geom.plot(ax, color_shaded=color_shaded, color_illum=color_illum,
with_index=False)
@property
def shaded_length(self):
"""Timeseries shaded length of the side."""
length = 0.
for seg in self.list_segments:
length += seg.shaded.length
return length
@property
def length(self):
"""Timeseries length of side."""
length = 0.
for seg in self.list_segments:
length += seg.length
return length
def get_param_weighted(self, param):
"""Get timeseries parameter for the side, after weighting by
surface length.
Parameters
----------
param : str
Name of parameter
Returns
-------
np.ndarray
Weighted parameter values
"""
return self.get_param_ww(param) / self.length
def get_param_ww(self, param):
"""Get timeseries parameter from the side's surfaces with weight, i.e.
after multiplying by the surface lengths.
Parameters
----------
param: str
Surface parameter to return
Returns
-------
np.ndarray
Timeseries parameter values multiplied by weights
Raises
------
KeyError
if parameter name not in a surface parameters
"""
value = 0.
for seg in self.list_segments:
value += seg.get_param_ww(param)
return value
def update_params(self, new_dict):
"""Update timeseries surface parameters of the side.
Parameters
----------
new_dict : dict
Parameters to add or update for the surfaces
"""
for seg in self.list_segments:
seg.update_params(new_dict)
@property
def n_ts_surfaces(self):
"""Number of timeseries surfaces in the ts side"""
n_ts_surfaces = 0
for ts_segment in self.list_segments:
n_ts_surfaces += ts_segment.n_ts_surfaces
return n_ts_surfaces
@property
def all_ts_surfaces(self):
"""List of all timeseries surfaces"""
all_ts_surfaces = []
for ts_segment in self.list_segments:
all_ts_surfaces += ts_segment.all_ts_surfaces
return all_ts_surfaces
class TsSegment(object):
"""A TsSegment is a timeseries segment that has a timeseries shaded
collection and a timeseries illuminated collection."""
def __init__(self, coords, illum_collection, shaded_collection,
index=None, n_vector=None):
"""Initialize timeseries segment using segment coordinates and
timeseries illuminated and shaded surfaces.
Parameters
----------
coords : :py:class:`~pvfactors.geometry.timeseries.TsLineCoords`
Timeseries coordinates of full segment
illum_collection : \
:py:class:`~pvfactors.geometry.timeseries.TsShadeCollection`
Timeseries collection for illuminated part of segment
shaded_collection : \
:py:class:`~pvfactors.geometry.timeseries.TsShadeCollection`
Timeseries collection for shaded part of segment
index : int, optional
Index of segment (Default = None)
n_vector : np.ndarray, optional
Timeseries normal vectors of the side (Default = None)
"""
self.coords = coords
self.illum = illum_collection
self.shaded = shaded_collection
self.index = index
self.n_vector = n_vector
def surfaces_at_idx(self, idx):
"""Get all PV surface geometries in timeseries segment for a certain
index.
Parameters
----------
idx : int
Index to use to generate PV surface geometries
Returns
-------
list of :py:class:`~pvfactors.geometry.base.PVSurface` objects
List of PV surfaces
"""
segment = self.at(idx)
return segment.all_surfaces
def plot_at_idx(self, idx, ax, color_shaded=COLOR_DIC['pvrow_shaded'],
color_illum=COLOR_DIC['pvrow_illum']):
"""Plot timeseries segment at a certain index.
Parameters
----------
idx : int
Index to use to plot timeseries segment
ax : :py:class:`matplotlib.pyplot.axes` object
Axes for plotting
color_shaded : str, optional
Color to use for plotting the shaded surfaces (Default =
COLOR_DIC['pvrow_shaded'])
color_shaded : str, optional
Color to use for plotting the illuminated surfaces (Default =
COLOR_DIC['pvrow_illum'])
"""
segment = self.at(idx)
segment.plot(ax, color_shaded=color_shaded, color_illum=color_illum,
with_index=False)
def at(self, idx):
"""Generate a PV segment geometry for the desired index.
Parameters
----------
idx : int
Index to use to generate PV segment geometry
Returns
-------
segment : :py:class:`~pvfactors.geometry.base.PVSegment`
"""
# Create illum collection
illum_collection = self.illum.at(idx)
# Create shaded collection
shaded_collection = self.shaded.at(idx)
# Create PV segment
segment = PVSegment(illum_collection=illum_collection,
shaded_collection=shaded_collection,
index=self.index)
return segment
@property
def length(self):
"""Timeseries length of segment."""
return self.illum.length + self.shaded.length
@property
def shaded_length(self):
"""Timeseries length of shaded part of segment."""
return self.shaded.length
@property
def centroid(self):
"""Timeseries point coordinates of the segment's centroid"""
return self.coords.centroid
def get_param_weighted(self, param):
"""Get timeseries parameter for the segment, after weighting by
surface length.
Parameters
----------
param : str
Name of parameter
Returns
-------
np.ndarray
Weighted parameter values
"""
return self.get_param_ww(param) / self.length
def get_param_ww(self, param):
"""Get timeseries parameter from the segment's surfaces with weight,
i.e. after multiplying by the surface lengths.
Parameters
----------
param: str
Surface parameter to return
Returns
-------
np.ndarray
Timeseries parameter values multiplied by weights
"""
return self.illum.get_param_ww(param) + self.shaded.get_param_ww(param)
def update_params(self, new_dict):
"""Update timeseries surface parameters of the segment.
Parameters
----------
new_dict : dict
Parameters to add or update for the surfaces
"""
self.illum.update_params(new_dict)
self.shaded.update_params(new_dict)
@property
def highest_point(self):
"""Timeseries point coordinates of highest point of segment"""
return self.coords.highest_point
@property
def lowest_point(self):
"""Timeseries point coordinates of lowest point of segment"""
return self.coords.lowest_point
@property
def all_ts_surfaces(self):
"""List of all timeseries surfaces in segment"""
return self.illum.list_ts_surfaces + self.shaded.list_ts_surfaces
@property
def n_ts_surfaces(self):
"""Number of timeseries surfaces in the segment"""
return self.illum.n_ts_surfaces + self.shaded.n_ts_surfaces
class PVRowSide(BaseSide):
"""A PV row side represents the whole surface of one side of a PV row.
At its core it will contain a fixed number of
:py:class:`~pvfactors.geometry.base.PVSegment` objects that will together
constitue one side of a PV row: a PV row side can also be
"discretized" into multiple segments"""
def __init__(self, list_segments=[]):
"""Initialize PVRowSide using its base class
:py:class:`pvfactors.geometry.base.BaseSide`
Parameters
----------
list_segments : list of :py:class:`~pvfactors.geometry.base.PVSegment`
List of PV segments for PV row side.
"""
super(PVRowSide, self).__init__(list_segments)
class PVRow(GeometryCollection):
"""A PV row is made of two PV row sides, a front and a back one."""
def __init__(self, front_side=PVRowSide(), back_side=PVRowSide(),
index=None, original_linestring=None):
"""Initialize PV row.
Parameters
----------
front_side : :py:class:`~pvfactors.geometry.pvrow.PVRowSide`, optional
Front side of the PV Row (Default = Empty PVRowSide)
back_side : :py:class:`~pvfactors.geometry.pvrow.PVRowSide`, optional
Back side of the PV Row (Default = Empty PVRowSide)
index : int, optional
Index of PV row (Default = None)
original_linestring : :py:class:`shapely.geometry.LineString`, optional
Full continuous linestring that the PV row will be made of
(Default = None)
"""
self.front = front_side
self.back = back_side
self.index = index
self.original_linestring = original_linestring
self._all_surfaces = None
super(PVRow, self).__init__([self.front, self.back])
@classmethod
def from_linestring_coords(cls, coords, shaded=False, normal_vector=None,
index=None, cut={}, param_names=[]):
"""Create a PV row with a single PV surface and using linestring
coordinates.
Parameters
----------
coords : list
List of linestring coordinates for the surface
shaded : bool, optional
Shading status desired for the PVRow sides (Default = False)
normal_vector : list, optional
Normal vector for the surface (Default = None)
index : int, optional
Index of PV row (Default = None)
cut : dict, optional
Scheme to decide how many segments to create on each side.
Eg {'front': 3, 'back': 2} will lead to 3 segments on front side
and 2 segments on back side. (Default = {})
param_names : list of str, optional
Names of the surface parameters, eg reflectivity, total incident
irradiance, temperature, etc. (Default = [])
Returns
-------
:py:class:`~pvfactors.geometry.pvrow.PVRow` object
"""
index_single_segment = 0
front_side = PVRowSide.from_linestring_coords(
coords, shaded=shaded, normal_vector=normal_vector,
index=index_single_segment, n_segments=cut.get('front', 1),
param_names=param_names)
if normal_vector is not None:
back_n_vec = - np.array(normal_vector)
else:
back_n_vec = - front_side.n_vector
back_side = PVRowSide.from_linestring_coords(
coords, shaded=shaded, normal_vector=back_n_vec,
index=index_single_segment, n_segments=cut.get('back', 1),
param_names=param_names)
return cls(front_side=front_side, back_side=back_side, index=index,
original_linestring=LineString(coords))
@classmethod
def from_center_tilt_width(cls, xy_center, tilt, width, surface_azimuth,
axis_azimuth, shaded=False, normal_vector=None,
index=None, cut={}, param_names=[]):
"""Create a PV row using mainly the coordinates of the line center,
a tilt angle, and its length.
Parameters
----------
xy_center : tuple
x, y coordinates of center point of desired linestring
tilt : float
surface tilt angle desired [deg]
length : float
desired length of linestring [m]
surface_azimuth : float
Surface azimuth of PV surface [deg]
axis_azimuth : float
Axis azimuth of the PV surface, i.e. direction of axis of rotation
[deg]
shaded : bool, optional
Shading status desired for the PVRow sides (Default = False)
normal_vector : list, optional
Normal vector for the surface (Default = None)
index : int, optional
Index of PV row (Default = None)
cut : dict, optional
Scheme to decide how many segments to create on each side.
Eg {'front': 3, 'back': 2} will lead to 3 segments on front side
and 2 segments on back side. (Default = {})
param_names : list of str, optional
Names of the surface parameters, eg reflectivity, total incident
irradiance, temperature, etc. (Default = [])
Returns
-------
:py:class:`~pvfactors.geometry.pvrow.PVRow` object
"""
coords = _coords_from_center_tilt_length(xy_center, tilt, width,
surface_azimuth, axis_azimuth)
return cls.from_linestring_coords(coords, shaded=shaded,
normal_vector=normal_vector,
index=index, cut=cut,
param_names=param_names)
def plot(self, ax, color_shaded=COLOR_DIC['pvrow_shaded'],
color_illum=COLOR_DIC['pvrow_illum'], with_index=False):
"""Plot the surfaces of the PV Row.
Parameters
----------
ax : :py:class:`matplotlib.pyplot.axes` object
Axes for plotting
color_shaded : str, optional
Color to use for plotting the shaded surfaces (Default =
COLOR_DIC['pvrow_shaded'])
color_shaded : str, optional
Color to use for plotting the illuminated surfaces (Default =
COLOR_DIC['pvrow_illum'])
with_index : bool
Flag to annotate surfaces with their indices (Default = False)
"""
self.front.plot(ax, color_shaded=color_shaded, color_illum=color_illum,
with_index=with_index)
self.back.plot(ax, color_shaded=color_shaded, color_illum=color_illum,
with_index=with_index)
@property
def boundary(self):
"""Boundaries of the PV Row's orginal linestring."""
return self.original_linestring.boundary
@property
def highest_point(self):
"""Highest point of the PV Row."""
b1, b2 = self.boundary
highest_point = b1 if b1.y > b2.y else b2
return highest_point
@property
def lowest_point(self):
"""Lowest point of the PV Row."""
b1, b2 = self.boundary
lowest_point = b1 if b1.y < b2.y else b2
return lowest_point
@property
def all_surfaces(self):
"""List of all the surfaces in the PV row."""
if self._all_surfaces is None:
self._all_surfaces = []
self._all_surfaces += self.front.all_surfaces
self._all_surfaces += self.back.all_surfaces
return self._all_surfaces
@property
def surface_indices(self):
"""List of all surface indices in the PV Row."""
list_indices = []
list_indices += self.front.surface_indices
list_indices += self.back.surface_indices
return list_indices
def update_params(self, new_dict):
"""Update surface parameters for both front and back sides.
<|fim▁hole|> ----------
new_dict : dict
Parameters to add or update for the surface
"""
self.front.update_params(new_dict)
self.back.update_params(new_dict)<|fim▁end|> | Parameters |
<|file_name|>rendertiles.py<|end_file_name|><|fim▁begin|>import mapnik
import subprocess,PIL.Image,cStringIO as StringIO
import time,sys,os
ew = 20037508.3428
tz = 8
def make_mapnik(fn, tabpp = None, scale=None, srs=None, mp=None, avoidEdges=False, abspath=True):
cc=[l for l in subprocess.check_output(['carto',fn]).split("\n") if not l.startswith('[millstone')]
if scale!=None:
for i,c in enumerate(cc):
if 'ScaleDenominator' in c:
sd=c.strip()[21:-22]
nsd=str(int(sd)*scale)
#print i,sd,"=>",nsd,
c=c.replace(sd, nsd)
#print c
cc[i]=c
bsp=''
if abspath:
a,b=os.path.split(fn)
if a:
bsp=a
#for i,c in enumerate(cc):
# if 'file' in c:
# if 'file=' in c:
# cc[i] = c.replace('file="','file="'+a+'/')
# elif 'name="file"><![CDATA[' in c:
# cc[i] = c.replace('CDATA[','CDATA['+a+'/')
if avoidEdges:
for i,c in enumerate(cc):
if '<ShieldSymbolizer size' in c:
cs = c.replace("ShieldSymbolizer size", "ShieldSymbolizer avoid-edges=\"true\" size")
cc[i]=cs
if tabpp != None:
cc=[l.replace("planet_osm",tabpp) for l in cc]
#cc2=[c.replace("clip=\"false","clip=\"true") for c in cc]
#cc3=[c.replace("file=\"symbols", "file=\""+root+"/symbols") for c in cc2]
#cc4=[c.replace("CDATA[data", "CDATA["+root+"/data") for c in cc3]
if mp==None:
mp = mapnik.Map(256*tz,256*tz)
mapnik.load_map_from_string(mp,"\n".join(cc),False,bsp)
if srs!=None:
mp.srs=srs
#mp.buffer_size=128
return mp
def tilebound(z,x,y,tzp):
zz = 1<<(z-1)
ss = ew/zz * tzp
xx = x / tzp
yy = y / tzp
bx = mapnik.Box2d(-ew + ss*xx, ew-ss*(yy+1), -ew+ss*(xx+1), ew-ss*yy)
mm = "%d %d %d {%d %d %d %f} => %s" % (z,x,y,zz,xx,yy,ss,bx)
return xx,yy,mm,bx
def render_im(mp,bx,width,height=None, scale_factor=1.0, buffer_size=256):
if height==None:
height=width
mp.resize(width,height)
mp.zoom_to_box(bx)
mp.buffer_size = buffer_size
im=mapnik.Image(mp.width,mp.height)
mapnik.render(mp,im, scale_factor)
return PIL.Image.frombytes('RGBA',(mp.width,mp.height),im.tostring())
def render_tile(mp,z,x,y):
st=time.time()
tzp = 1
if z==13: tzp=2
if z==14: tzp=4
if z>=15: tzp=8
#tzp = tz if z>10 else 1
xx,yy,mm,bx=tilebound(z,x,y,tzp)
print mm,
sys.stdout.flush()
pim = render_im(mp,bx,tzp*256)
print "%-8.1fs" % (time.time()-st,)
<|fim▁hole|> return iter_subtiles(pim,xx,yy,z,tzp)
def iter_subtiles(pim, xx,yy,z,tzp,ts=256):
for i in xrange(tzp):
for j in xrange(tzp):
xp = xx*tzp+i
yp = yy*tzp+j
pp = pim.crop([i*ts,j*ts,(i+1)*ts,(j+1)*ts])
#return pim.tostring('png')
ss=StringIO.StringIO()
pp.save(ss,format='PNG')
yield (z,xp,yp),ss.getvalue()<|fim▁end|> | |
<|file_name|>ReferenceType.java<|end_file_name|><|fim▁begin|>//
// Diese Datei wurde mit der JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.3.2 generiert
// Siehe <a href="https://javaee.github.io/jaxb-v2/">https://javaee.github.io/jaxb-v2/</a>
// Änderungen an dieser Datei gehen bei einer Neukompilierung des Quellschemas verloren.
// Generiert: 2020.03.13 um 12:48:52 PM CET
//
package net.opengis.ows._1;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.bind.annotation.XmlType;
/**
* Complete reference to a remote or local resource, allowing including metadata about that resource.
*
* <p>Java-Klasse für ReferenceType complex type.
*
* <p>Das folgende Schemafragment gibt den erwarteten Content an, der in dieser Klasse enthalten ist.
*
* <pre>
* <complexType name="ReferenceType">
* <complexContent>
* <extension base="{http://www.opengis.net/ows/1.1}AbstractReferenceBaseType">
* <sequence>
* <element ref="{http://www.opengis.net/ows/1.1}Identifier" minOccurs="0"/>
* <element ref="{http://www.opengis.net/ows/1.1}Abstract" maxOccurs="unbounded" minOccurs="0"/>
* <element name="Format" type="{http://www.opengis.net/ows/1.1}MimeType" minOccurs="0"/>
* <element ref="{http://www.opengis.net/ows/1.1}Metadata" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ReferenceType", propOrder = {
"identifier",
"_abstract",
"format",
"metadata"
})
@XmlSeeAlso({
ServiceReferenceType.class
})
public class ReferenceType
extends AbstractReferenceBaseType
{
@XmlElement(name = "Identifier")
protected CodeType identifier;
@XmlElement(name = "Abstract")
protected List<LanguageStringType> _abstract;
@XmlElement(name = "Format")
protected String format;
@XmlElement(name = "Metadata")
protected List<MetadataType> metadata;
/**
* Optional unique identifier of the referenced resource.
*
* @return
* possible object is
* {@link CodeType }
*
*/
public CodeType getIdentifier() {
return identifier;
}
/**
* Legt den Wert der identifier-Eigenschaft fest.
*
* @param value<|fim▁hole|> *
*/
public void setIdentifier(CodeType value) {
this.identifier = value;
}
public boolean isSetIdentifier() {
return (this.identifier!= null);
}
/**
* Gets the value of the abstract property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the abstract property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getAbstract().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link LanguageStringType }
*
*
*/
public List<LanguageStringType> getAbstract() {
if (_abstract == null) {
_abstract = new ArrayList<LanguageStringType>();
}
return this._abstract;
}
public boolean isSetAbstract() {
return ((this._abstract!= null)&&(!this._abstract.isEmpty()));
}
public void unsetAbstract() {
this._abstract = null;
}
/**
* Ruft den Wert der format-Eigenschaft ab.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFormat() {
return format;
}
/**
* Legt den Wert der format-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFormat(String value) {
this.format = value;
}
public boolean isSetFormat() {
return (this.format!= null);
}
/**
* Optional unordered list of additional metadata about this resource. A list of optional metadata elements for this ReferenceType could be specified in the Implementation Specification for each use of this type in a specific OWS. Gets the value of the metadata property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the metadata property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getMetadata().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link MetadataType }
*
*
*/
public List<MetadataType> getMetadata() {
if (metadata == null) {
metadata = new ArrayList<MetadataType>();
}
return this.metadata;
}
public boolean isSetMetadata() {
return ((this.metadata!= null)&&(!this.metadata.isEmpty()));
}
public void unsetMetadata() {
this.metadata = null;
}
public void setAbstract(List<LanguageStringType> value) {
this._abstract = value;
}
public void setMetadata(List<MetadataType> value) {
this.metadata = value;
}
}<|fim▁end|> | * allowed object is
* {@link CodeType } |
<|file_name|>break-value.rs<|end_file_name|><|fim▁begin|>// run-pass
#![allow(unreachable_code)]
// pretty-expanded FIXME #23616
fn int_id(x: isize) -> isize { return x; }<|fim▁hole|><|fim▁end|> |
pub fn main() { loop { int_id(break); } } |
<|file_name|>test_serializers.py<|end_file_name|><|fim▁begin|>"""
Test cases to cover Accounts-related serializers of the User API application
"""
import logging
from django.test import TestCase
from django.test.client import RequestFactory
from testfixtures import LogCapture
from openedx.core.djangoapps.user_api.accounts.serializers import UserReadOnlySerializer<|fim▁hole|>
class UserReadOnlySerializerTest(TestCase): # lint-amnesty, pylint: disable=missing-class-docstring
def setUp(self):
super(UserReadOnlySerializerTest, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
request_factory = RequestFactory()
self.request = request_factory.get('/api/user/v1/accounts/')
self.user = UserFactory.build(username='test_user', email='[email protected]')
self.user.save()
self.config = {
"default_visibility": "public",
"public_fields": [
'email', 'name', 'username'
],
}
def test_serializer_data(self):
"""
Test serializer return data properly.
"""
UserProfile.objects.create(user=self.user, name='test name')
data = UserReadOnlySerializer(self.user, configuration=self.config, context={'request': self.request}).data
assert data['username'] == self.user.username
assert data['name'] == 'test name'
assert data['email'] == self.user.email
def test_user_no_profile(self):
"""
Test serializer return data properly when user does not have profile.
"""
with LogCapture(LOGGER_NAME, level=logging.DEBUG) as logger:
data = UserReadOnlySerializer(self.user, configuration=self.config, context={'request': self.request}).data
logger.check(
(LOGGER_NAME, 'WARNING', 'user profile for the user [test_user] does not exist')
)
assert data['username'] == self.user.username
assert data['name'] is None<|fim▁end|> | from common.djangoapps.student.models import UserProfile
from common.djangoapps.student.tests.factories import UserFactory
LOGGER_NAME = "openedx.core.djangoapps.user_api.accounts.serializers" |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! A Collection of Header implementations for common HTTP Headers.
//!
//! ## Mime
//!
//! Several header fields use MIME values for their contents. Keeping with the
//! strongly-typed theme, the [mime](http://seanmonstar.github.io/mime.rs) crate
//! is used, such as `ContentType(pub Mime)`.
pub use self::accept::Accept;
pub use self::access_control_allow_credentials::AccessControlAllowCredentials;
pub use self::access_control_allow_headers::AccessControlAllowHeaders;
pub use self::access_control_allow_methods::AccessControlAllowMethods;
pub use self::access_control_allow_origin::AccessControlAllowOrigin;
pub use self::access_control_expose_headers::AccessControlExposeHeaders;
pub use self::access_control_max_age::AccessControlMaxAge;
pub use self::access_control_request_headers::AccessControlRequestHeaders;
pub use self::access_control_request_method::AccessControlRequestMethod;
pub use self::accept_charset::AcceptCharset;
pub use self::accept_encoding::AcceptEncoding;
pub use self::accept_language::AcceptLanguage;
pub use self::accept_ranges::{AcceptRanges, RangeUnit};
pub use self::allow::Allow;
pub use self::authorization::{Authorization, Scheme, Basic, Bearer};
pub use self::cache_control::{CacheControl, CacheDirective};
pub use self::connection::{Connection, ConnectionOption};
pub use self::content_disposition::{ContentDisposition, DispositionType, DispositionParam};
pub use self::content_length::ContentLength;
pub use self::content_encoding::ContentEncoding;
pub use self::content_language::ContentLanguage;
pub use self::content_range::{ContentRange, ContentRangeSpec};
pub use self::content_type::ContentType;
pub use self::cookie::Cookie;
pub use self::date::Date;
pub use self::etag::ETag;
pub use self::expect::Expect;
pub use self::expires::Expires;
pub use self::from::From;
pub use self::host::Host;
pub use self::if_match::IfMatch;
pub use self::if_modified_since::IfModifiedSince;
pub use self::if_none_match::IfNoneMatch;
pub use self::if_unmodified_since::IfUnmodifiedSince;
pub use self::if_range::IfRange;
pub use self::last_modified::LastModified;
pub use self::location::Location;
pub use self::pragma::Pragma;
pub use self::range::{Range, ByteRangeSpec};
pub use self::referer::Referer;
pub use self::server::Server;
pub use self::set_cookie::SetCookie;
pub use self::strict_transport_security::StrictTransportSecurity;
pub use self::transfer_encoding::TransferEncoding;
pub use self::upgrade::{Upgrade, Protocol, ProtocolName};
pub use self::user_agent::UserAgent;
pub use self::vary::Vary;
#[macro_export]
macro_rules! bench_header(
($name:ident, $ty:ty, $value:expr) => {
#[cfg(test)]
#[cfg(feature = "nightly")]
mod $name {
use test::Bencher;
use super::*;
use header::{Header, HeaderFormatter};
#[bench]
fn bench_parse(b: &mut Bencher) {
let val = $value;
b.iter(|| {
let _: $ty = Header::parse_header(&val[..]).unwrap();
});
}
#[bench]<|fim▁hole|> let val: $ty = Header::parse_header(&$value[..]).unwrap();
let fmt = HeaderFormatter(&val);
b.iter(|| {
format!("{}", fmt);
});
}
}
}
);
#[macro_export]
macro_rules! __hyper__deref {
($from:ty => $to:ty) => {
impl ::std::ops::Deref for $from {
type Target = $to;
fn deref(&self) -> &$to {
&self.0
}
}
impl ::std::ops::DerefMut for $from {
fn deref_mut(&mut self) -> &mut $to {
&mut self.0
}
}
}
}
#[macro_export]
macro_rules! __hyper__tm {
($id:ident, $tm:ident{$($tf:item)*}) => {
#[allow(unused_imports)]
#[cfg(test)]
mod $tm{
use std::str;
use $crate::header::*;
use $crate::mime::*;
use $crate::language_tags::*;
use $crate::method::Method;
use super::$id as HeaderField;
$($tf)*
}
}
}
#[macro_export]
macro_rules! test_header {
($id:ident, $raw:expr) => {
#[test]
fn $id() {
use std::ascii::AsciiExt;
let raw = $raw;
let a: Vec<Vec<u8>> = raw.iter().map(|x| x.to_vec()).collect();
let value = HeaderField::parse_header(&a[..]);
let result = format!("{}", value.unwrap());
let expected = String::from_utf8(raw[0].to_vec()).unwrap();
let result_cmp: Vec<String> = result
.to_ascii_lowercase()
.split(' ')
.map(|x| x.to_owned())
.collect();
let expected_cmp: Vec<String> = expected
.to_ascii_lowercase()
.split(' ')
.map(|x| x.to_owned())
.collect();
assert_eq!(result_cmp.concat(), expected_cmp.concat());
}
};
($id:ident, $raw:expr, $typed:expr) => {
#[test]
fn $id() {
let a: Vec<Vec<u8>> = $raw.iter().map(|x| x.to_vec()).collect();
let val = HeaderField::parse_header(&a[..]);
let typed: Option<HeaderField> = $typed;
// Test parsing
assert_eq!(val.ok(), typed);
// Test formatting
if typed.is_some() {
let res: &str = str::from_utf8($raw[0]).unwrap();
assert_eq!(format!("{}", typed.unwrap()), res);
}
}
}
}
#[macro_export]
macro_rules! __hyper_generate_header_serialization {
($id:ident) => {
#[cfg(feature = "serde-serialization")]
impl ::serde::Serialize for $id {
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
where S: ::serde::Serializer {
format!("{}", self).serialize(serializer)
}
}
#[cfg(feature = "serde-serialization")]
impl ::serde::Deserialize for $id {
fn deserialize<D>(deserializer: &mut D) -> Result<$id, D::Error>
where D: ::serde::Deserializer {
let string_representation: String =
try!(::serde::Deserialize::deserialize(deserializer));
Ok($crate::header::Header::parse_header(&[
string_representation.into_bytes()
]).unwrap())
}
}
}
}
#[macro_export]
macro_rules! header {
// $a:meta: Attributes associated with the header item (usually docs)
// $id:ident: Identifier of the header
// $n:expr: Lowercase name of the header
// $nn:expr: Nice name of the header
// List header, zero or more items
($(#[$a:meta])*($id:ident, $n:expr) => ($item:ty)*) => {
$(#[$a])*
#[derive(Clone, Debug, PartialEq)]
pub struct $id(pub Vec<$item>);
__hyper__deref!($id => Vec<$item>);
impl $crate::header::Header for $id {
fn header_name() -> &'static str {
$n
}
fn parse_header(raw: &[Vec<u8>]) -> $crate::Result<Self> {
$crate::header::parsing::from_comma_delimited(raw).map($id)
}
}
impl $crate::header::HeaderFormat for $id {
fn fmt_header(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
$crate::header::parsing::fmt_comma_delimited(f, &self.0[..])
}
}
impl ::std::fmt::Display for $id {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
use $crate::header::HeaderFormat;
self.fmt_header(f)
}
}
__hyper_generate_header_serialization!($id);
};
// List header, one or more items
($(#[$a:meta])*($id:ident, $n:expr) => ($item:ty)+) => {
$(#[$a])*
#[derive(Clone, Debug, PartialEq)]
pub struct $id(pub Vec<$item>);
__hyper__deref!($id => Vec<$item>);
impl $crate::header::Header for $id {
fn header_name() -> &'static str {
$n
}
fn parse_header(raw: &[Vec<u8>]) -> $crate::Result<Self> {
$crate::header::parsing::from_comma_delimited(raw).map($id)
}
}
impl $crate::header::HeaderFormat for $id {
fn fmt_header(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
$crate::header::parsing::fmt_comma_delimited(f, &self.0[..])
}
}
impl ::std::fmt::Display for $id {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
use $crate::header::HeaderFormat;
self.fmt_header(f)
}
}
__hyper_generate_header_serialization!($id);
};
// Single value header
($(#[$a:meta])*($id:ident, $n:expr) => [$value:ty]) => {
$(#[$a])*
#[derive(Clone, Debug, PartialEq)]
pub struct $id(pub $value);
__hyper__deref!($id => $value);
impl $crate::header::Header for $id {
fn header_name() -> &'static str {
$n
}
fn parse_header(raw: &[Vec<u8>]) -> $crate::Result<Self> {
$crate::header::parsing::from_one_raw_str(raw).map($id)
}
}
impl $crate::header::HeaderFormat for $id {
fn fmt_header(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
::std::fmt::Display::fmt(&**self, f)
}
}
impl ::std::fmt::Display for $id {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
::std::fmt::Display::fmt(&**self, f)
}
}
__hyper_generate_header_serialization!($id);
};
// List header, one or more items with "*" option
($(#[$a:meta])*($id:ident, $n:expr) => {Any / ($item:ty)+}) => {
$(#[$a])*
#[derive(Clone, Debug, PartialEq)]
pub enum $id {
/// Any value is a match
Any,
/// Only the listed items are a match
Items(Vec<$item>),
}
impl $crate::header::Header for $id {
fn header_name() -> &'static str {
$n
}
fn parse_header(raw: &[Vec<u8>]) -> $crate::Result<Self> {
// FIXME: Return None if no item is in $id::Only
if raw.len() == 1 {
if raw[0] == b"*" {
return Ok($id::Any)
}
}
$crate::header::parsing::from_comma_delimited(raw).map($id::Items)
}
}
impl $crate::header::HeaderFormat for $id {
fn fmt_header(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
match *self {
$id::Any => f.write_str("*"),
$id::Items(ref fields) => $crate::header::parsing::fmt_comma_delimited(
f, &fields[..])
}
}
}
impl ::std::fmt::Display for $id {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
use $crate::header::HeaderFormat;
self.fmt_header(f)
}
}
__hyper_generate_header_serialization!($id);
};
// optional test module
($(#[$a:meta])*($id:ident, $n:expr) => ($item:ty)* $tm:ident{$($tf:item)*}) => {
header! {
$(#[$a])*
($id, $n) => ($item)*
}
__hyper__tm! { $id, $tm { $($tf)* }}
};
($(#[$a:meta])*($id:ident, $n:expr) => ($item:ty)+ $tm:ident{$($tf:item)*}) => {
header! {
$(#[$a])*
($id, $n) => ($item)+
}
__hyper__tm! { $id, $tm { $($tf)* }}
};
($(#[$a:meta])*($id:ident, $n:expr) => [$item:ty] $tm:ident{$($tf:item)*}) => {
header! {
$(#[$a])*
($id, $n) => [$item]
}
__hyper__tm! { $id, $tm { $($tf)* }}
};
($(#[$a:meta])*($id:ident, $n:expr) => {Any / ($item:ty)+} $tm:ident{$($tf:item)*}) => {
header! {
$(#[$a])*
($id, $n) => {Any / ($item)+}
}
__hyper__tm! { $id, $tm { $($tf)* }}
};
}
mod accept;
mod access_control_allow_credentials;
mod access_control_allow_headers;
mod access_control_allow_methods;
mod access_control_allow_origin;
mod access_control_expose_headers;
mod access_control_max_age;
mod access_control_request_headers;
mod access_control_request_method;
mod accept_charset;
mod accept_encoding;
mod accept_language;
mod accept_ranges;
mod allow;
mod authorization;
mod cache_control;
mod cookie;
mod connection;
mod content_disposition;
mod content_encoding;
mod content_language;
mod content_length;
mod content_range;
mod content_type;
mod date;
mod etag;
mod expect;
mod expires;
mod from;
mod host;
mod if_match;
mod if_modified_since;
mod if_none_match;
mod if_range;
mod if_unmodified_since;
mod last_modified;
mod location;
mod pragma;
mod range;
mod referer;
mod server;
mod set_cookie;
mod strict_transport_security;
mod transfer_encoding;
mod upgrade;
mod user_agent;
mod vary;<|fim▁end|> | fn bench_format(b: &mut Bencher) { |
<|file_name|>48.time.go<|end_file_name|><|fim▁begin|>package main
import (
"fmt"<|fim▁hole|>
func main() {
p := fmt.Println
// We’ll start by getting the current time.
now := time.Now()
p(now)
// You can build a time struct by providing the year, month, day, etc.
// Times are always associated with a Location, i.e. time zone.
then := time.Date(2009, 11, 17, 20, 34, 58, 651387237, time.UTC)
p(then)
// You can extract the various components of the time value as expected.
p(then.Year())
p(then.Month())
p(then.Day())
p(then.Hour())
p(then.Minute())
p(then.Second())
p(then.Nanosecond())
// The Monday-Sunday Weekday is also available.
p(then.Weekday())
// These methods compare two times, testing if the first occurs before, after,
// or at the same time as the second, respectively.
p(then.Before(now))
p(then.After(now))
p(then.Equal(now))
// The Sub methods returns a Duration representing the interval between two times.
diff := now.Sub(then)
p(diff)
// We can compute the length of the duration in various units.
p(diff.Hours())
p(diff.Minutes())
p(diff.Seconds())
p(diff.Nanoseconds())
// You can use Add to advance a time by a given duration,
// or with a - to move backwards by a duration.
p(then.Add(diff))
p(then.Add(-diff))
}<|fim▁end|> | "time"
) |
<|file_name|>configurable.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import argparse
from configparser import SafeConfigParser
class Configurable(object):
"""
Configuration processing for the network
"""
def __init__(self, *args, **kwargs):
self._name = kwargs.pop("name", "Unknown")
if args and kwargs:
raise TypeError('Configurable must take either a config parser or keyword args')
if len(args) > 1:
raise TypeError('Configurable takes at most one argument')
if args:
self._config = args[0]
else:
self._config = self._configure(**kwargs)
return
@property
def name(self):
return self._name
def _configure(self, **kwargs):
config = SafeConfigParser()
config_file = kwargs.pop("config_file", "")
config.read(config_file)
# Override the config setting if the (k,v) specified in command line
for option, value in kwargs.items():
assigned = False
for section in config.sections():
if option in config.options(section):
config.set(section, option, str(value))
assigned = True
break
if not assigned:
raise ValueError("%s is not a valid option" % option)
return config
argparser = argparse.ArgumentParser()<|fim▁hole|> @property
def model_type(self):
return self._config.get('OS', 'model_type')
argparser.add_argument('--model_type')
@property
def mode(self):
return self._config.get('OS', 'mode')
argparser.add_argument('--mode')
@property
def save_dir(self):
return self._config.get('OS', 'save_dir')
argparser.add_argument('--save_dir')
@property
def word_file(self):
return self._config.get('OS', 'word_file')
argparser.add_argument('--word_file')
@property
def target_file(self):
return self._config.get('OS', 'target_file')
argparser.add_argument('--target_file')
@property
def train_file(self):
return self._config.get('OS', 'train_file')
argparser.add_argument('--train_file')
@property
def valid_file(self):
return self._config.get('OS', 'valid_file')
argparser.add_argument('--valid_file')
@property
def test_file(self):
return self._config.get('OS', 'test_file')
argparser.add_argument('--test_file')
@property
def save_model_file(self):
return self._config.get('OS', 'save_model_file')
argparser.add_argument('--save_model_file')
@property
def restore_from(self):
return self._config.get('OS', 'restore_from')
argparser.add_argument('--restore_from')
@property
def embed_file(self):
return self._config.get('OS', 'embed_file')
argparser.add_argument('--embed_file')
@property
def use_gpu(self):
return self._config.getboolean('OS', 'use_gpu')
argparser.add_argument('--use_gpu')
# [Dataset]
@property
def n_bkts(self):
return self._config.getint('Dataset', 'n_bkts')
argparser.add_argument('--n_bkts')
@property
def n_valid_bkts(self):
return self._config.getint('Dataset', 'n_valid_bkts')
argparser.add_argument('--n_valid_bkts')
@property
def dataset_type(self):
return self._config.get('Dataset', 'dataset_type')
argparser.add_argument('--dataset_type')
@property
def min_occur_count(self):
return self._config.getint('Dataset', 'min_occur_count')
argparser.add_argument('--min_occur_count')
# [Learning rate]
@property
def learning_rate(self):
return self._config.getfloat('Learning rate', 'learning_rate')
argparser.add_argument('--learning_rate')
@property
def epoch_decay(self):
return self._config.getint('Learning rate', 'epoch_decay')
argparser.add_argument('--epoch_decay')
@property
def dropout(self):
return self._config.getfloat('Learning rate', 'dropout')
argparser.add_argument('--dropout')
# [Sizes]
@property
def words_dim(self):
return self._config.getint('Sizes', 'words_dim')
argparser.add_argument('--words_dim')
# [Training]
@property
def log_interval(self):
return self._config.getint('Training', 'log_interval')
argparser.add_argument('--log_interval')
@property
def valid_interval(self):
return self._config.getint('Training', 'valid_interval')
argparser.add_argument('--valid_interval')
@property
def train_batch_size(self):
return self._config.getint('Training', 'train_batch_size')
argparser.add_argument('--train_batch_size')
@property
def test_batch_size(self):
return self._config.getint('Training', 'test_batch_size')
argparser.add_argument('--test_batch_size')<|fim▁end|> | argparser.add_argument('--config_file')
# ======
# [OS] |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015-2016 Tigera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import os
import re
import socket
import sys
import tempfile
from datetime import datetime
from subprocess import CalledProcessError
from subprocess import check_output, STDOUT
import termios
import json
import logging
from pprint import pformat
import yaml
from deepdiff import DeepDiff
LOCAL_IP_ENV = "MY_IP"
LOCAL_IPv6_ENV = "MY_IPv6"
logger = logging.getLogger(__name__)
ETCD_SCHEME = os.environ.get("ETCD_SCHEME", "http")
ETCD_CA = os.environ.get("ETCD_CA_CERT_FILE", "")
ETCD_CERT = os.environ.get("ETCD_CERT_FILE", "")
ETCD_KEY = os.environ.get("ETCD_KEY_FILE", "")
ETCD_HOSTNAME_SSL = "etcd-authority-ssl"
KUBECONFIG = "/home/user/certs/kubeconfig"
API_VERSION = 'projectcalico.org/v3'
ERROR_CONFLICT = "update conflict"
NOT_FOUND = "resource does not exist"
NOT_NAMESPACED = "is not namespaced"
SET_DEFAULT = "Cannot set"
NOT_SUPPORTED = "is not supported on"
KUBERNETES_NP = "kubernetes network policies must be managed through the kubernetes API"
NOT_LOCKED = "Datastore is not locked. Run the `calicoctl datastore migrate lock` command in order to begin migration."
NOT_KUBERNETES = "Invalid datastore type: etcdv3 to import to for datastore migration. Datastore type must be kubernetes"
NO_IPAM = "No IPAM resources specified in file"
class CalicoctlOutput:
"""
CalicoctlOutput contains the output from running a calicoctl command using
the calicoctl function below.
This class contains the command, output and error code (if it failed)
along with YAML/JSON decoded output if the output could be decoded.
"""
def __init__(self, command, output, error=None):
self.command = command
self.output = output
self.error = error
# Attempt to decode the output and store the output format.
self.decoded, self.decoded_format = decode_json_yaml(self.output)
def assert_data(self, data, format="yaml", text=None):
"""
Assert the decoded output from the calicoctl command matches the
supplied data and the expected decoder format.
Args:
data: The data to compare
format: The expected output format of the data.
text: (optional) Expected text in the command output.
"""
self.assert_no_error(text)
assert self.decoded is not None, "No value was decoded from calicoctl response."
if isinstance(data, str):
data, _ = decode_json_yaml(data)
assert data is not None, "String data did not decode"
if format is not None:
assert format == self.decoded_format, "Decoded format is different. " \
"expect %s; got %s" % (format, self.decoded_format)
# Copy and clean the decoded data to allow it to be comparable.
cleaned = clean_calico_data(self.decoded)
assert cmp(cleaned, data) == 0, \
"Items are not the same. Difference is:\n %s" % \
pformat(DeepDiff(cleaned, data), indent=2)
def assert_empty_list(self, kind, format="yaml", text=None):
"""
Assert the calicoctl command output an empty list of the specified
kind.
Args:
kind: The resource kind.
format: The expected output format of the data.
text: (optional) Expected text in the command output.
Returns:
"""
data = make_list(kind, [])
self.assert_data(data, format=format, text=text)
def assert_list(self, kind, items, format="yaml", text=None):
"""
Assert the calicoctl command output a list of the specified
kind.
Args:
kind: The resource kind.
items: A list of the items in the list.
format: The expected output format of the data.
text: (optional) Expected text in the command output.
Returns:
"""
data = make_list(kind, items)
self.assert_data(data, format=format, text=text)
def assert_error(self, text=None):
"""
Assert the calicoctl command exited with an error and did not panic
Args:
text: (optional) Expected text in the command output.
"""
assert self.error, "Expected error running command; \n" \
"command=" + self.command + "\noutput=" + self.output
assert not "panic" in self.output, "Exited with an error due to a panic"
self.assert_output_contains(text)
def assert_no_error(self, text=None):
"""
Assert the calicoctl command did not exit with an error code.
Args:
text: (optional) Expected text in the command output.
"""
assert not self.error, "Expected no error running command; \n" \
"command=" + self.command + "\noutput=" + self.output
# If text is supplied, assert it appears in the output
if text:
self.assert_output_contains(text)
def assert_output_equals(self, text):
"""
Assert the calicoctl command output is exactly the supplied text.
Args:
text: Expected text in the command output.
"""
if not text:
return
assert text == self.output, "Expected output to exactly match; \n" + \
"command=" + self.command + "\noutput=\n" + self.output + \
"\nexpected=\n" + text
def assert_output_equals_ignore_res_version(self, text):
"""
Assert the calicoctl command output is exactly the supplied text.
Args:
text: Expected text in the command output.
"""
if not text:
return
text = re.sub('resourceVersion: ".*?"', 'resourceVersion: "<ignored>"', text)
out = re.sub('resourceVersion: ".*?"', 'resourceVersion: "<ignored>"', self.output)
assert text == out, "Expected output to match after ignoring resource version; \n" + \
"command=" + self.command + "\noutput=\n" + out + \
"\nexpected=\n" + text
def assert_output_contains(self, text):
"""
Assert the calicoctl command output contains the supplied text.
Args:
text: Expected text in the command output.
"""
if not text:
return
assert text in self.output, "Expected text in output; \n" + \
"command=" + self.command + "\noutput=\n" + self.output + \
"\nexpected=\n" + text
def assert_output_not_contains(self, text):
"""
Assert the calicoctl command output does not contain the supplied text.
Args:
text: Expected text in the command output.
"""
if not text:
return
assert not text in self.output, "Unexpected text in output; \n" + \
"command=" + self.command + "\noutput=\n" + self.output + \
"\nunexpected=\n" + text
def calicoctl(command, data=None, load_as_stdin=False, format="yaml", only_stdout=False, no_config=False, kdd=False, allowVersionMismatch=True):
"""
Convenience function for abstracting away calling the calicoctl
command.
:param command: The calicoctl command line parms as a single string.
:param data: Input data either as a string or a JSON serializable Python
object.
:param load_as_stdin: Load the input data through stdin rather than by
loading from file.
:param format: Specify the format for loading the data.
:param only_stdout: Return only the stdout
:return: The output from the command with leading and trailing
whitespace removed.
"""
# If input data is specified, save it to file in the required format.
if isinstance(data, str):
data, _ = decode_json_yaml(data)
assert data is not None, "String data did not decode"
if data is not None:
if format == "yaml":
writeyaml("/tmp/input-data", data)
else:
writejson("/tmp/input-data", data)
stdin = ''
option_file = ''
if data and load_as_stdin:
stdin = 'cat /tmp/input-data | '
option_file = ' -f -'
elif data and not load_as_stdin:
option_file = ' -f /tmp/input-data'
calicoctl_bin = os.environ.get("CALICOCTL", "/code/bin/calicoctl-linux-amd64")
if allowVersionMismatch:
calicoctl_bin += " --allow-version-mismatch"
if ETCD_SCHEME == "https":
etcd_auth = "%s:2379" % ETCD_HOSTNAME_SSL
else:
etcd_auth = "%s:2379" % get_ip()
# Export the environment, in case the command has multiple parts, e.g.
# use of | or ;
#
# Pass in all etcd params, the values will be empty if not set anyway
calicoctl_env_cmd = "export ETCD_ENDPOINTS=%s; " \
"export ETCD_CA_CERT_FILE=%s; " \
"export ETCD_CERT_FILE=%s; " \
"export ETCD_KEY_FILE=%s; " \
"export DATASTORE_TYPE=%s; %s %s" % \
(ETCD_SCHEME+"://"+etcd_auth, ETCD_CA, ETCD_CERT, ETCD_KEY,
"etcdv3", stdin, calicoctl_bin)
if kdd:
calicoctl_env_cmd = "export DATASTORE_TYPE=kubernetes; " \
"export KUBECONFIG=%s; %s %s" % \
(KUBECONFIG, stdin, calicoctl_bin)
if no_config :
calicoctl_env_cmd = calicoctl_bin
full_cmd = calicoctl_env_cmd + " " + command + option_file
try:
output = log_and_run(full_cmd, stderr=(None if only_stdout else STDOUT))
return CalicoctlOutput(full_cmd, output)
except CalledProcessError as e:
return CalicoctlOutput(full_cmd, e.output, error=e.returncode)
def clean_calico_data(data, extra_keys_to_remove=None):
"""
Clean the data returned from a calicoctl get command to remove empty
structs, null values and non-configurable fields. This makes comparison<|fim▁hole|>
Args:
data: The data to clean.
extra_keys_to_remove: more keys to remove if needed.
Returns: The cleaned data.
"""
new = copy.deepcopy(data)
# Recursively delete empty structs / nil values and non-configurable
# fields.
def clean_elem(elem, extra_keys):
if isinstance(elem, list):
# Loop through each element in the list
for i in elem:
clean_elem(i, extra_keys)
if isinstance(elem, dict):
# Remove non-settable fields, and recursively clean each value of
# the dictionary, removing nil values or values that are empty
# dicts after cleaning.
del_keys = ['creationTimestamp', 'resourceVersion', 'uid']
if extra_keys is not None:
for extra_key in extra_keys:
del_keys.append(extra_key)
for k, v in elem.iteritems():
clean_elem(v, extra_keys)
if v is None or v == {}:
del_keys.append(k)
for k in del_keys:
if k in elem:
del(elem[k])
clean_elem(new, extra_keys_to_remove)
return new
def decode_json_yaml(value):
try:
decoded = json.loads(value)
# fix the python datetime back into isoformat with empty timezone information
decoded = find_and_format_creation_timestamp(decoded)
return decoded, "json"
except ValueError:
pass
try:
decoded = yaml.safe_load(value)
# fix the python datetime back into isoformat with empty timezone information
decoded = find_and_format_creation_timestamp(decoded)
return decoded, "yaml"
except yaml.YAMLError:
pass
return None, None
def find_and_format_creation_timestamp(decoded):
if decoded:
if 'items' in decoded:
for i in xrange(len(decoded['items'])):
decoded['items'][i] = format_creation_timestamp(decoded['items'][i])
else:
decoded = format_creation_timestamp(decoded)
return decoded
def format_creation_timestamp(decoded):
if isinstance(decoded, dict) and 'metadata' in decoded and 'creationTimestamp' in decoded['metadata']:
if isinstance(decoded['metadata']['creationTimestamp'], datetime):
decoded['metadata']['creationTimestamp'] = decoded.get('metadata', {}). \
get('creationTimestamp', datetime.utcnow()).isoformat() + 'Z'
return decoded
def writeyaml(filename, data):
"""
Converts a python dict to yaml and outputs to a file.
:param filename: filename to write
:param data: dictionary to write out as yaml
"""
with open(filename, 'w') as f:
text = yaml.dump(data, default_flow_style=False)
logger.debug("Writing %s: \n%s" % (filename, truncate_for_log(text, 4000)))
f.write(text)
def writejson(filename, data):
"""
Converts a python dict to json and outputs to a file.
:param filename: filename to write
:param data: dictionary to write out as json
"""
with open(filename, 'w') as f:
text = json.dumps(data,
sort_keys=True,
indent=2,
separators=(',', ': '))
logger.debug("Writing %s: \n%s" % (filename, truncate_for_log(text, 4000)))
f.write(text)
def truncate_for_log(text, length):
if len(text) <=length:
return text
return text[:length] + "... <truncated>"
def get_ip(v6=False):
"""
Return a string of the IP of the hosts interface.
Try to get the local IP from the environment variables. This allows
testers to specify the IP address in cases where there is more than one
configured IP address for the test system.
"""
env = LOCAL_IPv6_ENV if v6 else LOCAL_IP_ENV
ip = os.environ.get(env)
if not ip:
logger.debug("%s not set; try to auto detect IP.", env)
socket_type = socket.AF_INET6 if v6 else socket.AF_INET
s = socket.socket(socket_type, socket.SOCK_DGRAM)
remote_ip = "2001:4860:4860::8888" if v6 else "8.8.8.8"
s.connect((remote_ip, 0))
ip = s.getsockname()[0]
s.close()
else:
logger.debug("Got local IP from %s=%s", env, ip)
return ip
# Some of the commands we execute like to mess with the TTY configuration,
# which can break the output formatting. As a wrokaround, save off the
# terminal settings and restore them after each command.
_term_settings = termios.tcgetattr(sys.stdin.fileno())
def log_and_run(command, raise_exception_on_failure=True, stderr=STDOUT):
def log_output(results):
if results is None:
logger.info(" # <no output>")
lines = results.split("\n")
for line in lines:
logger.info(" # %s", line.rstrip())
try:
logger.info("%s", command)
try:
results = check_output(command, shell=True, stderr=stderr).rstrip()
finally:
# Restore terminal settings in case the command we ran manipulated
# them. Note: under concurrent access, this is still not a perfect
# solution since another thread's child process may break the
# settings again before we log below.
termios.tcsetattr(sys.stdin.fileno(), termios.TCSADRAIN, _term_settings)
log_output(results)
return results
except CalledProcessError as e:
# Wrap the original exception with one that gives a better error
# message (including command output).
logger.info(" # Return code: %s", e.returncode)
log_output(e.output)
if raise_exception_on_failure:
raise e
def curl_etcd(path, options=None, recursive=True, ip=None):
"""
Perform a curl to etcd, returning JSON decoded response.
:param path: The key path to query
:param options: Additional options to include in the curl
:param recursive: Whether we want recursive query or not
:return: The JSON decoded response.
"""
if options is None:
options = []
if ETCD_SCHEME == "https":
# Etcd is running with SSL/TLS, require key/certificates
rc = check_output(
"curl --cacert %s --cert %s --key %s "
"-sL https://%s:2379/v2/keys/%s?recursive=%s %s"
% (ETCD_CA, ETCD_CERT, ETCD_KEY, ETCD_HOSTNAME_SSL,
path, str(recursive).lower(), " ".join(options)),
shell=True)
else:
rc = check_output(
"curl -sL http://%s:2379/v2/keys/%s?recursive=%s %s"
% (ip, path, str(recursive).lower(), " ".join(options)),
shell=True)
logger.info("etcd RC: %s" % rc.strip())
return json.loads(rc.strip())
def wipe_etcd(ip):
# Delete /calico if it exists. This ensures each test has an empty data
# store at start of day.
curl_etcd("calico", options=["-XDELETE"], ip=ip)
# Disable Usage Reporting to usage.projectcalico.org
# We want to avoid polluting analytics data with unit test noise
curl_etcd("calico/v1/config/UsageReportingEnabled",
options=["-XPUT -d value=False"], ip=ip)
etcd_container_name = "calico-etcd"
tls_vars = ""
if ETCD_SCHEME == "https":
# Etcd is running with SSL/TLS, require key/certificates
etcd_container_name = "calico-etcd-ssl"
tls_vars = ("ETCDCTL_CACERT=/etc/calico/certs/ca.pem " +
"ETCDCTL_CERT=/etc/calico/certs/client.pem " +
"ETCDCTL_KEY=/etc/calico/certs/client-key.pem ")
check_output("docker exec " + etcd_container_name + " sh -c '" + tls_vars +
"ETCDCTL_API=3 etcdctl del --prefix /calico" +
"'", shell=True)
def make_list(kind, items):
"""
Convert the list of resources into a single List resource type.
Args:
items: A list of the resources in the List object.
Returns:
None
"""
assert isinstance(items, list)
if "List" not in kind:
kind = kind + "List"
return {
'kind': kind,
'apiVersion': API_VERSION,
'items': items,
}
def name(data):
"""
Returns the name of the resource in the supplied data
Args:
data: A dictionary containing the resource.
Returns: The resource name.
"""
return data['metadata']['name']
def namespace(data):
"""
Returns the namespace of the resource in the supplied data
Args:
data: A dictionary containing the resource.
Returns: The resource name.
"""
return data['metadata']['namespace']
def set_cluster_version(calico_version="", kdd=False):
"""
Set Calico version in ClusterInformation using the calico_version_helper go app.
Args:
calico_version: string with version to set
kdd: optional bool to indicate use of kubernetes datastore (default False)
Returns: The command output
"""
if ETCD_SCHEME == "https":
etcd_auth = "%s:2379" % ETCD_HOSTNAME_SSL
else:
etcd_auth = "%s:2379" % get_ip()
calico_helper_bin = "/code/tests/fv/helper/bin/calico_version_helper"
full_cmd = "export ETCD_ENDPOINTS=%s; " \
"export ETCD_CA_CERT_FILE=%s; " \
"export ETCD_CERT_FILE=%s; " \
"export ETCD_KEY_FILE=%s; " \
"export DATASTORE_TYPE=%s; %s" % \
(ETCD_SCHEME+"://"+etcd_auth, ETCD_CA, ETCD_CERT, ETCD_KEY,
"etcdv3", calico_helper_bin)
if kdd:
full_cmd = "export DATASTORE_TYPE=kubernetes; " \
"export KUBECONFIG=%s; %s" % \
(KUBECONFIG, calico_helper_bin)
if calico_version:
full_cmd += " -v " + calico_version
try:
output = log_and_run(full_cmd, stderr=STDOUT)
return CalicoctlOutput(full_cmd, output)
except CalledProcessError as e:
return CalicoctlOutput(full_cmd, e.output, error=e.returncode)<|fim▁end|> | with the input data much simpler. |
<|file_name|>test_country_field.py<|end_file_name|><|fim▁begin|>import sqlalchemy_utils
from babel import Locale
from wtforms import Form
from tests import MultiDict
from wtforms_alchemy import CountryField
sqlalchemy_utils.i18n.get_locale = lambda: Locale('en')
class TestCountryField(object):
field_class = CountryField
def init_form(self, **kwargs):
class TestForm(Form):
test_field = self.field_class(**kwargs)
self.form_class = TestForm<|fim▁hole|> def setup_method(self, method):
self.valid_countries = [
'US',
'SA',
'FI'
]
self.invalid_countries = [
'unknown',
]
def test_valid_countries(self):
form_class = self.init_form()
for country in self.valid_countries:
form = form_class(MultiDict(test_field=country))
form.validate()
assert len(form.errors) == 0
def test_invalid_countries(self):
form_class = self.init_form()
for country in self.invalid_countries:
form = form_class(MultiDict(test_field=country))
form.validate()
assert len(form.errors['test_field']) == 2<|fim▁end|> | return self.form_class
|
<|file_name|>npairsloss.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from __future__ import division<|fim▁hole|>
import paddle.fluid as fluid
from utility import get_gpu_num
class NpairsLoss():
def __init__(self,
train_batch_size = 160,
samples_each_class=2,
reg_lambda=0.01):
self.samples_each_class = samples_each_class
assert(self.samples_each_class == 2)
self.train_batch_size = train_batch_size
num_gpus = get_gpu_num()
assert(train_batch_size % num_gpus == 0)
self.cal_loss_batch_size = train_batch_size // num_gpus
assert(self.cal_loss_batch_size % samples_each_class == 0)
self.reg_lambda = reg_lambda
def loss(self, input, label=None):
reg_lambda = self.reg_lambda
samples_each_class = self.samples_each_class
batch_size = self.cal_loss_batch_size
num_class = batch_size // samples_each_class
fea_dim = input.shape[1]
input = fluid.layers.reshape(input, shape = [-1, fea_dim])
feature = fluid.layers.reshape(input, shape = [-1, samples_each_class, fea_dim])
label = fluid.layers.reshape(label, shape = [-1, samples_each_class])
label = fluid.layers.cast(label, dtype='float32')
if samples_each_class == 2:
anchor_fea, positive_fea = fluid.layers.split(feature, num_or_sections = 2, dim = 1)
anchor_lab, positive_lab = fluid.layers.split(label, num_or_sections = 2, dim = 1)
else:
anchor_fea, positive_fea = fluid.layers.split(feature, num_or_sections = [1, samples_each_class-1], dim = 1)
anchor_lab, positive_lab = fluid.layers.split(label, num_or_sections = [1, samples_each_class-1], dim = 1)
anchor_fea = fluid.layers.reshape(anchor_fea, shape = [-1, fea_dim])
positive_fea = fluid.layers.reshape(positive_fea, shape = [-1, fea_dim])
positive_fea_trans = fluid.layers.transpose(positive_fea, perm = [1, 0])
similarity_matrix = fluid.layers.mul(anchor_fea, positive_fea_trans)
anchor_lab = fluid.layers.expand(x=anchor_lab, expand_times=[1, batch_size-num_class])
positive_lab_tran = fluid.layers.transpose(positive_lab, perm = [1, 0])
positive_lab_tran = fluid.layers.expand(x=positive_lab_tran, expand_times=[num_class, 1])
label_remapped = fluid.layers.equal(anchor_lab, positive_lab_tran)
label_remapped = fluid.layers.cast(label_remapped, dtype='float32') / (samples_each_class-1)
label_remapped.stop_gradient = True
out = fluid.layers.softmax(input=similarity_matrix, use_cudnn=False)
xentloss = fluid.layers.cross_entropy(input=out, label=label_remapped, soft_label=True)
xentloss = fluid.layers.mean(x=xentloss)
reg = fluid.layers.reduce_mean(fluid.layers.reduce_sum(fluid.layers.square(input), dim=1))
l2loss = 0.5 * reg_lambda * reg
return xentloss + l2loss<|fim▁end|> | from __future__ import print_function |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-06-06 02:47
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Car',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('car_model', models.CharField(max_length=20)),
('color', models.CharField(max_length=20)),
('year', models.SmallIntegerField(help_text='Use year as YYYY.', validators=[django.core.validators.RegexValidator('^[0-9]{4}$', 'Year in invalid format!', 'invalid')])),
('mileage', models.IntegerField(default=0, help_text='Or your car is brand new or it have some mileage traveled', validators=[django.core.validators.MinValueValidator(0)])),
],
),
migrations.CreateModel(
name='OilChange',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField(verbose_name='date changed')),
('mileage', models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0)])),
('car', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='car.Car')),
],
),
migrations.CreateModel(
name='Refuel',
fields=[<|fim▁hole|> ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField(verbose_name='date refueled')),
('liters', models.DecimalField(decimal_places=3, max_digits=7)),
('fuel_price', models.DecimalField(decimal_places=2, max_digits=4)),
('mileage', models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0)])),
('fuel_type', models.CharField(choices=[('Regular gas', 'Regular gas'), ('Premium gas', 'Premium gas'), ('Alcohol', 'Alcohol'), ('Diesel', 'Diesel')], default='Regular gas', max_length=20)),
('car', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='car.Car')),
],
),
]<|fim▁end|> | |
<|file_name|>test_validate.py<|end_file_name|><|fim▁begin|># THIS FILE IS PART OF THE CYLC WORKFLOW ENGINE.
# Copyright (C) NIWA & British Crown (Met Office) & Contributors.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Unit Tests for cylc.flow.parsec.validate.ParsecValidator.coerce methods."""
from typing import List
import pytest
from pytest import approx
from cylc.flow.parsec.config import ConfigNode as Conf
from cylc.flow.parsec.OrderedDict import OrderedDictWithDefaults
from cylc.flow.parsec.exceptions import IllegalValueError
from cylc.flow.parsec.validate import (
CylcConfigValidator as VDR,
DurationFloat,
ListValueError,
IllegalItemError,
ParsecValidator,
parsec_validate
)
@pytest.fixture
def sample_spec():
with Conf('myconf') as myconf:
with Conf('section1'):
Conf('value1', default='')
Conf('value2', default='what?')
with Conf('section2'):
Conf('enabled', VDR.V_BOOLEAN)
with Conf('section3'):
Conf('title', default='default', options=['1', '2'])
Conf(
'amounts',
VDR.V_INTEGER_LIST,
default=[1, 2, 3],
# options=[[1, 2, 3]]
)
with Conf('entries'):
Conf('key')
Conf('value')
with Conf('<whatever>'):
Conf('section300000', default='')
Conf('ids', VDR.V_INTEGER_LIST)
return myconf
@pytest.fixture
def validator_invalid_values():
"""
Data provider or invalid values for parsec validator. All values must not
be null (covered elsewhere), and not dict's.
Possible invalid scenarios must include:
- cfg[key] is a list AND a value is not in list of the possible values
- OR
- cfg[key] is not a list AND cfg[key] not in the list of possible values
:return: a list with sets of tuples for the test parameters
:rtype: list
"""
values = []
# variables reused throughout
spec = None
msg = None
# set 1 (t, f, f, t)
with Conf('base') as spec:
Conf('value', VDR.V_INTEGER_LIST, default=1, options=[1, 2, 3, 4])
cfg = OrderedDictWithDefaults()
cfg['value'] = "1, 2, 3"
msg = None
values.append((spec, cfg, msg))
# set 2 (t, t, f, t)
with Conf('base') as spec:
Conf('value', VDR.V_INTEGER_LIST, default=1, options=[1, 2, 3, 4])
cfg = OrderedDictWithDefaults()
cfg['value'] = "1, 2, 5"
msg = '(type=option) value = [1, 2, 5]'
values.append((spec, cfg, msg))
# set 3 (f, f, t, f)
with Conf('base') as spec:
Conf('value', VDR.V_INTEGER, default=1, options=[2, 3, 4])
cfg = OrderedDictWithDefaults()
cfg['value'] = "2"
msg = None
values.append((spec, cfg, msg))
# set 4 (f, f, t, t)
with Conf('base') as spec:
Conf('value', VDR.V_INTEGER, default=1, options=[1, 2, 3, 4])
cfg = OrderedDictWithDefaults()
cfg['value'] = "5"
msg = '(type=option) value = 5'
values.append((spec, cfg, msg))
return values
@pytest.fixture
def strip_and_unquote_list():
return [
[
'"a,b", c, "d e"', # input
["a,b", "c", "d e"] # expected
],
[
'foo bar baz', # input
["foo bar baz"] # expected
],
[
'"a", \'b\', c', # input
["a", "b", "c"] # expected
],
[
'a b c, d e f', # input
["a b c", "d e f"] # expected
],
]
def test_list_value_error():
keys = ['a,', 'b', 'c']
value = 'a sample value'
error = ListValueError(keys, value, "who cares")
output = str(error)
expected = '(type=list) [a,][b]c = a sample value - (who cares)'
assert expected == output
def test_list_value_error_with_exception():
keys = ['a,', 'b', 'c']
value = 'a sample value'
exc = Exception('test')
error = ListValueError(keys, value, "who cares", exc)
output = str(error)
expected = '(type=list) [a,][b]c = a sample value - (test: who cares)'
assert expected == output
def test_illegal_value_error():
value_type = 'ClassA'
keys = ['a,', 'b', 'c']
value = 'a sample value'
error = IllegalValueError(value_type, keys, value)
output = str(error)
expected = "(type=ClassA) [a,][b]c = a sample value"
assert expected == output
def test_illegal_value_error_with_exception():
value_type = 'ClassA'
keys = ['a,', 'b', 'c']
value = 'a sample value'
exc = Exception('test')
error = IllegalValueError(value_type, keys, value, exc)
output = str(error)
expected = "(type=ClassA) [a,][b]c = a sample value - (test)"
assert expected == output
def test_illegal_item_error():
keys = ['a,', 'b', 'c']
key = 'a sample value'
error = IllegalItemError(keys, key)
output = str(error)
expected = "[a,][b][c]a sample value"
assert expected == output
def test_illegal_item_error_message():
keys = ['a,', 'b', 'c']
key = 'a sample value'
message = "invalid"
error = IllegalItemError(keys, key, message)
output = str(error)
expected = "[a,][b][c]a sample value - (invalid)"
assert expected == output
def test_parsec_validator_invalid_key(sample_spec):
parsec_validator = ParsecValidator()
cfg = OrderedDictWithDefaults()
cfg['section1'] = OrderedDictWithDefaults()
cfg['section1']['value1'] = '1'
cfg['section1']['value2'] = '2'
cfg['section22'] = 'abc'
with pytest.raises(IllegalItemError):
parsec_validator.validate(cfg, sample_spec)
def test_parsec_validator_invalid_key_no_spec(sample_spec):
parsec_validator = ParsecValidator()
cfg = OrderedDictWithDefaults()
cfg['section1'] = OrderedDictWithDefaults()
cfg['section1']['value1'] = '1'
cfg['section1']['value2'] = '2'
cfg['section22'] = 'abc'
# remove the user-defined section from the spec
sample_spec._children = {
key: value
for key, value in sample_spec._children.items()
if key != '__MANY__'
}
with pytest.raises(IllegalItemError):
parsec_validator.validate(cfg, sample_spec)
def test_parsec_validator_invalid_key_with_many_spaces(sample_spec):
parsec_validator = ParsecValidator()
cfg = OrderedDictWithDefaults()
cfg['section1'] = OrderedDictWithDefaults()
cfg['section1']['value1'] = '1'
cfg['section1']['value2'] = '2'
cfg['section 3000000'] = 'test'
with pytest.raises(IllegalItemError) as cm:
parsec_validator.validate(cfg, sample_spec)
assert str(cm.exception) == "section 3000000 - (consecutive spaces)"
def test_parsec_validator_invalid_key_with_many_invalid_values(
validator_invalid_values
):
for spec, cfg, msg in validator_invalid_values:
parsec_validator = ParsecValidator()
if msg is not None:
with pytest.raises(IllegalValueError) as cm:
parsec_validator.validate(cfg, spec)
assert msg == str(cm.value)
else:
# cylc.flow.parsec_validator.validate(cfg, spec)
# let's use the alias `parsec_validate` here
parsec_validate(cfg, spec)
# TBD assertIsNotNone when 2.6+
assert parsec_validator is not None
def test_parsec_validator_invalid_key_with_many_1(sample_spec):
parsec_validator = ParsecValidator()
cfg = OrderedDictWithDefaults()
cfg['section1'] = OrderedDictWithDefaults()
cfg['section1']['value1'] = '1'
cfg['section1']['value2'] = '2'
cfg['section3000000'] = OrderedDictWithDefaults()
parsec_validator.validate(cfg, sample_spec)
# TBD assertIsNotNone when 2.6+
assert parsec_validator is not None
def test_parsec_validator_invalid_key_with_many_2(sample_spec):
parsec_validator = ParsecValidator()
cfg = OrderedDictWithDefaults()
cfg['section3'] = OrderedDictWithDefaults()
cfg['section3']['title'] = '1'
cfg['section3']['entries'] = OrderedDictWithDefaults()
cfg['section3']['entries']['key'] = 'name'
cfg['section3']['entries']['value'] = "1, 2, 3, 4"
parsec_validator.validate(cfg, sample_spec)
# TBD assertIsNotNone when 2.6+
assert parsec_validator is not None
def test_parsec_validator(sample_spec):
parsec_validator = ParsecValidator()
cfg = OrderedDictWithDefaults()
cfg['section1'] = OrderedDictWithDefaults()
cfg['section1']['value1'] = '1'
cfg['section1']['value2'] = '2'
cfg['section3'] = OrderedDictWithDefaults()
cfg['section3']['title'] = None
parsec_validator.validate(cfg, sample_spec)
# TBD assertIsNotNone when 2.6+
assert parsec_validator is not None
# --- static methods
def test_coerce_none_fails():
with pytest.raises(AttributeError):
ParsecValidator.coerce_boolean(None, [])
with pytest.raises(AttributeError):
ParsecValidator.coerce_float(None, [])
with pytest.raises(AttributeError):
ParsecValidator.coerce_int(None, [])
def test_coerce_boolean():
"""Test coerce_boolean."""
validator = ParsecValidator()
# The good
for value, result in [
('True', True),
(' True ', True),
('"True"', True),
("'True'", True),
('true', True),
(' true ', True),
('"true"', True),
("'true'", True),
('False', False),
(' False ', False),
('"False"', False),
("'False'", False),
('false', False),
(' false ', False),
('"false"', False),
("'false'", False),
('', None),
(' ', None)
]:
assert validator.coerce_boolean(value, ['whatever']) == result
# The bad
for value in [
'None', ' Who cares? ', '3.14', '[]', '[True]', 'True, False'
]:
with pytest.raises(IllegalValueError):
validator.coerce_boolean(value, ['whatever'])
@pytest.mark.parametrize(
'value, expected',
[
('3', 3.0),
('9.80', 9.80),
('3.141592654', 3.141592654),
('"3.141592654"', 3.141592654),
("'3.141592654'", 3.141592654),
('-3', -3.0),
('-3.1', -3.1),
('0', 0.0),
('-0', -0.0),
('0.0', 0.0),
('1e20', 1.0e20),
('6.02e23', 6.02e23),
('-1.6021765e-19', -1.6021765e-19),
('6.62607004e-34', 6.62607004e-34),
]
)
def test_coerce_float(value: str, expected: float):
"""Test coerce_float."""
assert (
ParsecValidator.coerce_float(value, ['whatever']) == approx(expected)
)
def test_coerce_float__empty():
# not a number
assert ParsecValidator.coerce_float('', ['whatever']) is None
@pytest.mark.parametrize(
'value',
['None', ' Who cares? ', 'True', '[]', '[3.14]', '3.14, 2.72']
)
def test_coerce_float__bad(value: str):
with pytest.raises(IllegalValueError):
ParsecValidator.coerce_float(value, ['whatever'])
@pytest.mark.parametrize(
'value, expected',
[
('', []),
('3', [3.0]),
('2*3.141592654', [3.141592654, 3.141592654]),
('12*8, 8*12.0', [8.0] * 12 + [12.0] * 8),
('-3, -2, -1, -0.0, 1.0', [-3.0, -2.0, -1.0, -0.0, 1.0]),
('6.02e23, -1.6021765e-19, 6.62607004e-34',
[6.02e23, -1.6021765e-19, 6.62607004e-34]),
]
)
def test_coerce_float_list(value: str, expected: List[float]):
"""Test coerce_float_list."""
items = ParsecValidator.coerce_float_list(value, ['whatever'])
assert items == approx(expected)
@pytest.mark.parametrize(
'value',
['None', 'e, i, e, i, o', '[]', '[3.14]', 'pi, 2.72', '2*True']
)
def test_coerce_float_list__bad(value: str):
with pytest.raises(IllegalValueError):
ParsecValidator.coerce_float_list(value, ['whatever'])
@pytest.mark.parametrize(
'value, expected',
[
('0', 0),
('3', 3),
('-3', -3),
('-0', -0),
('653456', 653456),
('-8362583645365', -8362583645365)
]
)
def test_coerce_int(value: str, expected: int):
"""Test coerce_int."""
assert ParsecValidator.coerce_int(value, ['whatever']) == expected
def test_coerce_int__empty():
assert ParsecValidator.coerce_int('', ['whatever']) is None # not a number
@pytest.mark.parametrize(
'value',
['None', ' Who cares? ', 'True', '4.8', '[]', '[3]', '60*60']
)
def test_coerce_int__bad(value: str):
with pytest.raises(IllegalValueError):
ParsecValidator.coerce_int(value, ['whatever'])
def test_coerce_int_list():
"""Test coerce_int_list."""
validator = ParsecValidator()
# The good
for value, results in [
('', []),
('3', [3]),
('1..10, 11..20..2',
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19]),
('18 .. 24', [18, 19, 20, 21, 22, 23, 24]),
('18 .. 24 .. 3', [18, 21, 24]),
('-10..10..3', [-10, -7, -4, -1, 2, 5, 8]),
('10*3, 4*-6', [3] * 10 + [-6] * 4),
('10*128, -78..-72, 2048',
[128] * 10 + [-78, -77, -76, -75, -74, -73, -72, 2048])
]:
assert validator.coerce_int_list(value, ['whatever']) == results
# The bad
for value in [
'None', 'e, i, e, i, o', '[]', '1..3, x', 'one..ten'
]:
with pytest.raises(IllegalValueError):
validator.coerce_int_list(value, ['whatever'])
def test_coerce_str():
"""Test coerce_str."""
validator = ParsecValidator()
# The good
for value, result in [
('', ''),
('Hello World!', 'Hello World!'),
('"Hello World!"', 'Hello World!'),
('"Hello Cylc\'s World!"', 'Hello Cylc\'s World!'),
("'Hello World!'", 'Hello World!'),
('0', '0'),
('My list is:\nfoo, bar, baz\n', 'My list is:\nfoo, bar, baz'),
(' Hello:\n foo\n bar\n baz\n',
'Hello:\nfoo\nbar\nbaz'),
(' Hello:\n foo\n Greet\n baz\n',
'Hello:\n foo\nGreet\n baz'),
('False', 'False'),
('None', 'None'),
(['a', 'b'], 'a\nb')
]:
assert validator.coerce_str(value, ['whatever']) == result
def test_coerce_str_list():
"""Test coerce_str_list."""
validator = ParsecValidator()
# The good
for value, results in [
('', []),
('Hello', ['Hello']),
('"Hello"', ['Hello']),
('1', ['1']),
('Mercury, Venus, Earth, Mars',
['Mercury', 'Venus', 'Earth', 'Mars']),
('Mercury, Venus, Earth, Mars,\n"Jupiter",\n"Saturn"\n',
['Mercury', 'Venus', 'Earth', 'Mars', 'Jupiter', 'Saturn']),
('New Zealand, United Kingdom',
['New Zealand', 'United Kingdom'])
]:
assert validator.coerce_str_list(value, ['whatever']) == results
def test_strip_and_unquote():
with pytest.raises(IllegalValueError):
ParsecValidator.strip_and_unquote(['a'], '"""')
def test_strip_and_unquote_list_parsec():
"""Test strip_and_unquote_list using ParsecValidator."""
for value, results in [
('"a"\n"b"', ['a', 'b']),
('"a", "b"', ['a', 'b']),
('"a", "b"', ['a', 'b']),
('"c" # d', ['c']),
('"a", "b", "c" # d', ['a', 'b', 'c']),
('"a"\n"b"\n"c" # d', ['a', 'b', 'c']),
("'a', 'b'", ['a', 'b']),
("'c' #d", ['c']),
("'a', 'b', 'c' # d", ['a', 'b', 'c']),
("'a'\n'b'\n'c' # d", ['a', 'b', 'c']),
('a, b, c,', ['a', 'b', 'c']),
('a, b, c # d', ['a', 'b', 'c']),
('a, b, c\n"d"', ['a', 'b', 'd']),
('a, b, c\n"d" # e', ['a', 'b', '"d"'])
]:
assert results == ParsecValidator.strip_and_unquote_list(
['a'], value)
def test_strip_and_unquote_list_cylc(strip_and_unquote_list):
"""Test strip_and_unquote_list using CylcConfigValidator."""
validator = VDR()
for values in strip_and_unquote_list:
value = values[0]
expected = values[1]
output = validator.strip_and_unquote_list(keys=[], value=value)
assert expected == output
def test_strip_and_unquote_list_multiparam():
with pytest.raises(ListValueError):
ParsecValidator.strip_and_unquote_list(
['a'], 'a, b, c<a,b>'
)
def test_coerce_cycle_point():
"""Test coerce_cycle_point."""
validator = VDR()
# The good
for value, result in [
('', None),
('3', '3'),
('2018', '2018'),
('20181225T12Z', '20181225T12Z'),
('2018-12-25T12:00+11:00', '2018-12-25T12:00+11:00')]:
assert validator.coerce_cycle_point(value, ['whatever']) == result
# The bad
for value in [
'None', ' Who cares? ', 'True', '1, 2', '20781340E10']:
with pytest.raises(IllegalValueError):
validator.coerce_cycle_point(value, ['whatever'])
def test_coerce_cycle_point_format():
"""Test coerce_cycle_point_format."""
validator = VDR()
# The good
for value, result in [
('', None),
('%Y%m%dT%H%M%z', '%Y%m%dT%H%M%z'),
('CCYYMMDDThhmmZ', 'CCYYMMDDThhmmZ'),
('XCCYYMMDDThhmmZ', 'XCCYYMMDDThhmmZ')]:
assert (
validator.coerce_cycle_point_format(value, ['whatever'])
== result
)
# The bad
# '/' and ':' not allowed in cylc cycle points (they are used in paths).
for value in ['%i%j', 'Y/M/D', '%Y-%m-%dT%H:%MZ']:
with pytest.raises(IllegalValueError):
validator.coerce_cycle_point_format(value, ['whatever'])
def test_coerce_cycle_point_time_zone():
"""Test coerce_cycle_point_time_zone."""
validator = VDR()
# The good
for value, result in [
('', None),
('Z', 'Z'),
('+0000', '+0000'),
('+0100', '+0100'),
('+1300', '+1300'),
('-0630', '-0630')]:
assert (
validator.coerce_cycle_point_time_zone(value, ['whatever'])
== result
)
# The bad
for value in ['None', 'Big Bang Time', 'Standard Galaxy Time']:
with pytest.raises(IllegalValueError):
validator.coerce_cycle_point_time_zone(value, ['whatever'])
def test_coerce_interval():
"""Test coerce_interval."""
validator = VDR()
# The good
for value, result in [
('', None),
('P3D', DurationFloat(259200)),<|fim▁hole|> for value in ['None', '5 days', '20', '-12']:
with pytest.raises(IllegalValueError):
validator.coerce_interval(value, ['whatever'])
@pytest.mark.parametrize(
'value, expected',
[
('', []),
('P3D', [DurationFloat(259200)]),
('P3D, PT10M10S', [DurationFloat(259200), DurationFloat(610)]),
('25*PT30M,10*PT1H',
[DurationFloat(1800)] * 25 + [DurationFloat(3600)] * 10)
]
)
def test_coerce_interval_list(value: str, expected: List[DurationFloat]):
"""Test coerce_interval_list."""
assert VDR.coerce_interval_list(value, ['whatever']) == approx(expected)
@pytest.mark.parametrize(
'value',
['None', '5 days', '20', 'PT10S, -12']
)
def test_coerce_interval_list__bad(value: str):
with pytest.raises(IllegalValueError):
VDR.coerce_interval_list(value, ['whatever'])
def test_coerce_parameter_list():
"""Test coerce_parameter_list."""
validator = VDR()
# The good
for value, result in [
('', []),
('planet', ['planet']),
('planet, star, galaxy', ['planet', 'star', 'galaxy']),
('1..5, 21..25', [1, 2, 3, 4, 5, 21, 22, 23, 24, 25]),
('-15, -10, -5, -1..1', [-15, -10, -5, -1, 0, 1])]:
assert validator.coerce_parameter_list(value, ['whatever']) == result
# The bad
for value in ['foo/bar', 'p1, 1..10', '2..3, 4, p']:
with pytest.raises(IllegalValueError):
validator.coerce_parameter_list(value, ['whatever'])
def test_coerce_xtrigger():
"""Test coerce_xtrigger."""
validator = VDR()
# The good
for value, result in [
('foo(x="bar")', 'foo(x=bar)'),
('foo(x, y, z="zebra")', 'foo(x, y, z=zebra)')]:
assert (
validator.coerce_xtrigger(value, ['whatever']).get_signature()
== result
)
# The bad
for value in [
'', 'foo(', 'foo)', 'foo,bar']:
with pytest.raises(IllegalValueError):
validator.coerce_xtrigger(value, ['whatever'])
def test_type_help_examples():
types = {
**ParsecValidator.V_TYPE_HELP,
**VDR.V_TYPE_HELP
}
validator = VDR()
for vdr, info in types.items():
coercer = validator.coercers[vdr]
if len(info) > 2:
for example in info[2]:
try:
coercer(example, [None])
except Exception:
raise Exception(
f'Example "{example}" failed for type "{vdr}"')<|fim▁end|> | ('PT10M10S', DurationFloat(610))]:
assert validator.coerce_interval(value, ['whatever']) == result
# The bad |
<|file_name|>OFShell.py<|end_file_name|><|fim▁begin|>import threading
import time
import re
from openflow.optin_manager.sfa.openflow_utils.CreateOFSliver import CreateOFSliver
from openflow.optin_manager.sfa.openflow_utils.sliver_status import get_sliver_status
from openflow.optin_manager.sfa.openflow_utils.delete_slice import delete_slice
from openflow.optin_manager.sfa.openflow_utils.rspec3_to_expedient import get_fs_from_group
from openflow.optin_manager.sfa.util.xrn import Xrn
from openflow.optin_manager.opts.models import Experiment, ExperimentFLowSpace
from openflow.optin_manager.xmlrpc_server.models import CallBackServerProxy, FVServerProxy
#TODO: Uncomment when merge
#from expedient.common.utils.mail import send_mail
from django.conf import settings
from openflow.optin_manager.sfa.openflow_utils.ServiceThread import ServiceThread
from openflow.optin_manager.sfa.models import ExpiringComponents
from openflow.optin_manager.sfa.openflow_utils.federationlinkmanager import FederationLinkManager
#XXX TEST
from openflow.optin_manager.sfa.tests.data_example import test_switches, test_links
class OFShell:
def __init__(self):
pass
@staticmethod
def get_switches(used_switches=[]):
complete_list = []
switches = OFShell().get_raw_switches()
for switch in switches:
if len(used_switches)>0:
if not switch[0] in used_switches:
continue
if int(switch[1]['nPorts']) == 0:
#TODO: Uncomment when merge with ofelia.development
#send_mail('SFA OptinManager Error', 'There are some errors related with switches: GetSwitches() returned 0 ports.',settings.ROOT_EMAIL, [settings.ROOT_EMAIL])
raise Exception("The switch with dpid:%s has a connection problem and the OCF Island Manager has already been informed. Please try again later." % str(switch[0]))
#TODO: Send Mail to the Island Manager Here.
port_list = switch[1]['portNames'].split(',')
ports = list()
for port in port_list:
match = re.match(r'[\s]*(.*)\((.*)\)', port)
ports.append({'port_name':match.group(1), 'port_num':match.group(2)})
complete_list.append({'dpid':switch[0], 'ports':ports})
return complete_list
@staticmethod
def get_links():
links = OFShell().get_raw_links()
link_list = list()
for link in links:
link_list.append({'src':{ 'dpid':link[0],'port':link[1]}, 'dst':{'dpid':link[2], 'port':link[3]}})
#for link in FederationLinkManager.get_federated_links():
# link_list.append({'src':{'dpid':link['src_id'], 'port':link['src_port']}, 'dst':{'dpid':link['dst_id'],'port':link['dst_port']}})
return link_list
@staticmethod
def get_federation_links():
link_list = list()
for link in FederationLinkManager.get_federated_links():
link_list.append({'src':{'dpid':link['src_id'], 'port':link['src_port']}, 'dst':{'dpid':link['dst_id'],'port':link['dst_port']}})
return link_list
def GetNodes(self,slice_urn=None,authority=None):
if not slice_urn:
switch_list = self.get_switches()
link_list = self.get_links()
federated_links = self.get_federation_links()
return {'switches':switch_list, 'links':link_list, 'federation_links':federated_links}
else:
nodes = list()
experiments = Experiment.objects.filter(slice_id=slice_urn)
for experiment in experiments:
expfss = ExperimentFLowSpace.objects.filter(exp = experiment.id)
for expfs in expfss:
if not expfs.dpid in nodes:
nodes.append(expfs.dpid)
switches = self.get_switches(nodes)
return {'switches':switches, 'links':[]}
#def GetSlice(self,slicename,authority):
#
# name = slicename
# nodes = self.GetNodes()
# slices = dict()
# List = list()
# return slices
def StartSlice(self, slice_urn):
#Look if the slice exists and return True or RecordNotFound
experiments = Experiment.objects.filter(slice_id=str(slice_urn))
if len(experiments) > 0:
return True
else:
raise ""
def StopSlice(self, slice_urn):
#Look if the slice exists and return True or RecordNotFound
experiments = Experiment.objects.filter(slice_id=slice_urn)
if len(experiments) > 0:
return True
else:
raise ""
def RebootSlice(self, slice_urn):
return self.StartSlice(slice_urn)
def DeleteSlice(self, slice_urn):
try:
delete_slice(slice_urn)
return 1
except Exception as e:
print e
raise ""
def CreateSliver(self, requested_attributes, slice_urn, authority,expiration):
project_description = 'SFA Project from %s' %authority
slice_id = slice_urn
for rspec_attrs in requested_attributes:
switch_slivers = get_fs_from_group(rspec_attrs['match'], rspec_attrs['group'])
controller = rspec_attrs['controller'][0]['url']
email = rspec_attrs['email']
email_pass = ''
slice_description = rspec_attrs['description']
if not self.check_req_switches(switch_slivers):
raise Exception("The Requested OF Switches on the RSpec do not match with the available OF switches of this island. Please check the datapath IDs of your Request RSpec.")
CreateOFSliver(slice_id, authority, project_description, slice_urn, slice_description, controller, email, email_pass, switch_slivers)
if expiration:
#Since there is a synchronous connection, expiring_components table is easier to fill than VTAM
#ExpiringComponents.objects.create(slice=slice_urn, authority=authority, expires=expiration)
pass
return 1
def SliverStatus(self, slice_urn):
try:
print "-----------------------------------------------------------SliverStatus"
sliver_status = get_sliver_status(slice_urn)
print sliver_status
if len(sliver_status) == 0:
xrn = Xrn(slice_urn, 'slice')
slice_leaf = xrn.get_leaf()
sliver_status = ['The requested flowspace for slice %s is still pending for approval' %slice_leaf]
granted_fs = {'granted_flowspaces':get_sliver_status(slice_urn)}
return [granted_fs]
except Exception as e:
import traceback
print traceback.print_exc()
raise e
def check_req_switches(self, switch_slivers):
available_switches = self.get_raw_switches()
for sliver in switch_slivers:
found = False
for switch in available_switches:<|fim▁hole|> return False
return True
def get_raw_switches(self):
try:
#raise Exception("")
fv = FVServerProxy.objects.all()[0]
switches = fv.get_switches()
except Exception as e:
switches = test_switches
#raise e
return switches
def get_raw_links(self):
try:
#raise Exception("")
fv = FVServerProxy.objects.all()[0]
links = fv.get_links()
except Exception as e:
links = test_links
#raise e
return links<|fim▁end|> | if str(sliver['datapath_id']) == str(switch[0]): #Avoiding Unicodes
found = True
break
if found == False: |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | pub mod posts; |
<|file_name|>host_mock.py<|end_file_name|><|fim▁begin|># Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.<|fim▁hole|># copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from webkitpy.common.checkout.scm.scm_mock import MockSCM
from webkitpy.common.net.buildbot.buildbot_mock import MockBuildBot
from webkitpy.common.net.web_mock import MockWeb
from webkitpy.common.system.systemhost_mock import MockSystemHost
# New-style ports need to move down into webkitpy.common.
from webkitpy.layout_tests.port.factory import PortFactory
from webkitpy.layout_tests.port.test import add_unit_tests_to_mock_filesystem
class MockHost(MockSystemHost):
def __init__(self, log_executive=False, executive_throws_when_run=None, initialize_scm_by_default=True, web=None, scm=None):
MockSystemHost.__init__(self, log_executive, executive_throws_when_run)
add_unit_tests_to_mock_filesystem(self.filesystem)
self.web = web or MockWeb()
self._scm = scm
# FIXME: we should never initialize the SCM by default, since the real
# object doesn't either. This has caused at least one bug (see bug 89498).
if initialize_scm_by_default:
self.initialize_scm()
self.buildbot = MockBuildBot()
# Note: We're using a real PortFactory here. Tests which don't wish to depend
# on the list of known ports should override this with a MockPortFactory.
self.port_factory = PortFactory(self)
def initialize_scm(self, patch_directories=None):
if not self._scm:
self._scm = MockSCM(filesystem=self.filesystem, executive=self.executive)
# Various pieces of code (wrongly) call filesystem.chdir(checkout_root).
# Making the checkout_root exist in the mock filesystem makes that chdir not raise.
self.filesystem.maybe_make_directory(self._scm.checkout_root)
def scm(self):
return self._scm
def scm_for_path(self, path):
# FIXME: consider supporting more than one SCM so that we can do more comprehensive testing.
self.initialize_scm()
return self._scm
def checkout(self):
return self._checkout<|fim▁end|> | # * Redistributions in binary form must reproduce the above |
<|file_name|>gem.py<|end_file_name|><|fim▁begin|>'''
Manage Ruby gem packages. (see https://rubygems.org/ )
'''
from pyinfra.api import operation
from pyinfra.facts.gem import GemPackages
from .util.packaging import ensure_packages
@operation
def packages(packages=None, present=True, latest=False, state=None, host=None):
'''
Add/remove/update gem packages.
+ packages: list of packages to ensure
+ present: whether the packages should be installed
+ latest: whether to upgrade packages without a specified version
Versions:
Package versions can be pinned like gem: ``<pkg>:<version>``.
Example:
.. code:: python<|fim▁hole|> name='Install rspec',
packages=['rspec'],
)
'''
yield ensure_packages(
host, packages, host.get_fact(GemPackages), present,
install_command='gem install',
uninstall_command='gem uninstall',
upgrade_command='gem update',
version_join=':',
latest=latest,
)<|fim▁end|> |
# Note: Assumes that 'gem' is installed.
gem.packages( |
<|file_name|>parser.js<|end_file_name|><|fim▁begin|>//process.argv.forEach(function (val, index, array) { console.log(index + ': ' + val); });
var fs = require('fs');
(function () {
function slugify(text) {
text = text.replace(/[^-a-zA-Z0-9,&\s]+/ig, '');
text = text.replace(/-/gi, "_");
text = text.replace(/\s/gi, "-");
return text;
}
var DocGen = {
filesArr: null,
files: {},
functions: [],
nbLoaded: 0,
init: function (files) {
this.filesArr = files;
},
start: function () {
for (var i=0, len=this.filesArr.length; i<len; i++) {
var file = this.filesArr[i];
this.processFile(file);
}
},
fileLoaded: function() {
this.nbLoaded++;
if (this.nbLoaded == this.filesArr.length) {
this.exportHtml();
}
},
getSignatures: function (m) {
var sig = null;
var signatures = [];
var rSig = /\\*\s?(@sig\s.*)\n/gi;
while (sig = rSig.exec(m)) {
var params = [];
var rParam = /(\w+):(\w+)/gi;
while (param = rParam.exec(sig[1])) {
var name = param[1];
var type = param[2];
params.push({ name: name, type: type });
}
if (params.length >= 1) {
ret = params.pop();
}
signatures.push({ params: params, ret: ret});
}
return signatures;
},
extractInfos: function (m) {
var self = this;
var fun = m[2];
var rFun = /['|"]?([a-zA-Z0-9._-]+)['|"]?\s?:\s?function\s?\(.*\)\s?{/gi;
var isFun = rFun.exec(fun);
if (!isFun) {
rFun = /socket\.on\(['|"]([a-zA-Z0-9._-]+)['|"]\s?,\s?function\s?\(.*\)\s?{/gi;
isFun = rFun.exec(fun);
}
if (isFun) {
var comment = m[1];
var name = isFun[1];
var sigs = self.getSignatures(comment);
var desc = (/\*\s(.*?)\n/gi).exec(m[1])[1];
var f = { name: name, description: desc, sigs: sigs };
return f;
}
return null;
},
processFile: function (file) {
var self = this;
// Get the file in a buffer
fs.readFile(file, function(err, data) {
var buf = data.toString('binary');
var functions = [];
// Get all long comment ( /** )
var rgx = new RegExp("/\\*\\*\n([a-zA-Z0-9 -_\n\t]*)\\*/\n(.*)\n", "gi");
while (m = rgx.exec(buf)) {
info = self.extractInfos(m);
if (info) {
functions.push(info);
}
}
self.files[file] = { functions: functions };
self.fileLoaded();
});
},
sortFunctions: function (fun1, fun2) {
var name1 = fun1.name.toLowerCase();
var name2 = fun2.name.toLowerCase();
if (name1 < name2) { return -1; }
else if (name1 > name2) { return 1; }
else { return 0; }
},
exportHtml: function() {
for (var fileName in this.files) {
var file = this.files[fileName];
file.functions.sort(this.sortFunctions);
console.log(fileName, file.functions.length);
var html = '<!DOCTYPE html>\n' +
'<html>\n' +
'<head>\n' +
' <title></title>\n' +
' <link rel="stylesheet" href="css/reset.css" type="text/css" media="screen" charset="utf-8" />\n' +
' <link rel="stylesheet" href="css/style.css" type="text/css" media="screen" charset="utf-8" />\n' +
//' <script src="js/scripts.js" type="text/javascript"></script>' +
'</head>\n' +
'<body>\n' +
'\n' +
'<div class="menu" id="menu">\n' +
' <h1>Files</h1>\n' +
' <ul>\n';
for (var f in this.files) {
html += ' <li><a href="'+f+'.html">'+f+'</a></li>\n';
}
html += ' </ul>\n' +
' <h1>Functions</h1>\n' +
' <ul>\n';
for (var i=0, len=file.functions.length; i<len; i++) {
html += ' <li><a href="#'+slugify(file.functions[i].name)+'">'+file.functions[i].name+'</a></li>\n';
}
html += ' </ul>\n'
html += '</div>\n' +
'<div id="page">\n' +
' <div class="content">\n';
for (var i=0, len=file.functions.length; i<len; i++) {
var fn = file.functions[i];
if (fn.sigs.length > 0) {
html += '<h3><a name="'+slugify(fn.name)+'">'+fn.name+'</a></h3>\n';
html += '<span class="signature">\n';
for (var s=0, len2=fn.sigs.length; s<len2; s++) {
var sig = fn.sigs[s];
html += '<span class="name">'+fn.name+'</span> ( ';
for (var p=0, len3=sig.params.length; p<len3; p++) {
var param = sig.params[p];
html += '<span class="param">'+param.name+'</span>:<span class="type">'+param.type+'</span>, ';
}
html = html.substr(0, html.length-2);
html += ' ) : <span class="param">'+sig.ret.name+'</span>:<span class="type">'+sig.ret.type+'</span><br />';
}
html = html.substr(0, html.length-6);
html += '</span>\n';
html += '<p>'+fn.description+'</p>';
}
}
html += ' </div>\n' +
'</div>\n' +
'\n' +
'</body>\n'
'</html>';
fs.writeFile('doc/'+fileName+'.html', html);
}
}
};
var files = ['sockets.js', 'database_operations.js'];
DocGen.init(files);<|fim▁hole|><|fim▁end|> | DocGen.start();
})(); |
<|file_name|>types.py<|end_file_name|><|fim▁begin|>from iota.crypto import FRAGMENT_LENGTH
from iota.exceptions import with_context
from iota.types import Hash, TryteString, TrytesCompatible
__all__ = [
'BundleHash',
'Fragment',
'TransactionHash',
'TransactionTrytes',
'Nonce'
]
class BundleHash(Hash):
"""
An :py:class:`TryteString` (:py:class:`Hash`) that acts as a bundle hash.
"""
pass
class TransactionHash(Hash):
"""
An :py:class:`TryteString` (:py:class:`Hash`) that acts as a transaction hash.
"""
pass
class Fragment(TryteString):
"""
An :py:class:`TryteString` representation of a signature/message fragment
in a transaction.
:raises ValueError: if ``trytes`` is longer than 2187 trytes in length.
"""
LEN = FRAGMENT_LENGTH
"""
Length of a fragment in trytes.
"""
def __init__(self, trytes: TrytesCompatible) -> None:
super(Fragment, self).__init__(trytes, pad=self.LEN)
if len(self._trytes) > self.LEN:
raise with_context(
exc=ValueError('{cls} values must be {len} trytes long.'.format(
cls=type(self).__name__,
len=self.LEN
)),
context={
'trytes': trytes,
},
)
class TransactionTrytes(TryteString):
"""
An :py:class:`TryteString` representation of a Transaction.
:raises ValueError: if ``trytes`` is longer than 2673 trytes in length.
"""
LEN = 2673
"""
Length of a transaction in trytes.
"""
def __init__(self, trytes: TrytesCompatible) -> None:
super(TransactionTrytes, self).__init__(trytes, pad=self.LEN)
if len(self._trytes) > self.LEN:
raise with_context(
exc=ValueError('{cls} values must be {len} trytes long.'.format(
cls=type(self).__name__,
len=self.LEN
)),
context={
'trytes': trytes,
},
)
class Nonce(TryteString):
"""
An :py:class:`TryteString` that acts as a transaction nonce.
:raises ValueError: if ``trytes`` is longer than 27 trytes in length.
"""
LEN = 27
"""<|fim▁hole|> Length of a nonce in trytes.
"""
def __init__(self, trytes: TrytesCompatible) -> None:
super(Nonce, self).__init__(trytes, pad=self.LEN)
if len(self._trytes) > self.LEN:
raise with_context(
exc=ValueError('{cls} values must be {len} trytes long.'.format(
cls=type(self).__name__,
len=self.LEN
)),
context={
'trytes': trytes,
},
)<|fim▁end|> | |
<|file_name|>podsecuritypolicytemplate.go<|end_file_name|><|fim▁begin|>/*
Copyright 2022 Rancher Labs, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by main. DO NOT EDIT.
package v3
import (
"context"
"time"
"github.com/rancher/lasso/pkg/client"
"github.com/rancher/lasso/pkg/controller"
v3 "github.com/rancher/rancher/pkg/apis/management.cattle.io/v3"
"github.com/rancher/wrangler/pkg/generic"
"k8s.io/apimachinery/pkg/api/equality"
"k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/apimachinery/pkg/types"
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
"k8s.io/apimachinery/pkg/watch"
"k8s.io/client-go/tools/cache"
)
type PodSecurityPolicyTemplateHandler func(string, *v3.PodSecurityPolicyTemplate) (*v3.PodSecurityPolicyTemplate, error)
type PodSecurityPolicyTemplateController interface {
generic.ControllerMeta
PodSecurityPolicyTemplateClient
OnChange(ctx context.Context, name string, sync PodSecurityPolicyTemplateHandler)
OnRemove(ctx context.Context, name string, sync PodSecurityPolicyTemplateHandler)
Enqueue(name string)
EnqueueAfter(name string, duration time.Duration)
Cache() PodSecurityPolicyTemplateCache
}
type PodSecurityPolicyTemplateClient interface {
Create(*v3.PodSecurityPolicyTemplate) (*v3.PodSecurityPolicyTemplate, error)
Update(*v3.PodSecurityPolicyTemplate) (*v3.PodSecurityPolicyTemplate, error)
Delete(name string, options *metav1.DeleteOptions) error
Get(name string, options metav1.GetOptions) (*v3.PodSecurityPolicyTemplate, error)
List(opts metav1.ListOptions) (*v3.PodSecurityPolicyTemplateList, error)
Watch(opts metav1.ListOptions) (watch.Interface, error)
Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v3.PodSecurityPolicyTemplate, err error)
}
type PodSecurityPolicyTemplateCache interface {
Get(name string) (*v3.PodSecurityPolicyTemplate, error)
List(selector labels.Selector) ([]*v3.PodSecurityPolicyTemplate, error)
AddIndexer(indexName string, indexer PodSecurityPolicyTemplateIndexer)
GetByIndex(indexName, key string) ([]*v3.PodSecurityPolicyTemplate, error)
}
type PodSecurityPolicyTemplateIndexer func(obj *v3.PodSecurityPolicyTemplate) ([]string, error)
type podSecurityPolicyTemplateController struct {
controller controller.SharedController
client *client.Client
gvk schema.GroupVersionKind
groupResource schema.GroupResource
}
func NewPodSecurityPolicyTemplateController(gvk schema.GroupVersionKind, resource string, namespaced bool, controller controller.SharedControllerFactory) PodSecurityPolicyTemplateController {
c := controller.ForResourceKind(gvk.GroupVersion().WithResource(resource), gvk.Kind, namespaced)
return &podSecurityPolicyTemplateController{
controller: c,
client: c.Client(),
gvk: gvk,
groupResource: schema.GroupResource{
Group: gvk.Group,
Resource: resource,
},
}
}
func FromPodSecurityPolicyTemplateHandlerToHandler(sync PodSecurityPolicyTemplateHandler) generic.Handler {
return func(key string, obj runtime.Object) (ret runtime.Object, err error) {
var v *v3.PodSecurityPolicyTemplate
if obj == nil {
v, err = sync(key, nil)
} else {
v, err = sync(key, obj.(*v3.PodSecurityPolicyTemplate))
}
if v == nil {
return nil, err
}
return v, err
}
}
func (c *podSecurityPolicyTemplateController) Updater() generic.Updater {
return func(obj runtime.Object) (runtime.Object, error) {
newObj, err := c.Update(obj.(*v3.PodSecurityPolicyTemplate))
if newObj == nil {
return nil, err
}
return newObj, err
}
}
func UpdatePodSecurityPolicyTemplateDeepCopyOnChange(client PodSecurityPolicyTemplateClient, obj *v3.PodSecurityPolicyTemplate, handler func(obj *v3.PodSecurityPolicyTemplate) (*v3.PodSecurityPolicyTemplate, error)) (*v3.PodSecurityPolicyTemplate, error) {
if obj == nil {
return obj, nil
}
copyObj := obj.DeepCopy()
newObj, err := handler(copyObj)
if newObj != nil {
copyObj = newObj
}
if obj.ResourceVersion == copyObj.ResourceVersion && !equality.Semantic.DeepEqual(obj, copyObj) {
return client.Update(copyObj)
}
return copyObj, err
}
func (c *podSecurityPolicyTemplateController) AddGenericHandler(ctx context.Context, name string, handler generic.Handler) {
c.controller.RegisterHandler(ctx, name, controller.SharedControllerHandlerFunc(handler))
}
func (c *podSecurityPolicyTemplateController) AddGenericRemoveHandler(ctx context.Context, name string, handler generic.Handler) {
c.AddGenericHandler(ctx, name, generic.NewRemoveHandler(name, c.Updater(), handler))
}
func (c *podSecurityPolicyTemplateController) OnChange(ctx context.Context, name string, sync PodSecurityPolicyTemplateHandler) {
c.AddGenericHandler(ctx, name, FromPodSecurityPolicyTemplateHandlerToHandler(sync))
}
func (c *podSecurityPolicyTemplateController) OnRemove(ctx context.Context, name string, sync PodSecurityPolicyTemplateHandler) {
c.AddGenericHandler(ctx, name, generic.NewRemoveHandler(name, c.Updater(), FromPodSecurityPolicyTemplateHandlerToHandler(sync)))
}
func (c *podSecurityPolicyTemplateController) Enqueue(name string) {
c.controller.Enqueue("", name)
}
func (c *podSecurityPolicyTemplateController) EnqueueAfter(name string, duration time.Duration) {
c.controller.EnqueueAfter("", name, duration)
}
func (c *podSecurityPolicyTemplateController) Informer() cache.SharedIndexInformer {
return c.controller.Informer()
}
func (c *podSecurityPolicyTemplateController) GroupVersionKind() schema.GroupVersionKind {
return c.gvk
}
func (c *podSecurityPolicyTemplateController) Cache() PodSecurityPolicyTemplateCache {
return &podSecurityPolicyTemplateCache{
indexer: c.Informer().GetIndexer(),
resource: c.groupResource,
}
}
func (c *podSecurityPolicyTemplateController) Create(obj *v3.PodSecurityPolicyTemplate) (*v3.PodSecurityPolicyTemplate, error) {
result := &v3.PodSecurityPolicyTemplate{}
return result, c.client.Create(context.TODO(), "", obj, result, metav1.CreateOptions{})
}
func (c *podSecurityPolicyTemplateController) Update(obj *v3.PodSecurityPolicyTemplate) (*v3.PodSecurityPolicyTemplate, error) {
result := &v3.PodSecurityPolicyTemplate{}
return result, c.client.Update(context.TODO(), "", obj, result, metav1.UpdateOptions{})
}
func (c *podSecurityPolicyTemplateController) Delete(name string, options *metav1.DeleteOptions) error {
if options == nil {
options = &metav1.DeleteOptions{}
}
return c.client.Delete(context.TODO(), "", name, *options)
}
func (c *podSecurityPolicyTemplateController) Get(name string, options metav1.GetOptions) (*v3.PodSecurityPolicyTemplate, error) {
result := &v3.PodSecurityPolicyTemplate{}
return result, c.client.Get(context.TODO(), "", name, result, options)
}
func (c *podSecurityPolicyTemplateController) List(opts metav1.ListOptions) (*v3.PodSecurityPolicyTemplateList, error) {
result := &v3.PodSecurityPolicyTemplateList{}
return result, c.client.List(context.TODO(), "", result, opts)
}
func (c *podSecurityPolicyTemplateController) Watch(opts metav1.ListOptions) (watch.Interface, error) {
return c.client.Watch(context.TODO(), "", opts)
}
func (c *podSecurityPolicyTemplateController) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (*v3.PodSecurityPolicyTemplate, error) {
result := &v3.PodSecurityPolicyTemplate{}
return result, c.client.Patch(context.TODO(), "", name, pt, data, result, metav1.PatchOptions{}, subresources...)
}
type podSecurityPolicyTemplateCache struct {
indexer cache.Indexer<|fim▁hole|>}
func (c *podSecurityPolicyTemplateCache) Get(name string) (*v3.PodSecurityPolicyTemplate, error) {
obj, exists, err := c.indexer.GetByKey(name)
if err != nil {
return nil, err
}
if !exists {
return nil, errors.NewNotFound(c.resource, name)
}
return obj.(*v3.PodSecurityPolicyTemplate), nil
}
func (c *podSecurityPolicyTemplateCache) List(selector labels.Selector) (ret []*v3.PodSecurityPolicyTemplate, err error) {
err = cache.ListAll(c.indexer, selector, func(m interface{}) {
ret = append(ret, m.(*v3.PodSecurityPolicyTemplate))
})
return ret, err
}
func (c *podSecurityPolicyTemplateCache) AddIndexer(indexName string, indexer PodSecurityPolicyTemplateIndexer) {
utilruntime.Must(c.indexer.AddIndexers(map[string]cache.IndexFunc{
indexName: func(obj interface{}) (strings []string, e error) {
return indexer(obj.(*v3.PodSecurityPolicyTemplate))
},
}))
}
func (c *podSecurityPolicyTemplateCache) GetByIndex(indexName, key string) (result []*v3.PodSecurityPolicyTemplate, err error) {
objs, err := c.indexer.ByIndex(indexName, key)
if err != nil {
return nil, err
}
result = make([]*v3.PodSecurityPolicyTemplate, 0, len(objs))
for _, obj := range objs {
result = append(result, obj.(*v3.PodSecurityPolicyTemplate))
}
return result, nil
}<|fim▁end|> | resource schema.GroupResource |
<|file_name|>Flat.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.governator.lifecycle.warmup;
import com.google.inject.Singleton;
import com.netflix.governator.annotations.WarmUp;
public class Flat
{
/*
Root classes without dependencies
*/
@Singleton
public static class A
{
public volatile Recorder recorder;
@WarmUp
public void warmUp() throws InterruptedException
{
recorder.record("A");
}
}
@Singleton
public static class B<|fim▁hole|>
@WarmUp
public void warmUp() throws InterruptedException
{
recorder.record("B");
}
}
}<|fim▁end|> | {
public volatile Recorder recorder; |
<|file_name|>MultilineChartOutlined.js<|end_file_name|><|fim▁begin|>"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");<|fim▁hole|>
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon"));
var _jsxRuntime = require("react/jsx-runtime");
var _default = (0, _createSvgIcon.default)( /*#__PURE__*/(0, _jsxRuntime.jsx)("path", {
d: "m22 6.92-1.41-1.41-2.85 3.21C15.68 6.4 12.83 5 9.61 5 6.72 5 4.07 6.16 2 8l1.42 1.42C5.12 7.93 7.27 7 9.61 7c2.74 0 5.09 1.26 6.77 3.24l-2.88 3.24-4-4L2 16.99l1.5 1.5 6-6.01 4 4 4.05-4.55c.75 1.35 1.25 2.9 1.44 4.55H21c-.22-2.3-.95-4.39-2.04-6.14L22 6.92z"
}), 'MultilineChartOutlined');
exports.default = _default;<|fim▁end|> | |
<|file_name|>converter_sosi2tsv.cpp<|end_file_name|><|fim▁begin|>/*
* This file is part of the command-line tool sosicon.
* Copyright (C) 2014 Espen Andersen, Norwegian Broadcast Corporation (NRK)
*
* This is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by<|fim▁hole|> * but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "converter_sosi2tsv.h"
void sosicon::ConverterSosi2tsv::
run( bool* ) {
}<|fim▁end|> | * the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, |
<|file_name|>razor.test.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import 'vs/languages/razor/common/razor.contribution';
import modesUtil = require('vs/editor/test/common/modesUtil');
import Modes = require('vs/editor/common/modes');
import razorTokenTypes = require('vs/languages/razor/common/razorTokenTypes');
import {htmlTokenTypes} from 'vs/languages/html/common/html';
suite('Syntax Highlighting - Razor', () => {
var tokenizationSupport: Modes.ITokenizationSupport;
setup((done) => {
modesUtil.load('razor').then(mode => {
tokenizationSupport = mode.tokenizationSupport;
done();
});
});
test('', () => {
modesUtil.executeTests(tokenizationSupport,[
// Embedding - embedded html
[{
line: '@{ var x; <b>x</b> }',
tokens: [
{ startIndex: 0, type: razorTokenTypes.EMBED_CS },
{ startIndex: 2, type: '' },
{ startIndex: 3, type: 'keyword.cs' },
{ startIndex: 6, type: '' },
{ startIndex: 7, type: 'ident.cs' },
{ startIndex: 8, type: 'punctuation.cs' },
{ startIndex: 9, type: '' },
{ startIndex: 10, type: htmlTokenTypes.DELIM_START },
{ startIndex: 11, type: htmlTokenTypes.getTag('b') },
{ startIndex: 12, type: htmlTokenTypes.DELIM_START },
{ startIndex: 13, type: 'ident.cs' },
{ startIndex: 14, type: htmlTokenTypes.DELIM_END },
{ startIndex: 16, type: htmlTokenTypes.getTag('b') },
{ startIndex: 17, type: htmlTokenTypes.DELIM_END },
{ startIndex: 18, type: '' },
{ startIndex: 19, type: razorTokenTypes.EMBED_CS }
]}],
// Comments - razor comment inside csharp
[{
line: '@{ var x; @* comment *@ x= 0; }',
tokens: [
{ startIndex: 0, type: razorTokenTypes.EMBED_CS },
{ startIndex: 2, type: '' },
{ startIndex: 3, type: 'keyword.cs' },
{ startIndex: 6, type: '' },<|fim▁hole|> { startIndex: 7, type: 'ident.cs' },
{ startIndex: 8, type: 'punctuation.cs' },
{ startIndex: 9, type: '' },
{ startIndex: 10, type: 'comment.cs' },
{ startIndex: 23, type: '' },
{ startIndex: 24, type: 'ident.cs' },
{ startIndex: 25, type: 'punctuation.cs' },
{ startIndex: 26, type: '' },
{ startIndex: 27, type: 'number.cs' },
{ startIndex: 28, type: 'punctuation.cs' },
{ startIndex: 29, type: '' },
{ startIndex: 30, type: razorTokenTypes.EMBED_CS }
]}],
// Blocks - simple
[{
line: '@{ var total = 0; }',
tokens: [
{ startIndex: 0, type: razorTokenTypes.EMBED_CS },
{ startIndex: 2, type: '' },
{ startIndex: 3, type: 'keyword.cs' },
{ startIndex: 6, type: '' },
{ startIndex: 7, type: 'ident.cs' },
{ startIndex: 12, type: '' },
{ startIndex: 13, type: 'punctuation.cs' },
{ startIndex: 14, type: '' },
{ startIndex: 15, type: 'number.cs' },
{ startIndex: 16, type: 'punctuation.cs' },
{ startIndex: 17, type: '' },
{ startIndex: 18, type: razorTokenTypes.EMBED_CS }
]}],
[{
line: '@if(true){ var total = 0; }',
tokens: [
{ startIndex: 0, type: razorTokenTypes.EMBED_CS },
{ startIndex: 1, type: 'keyword.cs' },
{ startIndex: 3, type: 'punctuation.parenthesis.cs' },
{ startIndex: 4, type: 'keyword.cs' },
{ startIndex: 8, type: 'punctuation.parenthesis.cs' },
{ startIndex: 9, type: razorTokenTypes.EMBED_CS },
{ startIndex: 10, type: '' },
{ startIndex: 11, type: 'keyword.cs' },
{ startIndex: 14, type: '' },
{ startIndex: 15, type: 'ident.cs' },
{ startIndex: 20, type: '' },
{ startIndex: 21, type: 'punctuation.cs' },
{ startIndex: 22, type: '' },
{ startIndex: 23, type: 'number.cs' },
{ startIndex: 24, type: 'punctuation.cs' },
{ startIndex: 25, type: '' },
{ startIndex: 26, type: razorTokenTypes.EMBED_CS }
]}],
// Expressions - csharp expressions in html
[{
line: 'test@xyz<br>',
tokens: [
{ startIndex:0, type: '' },
{ startIndex:4, type: razorTokenTypes.EMBED_CS },
{ startIndex:5, type: 'ident.cs' },
{ startIndex:8, type: htmlTokenTypes.DELIM_START },
{ startIndex:9, type: htmlTokenTypes.getTag('br') },
{ startIndex:11, type: htmlTokenTypes.DELIM_START }
]}],
[{
line: 'test@xyz',
tokens: [
{ startIndex:0, type: '' },
{ startIndex:4, type: razorTokenTypes.EMBED_CS },
{ startIndex:5, type: 'ident.cs' }
]}],
[{
line: 'test @ xyz',
tokens: [
{ startIndex: 0, type: '' },
{ startIndex: 5, type: razorTokenTypes.EMBED_CS },
{ startIndex: 6, type: '' },
{ startIndex: 7, type: 'ident.cs' }
]}],
[{
line: 'test @(foo) xyz',
tokens: [
{ startIndex:0, type: '' },
{ startIndex:5, type: razorTokenTypes.EMBED_CS },
{ startIndex:7, type: 'ident.cs' },
{ startIndex:10, type: razorTokenTypes.EMBED_CS },
{ startIndex:11, type: '' }
]}],
[{
line: 'test @(foo(\")\")) xyz',
tokens: [
{ startIndex:0, type: '' },
{ startIndex:5, type: razorTokenTypes.EMBED_CS },
{ startIndex:7, type: 'ident.cs' },
{ startIndex:10, type: 'punctuation.parenthesis.cs' },
{ startIndex:11, type: 'string.cs' },
{ startIndex:14, type: 'punctuation.parenthesis.cs' },
{ startIndex:15, type: razorTokenTypes.EMBED_CS },
{ startIndex:16, type: '' }
]}],
// Escaping - escaped at character
[{
line: 'test@@xyz',
tokens: [
{ startIndex:0, type: '' }
]}]
]);
});
});<|fim▁end|> | |
<|file_name|>sessions.js<|end_file_name|><|fim▁begin|>/**
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import("dateutils");
import("execution");
import("fastJSON");
import("fileutils");
import("jsutils.{eachProperty,keys}");
import("stringutils.{randomHash,startsWith,endsWith}");
import("sync");
jimport("net.appjet.common.util.ExpiringMapping");
jimport("net.spy.memcached.MemcachedClient");
jimport("java.net.InetSocketAddress");
import("etherpad.log");
//----------------------------------------------------------------
var _DEFAULT_COOKIE_NAME = "SessionID";
var _DEFAULT_SERVER_EXPIRATION = 3*24*60*60*1000; // 72 hours
var _WRITE_SESSIONS_TO_DISK_INTERVAL = 10*60*1000; // 10 minutes
var _BUFFER_SIZE = 10 * 1024 * 1024; // 10 MB
function getSessionId(cookieName, createIfNotPresent, domain) {
if (request.isComet || request.isCron || !request.isDefined) {
return null;
}
if (request.cookies[cookieName]) {
return request.cookies[cookieName];
}
if (!createIfNotPresent) {
return null;
}
// Keep sessionId in requestCache so this function can be called multiple
// times per request without multiple calls to setCookie().
if (!appjet.requestCache.sessionId) {
var sessionId = randomHash(16);
response.setCookie({
name: cookieName,
value: sessionId,
path: "/",
domain: (domain || undefined),
secure: appjet.config.useHttpsUrls,
httpOnly: true /* disallow client js access */
});
appjet.requestCache.sessionId = sessionId;
}
return appjet.requestCache.sessionId;
}
function getSessionIdSubdomains(sessionId) {
var map = _getCachedDb().map;
if (map) {
return map.get(sessionId) || {};
}
return {};
}
function _getExpiringSessionMap(db) {
sync.callsyncIfTrue(db,
function() { return (!db.map); },
function() { db.map = new ExpiringMapping(_DEFAULT_SERVER_EXPIRATION); });
return db.map;
}
function _getCachedDb() {
return appjet.cacheRoot("net.appjet.ajstdlib.session");
}
function _getMemcachedClient() {
var mc = appjet.cache['memcache-client'];
if (!mc) {
mc = new MemcachedClient(new InetSocketAddress(appjet.config.memcached, 11211));
appjet.cache['memcache-client'] = mc;
// store existing sessions
var map = _getCachedDb().map;
if (map) {
var keyIterator = map.listAllKeys().iterator();
while (keyIterator.hasNext()) {
var key = keyIterator.next();
var session = map.get(key);
if (keys(session).length == 0) { continue; }
var json = fastJSON.stringify(session);
mc.set("sessions." + key, _DEFAULT_SERVER_EXPIRATION / 1000, json);
}
}
}
return mc;
}<|fim▁hole|>function _getSessionDataKey(opts) {
// Session options.
if (!opts) { opts = {}; }
var cookieName = opts.cookieName || _DEFAULT_COOKIE_NAME;
// get cookie ID (sets response cookie if necessary)
var sessionId = getSessionId(cookieName, true, opts.domain);
if (!sessionId) { return null; }
// get session data object
var domainKey = "." + request.domain;
return [sessionId, domainKey];
}
//----------------------------------------------------------------
function getSession(opts) {
var dataKey = _getSessionDataKey(opts);
if (!dataKey) { return null; }
if (appjet.requestCache.sessionDomains) {
return appjet.requestCache.sessionDomains[dataKey[1]];
}
if (appjet.config.memcached) {
var json = _getMemcachedClient().get("sessions." + dataKey[0]);
var sessionData = json ? fastJSON.parse(json) : {};
//log.info("MEMCACHE GOT SESSION:" + dataKey+ " VAL:" + json);
appjet.requestCache.sessionDomains = sessionData;
return sessionData[dataKey[1]];
} else {
// get expiring session map
var db = _getCachedDb();
var map = _getExpiringSessionMap(db);
var sessionData = map.get(dataKey[0]) || {};
if (!sessionData[dataKey[1]]) {
sessionData[dataKey[1]] = {};
map.put(dataKey[0], sessionData);
} else {
map.touch(dataKey[0]);
}
appjet.requestCache.sessionDomains = sessionData;
return sessionData[dataKey[1]];
}
}
function saveSession(opts) {
if (!appjet.config.memcached) { return; }
if (!appjet.requestCache.sessionDomains) { return; }
var json = fastJSON.stringify(appjet.requestCache.sessionDomains);
if (json == "{}") { return; }
var dataKey = _getSessionDataKey(opts);
_getMemcachedClient().set("sessions." + dataKey[0], _DEFAULT_SERVER_EXPIRATION / 1000, json);
//log.info("MEMCACHE SAVED SESSION:" + dataKey+ " VAL:" + json);
}
function destroySession(opts) {
var dataKey = _getSessionDataKey(opts);
if (!dataKey) { return null; }
if (appjet.config.memcached) {
// todo: delete from memcache?
} else {
// get expiring session map
var db = _getCachedDb();
var map = _getExpiringSessionMap(db);
map.remove(dataKey[0]);
appjet.requestCache.sessionDomains = null;
}
}
function writeSessionsToDisk() {
try {
var dateString = dateutils.dateFormat(new Date(), "yyyy-MM-dd");
var dataFile = new Packages.java.io.File(appjet.config.sessionStoreDir+"/sessions-"+dateString+".jslog");
var tmpFile = new Packages.java.io.File(dataFile.toString() + ".tmp");
dataFile.getParentFile().mkdirs();
var writer = new java.io.BufferedWriter(new java.io.FileWriter(tmpFile), _BUFFER_SIZE);
var map = _getCachedDb().map;
if (! map) { return; }
var keyIterator = map.listAllKeys().iterator();
while (keyIterator.hasNext()) {
var key = keyIterator.next();
var session = map.get(key);
if (!session) {
continue;
}
// don't write sessions that don't have accounts
// they're ok to lose on restart
var hasAccount = false;
for (domain in session) {
if ('proAccount' in session[domain]) {
hasAccount = true;
break;
}
}
if (!hasAccount) {
continue;
}
if (keys(session).length == 0) { continue; }
var obj = { key: key, session: session };
var json = fastJSON.stringify(obj);
writer.write(json);
writer.write("\n");
}
writer.flush();
writer.close();
tmpFile.renameTo(dataFile);
} finally {
_scheduleWriteToDisk();
}
}
function cleanUpSessions(shouldDiscardSession) {
var map = _getCachedDb().map;
if (! map) { return; }
var keyIterator = map.listAllKeys().iterator();
var keysToDelete = [];
while (keyIterator.hasNext()) {
var key = keyIterator.next();
var session = map.get(key);
if (!session) {
continue;
}
for (domain in session) {
if (shouldDiscardSession(session[domain])) {
keysToDelete.push(key);
break;
}
}
}
keysToDelete.forEach(function(key) {
map.remove(key);
})
return keysToDelete.length;
}
function _extractDate(fname) {
var datePart = fname.substr("sessions-".length, "2009-09-24".length);
return Number(datePart.split("-").join(""));
}
function readLatestSessionsFromDisk() {
var dir = new Packages.java.io.File(appjet.config.sessionStoreDir);
if (! dir.exists()) { return; }
var files = dir.listFiles(new Packages.java.io.FilenameFilter({
accept: function(dir, name) {
return startsWith(name, "sessions") && endsWith(name, ".jslog")
}
}));
if (files.length == 0) { return; }
var latestFile = files[0];
for (var i = 1; i < files.length; ++i) {
if (_extractDate(files[i].getName()) > _extractDate(latestFile.getName())) {
latestFile = files[i];
}
}
var map = _getExpiringSessionMap(_getCachedDb());
fileutils.eachFileLine(latestFile, function(json) {
try {
var obj = fastJSON.parse(json, true /* parseDate */);
var key = obj.key;
var session = obj.session;
map.put(key, session);
} catch (err) {
Packages.java.lang.System.out.println("Error reading sessions file on line '"+json+"': "+String(err));
}
});
latestFile.renameTo(new Packages.java.io.File(latestFile.getParent()+"/used-"+latestFile.getName()));
execution.initTaskThreadPool('sessions', 1);
_scheduleWriteToDisk();
}
function _scheduleWriteToDisk() {
if (appjet.cache.shutdownHandlerIsRunning) { return; }
execution.scheduleTask('sessions', 'sessionsWriteToDisk', _WRITE_SESSIONS_TO_DISK_INTERVAL, []);
}<|fim▁end|> | |
<|file_name|>longzhu.go<|end_file_name|><|fim▁begin|>package getters
import (
"errors"
"fmt"
"regexp"
"strings"
)
//longzhu 龙珠直播
type longzhu struct{}
//Site 实现接口
func (i *longzhu) Site() string { return "龙珠直播" }
//SiteURL 实现接口
<|fim▁hole|>}
//SiteIcon 实现接口
func (i *longzhu) SiteIcon() string {
return i.SiteURL() + "/favicon.ico"
}
//FileExt 实现接口
func (i *longzhu) FileExt() string {
return "flv"
}
//NeedFFMpeg 实现接口
func (i *longzhu) NeedFFMpeg() bool {
return false
}
//GetRoomInfo 实现接口
func (i *longzhu) GetRoomInfo(url string) (id string, live bool, err error) {
defer func() {
if recover() != nil {
err = errors.New("fail get data")
}
}()
url = strings.ToLower(url)
reg, _ := regexp.Compile("longzhu\\.com/(\\w+)")
id = reg.FindStringSubmatch(url)[1]
if id != "" {
url := "http://roomapicdn.plu.cn/room/RoomAppStatusV2?domain=" + id
var tmp string
tmp, err = httpGet(url)
if err == nil {
if strings.Contains(tmp, "IsBroadcasting") {
live = strings.Contains(tmp, "\"IsBroadcasting\":true")
} else {
id = ""
}
}
}
if id == "" {
err = errors.New("fail get data")
}
return
}
//GetLiveInfo 实现接口
func (i *longzhu) GetLiveInfo(id string) (live LiveInfo, err error) {
defer func() {
if recover() != nil {
err = errors.New("fail get data")
}
}()
live = LiveInfo{}
url := "http://roomapicdn.plu.cn/room/RoomAppStatusV2?domain=" + id
tmp, err := httpGet(url)
json := *(pruseJSON(tmp).JToken("BaseRoomInfo"))
nick := json["Name"].(string)
title := json["BoardCastTitle"].(string)
details := json["Desc"].(string)
_id := json["Id"]
live.RoomID = fmt.Sprintf("%.f", _id)
url = "http://livestream.plu.cn/live/getlivePlayurl?roomId=" + live.RoomID
tmp, err = httpGet(url)
json = *(pruseJSON(tmp).JTokens("playLines")[0].JTokens("urls")[0])
video := json["securityUrl"].(string)
live.LiveNick = nick
live.RoomTitle = title
live.RoomDetails = details
live.LivingIMG = ""
live.VideoURL = video
if video == "" {
err = errors.New("fail get data")
}
return
}<|fim▁end|> | func (i *longzhu) SiteURL() string {
return "http://www.longzhu.com"
|
<|file_name|>groupSelection.js<|end_file_name|><|fim▁begin|>var module = angular.module("example", ["agGrid"]);
module.controller("exampleCtrl", function($scope, $http) {
var columnDefs = [
{headerName: "Gold", field: "gold", width: 100},
{headerName: "Silver", field: "silver", width: 100},
{headerName: "Bronze", field: "bronze", width: 100},
{headerName: "Total", field: "total", width: 100},
{headerName: "Age", field: "age", width: 90, checkboxSelection: true},
{headerName: "Country", field: "country", width: 120, rowGroupIndex: 0},
{headerName: "Year", field: "year", width: 90},
{headerName: "Date", field: "date", width: 110},
{headerName: "Sport", field: "sport", width: 110, rowGroupIndex: 1}
];
$scope.gridOptions = {
columnDefs: columnDefs,
rowData: null,
rowSelection: 'multiple',
groupAggFunction: groupAggFunction,
groupSelectsChildren: true,
suppressRowClickSelection: true,
groupColumnDef: {headerName: "Athlete", field: "athlete", width: 200,
cellRenderer: {
renderer: "group",
checkbox: true
}}
};
<|fim▁hole|> silver: 0,
bronze: 0,
total: 0
};
rows.forEach(function(row) {
var data = row.data;
sums.gold += data.gold;
sums.silver += data.silver;
sums.bronze += data.bronze;
sums.total += data.total;
});
return sums;
}
$http.get("../olympicWinners.json")
.then(function(res){
$scope.gridOptions.api.setRowData(res.data);
});
});<|fim▁end|> | function groupAggFunction(rows) {
var sums = {
gold: 0, |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>var Handler, MiniEventEmitter;
Handler = require("./handler");
MiniEventEmitter = (function() {
function MiniEventEmitter(obj) {
var handler;
handler = new Handler(this, obj);
this.on = handler.on;
this.off = handler.off;
this.emit = handler.emit;
this.emitIf = handler.emitIf;
this.trigger = handler.emit;
this.triggerIf = handler.emitIf;
}
MiniEventEmitter.prototype.listen = function(type, event, args) {};
return MiniEventEmitter;
<|fim▁hole|>
module.exports = MiniEventEmitter;<|fim▁end|> | })(); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># ==================================================================================================
# Copyright 2013 Twitter, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================================<|fim▁hole|> TFinagleProtocol,
TFinagleProtocolWithClientId)
__all__ = [
'TFinagleProtocol',
'TFinagleProtocolWithClientId'
]<|fim▁end|> |
from twitter.common.rpc.finagle.protocol import ( |
<|file_name|>LL.py<|end_file_name|><|fim▁begin|>import os
import sys
import math
import errno
import subprocess
import tkMessageBox
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from PIL import Image
from matplotlib import style
from datetime import datetime
from matplotlib.widgets import RadioButtons, Button
# Update the version of the program here:
version = "2.2a"
# 'state' is used to keep track of weather the graph has been paused or not
state = 0
# Global arrays that keep the data for plotting the graphs
ltimes = []
wtimes = []
btimes = []
lpings = []
wpings = []
bpings = []
avg_lis = []
top = []
bot = []
# Global variables
sd = 0
avg = 0
num_to = 0 # number of timeout errors
num_un = 0 # number of unreachable errors
sum_ping = 0
min_ping = float('+inf')
max_ping = float('-inf')
count_na = 0
sum_ping_na = 0
sum_sq_dif_na = 0
min_ping_na = float('+inf')
max_ping_na = float('-inf')
count_lan = 0
sum_ping_lan = 0
sum_sq_dif_lan = 0
min_ping_lan = float('+inf')
max_ping_lan = float('-inf')
start = datetime.now()
sq_dif_ar = []
servers = {"NA": "104.160.131.3", "LAN": "104.160.136.3"}
# matplotlib related variable initialization
style.use('seaborn-darkgrid')
fig = plt.figure(figsize=(16, 9))
ax1 = fig.add_subplot(1, 1, 1)
pp_img = Image.open(os.path.dirname(__file__) + '/static/buttons/pp_button.png')
dec_img = Image.open(os.path.dirname(__file__) + '/static/buttons/dec.png')
inc_img = Image.open(os.path.dirname(__file__) + '/static/buttons/inc.png')
null_img = Image.open(os.path.dirname(__file__) + '/static/buttons/null.png')
stgd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/stgd.png')
stwr_img = Image.open(os.path.dirname(__file__) + '/static/buttons/stwr.png')
stbd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/stbd.png')
unstgd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/unstgd.png')
unstwr_img = Image.open(os.path.dirname(__file__) + '/static/buttons/unstwr.png')
unstbd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/unstbd.png')
unstlgd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/unstlgd.png')
unstlwr_img = Image.open(os.path.dirname(__file__) + '/static/buttons/unstlwr.png')
unstlbd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/unstlbd.png')
vunstgd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/vunstgd.png')
vunstwr_img = Image.open(os.path.dirname(__file__) + '/static/buttons/vunstwr.png')
vunstbd_img = Image.open(os.path.dirname(__file__) + '/static/buttons/vunstbd.png')
pp_img.thumbnail((64, 64), Image.ANTIALIAS)
dec_img.thumbnail((16, 16), Image.ANTIALIAS)
inc_img.thumbnail((16, 16), Image.ANTIALIAS)
stgd_img.thumbnail((16, 16), Image.ANTIALIAS)
stwr_img.thumbnail((16, 16), Image.ANTIALIAS)
stbd_img.thumbnail((16, 16), Image.ANTIALIAS)
unstgd_img.thumbnail((16, 16), Image.ANTIALIAS)
unstwr_img.thumbnail((16, 16), Image.ANTIALIAS)
unstbd_img.thumbnail((16, 16), Image.ANTIALIAS)
unstlgd_img.thumbnail((16, 16), Image.ANTIALIAS)
unstlwr_img.thumbnail((16, 16), Image.ANTIALIAS)
unstlbd_img.thumbnail((16, 16), Image.ANTIALIAS)
vunstgd_img.thumbnail((16, 16), Image.ANTIALIAS)
vunstwr_img.thumbnail((16, 16), Image.ANTIALIAS)
vunstbd_img.thumbnail((16, 16), Image.ANTIALIAS)
icon_manager = mpl.pyplot.get_current_fig_manager()
icon_manager.window.wm_iconbitmap(os.path.dirname(__file__) + '/static/icons/icon.ico')
rax = plt.axes([0.881, 0.535, 0.089, 0.089], aspect='equal', frameon=True, axisbg='white')
radio = RadioButtons(rax, servers.keys())
radio_value = radio.value_selected
class ButtonHandler(object):
"""
Class created to handle button functionality via .on_clicked()
"""
ind = 0
def quit(self, event):
self.ind += 1<|fim▁hole|>
def pause(self, event):
global state
self.ind -= 1
state += 1
plt.draw()
def make_databox(vpos, hpos, alpha, fc, ec):
"""
Creates a box of all equal dimensions to hold the text data at the side of the graph - uniformity!
vpos: vertical position float
hpos: horizontal position float
alpha: strength of the colour float
colour: colour of the box string
"""
return ax1.text(vpos, hpos, '______________.', transform=ax1.transAxes, alpha=0,
bbox={'alpha': alpha,
'pad': 5,
"fc": fc,
"ec": ec,
"lw": 2})
def close_handler(event):
"""
Safely shutdown all processes of this program whenever the window is closed by user.
"""
sys.exit()
def spperr_handler(err):
"""
Sub-process ping error handler
Handles common 'errors' we can expect from Window's ping.exe, which is accessed through a subprocess.
'errors' refer to unsuccessful pings.
"""
err_dict = {'Destination host unreachable': 'The destination was unreachable!\nPlease check your internet '
'connection and press Retry.',
'Request timed out': 'The destination took too long to respond!\nPlease check your internet connection '
'and press Retry.'
}
try:
if tkMessageBox.askretrycancel(err, err_dict[err]):
upd_data()
else:
sys.exit()
# This should never occur - this handles errors not in the err_dict (the expected errors).
# Could be useful if a very powerful err_handler was coded, where every line is passed through here.
except KeyError:
if tkMessageBox.showerror('Unknown Error', 'The condition under which this error occurred was unexpected!'):
sys.exit()
def set_savdir(sav_dir='Screenshots'):
"""
Configures the default mpl save directory for screenshots.
Checks if there is a folder named 'Screenshots' in root folder.
If there is no folder there named 'Screenshots', it creates the directory.
"""
if not os.path.isdir(os.path.join(os.path.dirname(__file__), sav_dir).replace('\\', '/')):
try:
os.makedirs(os.path.join(os.path.dirname(__file__), sav_dir).replace('\\', '/'))
except OSError as exc:
if not (exc.errno == errno.EEXIST and os.path.isdir(os.path.join(os.path.dirname(__file__),
sav_dir).replace('\\', '/'))):
raise
# Now that the directory for 'Screenshots' surely exists, set it as default directory.
mpl.rcParams["savefig.directory"] = os.path.join(os.path.dirname(__file__), sav_dir).replace('\\', '/')
def draw_ping(vpos, hpos, ping, up_bound, lo_bound, stdv, vpos_tb, hpos_tb, a_yellow, a_green, a_red):
"""
A powerful function that performs:
1- The specification of the databox which holds the ping data:
a. Inner (face) colour represents the ping range
b. Outer (edge) colour represents the ping state (spiked, below lo_bound etc.)
2- Drawing the circle that summarizes the state of the ping
vpos: the vertical position of the button it draws the ping circle in
hpos: the horizontal position of the button it draws the ping circle in
ping: the value of the current ping
used in data analysis and is a key factor to decide the state of the ping
up_bound: represents the ping + standard deviation
lo_bound: represents the ping - standard deviation
stdv: the standard deviation calculated in upd_data(), passed from animate(i)
vpos_tb: the vertical position of the databox which holds the ping data
hpos_tb: the horizontal position of the databox which holds the ping data
a_yellow: the strength of the databox colour for yellow
a_green: the strength of the databox colour for green
a_red: the strength of the databox colour for red
"""
global avg
# Ping is 'good'
if 0 <= ping <= 199:
# Ping is very unstable - has very large and frequent spikes
if stdv * 2 >= 0.3 * avg:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_green, fc="green", ec="red")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=vunstgd_img, color='None')
# Ping is unstable - has a few frequent medium spikes causing the range to go over 15% current average ping
elif stdv * 2 >= 0.15 * avg:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_green, fc="green", ec="gold")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=unstgd_img, color='None')
# Ping is stable
elif lo_bound <= ping <= up_bound:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_green, fc="green", ec="green")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=stgd_img, color='None')
# Ping is out of bounds (unstable)
else:
# If ping is lower than lower bound, then all conditions tend toward a better ping - colour this as blue
if ping <= lo_bound:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_green, fc="green", ec="blue")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=unstlgd_img, color='None')
# Else it is simply just unstable
else:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_green, fc="green", ec="gold")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=unstgd_img, color='None')
# Ping is 'not good'
elif 200 <= ping <= 499:
if stdv * 2 >= 0.3 * avg:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_yellow, fc="yellow", ec="red")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=vunstwr_img, color='None')
elif stdv * 2 >= 0.15 * avg:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_yellow, fc="yellow", ec="gold")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=unstgd_img, color='None')
elif lo_bound <= ping <= up_bound:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_yellow, fc="yellow", ec="green")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=stwr_img, color='None')
else:
if ping <= lo_bound:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_yellow, fc="yellow", ec="blue")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=unstlwr_img, color='None')
else:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_yellow, fc="yellow", ec="gold")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=unstwr_img, color='None')
# Ping is 'bad'
elif ping > 500:
if stdv * 2 >= 0.3 * avg:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_red, fc="red", ec="black")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=vunstbd_img, color='None')
elif stdv * 2 >= 0.15 * avg:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_red, fc="red", ec="gold")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=unstgd_img, color='None')
elif lo_bound <= ping <= up_bound:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_red, fc="red", ec="green")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=stbd_img, color='None')
else:
if ping <= lo_bound:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_red, fc="red", ec="blue")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=unstlbd_img, color='None')
else:
make_databox(vpos=vpos_tb, hpos=hpos_tb, alpha=a_red, fc="red", ec="gold")
return Button(plt.axes([hpos, vpos, 0.02, 0.02]), '', image=unstbd_img, color='None')
def upd_data():
"""
This function performs a Windows ping function and updates:
1- lping : which is stored in global data array lpings each instance
: if lping >= 200 or <= 499, it is stored in global data array wpings each instance
: if lping >= 500, it is stored in global data array bpings each instance
2- ltime : which is stored in global data array ltimes each instance
: is stored in global data array wtimes each instance wpings has a new value
: is stored in global data array btimes each instance wpings has a new value
3- avg : hence also count and sum_ping (based on radio_value's "NA" or "LAN")
: which is stored in global array avg_lis each instance
4- max_ping: (based on radio_value's "NA" or "LAN")
5- min_ping: (based on radio_value's "NA" or "LAN")
6- sd : the standard deviation (lping-avg)^2/count (based on radio_value's "NA" or "LAN")
: used to calculate top (upper bound = avg + sd) and bot (lower bound = avg - sd)
: top and bot are global data arrays
Notes:
1- creationflags=0x08000000 (for subprocess) forces Windows cmd to not generate a window.
"""
global lpings, ltimes, sum_ping, servers, avg, avg_lis, radio_value, num_un, num_to, top, bot, sd, wtimes, wpings, \
bpings, btimes
global sum_ping_na, count_na, max_ping_na, min_ping_na, sum_sq_dif_na
global sum_ping_lan, count_lan, max_ping_lan, min_ping_lan, sum_sq_dif_lan
# Recheck the radio button value so as to ping to the selected server
radio_value = radio.value_selected
sp = subprocess.Popen(["ping.exe", servers[radio_value], "-n", "1", "-l", "500"],
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=False,
creationflags=0x08000000)
# For instantaneous interpretation of output from subprocess
while sp.poll() is None:
line = sp.stdout.readline()
# Data is updated in here from the newest subprocess ping
if "time=" in line:
lping = float(line[line.find("time=")+5:line.find("ms")])
if radio_value == "NA":
sum_ping_na += lping
count_na += 1
avg = sum_ping_na / count_na
sq_dif = (lping - avg)*(lping - avg)
sum_sq_dif_na += sq_dif
sd = math.sqrt(sum_sq_dif_na / count_na)
if lping > max_ping_na:
max_ping_na = lping
if min_ping_na > lping:
min_ping_na = lping
if radio_value == "LAN":
sum_ping_lan += lping
count_lan += 1
avg = sum_ping_lan / count_lan
sq_dif = (lping - avg) * (lping - avg)
sum_sq_dif_lan += sq_dif
sd = math.sqrt(sum_sq_dif_lan / count_lan)
if lping > max_ping_lan:
max_ping_lan = lping
if min_ping_lan > lping:
min_ping_lan = lping
top += [avg + sd]
bot += [avg - sd]
avg_lis += [avg]
interval = datetime.now() - start
ltime = interval.total_seconds()
ltimes += [ltime]
lpings += [lping]
if 200 <= lping <= 499:
wpings += [lping]
wtimes += [ltime]
elif lping >= 500:
bpings += [lping]
btimes += [ltime]
elif "Destination host unreachable" in line:
num_un += 1
spperr_handler("Destination host unreachable")
elif "Request timed out" in line:
num_to += 1
spperr_handler("Request timed out")
def animate(i):
"""
Performs the 'graphical updating' based on the newly updated data from upd_date()
"""
global max_ping, min_ping, ltimes, lpings, radio_value, servers, avg, avg_lis, num_to, num_un, top, bot, wtimes,\
wpings, btimes
global sum_ping_na, count_na, max_ping_na, min_ping_na
global sum_ping_lan, count_lan, max_ping_lan, min_ping_lan
if radio_value == "NA":
max_ping = max_ping_na
min_ping = min_ping_na
if radio_value == "LAN":
max_ping = max_ping_lan
min_ping = min_ping_lan
pingar = np.array(lpings)
timear = np.array(ltimes)
w_pingar = np.array(wpings)
w_timear = np.array(wtimes)
b_pingar = np.array(bpings)
b_timear = np.array(btimes)
avgar = np.array(avg_lis)
topar = np.array(top)
botar = np.array(bot)
ax1.clear()
ax1.text(0.999, 1.02, 'by Ryan Chin Sang', ha='right', va='top', color='0.85', size='small',
transform=ax1.transAxes)
# Positions of the first textbox to display data
vpos_tb = 1.01
hpos_tb = 0.973
hpos_img = 0.88
vpos_img = 0.8325
a_red = 0.3
a_grey = 0.2
a_blue = 0.14
a_green = 0.23
a_yellow = 0.17
# Ping data
ax1.text(vpos_tb, hpos_tb, "Ping: " + str(lpings[-1]) + " ms", transform=ax1.transAxes)
draw_ping(vpos=vpos_img + 0.0385, hpos=hpos_img, ping=lpings[-1], up_bound=top[-1], lo_bound=bot[-1], stdv=sd,
vpos_tb=vpos_tb, hpos_tb=hpos_tb, a_green=a_green, a_red=a_red, a_yellow=a_yellow)
# Average ping
if lpings[-1] < avg:
make_databox(vpos=vpos_tb, hpos=hpos_tb - 0.05, alpha=a_green, fc="green", ec="green")
ax1.text(vpos_tb, hpos_tb-0.05, "Avg: " + format(avg, '.3f') + " ms", transform=ax1.transAxes)
Button(plt.axes([hpos_img, vpos_img, 0.02, 0.02]), '', image=dec_img, color='None')
elif lpings[-1] > avg:
make_databox(vpos=vpos_tb, hpos=hpos_tb - 0.05, alpha=a_red, fc="red", ec="black")
ax1.text(vpos_tb, hpos_tb - 0.05, "Avg: " + format(avg, '.3f') + " ms", transform=ax1.transAxes)
Button(plt.axes([hpos_img, vpos_img, 0.02, 0.02]), '', image=inc_img, color='None')
else:
make_databox(vpos=vpos_tb, hpos=hpos_tb - 0.05, alpha=a_blue, fc="blue", ec="blue")
ax1.text(vpos_tb, hpos_tb - 0.05, "Avg: " + format(avg, '.3f') + " ms", transform=ax1.transAxes)
Button(plt.axes([hpos_img, vpos_img, 0.02, 0.02]), '', image=null_img, color='None')
# Time data
make_databox(vpos=vpos_tb, hpos=hpos_tb - 0.1, alpha=a_blue, fc="blue", ec="blue")
ax1.text(vpos_tb, hpos_tb-0.1, "Time: " + str(ltimes[-1]) + " s", transform=ax1.transAxes)
# Maximum Ping data
make_databox(vpos=vpos_tb, hpos=hpos_tb - 0.15, alpha=a_blue, fc="blue", ec="blue")
ax1.text(vpos_tb, hpos_tb-0.15, "Max: " + str(max_ping) + " ms", transform=ax1.transAxes)
# Minimum Ping data
make_databox(vpos=vpos_tb, hpos=hpos_tb - 0.2, alpha=a_blue, fc="blue", ec="blue")
ax1.text(vpos_tb, hpos_tb-0.2, "Min: " + str(min_ping) + " ms", transform=ax1.transAxes)
# No. of timeouts
make_databox(vpos=vpos_tb, hpos=hpos_tb - 0.25, alpha=a_grey, fc="grey", ec="black")
ax1.text(vpos_tb, hpos_tb-0.25, "# Timeout: " + str(num_to), transform=ax1.transAxes)
# No. of unreachables
make_databox(vpos=vpos_tb, hpos=hpos_tb - 0.3, alpha=a_grey, fc="grey", ec="black")
ax1.text(vpos_tb, hpos_tb-0.3, "# Unreachable: " + str(num_un), transform=ax1.transAxes)
# Shows state of the animated graph
ax1.text(0.92, -0.0925, 'box', transform=ax1.transAxes, fontsize=22, zorder=0, alpha=0,
bbox={'alpha': a_grey, 'pad': 5, "fc": "white", "ec": "black", "lw": 2})
ax1.text(0.92, -0.087, ' Play' if state % 2 else 'Pause', transform=ax1.transAxes, zorder=1)
ax1.set_ylabel('Ping /ms', size='large')
ax1.set_xlabel('Time /s', size='large')
ax1.set_title('Ping to League of Legends [' + radio_value + '] Server (' + servers[radio_value] + ')', fontsize=16,
fontweight='bold')
ax1.plot(timear, pingar, linewidth=1.0, label="Ping")
ax1.plot(timear, avgar, linewidth=0.6, label="Average Ping")
# Draws a yellow graph when ping goes over 200 ms and is less than 499 ms
ax1.plot(w_timear, w_pingar, linewidth=1.5, color='yellow', zorder=1)
# Draws a red graph when ping goes over 500 ms
ax1.plot(b_timear, b_pingar, linewidth=1.5, color='red', zorder=1)
ax1.plot(timear, topar, linewidth=0.3)
ax1.plot(timear, botar, linewidth=0.3)
ax1.fill_between(timear, botar, topar, facecolor='green', interpolate=True, alpha=0.0375)
ax1.legend(loc='upper left')
# Only update the data if state indicates 'play' (opposite of button logic)
if state % 2 == 0:
upd_data()
def set_frame():
"""
Sets the initial frame of the Window in which will be animated through animate(i)
"""
global state
fig.canvas.mpl_connect('close_event', close_handler)
fig.canvas.set_window_title('League Latency v' + version)
ani = animation.FuncAnimation(fig, animate, frames=120)
# [(-=left, +=right), (-=up, +=down), (-=thin, +=wide), (-=thin, +=thick)]
quit_b = Button(plt.axes([0.905, 0.01, 0.089, 0.05]), 'Quit')
quit_b.on_clicked(ButtonHandler().quit)
pp_b = Button(plt.axes([0.835, 0.01, 0.1, 0.05]), '', image=pp_img)
pp_b.on_clicked(ButtonHandler().pause)
plt.show()
set_savdir()
upd_data()
set_frame()<|fim▁end|> | close_handler(event)
plt.draw() |
<|file_name|>results.py<|end_file_name|><|fim▁begin|>from collections import defaultdict
from django import template
from django.utils.safestring import mark_safe
from censusreporter.apps.census.utils import parse_table_id, generic_table_description, table_link
register = template.Library()
@register.filter
def format_subtables_for_results(table_ids):
parts = []
deferred_racials = defaultdict(list)
deferred_pr = []
for table in table_ids:
parsed = parse_table_id(table)
if parsed['racial']:
key = parsed['table_type']
if parsed['puerto_rico']:
key += 'PR'
deferred_racials[key].append(parsed)
elif parsed['puerto_rico']:
deferred_pr.append(table)
else:
parts.append(table_link(table, generic_table_description(table)))
for table in deferred_pr:<|fim▁hole|> parts.append(table_link(table, generic_table_description(table)))
racial_label_tests = [
('B', 'Detailed (by race)'),
('C', 'Simplified (by race)'),
('BPR', 'Detailed (by race) for Puerto Rico'),
('CPR', 'Simplified (by race) for Puerto Rico'),
]
for test, label in racial_label_tests:
try:
iteration_parts = []
for table_dict in deferred_racials[test]:
iteration_parts.append(table_link(table_dict['table_id'], table_dict['race']))
group_table_id = table_dict['table_id']
if iteration_parts:
contents = ' / '.join(iteration_parts)
iter_wrapper = """
<a class="toggler" data-id="{}">{}</a>
<span data-id="{}" class='racial-iteration'>{}</span>
""".format(group_table_id, label, group_table_id, contents)
parts.append(iter_wrapper)
except Exception as e:
parts.append(e.message)
return mark_safe(', '.join(parts))<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
#![deny(warnings)]
use ascii::AsciiString;
use mercurial_types::{
blobs::HgBlobChangeset, HgBlob, HgChangesetId, HgFileNodeId, HgManifestId, HgNodeHash,
HgParents, MPath, RepoPath, Type,
};
use mononoke_types::{hash::Sha256, ChangesetId, FileType};
use std::fmt;
use thiserror::Error;
#[derive(Debug)]
pub enum StateOpenError {
Heads,
Bookmarks,
Changesets,
Filenodes,
BonsaiGitMapping,
BonsaiGlobalrevMapping,<|fim▁hole|> SegmentedChangelog,
}
impl fmt::Display for StateOpenError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
StateOpenError::Heads => write!(f, "heads"),
StateOpenError::Bookmarks => write!(f, "bookmarks"),
StateOpenError::Changesets => write!(f, "changesets"),
StateOpenError::Filenodes => write!(f, "filenodes"),
StateOpenError::BonsaiGitMapping => write!(f, "bonsai_git_mapping"),
StateOpenError::BonsaiGlobalrevMapping => write!(f, "bonsai_globalrev_mapping"),
StateOpenError::BonsaiSvnrevMapping => write!(f, "bonsai_svnrev_mapping"),
StateOpenError::BonsaiHgMapping => write!(f, "bonsai_hg_mapping"),
StateOpenError::Phases => write!(f, "phases"),
StateOpenError::HgMutationStore => write!(f, "hg_mutation_store"),
StateOpenError::SegmentedChangelog => write!(f, "segmented_changelog"),
}
}
}
#[derive(Debug, Error)]
pub enum ErrorKind {
#[error("Error while opening state for {0}")]
StateOpen(StateOpenError),
#[error("Node id {0} is missing")]
NodeMissing(HgNodeHash),
#[error("Content missing nodeid {0}")]
ContentMissing(HgNodeHash),
#[error("Error while deserializing file contents retrieved from key '{0}'")]
FileContentsDeserializeFailed(String),
#[error("Content blob missing for id: {0}")]
ContentBlobByAliasMissing(Sha256),
#[error("Uploaded blob is incomplete {0:?}")]
BadUploadBlob(HgBlob),
#[error("HgParents are not in blob store {0:?}")]
ParentsUnknown(HgParents),
#[error("Serialization of node failed {0} ({1})")]
SerializationFailed(HgNodeHash, bincode::Error),
#[error("Root manifest is not a manifest (type {0})")]
BadRootManifest(HgManifestId),
#[error("Manifest type {0} does not match uploaded type {1}")]
ManifestTypeMismatch(Type, Type),
#[error("Node generation failed for unknown reason")]
NodeGenerationFailed,
#[error("Path {0} appears multiple times in manifests")]
DuplicateEntry(RepoPath),
#[error("Duplicate manifest hash {0}")]
DuplicateManifest(HgNodeHash),
#[error("Missing entries in new changeset {0}")]
MissingEntries(HgNodeHash),
#[error("Filenode is missing: {0} {1}")]
MissingFilenode(RepoPath, HgFileNodeId),
#[error("Some manifests do not exist")]
MissingManifests,
#[error("Expected {0} to be a manifest, found a {1} instead")]
NotAManifest(HgNodeHash, Type),
#[error(
"Inconsistent node hash for changeset: provided: {0}, \
computed: {1} for blob: {2:#?}"
)]
InconsistentChangesetHash(HgNodeHash, HgNodeHash, HgBlobChangeset),
#[error("Bookmark {0} does not exist")]
BookmarkNotFound(AsciiString),
#[error("Unresolved conflict at {0} with parents: {1:?}")]
UnresolvedConflicts(MPath, Vec<(FileType, HgFileNodeId)>),
#[error("Manifest without parents did not get changed by a BonsaiChangeset")]
UnchangedManifest,
#[error("Saving empty manifest which is not a root: {0}")]
SavingHgEmptyManifest(RepoPath),
#[error("Trying to merge a manifest with two existing parents p1 {0} and p2 {1}")]
ManifestAlreadyAMerge(HgNodeHash, HgNodeHash),
#[error("Path not found: {0}")]
PathNotFound(MPath),
#[error("Remove called on non-directory")]
NotADirectory,
#[error("Empty file path")]
EmptyFilePath,
#[error("Memory manifest conflict can not contain single entry")]
SingleEntryConflict,
#[error("Cannot find cache pool {0}")]
MissingCachePool(String),
#[error("Bonsai cs {0} not found")]
BonsaiNotFound(ChangesetId),
#[error("Bonsai changeset not found for hg changeset {0}")]
BonsaiMappingNotFound(HgChangesetId),
#[error("Root path wasn't expected at this context")]
UnexpectedRootPath,
#[error(
"Incorrect copy info: not found a file version {from_path} {from_node} the file {to_path} {to_node} was copied from"
)]
IncorrectCopyInfo {
from_path: MPath,
from_node: HgFileNodeId,
to_path: MPath,
to_node: HgFileNodeId,
},
#[error(
"CaseConflict: the changes introduced by this commit have conflicting case. The first offending path is '{0}', and conflicted with '{1}'. Resolve the conflict."
)]
InternalCaseConflict(MPath, MPath),
#[error(
"CaseConflict: the changes introduced by this commit conflict with existing files in the repository. The first conflicting path in this commit was '{0}', and conflicted with '{1}' in the repository. Resolve the conflict."
)]
ExternalCaseConflict(MPath, MPath),
}<|fim▁end|> | BonsaiSvnrevMapping,
BonsaiHgMapping,
Phases,
HgMutationStore, |
<|file_name|>service.go<|end_file_name|><|fim▁begin|>package docker
import (
"fmt"
"io"
"io/ioutil"
"os"
"sort"
"docker.io/go-docker/api/types"
"docker.io/go-docker/api/types/filters"
"docker.io/go-docker/api/types/swarm"
"github.com/appcelerator/amp/docker/cli/cli/command"
"github.com/appcelerator/amp/docker/cli/cli/service/progress"
"github.com/appcelerator/amp/docker/docker/pkg/jsonmessage"
log "github.com/sirupsen/logrus"
"golang.org/x/net/context"
)
type ServiceStatus struct {
RunningTasks int32
CompletedTasks int32
FailedTasks int32
TotalTasks int32
Status string
}
// ServiceInspect inspects a service
func (d *Docker) ServiceInspect(ctx context.Context, service string) (swarm.Service, error) {
serviceEntity, _, err := d.client.ServiceInspectWithRaw(ctx, service, types.ServiceInspectOptions{InsertDefaults: true})
if err != nil {
return swarm.Service{}, err
}
return serviceEntity, nil
}<|fim▁hole|> serviceEntity, err := d.ServiceInspect(ctx, service)
if err != nil {
return err
}
serviceMode := &serviceEntity.Spec.Mode
if serviceMode.Replicated == nil {
return fmt.Errorf("scale can only be used with replicated mode")
}
serviceMode.Replicated.Replicas = &scale
response, err := d.client.ServiceUpdate(ctx, serviceEntity.ID, serviceEntity.Version, serviceEntity.Spec, types.ServiceUpdateOptions{})
if err != nil {
return err
}
for _, warning := range response.Warnings {
log.Warnln(warning)
}
log.Infof("service %s scaled to %d\n", service, scale)
return nil
}
// ServiceStatus returns service status
func (d *Docker) ServiceStatus(ctx context.Context, service *swarm.Service) (*ServiceStatus, error) {
// Get expected number of tasks for the service
expectedTaskCount, err := d.ExpectedNumberOfTasks(ctx, service.ID)
if err != nil {
return nil, err
}
if expectedTaskCount == 0 {
return &ServiceStatus{
RunningTasks: 0,
TotalTasks: 0,
Status: StateNoMatchingNode,
}, nil
}
// List all tasks of service
args := filters.NewArgs()
args.Add("service", service.ID)
tasks, err := d.TaskList(ctx, types.TaskListOptions{Filters: args})
if err != nil {
return nil, err
}
// Sort tasks by slot, then by most recent
sort.Stable(TasksBySlot(tasks))
// Build a map with only the most recent task per slot
mostRecentTasks := map[int]swarm.Task{}
for _, task := range tasks {
if _, exists := mostRecentTasks[task.Slot]; !exists {
mostRecentTasks[task.Slot] = task
}
}
// Computing service status based on task status
taskMap := map[string]int32{}
for _, task := range mostRecentTasks {
switch task.Status.State {
case swarm.TaskStatePreparing:
taskMap[StatePreparing]++
case swarm.TaskStateReady:
taskMap[StateReady]++
case swarm.TaskStateStarting:
taskMap[StateStarting]++
case swarm.TaskStateRunning:
taskMap[StateRunning]++
case swarm.TaskStateComplete:
taskMap[StateComplete]++
case swarm.TaskStateFailed, swarm.TaskStateRejected:
taskMap[StateError]++
}
}
// If any task has an ERROR status, the service status is ERROR
if taskMap[StateError] > 0 {
return &ServiceStatus{
RunningTasks: taskMap[StateRunning],
TotalTasks: expectedTaskCount,
Status: StateError,
}, nil
}
// If all tasks are PREPARING, the service status is PREPARING
if taskMap[StatePreparing] == expectedTaskCount {
return &ServiceStatus{
RunningTasks: taskMap[StateRunning],
TotalTasks: expectedTaskCount,
Status: StatePreparing,
}, nil
}
// If all tasks are READY, the service status is READY
if taskMap[StateReady] == expectedTaskCount {
return &ServiceStatus{
RunningTasks: taskMap[StateRunning],
TotalTasks: expectedTaskCount,
Status: StateReady,
}, nil
}
// If all tasks are RUNNING, the service status is RUNNING
if taskMap[StateRunning] == expectedTaskCount {
return &ServiceStatus{
RunningTasks: taskMap[StateRunning],
TotalTasks: expectedTaskCount,
Status: StateRunning,
}, nil
}
// If all tasks are COMPLETE, the service status is COMPLETE
if taskMap[StateComplete] == expectedTaskCount {
return &ServiceStatus{
RunningTasks: taskMap[StateRunning],
TotalTasks: expectedTaskCount,
Status: StateComplete,
}, nil
}
// Else the service status is STARTING
return &ServiceStatus{
RunningTasks: taskMap[StateRunning],
TotalTasks: expectedTaskCount,
Status: StateStarting,
}, nil
}
// ServiceList list the services
func (d *Docker) ServicesList(ctx context.Context, options types.ServiceListOptions) ([]swarm.Service, error) {
return d.client.ServiceList(ctx, options)
}
// WaitOnService waits for the service to converge. It outputs a progress bar,
func (d *Docker) WaitOnService(ctx context.Context, serviceID string, quiet bool) error {
errChan := make(chan error, 1)
pipeReader, pipeWriter := io.Pipe()
go func() {
errChan <- progress.ServiceProgress(ctx, d.client, serviceID, pipeWriter)
}()
if quiet {
go io.Copy(ioutil.Discard, pipeReader)
return <-errChan
}
err := jsonmessage.DisplayJSONMessagesToStream(pipeReader, command.NewOutStream(os.Stdout), nil)
if err == nil {
err = <-errChan
}
return err
}<|fim▁end|> |
// ServiceScale scales a service
func (d *Docker) ServiceScale(ctx context.Context, service string, scale uint64) error { |
<|file_name|>scope_enclosed.ts<|end_file_name|><|fim▁begin|>function topfun() {<|fim▁hole|> let a = 12
}
let a = 13
}<|fim▁end|> | function subfun() { |
<|file_name|>implement.ctor.nseal.alias.js<|end_file_name|><|fim▁begin|>var Type = require("@kaoscript/runtime").Type;
module.exports = function(expect) {
class Shape {<|fim▁hole|> __ks_init_0() {
this._color = "";
}
__ks_init() {
Shape.prototype.__ks_init_0.call(this);
}
__ks_cons_0(color) {
if(arguments.length < 1) {
throw new SyntaxError("Wrong number of arguments (" + arguments.length + " for 1)");
}
if(color === void 0 || color === null) {
throw new TypeError("'color' is not nullable");
}
else if(!Type.isString(color)) {
throw new TypeError("'color' is not of type 'String'");
}
this._color = color;
}
__ks_cons(args) {
if(args.length === 1) {
Shape.prototype.__ks_cons_0.apply(this, args);
}
else {
throw new SyntaxError("Wrong number of arguments");
}
}
__ks_func_color_0() {
return this._color;
}
color() {
if(arguments.length === 0) {
return Shape.prototype.__ks_func_color_0.apply(this);
}
throw new SyntaxError("Wrong number of arguments");
}
__ks_func_draw_0() {
return "I'm drawing a " + this._color + " rectangle.";
}
draw() {
if(arguments.length === 0) {
return Shape.prototype.__ks_func_draw_0.apply(this);
}
throw new SyntaxError("Wrong number of arguments");
}
}
Shape.prototype.__ks_cons_1 = function() {
this._color = "red";
};
Shape.prototype.__ks_cons = function(args) {
if(args.length === 0) {
Shape.prototype.__ks_cons_1.apply(this);
}
else if(args.length === 1) {
Shape.prototype.__ks_cons_0.apply(this, args);
}
else {
throw new SyntaxError("Wrong number of arguments");
}
}
let shape = new Shape();
expect(shape.draw()).to.equals("I'm drawing a red rectangle.");
};<|fim▁end|> | constructor() {
this.__ks_init();
this.__ks_cons(arguments);
} |
<|file_name|>crime.client.routes.js<|end_file_name|><|fim▁begin|>(function () {
'use strict';
angular
.module('crimes.routes')
.config(routeConfig);
routeConfig.$inject = ['$stateProvider'];
function routeConfig($stateProvider) {
$stateProvider
.state('crimes', {
abstract: true,
url: '/crimes',
template: '<ui-view/>'
})
.state('crimes.list', {
url: '',
templateUrl: '/modules/crimes/client/views/list-crimes.client.view.html',
controller: 'CrimesListController',
controllerAs: 'vm',
data: {
pageTitle: 'Crimes List'
}
})
.state('crimes.view', {
url: '/:crimeId',
templateUrl: '/modules/crimes/client/views/view-crimes.client.view.html',
controller: 'CrimesController',
controllerAs: 'vm',
resolve: {
crimeResolve: getCrime
},<|fim▁hole|> pageTitle: 'Crimes {{ crimeResolve.title }}'
}
});
}
getCrime.$inject = ['$stateParams', 'CrimesService'];
function getCrime($stateParams, CrimesService) {
return CrimesService.get({
crimeId: $stateParams.crimeId
}).$promise;
}
}());<|fim▁end|> | data: { |
<|file_name|>localstorage.js<|end_file_name|><|fim▁begin|>angular.module('appTesting').service("LoginLocalStorage", function () {
"use strict";
var STORE_NAME = "login";
var setUser = function setUser(user) {
localStorage.setItem(STORE_NAME, JSON.stringify(user));
}
var getUser = function getUser() {
var storedTasks = localStorage.getItem(STORE_NAME);
if (storedTasks) {
return JSON.parse(storedTasks);
}
return {};
}
return {
setUser: setUser,<|fim▁hole|> }
});<|fim▁end|> | getUser: getUser |
<|file_name|>test_autoupdate.py<|end_file_name|><|fim▁begin|># vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2015-2020 Florian Bruhin (The Compiler) <[email protected]>
# Copyright 2015-2018 Alexander Cogneau (acogneau) <[email protected]>:
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Tests for qutebrowser.misc.autoupdate."""
import pytest
from PyQt5.QtCore import QUrl
from qutebrowser.misc import autoupdate, httpclient
INVALID_JSON = ['{"invalid": { "json"}', '{"wrong": "keys"}']
class HTTPGetStub(httpclient.HTTPClient):
"""A stub class for HTTPClient.
Attributes:
url: the last url used by get()
_success: Whether get() will emit a success signal.
"""
def __init__(self, success=True, json=None):
super().__init__()
self.url = None
self._success = success
if json:
self._json = json
else:
self._json = '{"info": {"version": "test"}}'
def get(self, url):
self.url = url
if self._success:
self.success.emit(self._json)
else:
self.error.emit("error")
<|fim▁hole|>
def test_get_version_success(qtbot):
"""Test get_version() when success is emitted."""
http_stub = HTTPGetStub(success=True)
client = autoupdate.PyPIVersionClient(client=http_stub)
with qtbot.assertNotEmitted(client.error):
with qtbot.waitSignal(client.success):
client.get_version('test')
assert http_stub.url == QUrl(client.API_URL.format('test'))
def test_get_version_error(qtbot):
"""Test get_version() when error is emitted."""
http_stub = HTTPGetStub(success=False)
client = autoupdate.PyPIVersionClient(client=http_stub)
with qtbot.assertNotEmitted(client.success):
with qtbot.waitSignal(client.error):
client.get_version('test')
@pytest.mark.parametrize('json', INVALID_JSON)
def test_invalid_json(qtbot, json):
"""Test on_client_success() with invalid JSON."""
http_stub = HTTPGetStub(json=json)
client = autoupdate.PyPIVersionClient(client=http_stub)
client.get_version('test')
with qtbot.assertNotEmitted(client.success):
with qtbot.waitSignal(client.error):
client.get_version('test')<|fim▁end|> | def test_constructor(qapp):
client = autoupdate.PyPIVersionClient()
assert isinstance(client._client, httpclient.HTTPClient)
|
<|file_name|>page.spec.ts<|end_file_name|><|fim▁begin|>import { ChatPage } from './page';
declare var describe, beforeEach, it, expect;
describe("ChatPage", () => {
<|fim▁hole|>
it("should contain 'Hello world!' in 'content' property", () => {
expect(page.Text.PAGE_HOME_CONTENT).toEqual("Hello world!");
});
});<|fim▁end|> | let page: ChatPage;
beforeEach(() => page = new ChatPage(null)); |
<|file_name|>server.js<|end_file_name|><|fim▁begin|>'use strict';
module.exports = function (grunt) {
grunt.registerTask('server', [
'clean:server',
'configureProxies:livereload',
'concurrent:server',
'autoprefixer',
'connect:livereload',
'watch'
]);
grunt.registerTask('server:open', [
'clean:server',
'configureProxies:livereload',
'concurrent:server',<|fim▁hole|> 'connect:livereload',
'open:server',
'watch'
]);
grunt.registerTask('server:dist', [
'connect:dist',
'open:dist',
'watch'
]);
grunt.registerTask('server:doc', [
'connect:doc',
'open:doc',
'watch:doc'
]);
};<|fim▁end|> | 'autoprefixer', |
<|file_name|>use.py<|end_file_name|><|fim▁begin|>##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#<|fim▁hole|># LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import argparse
from spack.cmd.common import print_module_placeholder_help
description = "add package to environment using dotkit"
section = "environment"
level = "long"
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
message with -h. """
subparser.add_argument(
'spec', nargs=argparse.REMAINDER,
help='spec of package to use with dotkit')
def use(parser, args):
print_module_placeholder_help()<|fim▁end|> | # This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved. |
<|file_name|>default_ty_param_dependent_defaults.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//
#![feature(default_type_parameter_fallback)]
use std::marker::PhantomData;
struct Foo<T,U=T> { t: T, data: PhantomData<U> }
fn main() {
let foo = Foo { t: 'a', data: PhantomData };
}<|fim▁end|> | |
<|file_name|>CommCalls.cc<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 1996-2016 The Squid Software Foundation and contributors
*
* Squid software is distributed under GPLv2+ license and includes
* contributions from numerous individuals and organizations.
* Please see the COPYING and CONTRIBUTORS files for details.
*/
#include "squid.h"
#include "anyp/PortCfg.h"
#include "comm/Connection.h"
#include "CommCalls.h"
#include "fde.h"
#include "globals.h"
/* CommCommonCbParams */
CommCommonCbParams::CommCommonCbParams(void *aData):
data(cbdataReference(aData)), conn(), flag(Comm::OK), xerrno(0), fd(-1)
{
}
CommCommonCbParams::CommCommonCbParams(const CommCommonCbParams &p):
data(cbdataReference(p.data)), conn(p.conn), flag(p.flag), xerrno(p.xerrno), fd(p.fd)
{
}
CommCommonCbParams::~CommCommonCbParams()
{
cbdataReferenceDone(data);
}
void
CommCommonCbParams::print(std::ostream &os) const
{
if (conn != NULL)
os << conn;
else
os << "FD " << fd;
if (xerrno)
os << ", errno=" << xerrno;
if (flag != Comm::OK)
os << ", flag=" << flag;
if (data)
os << ", data=" << data;
}
/* CommAcceptCbParams */
CommAcceptCbParams::CommAcceptCbParams(void *aData):
CommCommonCbParams(aData), xaction()
{
}
void
CommAcceptCbParams::print(std::ostream &os) const
{
CommCommonCbParams::print(os);
if (xaction != NULL)
os << ", " << xaction->id;
}
/* CommConnectCbParams */
CommConnectCbParams::CommConnectCbParams(void *aData):
CommCommonCbParams(aData)
{
}
bool
CommConnectCbParams::syncWithComm()
{
// drop the call if the call was scheduled before comm_close but
// is being fired after comm_close
if (fd >= 0 && fd_table[fd].closing()) {
debugs(5, 3, HERE << "dropping late connect call: FD " << fd);
return false;
}
return true; // now we are in sync and can handle the call
}
/* CommIoCbParams */
CommIoCbParams::CommIoCbParams(void *aData): CommCommonCbParams(aData),
buf(NULL), size(0)
{
}
bool
CommIoCbParams::syncWithComm()
{
// change parameters if the call was scheduled before comm_close but
// is being fired after comm_close
if ((conn->fd < 0 || fd_table[conn->fd].closing()) && flag != Comm::ERR_CLOSING) {
debugs(5, 3, HERE << "converting late call to Comm::ERR_CLOSING: " << conn);
flag = Comm::ERR_CLOSING;
}
return true; // now we are in sync and can handle the call
}
void
CommIoCbParams::print(std::ostream &os) const
{
CommCommonCbParams::print(os);
if (buf) {
os << ", size=" << size;
os << ", buf=" << (void*)buf;
}
}
/* CommCloseCbParams */
CommCloseCbParams::CommCloseCbParams(void *aData):
CommCommonCbParams(aData)
{
}
/* CommTimeoutCbParams */
CommTimeoutCbParams::CommTimeoutCbParams(void *aData):
CommCommonCbParams(aData)
{
}
/* FdeCbParams */
FdeCbParams::FdeCbParams(void *aData):
CommCommonCbParams(aData)
{
}
/* CommAcceptCbPtrFun */
CommAcceptCbPtrFun::CommAcceptCbPtrFun(IOACB *aHandler,
const CommAcceptCbParams &aParams):
CommDialerParamsT<CommAcceptCbParams>(aParams),
handler(aHandler)
{
}
CommAcceptCbPtrFun::CommAcceptCbPtrFun(const CommAcceptCbPtrFun &o):
CommDialerParamsT<CommAcceptCbParams>(o.params),
handler(o.handler)
{
}
void
CommAcceptCbPtrFun::dial()
{
handler(params);
}
void
CommAcceptCbPtrFun::print(std::ostream &os) const
{
os << '(';
params.print(os);
os << ')';
}
/* CommConnectCbPtrFun */
CommConnectCbPtrFun::CommConnectCbPtrFun(CNCB *aHandler,
const CommConnectCbParams &aParams):
CommDialerParamsT<CommConnectCbParams>(aParams),
handler(aHandler)
{
}
void
CommConnectCbPtrFun::dial()
{
handler(params.conn, params.flag, params.xerrno, params.data);
}
void
CommConnectCbPtrFun::print(std::ostream &os) const
{
os << '(';
params.print(os);
os << ')';
}
/* CommIoCbPtrFun */
CommIoCbPtrFun::CommIoCbPtrFun(IOCB *aHandler, const CommIoCbParams &aParams):
CommDialerParamsT<CommIoCbParams>(aParams),
handler(aHandler)
{
}
void
CommIoCbPtrFun::dial()
{
handler(params.conn, params.buf, params.size, params.flag, params.xerrno, params.data);
}
void
CommIoCbPtrFun::print(std::ostream &os) const
{
os << '(';
params.print(os);
os << ')';
}
/* CommCloseCbPtrFun */
CommCloseCbPtrFun::CommCloseCbPtrFun(CLCB *aHandler,
const CommCloseCbParams &aParams):
CommDialerParamsT<CommCloseCbParams>(aParams),
handler(aHandler)
{
}
void
CommCloseCbPtrFun::dial()
{
handler(params);
}
void
CommCloseCbPtrFun::print(std::ostream &os) const
{
os << '(';
params.print(os);
os << ')';
}
/* CommTimeoutCbPtrFun */
CommTimeoutCbPtrFun::CommTimeoutCbPtrFun(CTCB *aHandler,<|fim▁hole|> CommDialerParamsT<CommTimeoutCbParams>(aParams),
handler(aHandler)
{
}
void
CommTimeoutCbPtrFun::dial()
{
handler(params);
}
void
CommTimeoutCbPtrFun::print(std::ostream &os) const
{
os << '(';
params.print(os);
os << ')';
}
/* FdeCbPtrFun */
FdeCbPtrFun::FdeCbPtrFun(FDECB *aHandler, const FdeCbParams &aParams) :
CommDialerParamsT<FdeCbParams>(aParams),
handler(aHandler)
{
}
void
FdeCbPtrFun::dial()
{
handler(params);
}
void
FdeCbPtrFun::print(std::ostream &os) const
{
os << '(';
params.print(os);
os << ')';
}<|fim▁end|> | const CommTimeoutCbParams &aParams): |
<|file_name|>lastilePro.py<|end_file_name|><|fim▁begin|>#
# lastilePro.py
#
# (c) 2013, martin isenburg - http://rapidlasso.com
# rapidlasso GmbH - fast tools to catch reality
#
# uses lastile.exe to compute a tiling for a folder
# worth of LiDAR files with a user-specified tile
# size (and an optional buffer)
#
# LiDAR input: LAS/LAZ/BIN/TXT/SHP/BIL/ASC/DTM
# LiDAR output: LAS/LAZ/BIN/TXT
#
# for licensing see http://lastools.org/LICENSE.txt
#
import sys, os, arcgisscripting, subprocess
def check_output(command,console):
if console == True:
process = subprocess.Popen(command)
else:
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
output,error = process.communicate()
returncode = process.poll()
return returncode,output
### create the geoprocessor object
gp = arcgisscripting.create(9.3)
### report that something is happening
gp.AddMessage("Starting lastile production ...")
### get number of arguments
argc = len(sys.argv)
### report arguments (for debug)
#gp.AddMessage("Arguments:")
#for i in range(0, argc):
# gp.AddMessage("[" + str(i) + "]" + sys.argv[i])
### get the path to LAStools
lastools_path = os.path.dirname(os.path.dirname(os.path.dirname(sys.argv[0])))
### make sure the path does not contain spaces
if lastools_path.count(" ") > 0:
gp.AddMessage("Error. Path to .\\lastools installation contains spaces.")
gp.AddMessage("This does not work: " + lastools_path)
gp.AddMessage("This would work: C:\\software\\lastools")
sys.exit(1)
### complete the path to where the LAStools executables are
lastools_path = lastools_path + "\\bin"
<|fim▁hole|> gp.AddMessage("Cannot find .\\lastools\\bin at " + lastools_path)
sys.exit(1)
else:
gp.AddMessage("Found " + lastools_path + " ...")
### create the full path to the lastile executable
lastile_path = lastools_path+"\\lastile.exe"
### check if executable exists
if os.path.exists(lastools_path) == False:
gp.AddMessage("Cannot find lastile.exe at " + lastile_path)
sys.exit(1)
else:
gp.AddMessage("Found " + lastile_path + " ...")
### create the command string for lastile.exe
command = ['"'+lastile_path+'"']
### maybe use '-verbose' option
if sys.argv[argc-1] == "true":
command.append("-v")
### counting up the arguments
c = 1
### add input LiDAR
wildcards = sys.argv[c+1].split()
for wildcard in wildcards:
command.append("-i")
command.append('"' + sys.argv[c] + "\\" + wildcard + '"')
c = c + 2
### maybe the input files are flightlines
if sys.argv[c] == "true":
command.append("-files_are_flightlines")
c = c + 1
### maybe use a user-defined tile size
if sys.argv[c] != "1000":
command.append("-tile_size")
command.append(sys.argv[c].replace(",","."))
c = c + 1
### maybe create a buffer around the tiles
if sys.argv[c] != "0":
command.append("-buffer")
command.append(sys.argv[c].replace(",","."))
c = c + 1
### maybe the output will be over 2000 tiles
if sys.argv[c] == "true":
command.append("-extra_pass")
c = c + 1
### maybe an output format was selected
if sys.argv[c] != "#":
if sys.argv[c] == "las":
command.append("-olas")
elif sys.argv[c] == "laz":
command.append("-olaz")
elif sys.argv[c] == "bin":
command.append("-obin")
elif sys.argv[c] == "txt":
command.append("-otxt")
elif sys.argv[c] == "xyzi":
command.append("-otxt")
command.append("-oparse")
command.append("xyzi")
elif sys.argv[c] == "txyzi":
command.append("-otxt")
command.append("-oparse")
command.append("txyzi")
c = c + 1
### maybe an output file name was selected
if sys.argv[c] != "#":
command.append("-o")
command.append('"'+sys.argv[c]+'"')
c = c + 1
### maybe an output directory was selected
if sys.argv[c] != "#":
command.append("-odir")
command.append('"'+sys.argv[c]+'"')
c = c + 1
### maybe there are additional input options
if sys.argv[c] != "#":
additional_options = sys.argv[c].split()
for option in additional_options:
command.append(option)
### report command string
gp.AddMessage("LAStools command line:")
command_length = len(command)
command_string = str(command[0])
command[0] = command[0].strip('"')
for i in range(1, command_length):
command_string = command_string + " " + str(command[i])
command[i] = command[i].strip('"')
gp.AddMessage(command_string)
### run command
returncode,output = check_output(command, False)
### report output of lastile
gp.AddMessage(str(output))
### check return code
if returncode != 0:
gp.AddMessage("Error. lastile failed.")
sys.exit(1)
### report happy end
gp.AddMessage("Success. lastile done.")<|fim▁end|> |
### check if path exists
if os.path.exists(lastools_path) == False:
|
<|file_name|>agent_connect_dlg.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'agent_connect_dlg.ui'<|fim▁hole|>#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(253, 111)
self.gridLayout = QtGui.QGridLayout(Dialog)
self.gridLayout.setObjectName("gridLayout")
self.label = QtGui.QLabel(Dialog)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.agent_addrinfo = QtGui.QComboBox(Dialog)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.agent_addrinfo.sizePolicy().hasHeightForWidth())
self.agent_addrinfo.setSizePolicy(sizePolicy)
self.agent_addrinfo.setEditable(True)
self.agent_addrinfo.setObjectName("agent_addrinfo")
self.gridLayout.addWidget(self.agent_addrinfo, 0, 1, 1, 1)
self.disconnect_from_server = QtGui.QCheckBox(Dialog)
self.disconnect_from_server.setObjectName("disconnect_from_server")
self.gridLayout.addWidget(self.disconnect_from_server, 1, 0, 1, 2)
self.buttonBox = QtGui.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.gridLayout.addWidget(self.buttonBox, 2, 0, 1, 2)
self.retranslateUi(Dialog)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("accepted()"), Dialog.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("rejected()"), Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(QtGui.QApplication.translate("Dialog", "Dialog", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("Dialog", "Agent", None, QtGui.QApplication.UnicodeUTF8))
self.disconnect_from_server.setText(QtGui.QApplication.translate("Dialog", "Disconnect Clients from server", None, QtGui.QApplication.UnicodeUTF8))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
Dialog = QtGui.QDialog()
ui = Ui_Dialog()
ui.setupUi(Dialog)
Dialog.show()
sys.exit(app.exec_())<|fim▁end|> | #
# Created: Tue Oct 12 14:22:17 2010
# by: PyQt4 UI code generator 4.7.3 |
<|file_name|>methods_fixable.rs<|end_file_name|><|fim▁begin|>// run-rustfix
#![warn(clippy::filter_next)]
/// Checks implementation of `FILTER_NEXT` lint.
fn main() {
let v = vec![3, 2, 1, 0, -1, -2, -3];
// Single-line case.<|fim▁hole|><|fim▁end|> | let _ = v.iter().filter(|&x| *x < 0).next();
} |
<|file_name|>lint-unused-mut-variables.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Exercise the unused_mut attribute in some positive and negative cases
#[allow(dead_assignment)];
#[allow(unused_variable)];
#[allow(dead_code)];
#[deny(unused_mut)];
fn main() {
// negative cases
let mut a = 3; //~ ERROR: variable does not need to be mutable
let mut a = 2; //~ ERROR: variable does not need to be mutable
let mut b = 3; //~ ERROR: variable does not need to be mutable
let mut a = ~[3]; //~ ERROR: variable does not need to be mutable
let (mut a, b) = (1, 2); //~ ERROR: variable does not need to be mutable<|fim▁hole|>
let x = |mut y: int| 10; //~ ERROR: variable does not need to be mutable
fn what(mut foo: int) {} //~ ERROR: variable does not need to be mutable
// positive cases
let mut a = 2;
a = 3;
let mut a = ~[];
a.push(3);
let mut a = ~[];
callback(|| {
a.push(3);
});
let (mut a, b) = (1, 2);
a = 34;
match 30 {
mut x => {
x = 21;
}
}
let x = |mut y: int| y = 32;
fn nothing(mut foo: int) { foo = 37; }
// leading underscore should avoid the warning, just like the
// unused variable lint.
let mut _allowed = 1;
}
fn callback(f: ||) {}
// make sure the lint attribute can be turned off
#[allow(unused_mut)]
fn foo(mut a: int) {
let mut a = 3;
let mut b = ~[2];
}<|fim▁end|> |
match 30 {
mut x => {} //~ ERROR: variable does not need to be mutable
} |
<|file_name|>metadata.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
"""Utilities for managing GeoNode resource metadata
"""
# Standard Modules
import logging
import datetime
from lxml import etree
# Geonode functionality
from geonode import GeoNodeException
# OWSLib functionality
from owslib.csw import CswRecord
from owslib.iso import MD_Metadata
from owslib.fgdc import Metadata
from django.utils import timezone
LOGGER = logging.getLogger(__name__)
def set_metadata(xml):
"""Generate dict of model properties based on XML metadata"""
# check if document is XML
try:
exml = etree.fromstring(xml)
except Exception as err:
raise GeoNodeException(
'Uploaded XML document is not XML: %s' % str(err))
# check if document is an accepted XML metadata format
tagname = get_tagname(exml)
if tagname == 'GetRecordByIdResponse': # strip CSW element
LOGGER.info('stripping CSW root element')
exml = exml.getchildren()[0]
tagname = get_tagname(exml)
if tagname == 'MD_Metadata': # ISO
identifier, vals, regions, keywords = iso2dict(exml)
elif tagname == 'metadata': # FGDC
identifier, vals, regions, keywords = fgdc2dict(exml)
elif tagname == 'Record': # Dublin Core
identifier, vals, regions, keywords = dc2dict(exml)
else:
raise RuntimeError('Unsupported metadata format')
if not vals.get("date"):
vals["date"] = datetime.datetime.now(timezone.get_current_timezone()).strftime("%Y-%m-%dT%H:%M:%S")
return [identifier, vals, regions, keywords]
def iso2dict(exml):
"""generate dict of properties from gmd:MD_Metadata"""
vals = {}
regions = []
keywords = []
mdata = MD_Metadata(exml)
identifier = mdata.identifier
vals['language'] = mdata.language or mdata.languagecode or 'eng'
vals['spatial_representation_type'] = mdata.hierarchy
vals['date'] = sniff_date(mdata.datestamp)
if hasattr(mdata, 'identification'):
vals['title'] = mdata.identification.title
vals['abstract'] = mdata.identification.abstract
vals['purpose'] = mdata.identification.purpose
if mdata.identification.supplementalinformation is not None:
vals['supplemental_information'] = \
mdata.identification.supplementalinformation
vals['temporal_extent_start'] = \
mdata.identification.temporalextent_start
vals['temporal_extent_end'] = \
mdata.identification.temporalextent_end
if len(mdata.identification.topiccategory) > 0:
vals['topic_category'] = mdata.identification.topiccategory[0]
if (hasattr(mdata.identification, 'keywords') and
len(mdata.identification.keywords) > 0):
for kw in mdata.identification.keywords:
if kw['type'] == "place":
regions.extend(kw['keywords'])
else:
keywords.extend(kw['keywords'])
if len(mdata.identification.otherconstraints) > 0:
vals['constraints_other'] = \
mdata.identification.otherconstraints[0]
vals['purpose'] = mdata.identification.purpose
if mdata.dataquality is not None:
vals['data_quality_statement'] = mdata.dataquality.lineage
return [identifier, vals, regions, keywords]
def fgdc2dict(exml):
"""generate dict of properties from FGDC metadata"""
vals = {}
regions = []
keywords = []
mdata = Metadata(exml)
identifier = mdata.idinfo.datasetid
if hasattr(mdata.idinfo, 'citation'):
if hasattr(mdata.idinfo.citation, 'citeinfo'):
vals['spatial_representation_type'] = \
mdata.idinfo.citation.citeinfo['geoform']
vals['title'] = mdata.idinfo.citation.citeinfo['title']
if hasattr(mdata.idinfo, 'descript'):
vals['abstract'] = mdata.idinfo.descript.abstract
vals['purpose'] = mdata.idinfo.descript.purpose
if mdata.idinfo.descript.supplinf is not None:
vals['supplemental_information'] = mdata.idinfo.descript.supplinf
if hasattr(mdata.idinfo, 'keywords'):
if mdata.idinfo.keywords.theme:
for theme in mdata.idinfo.keywords.theme:
if theme['themekt'] is not None:
lowered_themekt = theme['themekt'].lower()
# Owslib doesn't support extracting the Topic Category
# from FGDC. So we add support here.
# http://www.fgdc.gov/metadata/geospatial-metadata-standards
if all(
ss in lowered_themekt for ss in [
'iso',
'19115',
'topic']) and any(
ss in lowered_themekt for ss in [
'category',
'categories']):<|fim▁hole|> if mdata.idinfo.keywords.place:
for place in mdata.idinfo.keywords.place:
if 'placekey' in place:
regions.extend(place['placekey'])
if hasattr(mdata.idinfo.timeperd, 'timeinfo'):
if hasattr(mdata.idinfo.timeperd.timeinfo, 'rngdates'):
vals['temporal_extent_start'] = \
sniff_date(mdata.idinfo.timeperd.timeinfo.rngdates.begdate)
vals['temporal_extent_end'] = \
sniff_date(mdata.idinfo.timeperd.timeinfo.rngdates.enddate)
vals['constraints_other'] = mdata.idinfo.useconst
raw_date = mdata.metainfo.metd
if raw_date is not None:
vals['date'] = sniff_date(raw_date)
return [identifier, vals, regions, keywords]
def dc2dict(exml):
"""generate dict of properties from csw:Record"""
vals = {}
regions = []
keywords = []
mdata = CswRecord(exml)
identifier = mdata.identifier
vals['language'] = mdata.language
vals['spatial_representation_type'] = mdata.type
keywords = mdata.subjects
regions = [mdata.spatial]
vals['temporal_extent_start'] = mdata.temporal
vals['temporal_extent_end'] = mdata.temporal
vals['constraints_other'] = mdata.license
vals['date'] = sniff_date(mdata.modified)
vals['title'] = mdata.title
vals['abstract'] = mdata.abstract
return [identifier, vals, regions, keywords]
def sniff_date(datestr):
"""
Attempt to parse date into datetime.datetime object
Possible inputs:
'20001122'
'2000-11-22'
'2000-11-22T11:11:11Z'
'2000-11-22T'
"""
dateformats = ('%Y%m%d', '%Y-%m-%d', '%Y-%m-%dT%H:%M:%SZ',
'%Y-%m-%dT', '%Y/%m/%d')
for dfmt in dateformats:
try:
return datetime.datetime.strptime(datestr.strip(), dfmt)
except (ValueError, AttributeError):
pass
return ""
def get_tagname(element):
"""get tagname without namespace"""
try:
tagname = element.tag.split('}')[1]
except IndexError:
tagname = element.tag
return tagname<|fim▁end|> | vals['topic_category'] = theme['themekey'][0]
keywords.extend(theme['themekey'])
|
<|file_name|>config_installer.py<|end_file_name|><|fim▁begin|>import os
from conans.tools import unzip
import shutil
from conans.util.files import rmdir, mkdir
from conans.client.remote_registry import RemoteRegistry
from conans import tools
from conans.errors import ConanException
def _handle_remotes(registry_path, remote_file, output):
registry = RemoteRegistry(registry_path, output)
new_registry = RemoteRegistry(remote_file, output)
registry.define_remotes(new_registry.remotes)
def _handle_profiles(source_folder, target_folder, output):
mkdir(target_folder)
for root, _, files in os.walk(source_folder):
relative_path = os.path.relpath(root, source_folder)
if relative_path == ".":
relative_path = ""
for f in files:
profile = os.path.join(relative_path, f)
output.info(" Installing profile %s" % profile)
shutil.copy(os.path.join(root, f), os.path.join(target_folder, profile))
def _process_git_repo(repo_url, client_cache, output, runner, tmp_folder):
output.info("Trying to clone repo %s" % repo_url)
with tools.chdir(tmp_folder):
runner('git clone "%s" config' % repo_url, output=output)
tmp_folder = os.path.join(tmp_folder, "config")
_process_folder(tmp_folder, client_cache, output)
def _process_zip_file(zippath, client_cache, output, tmp_folder, remove=False):
unzip(zippath, tmp_folder)
if remove:
os.unlink(zippath)
_process_folder(tmp_folder, client_cache, output)
def _handle_conan_conf(current_conan_conf, new_conan_conf_path):
current_conan_conf.read(new_conan_conf_path)
with open(current_conan_conf.filename, "w") as f:
current_conan_conf.write(f)<|fim▁hole|>def _process_folder(folder, client_cache, output):
for root, dirs, files in os.walk(folder):
for f in files:
if f == "settings.yml":
output.info("Installing settings.yml")
settings_path = client_cache.settings_path
shutil.copy(os.path.join(root, f), settings_path)
elif f == "conan.conf":
output.info("Processing conan.conf")
conan_conf = client_cache.conan_config
_handle_conan_conf(conan_conf, os.path.join(root, f))
elif f == "remotes.txt":
output.info("Defining remotes")
registry_path = client_cache.registry
_handle_remotes(registry_path, os.path.join(root, f), output)
else:
output.info("Copying file %s to %s" % (f, client_cache.conan_folder))
shutil.copy(os.path.join(root, f), client_cache.conan_folder)
for d in dirs:
if d == "profiles":
output.info("Installing profiles")
profiles_path = client_cache.profiles_path
_handle_profiles(os.path.join(root, d), profiles_path, output)
break
dirs[:] = [d for d in dirs if d not in ("profiles", ".git")]
def _process_download(item, client_cache, output, tmp_folder):
output.info("Trying to download %s" % item)
zippath = os.path.join(tmp_folder, "config.zip")
tools.download(item, zippath, out=output)
_process_zip_file(zippath, client_cache, output, tmp_folder, remove=True)
def configuration_install(item, client_cache, output, runner):
tmp_folder = os.path.join(client_cache.conan_folder, "tmp_config_install")
# necessary for Mac OSX, where the temp folders in /var/ are symlinks to /private/var/
tmp_folder = os.path.realpath(tmp_folder)
mkdir(tmp_folder)
try:
if item is None:
try:
item = client_cache.conan_config.get_item("general.config_install")
except ConanException:
raise ConanException("Called config install without arguments and "
"'general.config_install' not defined in conan.conf")
if item.endswith(".git"):
_process_git_repo(item, client_cache, output, runner, tmp_folder)
elif os.path.exists(item):
# is a local file
_process_zip_file(item, client_cache, output, tmp_folder)
elif item.startswith("http"):
_process_download(item, client_cache, output, tmp_folder)
else:
raise ConanException("I don't know how to process %s" % item)
finally:
if item:
client_cache.conan_config.set_item("general.config_install", item)
rmdir(tmp_folder)<|fim▁end|> | |
<|file_name|>anoncreds.rs<|end_file_name|><|fim▁begin|>extern crate libc;
extern crate serde_json;
use api::{ErrorCode, IndyHandle, CommandHandle, WalletHandle, SearchHandle};
use errors::prelude::*;
use commands::{Command, CommandExecutor};
use commands::anoncreds::AnoncredsCommand;
use commands::anoncreds::issuer::IssuerCommand;
use commands::anoncreds::prover::ProverCommand;
use commands::anoncreds::verifier::VerifierCommand;
use domain::anoncreds::schema::{Schema, AttributeNames};
use domain::anoncreds::credential_definition::{CredentialDefinition, CredentialDefinitionConfig};
use domain::anoncreds::credential_offer::CredentialOffer;
use domain::anoncreds::credential_request::{CredentialRequest, CredentialRequestMetadata};
use domain::anoncreds::credential::{Credential, AttributeValues};
use domain::anoncreds::revocation_registry_definition::{RevocationRegistryConfig, RevocationRegistryDefinition};
use domain::anoncreds::revocation_registry_delta::RevocationRegistryDelta;
use domain::anoncreds::proof::Proof;
use domain::anoncreds::proof_request::{ProofRequest, ProofRequestExtraQuery};
use domain::anoncreds::requested_credential::RequestedCredentials;
use domain::anoncreds::revocation_registry::RevocationRegistry;
use domain::anoncreds::revocation_state::RevocationState;
use utils::ctypes;
use self::libc::c_char;
use std::ptr;
use std::collections::HashMap;
/// Create credential schema entity that describes credential attributes list and allows credentials
/// interoperability.
///
/// Schema is public and intended to be shared with all anoncreds workflow actors usually by publishing SCHEMA transaction
/// to Indy distributed ledger.
///
/// It is IMPORTANT for current version POST Schema in Ledger and after that GET it from Ledger
/// with correct seq_no to save compatibility with Ledger.
/// After that can call indy_issuer_create_and_store_credential_def to build corresponding Credential Definition.
///
/// #Params
/// command_handle: command handle to map callback to user context
/// issuer_did: DID of schema issuer
/// name: a name the schema
/// version: a version of the schema
/// attrs: a list of schema attributes descriptions (the number of attributes should be less or equal than 125)
/// cb: Callback that takes command result as parameter
///
/// #Returns
/// schema_id: identifier of created schema
/// schema_json: schema as json
///
/// #Errors
/// Common*
/// Anoncreds*
#[no_mangle]
pub extern fn indy_issuer_create_schema(command_handle: CommandHandle,
issuer_did: *const c_char,
name: *const c_char,
version: *const c_char,
attrs: *const c_char,
cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode,
schema_id: *const c_char, schema_json: *const c_char)>) -> ErrorCode {
trace!("indy_issuer_create_schema: >>> issuer_did: {:?}, name: {:?}, version: {:?}, attrs: {:?}", issuer_did, name, version, attrs);
check_useful_c_str!(issuer_did, ErrorCode::CommonInvalidParam2);
check_useful_c_str!(name, ErrorCode::CommonInvalidParam3);
check_useful_c_str!(version, ErrorCode::CommonInvalidParam4);
check_useful_json!(attrs, ErrorCode::CommonInvalidParam5, AttributeNames);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam6);
trace!("indy_issuer_create_schema: entity >>> issuer_did: {:?}, name: {:?}, version: {:?}, attrs: {:?}", issuer_did, name, version, attrs);
if attrs.is_empty() {
return err_msg(IndyErrorKind::InvalidStructure, "Empty list of Schema attributes has been passed").into();
}
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Issuer(
IssuerCommand::CreateSchema(
issuer_did,
name,
version,
attrs,
Box::new(move |result| {
let (err, id, schema_json) = prepare_result_2!(result, String::new(), String::new());
trace!("indy_crypto_cl_credential_public_key_to_json: id: {:?}, schema_json: {:?}", id, schema_json);
let id = ctypes::string_to_cstring(id);
let schema_json = ctypes::string_to_cstring(schema_json);
cb(command_handle, err, id.as_ptr(), schema_json.as_ptr())
})
))));
let res = prepare_result!(result);
trace!("indy_issuer_create_schema: <<< res: {:?}", res);
res
}
/// Create credential definition entity that encapsulates credentials issuer DID, credential schema, secrets used for signing credentials
/// and secrets used for credentials revocation.
///
/// Credential definition entity contains private and public parts. Private part will be stored in the wallet. Public part
/// will be returned as json intended to be shared with all anoncreds workflow actors usually by publishing CRED_DEF transaction
/// to Indy distributed ledger.
///
/// It is IMPORTANT for current version GET Schema from Ledger with correct seq_no to save compatibility with Ledger.
///
/// #Params
/// wallet_handle: wallet handler (created by open_wallet).
/// command_handle: command handle to map callback to user context.
/// issuer_did: a DID of the issuer signing cred_def transaction to the Ledger
/// schema_json: credential schema as a json
/// tag: allows to distinct between credential definitions for the same issuer and schema
/// signature_type: credential definition type (optional, 'CL' by default) that defines credentials signature and revocation math. Supported types are:
/// - 'CL': Camenisch-Lysyanskaya credential signature type
/// config_json: (optional) type-specific configuration of credential definition as json:
/// - 'CL':
/// - support_revocation: whether to request non-revocation credential (optional, default false)
/// cb: Callback that takes command result as parameter.
///
/// #Returns
/// cred_def_id: identifier of created credential definition
/// cred_def_json: public part of created credential definition
///
/// #Errors
/// Common*
/// Wallet*
/// Anoncreds*
#[no_mangle]
pub extern fn indy_issuer_create_and_store_credential_def(command_handle: CommandHandle,
wallet_handle: WalletHandle,
issuer_did: *const c_char,
schema_json: *const c_char,
tag: *const c_char,
signature_type: *const c_char,
config_json: *const c_char,
cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode,
cred_def_id: *const c_char,
cred_def_json: *const c_char)>) -> ErrorCode {
trace!("indy_issuer_create_and_store_credential_def: >>> wallet_handle: {:?}, issuer_did: {:?}, schema_json: {:?}, tag: {:?}, \
signature_type: {:?}, config_json: {:?}", wallet_handle, issuer_did, schema_json, tag, signature_type, config_json);
check_useful_c_str!(issuer_did, ErrorCode::CommonInvalidParam3);
check_useful_json!(schema_json, ErrorCode::CommonInvalidParam4, Schema);
check_useful_c_str!(tag, ErrorCode::CommonInvalidParam5);
check_useful_opt_c_str!(signature_type, ErrorCode::CommonInvalidParam6);
check_useful_opt_json!(config_json, ErrorCode::CommonInvalidParam7, CredentialDefinitionConfig);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam8);
trace!("indy_issuer_create_and_store_credential_def: entities >>> wallet_handle: {:?}, issuer_did: {:?}, schema_json: {:?}, tag: {:?}, \
signature_type: {:?}, config_json: {:?}", wallet_handle, issuer_did, schema_json, tag, signature_type, config_json);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Issuer(
IssuerCommand::CreateAndStoreCredentialDefinition(
wallet_handle,
issuer_did,
schema_json,
tag,
signature_type,
config_json,
Box::new(move |result| {
let (err, cred_def_id, cred_def_json) = prepare_result_2!(result, String::new(), String::new());
trace!("indy_issuer_create_and_store_credential_def: cred_def_id: {:?}, cred_def_json: {:?}", cred_def_id, cred_def_json);
let cred_def_id = ctypes::string_to_cstring(cred_def_id);
let cred_def_json = ctypes::string_to_cstring(cred_def_json);
cb(command_handle, err, cred_def_id.as_ptr(), cred_def_json.as_ptr())
})
))));
let res = prepare_result!(result);
trace!("indy_issuer_create_and_store_credential_def: <<< res: {:?}", res);
res
}
/// Create a new revocation registry for the given credential definition as tuple of entities
/// - Revocation registry definition that encapsulates credentials definition reference, revocation type specific configuration and
/// secrets used for credentials revocation
/// - Revocation registry state that stores the information about revoked entities in a non-disclosing way. The state can be
/// represented as ordered list of revocation registry entries were each entry represents the list of revocation or issuance operations.
///
/// Revocation registry definition entity contains private and public parts. Private part will be stored in the wallet. Public part
/// will be returned as json intended to be shared with all anoncreds workflow actors usually by publishing REVOC_REG_DEF transaction
/// to Indy distributed ledger.
///
/// Revocation registry state is stored on the wallet and also intended to be shared as the ordered list of REVOC_REG_ENTRY transactions.
/// This call initializes the state in the wallet and returns the initial entry.
///
/// Some revocation registry types (for example, 'CL_ACCUM') can require generation of binary blob called tails used to hide information about revoked credentials in public
/// revocation registry and intended to be distributed out of leger (REVOC_REG_DEF transaction will still contain uri and hash of tails).
/// This call requires access to pre-configured blob storage writer instance handle that will allow to write generated tails.
///
/// #Params
/// wallet_handle: wallet handler (created by open_wallet).
/// command_handle: command handle to map callback to user context.
/// issuer_did: a DID of the issuer signing transaction to the Ledger
/// revoc_def_type: revocation registry type (optional, default value depends on credential definition type). Supported types are:
/// - 'CL_ACCUM': Type-3 pairing based accumulator. Default for 'CL' credential definition type
/// tag: allows to distinct between revocation registries for the same issuer and credential definition
/// cred_def_id: id of stored in ledger credential definition
/// config_json: type-specific configuration of revocation registry as json:
/// - 'CL_ACCUM': {
/// "issuance_type": (optional) type of issuance. Currently supported:
/// 1) ISSUANCE_BY_DEFAULT: all indices are assumed to be issued and initial accumulator is calculated over all indices;
/// Revocation Registry is updated only during revocation.
/// 2) ISSUANCE_ON_DEMAND: nothing is issued initially accumulator is 1 (used by default);
/// "max_cred_num": maximum number of credentials the new registry can process (optional, default 100000)
/// }
/// tails_writer_handle: handle of blob storage to store tails
/// cb: Callback that takes command result as parameter.
///
/// #Returns
/// revoc_reg_id: identifier of created revocation registry definition
/// revoc_reg_def_json: public part of revocation registry definition
/// revoc_reg_entry_json: revocation registry entry that defines initial state of revocation registry
///
/// #Errors
/// Common*
/// Wallet*
/// Anoncreds*
#[no_mangle]
pub extern fn indy_issuer_create_and_store_revoc_reg(command_handle: CommandHandle,
wallet_handle: WalletHandle,
issuer_did: *const c_char,
revoc_def_type: *const c_char,
tag: *const c_char,
cred_def_id: *const c_char,
config_json: *const c_char,
tails_writer_handle: IndyHandle,
cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode,
revoc_reg_id: *const c_char,
revoc_reg_def_json: *const c_char,
revoc_reg_entry_json: *const c_char)>) -> ErrorCode {
trace!("indy_issuer_create_and_store_credential_def: >>> wallet_handle: {:?}, issuer_did: {:?}, revoc_def_type: {:?}, tag: {:?}, \
cred_def_id: {:?}, config_json: {:?}, tails_writer_handle: {:?}", wallet_handle, issuer_did, revoc_def_type, tag, cred_def_id, config_json, tails_writer_handle);
check_useful_c_str!(issuer_did, ErrorCode::CommonInvalidParam3);
check_useful_opt_c_str!(revoc_def_type, ErrorCode::CommonInvalidParam4);
check_useful_c_str!(tag, ErrorCode::CommonInvalidParam5);
check_useful_c_str!(cred_def_id, ErrorCode::CommonInvalidParam6);
check_useful_json!(config_json, ErrorCode::CommonInvalidParam7, RevocationRegistryConfig);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam9);
trace!("indy_issuer_create_and_store_credential_def: entities >>> wallet_handle: {:?}, issuer_did: {:?}, revoc_def_type: {:?}, tag: {:?}, \
cred_def_id: {:?}, config_json: {:?}, tails_writer_handle: {:?}", wallet_handle, issuer_did, revoc_def_type, tag, cred_def_id, config_json, tails_writer_handle);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Issuer(
IssuerCommand::CreateAndStoreRevocationRegistry(
wallet_handle,
issuer_did,
revoc_def_type,
tag,
cred_def_id,
config_json,
tails_writer_handle,
Box::new(move |result| {
let (err, revoc_reg_id, revoc_reg_def_json, revoc_reg_json) = prepare_result_3!(result, String::new(), String::new(), String::new());
trace!("indy_issuer_create_and_store_credential_def: revoc_reg_id: {:?}, revoc_reg_def_json: {:?}, revoc_reg_json: {:?}",
revoc_reg_id, revoc_reg_def_json, revoc_reg_json);
let revoc_reg_id = ctypes::string_to_cstring(revoc_reg_id);
let revoc_reg_def_json = ctypes::string_to_cstring(revoc_reg_def_json);
let revoc_reg_json = ctypes::string_to_cstring(revoc_reg_json);
cb(command_handle, err, revoc_reg_id.as_ptr(), revoc_reg_def_json.as_ptr(), revoc_reg_json.as_ptr())
})
))));
let res = prepare_result!(result);
trace!("indy_issuer_create_and_store_credential_def: <<< res: {:?}", res);
res
}
/// Create credential offer that will be used by Prover for
/// credential request creation. Offer includes nonce and key correctness proof
/// for authentication between protocol steps and integrity checking.
///
/// #Params
/// command_handle: command handle to map callback to user context
/// wallet_handle: wallet handler (created by open_wallet)
/// cred_def_id: id of credential definition stored in the wallet
/// cb: Callback that takes command result as parameter
///
/// #Returns
/// credential offer json:
/// {
/// "schema_id": string,
/// "cred_def_id": string,
/// // Fields below can depend on Cred Def type
/// "nonce": string,
/// "key_correctness_proof" : <key_correctness_proof>
/// }
///
/// #Errors
/// Common*
/// Wallet*
/// Anoncreds*
#[no_mangle]
pub extern fn indy_issuer_create_credential_offer(command_handle: CommandHandle,
wallet_handle: WalletHandle,
cred_def_id: *const c_char,
cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode,
cred_offer_json: *const c_char)>) -> ErrorCode {
trace!("indy_issuer_create_credential_offer: >>> wallet_handle: {:?}, cred_def_id: {:?}", wallet_handle, cred_def_id);
check_useful_c_str!(cred_def_id, ErrorCode::CommonInvalidParam3);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam4);
trace!("indy_issuer_create_credential_offer: entities >>> wallet_handle: {:?}, cred_def_id: {:?}", wallet_handle, cred_def_id);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Issuer(
IssuerCommand::CreateCredentialOffer(
wallet_handle,
cred_def_id,
Box::new(move |result| {
let (err, cred_offer_json) = prepare_result_1!(result, String::new());
trace!("indy_issuer_create_credential_offer: cred_offer_json: {:?}", cred_offer_json);
let cred_offer_json = ctypes::string_to_cstring(cred_offer_json);
cb(command_handle, err, cred_offer_json.as_ptr())
})
))));
let res = prepare_result!(result);
trace!("indy_issuer_create_credential_offer: <<< res: {:?}", res);
res
}
/// Check Cred Request for the given Cred Offer and issue Credential for the given Cred Request.
///
/// Cred Request must match Cred Offer. The credential definition and revocation registry definition
/// referenced in Cred Offer and Cred Request must be already created and stored into the wallet.
///
/// Information for this credential revocation will be store in the wallet as part of revocation registry under
/// generated cred_revoc_id local for this wallet.
///
/// This call returns revoc registry delta as json file intended to be shared as REVOC_REG_ENTRY transaction.
/// Note that it is possible to accumulate deltas to reduce ledger load.
///
/// #Params
/// wallet_handle: wallet handler (created by open_wallet).
/// command_handle: command handle to map callback to user context.
/// cred_offer_json: a cred offer created by indy_issuer_create_credential_offer
/// cred_req_json: a credential request created by indy_prover_create_credential_req
/// cred_values_json: a credential containing attribute values for each of requested attribute names.
/// Example:
/// {
/// "attr1" : {"raw": "value1", "encoded": "value1_as_int" },
/// "attr2" : {"raw": "value1", "encoded": "value1_as_int" }
/// }
/// rev_reg_id: id of revocation registry stored in the wallet
/// blob_storage_reader_handle: configuration of blob storage reader handle that will allow to read revocation tails
/// cb: Callback that takes command result as parameter.
///
/// #Returns
/// cred_json: Credential json containing signed credential values
/// {
/// "schema_id": string,
/// "cred_def_id": string,
/// "rev_reg_def_id", Optional<string>,
/// "values": <see cred_values_json above>,
/// // Fields below can depend on Cred Def type
/// "signature": <signature>,
/// "signature_correctness_proof": <signature_correctness_proof>
/// }
/// cred_revoc_id: local id for revocation info (Can be used for revocation of this credential)
/// revoc_reg_delta_json: Revocation registry delta json with a newly issued credential
///
/// #Errors
/// Annoncreds*
/// Common*
/// Wallet*
#[no_mangle]
pub extern fn indy_issuer_create_credential(command_handle: CommandHandle,
wallet_handle: WalletHandle,
cred_offer_json: *const c_char,
cred_req_json: *const c_char,
cred_values_json: *const c_char,
rev_reg_id: *const c_char,
blob_storage_reader_handle: IndyHandle,
cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode,
cred_json: *const c_char,
cred_revoc_id: *const c_char,
revoc_reg_delta_json: *const c_char)>) -> ErrorCode {
trace!("indy_issuer_create_credential: >>> wallet_handle: {:?}, cred_offer_json: {:?}, cred_req_json: {:?}, cred_values_json: {:?}, rev_reg_id: {:?}, \
blob_storage_reader_handle: {:?}", wallet_handle, cred_offer_json, cred_req_json, cred_values_json, rev_reg_id, blob_storage_reader_handle);
check_useful_json!(cred_offer_json, ErrorCode::CommonInvalidParam3, CredentialOffer);
check_useful_json!(cred_req_json, ErrorCode::CommonInvalidParam4, CredentialRequest);
check_useful_json!(cred_values_json, ErrorCode::CommonInvalidParam5, HashMap<String, AttributeValues>);
check_useful_opt_c_str!(rev_reg_id, ErrorCode::CommonInvalidParam6);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam8);
let blob_storage_reader_handle = if blob_storage_reader_handle != -1 { Some(blob_storage_reader_handle) } else { None };
trace!("indy_issuer_create_credential: entities >>> wallet_handle: {:?}, cred_offer_json: {:?}, cred_req_json: {:?}, cred_values_json: {:?}, rev_reg_id: {:?}, \
blob_storage_reader_handle: {:?}", wallet_handle, cred_offer_json, secret!(&cred_req_json), secret!(&cred_values_json), secret!(&rev_reg_id), blob_storage_reader_handle);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Issuer(
IssuerCommand::CreateCredential(
wallet_handle,
cred_offer_json,
cred_req_json,
cred_values_json,
rev_reg_id,
blob_storage_reader_handle,
Box::new(move |result| {
let (err, cred_json, revoc_id, revoc_reg_delta_json) = prepare_result_3!(result, String::new(), None, None);
trace!("indy_issuer_create_credential: cred_json: {:?}, revoc_id: {:?}, revoc_reg_delta_json: {:?}",
secret!(cred_json.as_str()), secret!(&revoc_id), revoc_reg_delta_json);
let cred_json = ctypes::string_to_cstring(cred_json);
let revoc_id = revoc_id.map(ctypes::string_to_cstring);
let revoc_reg_delta_json = revoc_reg_delta_json.map(ctypes::string_to_cstring);
cb(command_handle, err, cred_json.as_ptr(),
revoc_id.as_ref().map(|id| id.as_ptr()).unwrap_or(ptr::null()),
revoc_reg_delta_json.as_ref().map(|delta| delta.as_ptr()).unwrap_or(ptr::null()))
})
))));
let res = prepare_result!(result);
trace!("indy_issuer_create_credential: <<< res: {:?}", res);
res
}
/// Revoke a credential identified by a cred_revoc_id (returned by indy_issuer_create_credential).
///
/// The corresponding credential definition and revocation registry must be already
/// created an stored into the wallet.
///
/// This call returns revoc registry delta as json file intended to be shared as REVOC_REG_ENTRY transaction.
/// Note that it is possible to accumulate deltas to reduce ledger load.
///
/// #Params
/// command_handle: command handle to map callback to user context.
/// wallet_handle: wallet handler (created by open_wallet).
/// blob_storage_reader_cfg_handle: configuration of blob storage reader handle that will allow to read revocation tails
/// rev_reg_id: id of revocation registry stored in wallet
/// cred_revoc_id: local id for revocation info
/// cb: Callback that takes command result as parameter.
///
/// #Returns
/// revoc_reg_delta_json: Revocation registry delta json with a revoked credential
///
/// #Errors
/// Annoncreds*
/// Common*
/// Wallet*
#[no_mangle]
pub extern fn indy_issuer_revoke_credential(command_handle: CommandHandle,
wallet_handle: WalletHandle,
blob_storage_reader_cfg_handle: IndyHandle,
rev_reg_id: *const c_char,
cred_revoc_id: *const c_char,
cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode,
revoc_reg_delta_json: *const c_char)>) -> ErrorCode {
trace!("indy_issuer_revoke_credential: >>> wallet_handle: {:?}, blob_storage_reader_cfg_handle: {:?}, rev_reg_id: {:?}, cred_revoc_id: {:?}",
wallet_handle, blob_storage_reader_cfg_handle, rev_reg_id, cred_revoc_id);
check_useful_c_str!(rev_reg_id, ErrorCode::CommonInvalidParam4);
check_useful_c_str!(cred_revoc_id, ErrorCode::CommonInvalidParam5);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam6);
trace!("indy_issuer_revoke_credential: entities >>> wallet_handle: {:?}, blob_storage_reader_cfg_handle: {:?}, rev_reg_id: {:?}, cred_revoc_id: {:?}",
wallet_handle, blob_storage_reader_cfg_handle, rev_reg_id, secret!(cred_revoc_id.as_str()));
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Issuer(
IssuerCommand::RevokeCredential(
wallet_handle,
blob_storage_reader_cfg_handle,
rev_reg_id,
cred_revoc_id,
Box::new(move |result| {
let (err, revoc_reg_update_json) = prepare_result_1!(result, String::new());
trace!("indy_issuer_revoke_credential: revoc_reg_update_json: {:?}", revoc_reg_update_json);
let revoc_reg_update_json = ctypes::string_to_cstring(revoc_reg_update_json);
cb(command_handle, err, revoc_reg_update_json.as_ptr())
})
))));
let res = prepare_result!(result);
trace!("indy_issuer_revoke_credential: <<< res: {:?}", res);
res
}
/*/// Recover a credential identified by a cred_revoc_id (returned by indy_issuer_create_credential).
///
/// The corresponding credential definition and revocation registry must be already
/// created an stored into the wallet.
///
/// This call returns revoc registry delta as json file intended to be shared as REVOC_REG_ENTRY transaction.
/// Note that it is possible to accumulate deltas to reduce ledger load.
///
/// #Params
/// command_handle: command handle to map callback to user context.
/// wallet_handle: wallet handler (created by open_wallet).
/// blob_storage_reader_cfg_handle: configuration of blob storage reader handle that will allow to read revocation tails
/// rev_reg_id: id of revocation registry stored in wallet
/// cred_revoc_id: local id for revocation info
/// cb: Callback that takes command result as parameter.
///
/// #Returns
/// revoc_reg_delta_json: Revocation registry delta json with a recovered credential
///
/// #Errors
/// Annoncreds*
/// Common*
/// Wallet*
#[no_mangle]
pub extern fn indy_issuer_recover_credential(command_handle: CommandHandle,
wallet_handle: WalletHandle,
blob_storage_reader_cfg_handle: IndyHandle,
rev_reg_id: *const c_char,
cred_revoc_id: *const c_char,
cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode,
revoc_reg_delta_json: *const c_char,
)>) -> ErrorCode {
check_useful_c_str!(rev_reg_id, ErrorCode::CommonInvalidParam4);
check_useful_c_str!(cred_revoc_id, ErrorCode::CommonInvalidParam5);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam6);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Issuer(
IssuerCommand::RecoverCredential(
wallet_handle,
blob_storage_reader_cfg_handle,
rev_reg_id,
cred_revoc_id,
Box::new(move |result| {
let (err, revoc_reg_update_json) = prepare_result_1!(result, String::new());
let revoc_reg_update_json = ctypes::string_to_cstring(revoc_reg_update_json);
cb(command_handle, err, revoc_reg_update_json.as_ptr())
})
))));
prepare_result!(result)
}*/
/// Merge two revocation registry deltas (returned by indy_issuer_create_credential or indy_issuer_revoke_credential) to accumulate common delta.
/// Send common delta to ledger to reduce the load.
///
/// #Params
/// command_handle: command handle to map callback to user context.
/// rev_reg_delta_json: revocation registry delta.
/// other_rev_reg_delta_json: revocation registry delta for which PrevAccum value is equal to current accum value of rev_reg_delta_json.
/// cb: Callback that takes command result as parameter.
///
/// #Returns
/// merged_rev_reg_delta: Merged revocation registry delta
///
/// #Errors
/// Annoncreds*
/// Common*
/// Wallet*
#[no_mangle]
pub extern fn indy_issuer_merge_revocation_registry_deltas(command_handle: CommandHandle,
rev_reg_delta_json: *const c_char,
other_rev_reg_delta_json: *const c_char,
cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode,
merged_rev_reg_delta: *const c_char)>) -> ErrorCode {
trace!("indy_issuer_merge_revocation_registry_deltas: >>> rev_reg_delta_json: {:?}, other_rev_reg_delta_json: {:?}",
rev_reg_delta_json, other_rev_reg_delta_json);
check_useful_json!(rev_reg_delta_json, ErrorCode::CommonInvalidParam2, RevocationRegistryDelta);
check_useful_json!(other_rev_reg_delta_json, ErrorCode::CommonInvalidParam3, RevocationRegistryDelta);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam4);
trace!("indy_issuer_merge_revocation_registry_deltas: entities >>> rev_reg_delta_json: {:?}, other_rev_reg_delta_json: {:?}",
rev_reg_delta_json, other_rev_reg_delta_json);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Issuer(
IssuerCommand::MergeRevocationRegistryDeltas(
rev_reg_delta_json,
other_rev_reg_delta_json,
Box::new(move |result| {
let (err, merged_rev_reg_delta) = prepare_result_1!(result, String::new());
trace!("indy_issuer_merge_revocation_registry_deltas: merged_rev_reg_delta: {:?}", merged_rev_reg_delta);
let merged_rev_reg_delta = ctypes::string_to_cstring(merged_rev_reg_delta);
cb(command_handle, err, merged_rev_reg_delta.as_ptr())
})
))));
let res = prepare_result!(result);
trace!("indy_issuer_merge_revocation_registry_deltas: <<< res: {:?}", res);
res
}
/// Creates a master secret with a given id and stores it in the wallet.
/// The id must be unique.
///
/// #Params
/// wallet_handle: wallet handler (created by open_wallet).
/// command_handle: command handle to map callback to user context.
/// master_secret_id: (optional, if not present random one will be generated) new master id
///
/// #Returns
/// out_master_secret_id: Id of generated master secret
///
/// #Errors
/// Annoncreds*
/// Common*
/// Wallet*
#[no_mangle]
pub extern fn indy_prover_create_master_secret(command_handle: CommandHandle,
wallet_handle: WalletHandle,
master_secret_id: *const c_char,
cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode,
out_master_secret_id: *const c_char)>) -> ErrorCode {
trace!("indy_prover_create_master_secret: >>> wallet_handle: {:?}, master_secret_id: {:?}", wallet_handle, master_secret_id);
check_useful_opt_c_str!(master_secret_id, ErrorCode::CommonInvalidParam3);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam4);
trace!("indy_prover_create_master_secret: entities >>> wallet_handle: {:?}, master_secret_id: {:?}", wallet_handle, master_secret_id);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Prover(
ProverCommand::CreateMasterSecret(
wallet_handle,
master_secret_id,
Box::new(move |result| {
let (err, out_master_secret_id) = prepare_result_1!(result, String::new());
trace!("indy_prover_create_master_secret: out_master_secret_id: {:?}", out_master_secret_id);
let out_master_secret_id = ctypes::string_to_cstring(out_master_secret_id);
cb(command_handle, err, out_master_secret_id.as_ptr())
})
))));
let res = prepare_result!(result);
trace!("indy_prover_create_master_secret: <<< res: {:?}", res);
res
}
/// Creates a credential request for the given credential offer.
///
/// The method creates a blinded master secret for a master secret identified by a provided name.
/// The master secret identified by the name must be already stored in the secure wallet (see prover_create_master_secret)
/// The blinded master secret is a part of the credential request.
///
/// #Params
/// command_handle: command handle to map callback to user context
/// wallet_handle: wallet handler (created by open_wallet)
/// prover_did: a DID of the prover
/// cred_offer_json: credential offer as a json containing information about the issuer and a credential
/// cred_def_json: credential definition json related to <cred_def_id> in <cred_offer_json>
/// master_secret_id: the id of the master secret stored in the wallet
/// cb: Callback that takes command result as parameter.
///
/// #Returns
/// cred_req_json: Credential request json for creation of credential by Issuer
/// {
/// "prover_did" : string,
/// "cred_def_id" : string,
/// // Fields below can depend on Cred Def type
/// "blinded_ms" : <blinded_master_secret>,
/// "blinded_ms_correctness_proof" : <blinded_ms_correctness_proof>,
/// "nonce": string
/// }
/// cred_req_metadata_json: Credential request metadata json for further processing of received form Issuer credential.
/// Note: cred_req_metadata_json mustn't be shared with Issuer.
///
/// #Errors
/// Annoncreds*
/// Common*
/// Wallet*
#[no_mangle]
pub extern fn indy_prover_create_credential_req(command_handle: CommandHandle,
wallet_handle: WalletHandle,
prover_did: *const c_char,
cred_offer_json: *const c_char,
cred_def_json: *const c_char,
master_secret_id: *const c_char,
cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode,
cred_req_json: *const c_char,
cred_req_metadata_json: *const c_char)>) -> ErrorCode {
trace!("indy_prover_create_credential_req: >>> wallet_handle: {:?}, prover_did: {:?}, cred_offer_json: {:?}, cred_def_json: {:?}, master_secret_id: {:?}",
wallet_handle, prover_did, cred_offer_json, cred_def_json, master_secret_id);
check_useful_c_str!(prover_did, ErrorCode::CommonInvalidParam3);
check_useful_json!(cred_offer_json, ErrorCode::CommonInvalidParam4, CredentialOffer);
check_useful_json!(cred_def_json, ErrorCode::CommonInvalidParam5, CredentialDefinition);
check_useful_c_str!(master_secret_id, ErrorCode::CommonInvalidParam6);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam7);
trace!("indy_prover_create_credential_req: entities >>> wallet_handle: {:?}, prover_did: {:?}, cred_offer_json: {:?}, cred_def_json: {:?}, master_secret_id: {:?}",
wallet_handle, prover_did, cred_offer_json, cred_def_json, master_secret_id);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Prover(
ProverCommand::CreateCredentialRequest(
wallet_handle,
prover_did,
cred_offer_json,
cred_def_json,
master_secret_id,
Box::new(move |result| {
let (err, cred_req_json, cred_req_metadata_json) = prepare_result_2!(result, String::new(), String::new());
trace!("indy_prover_create_credential_req: cred_req_json: {:?}, cred_req_metadata_json: {:?}", cred_req_json, cred_req_metadata_json);
let cred_req_json = ctypes::string_to_cstring(cred_req_json);
let cred_req_metadata_json = ctypes::string_to_cstring(cred_req_metadata_json);
cb(command_handle, err, cred_req_json.as_ptr(), cred_req_metadata_json.as_ptr())
})
))));
let res = prepare_result!(result);
trace!("indy_prover_create_credential_req: <<< res: {:?}", res);
res
}
/// Check credential provided by Issuer for the given credential request,
/// updates the credential by a master secret and stores in a secure wallet.
///
/// To support efficient and flexible search the following tags will be created for stored credential:
/// {
/// "schema_id": <credential schema id>,
/// "schema_issuer_did": <credential schema issuer did>,
/// "schema_name": <credential schema name>,
/// "schema_version": <credential schema version>,
/// "issuer_did": <credential issuer did>,
/// "cred_def_id": <credential definition id>,
/// "rev_reg_id": <credential revocation registry id>, // "None" as string if not present
/// // for every attribute in <credential values>
/// "attr::<attribute name>::marker": "1",
/// "attr::<attribute name>::value": <attribute raw value>,
/// }
///
/// #Params
/// command_handle: command handle to map callback to user context.
/// wallet_handle: wallet handler (created by open_wallet).
/// cred_id: (optional, default is a random one) identifier by which credential will be stored in the wallet
/// cred_req_metadata_json: a credential request metadata created by indy_prover_create_credential_req
/// cred_json: credential json received from issuer
/// cred_def_json: credential definition json related to <cred_def_id> in <cred_json>
/// rev_reg_def_json: revocation registry definition json related to <rev_reg_def_id> in <cred_json>
/// cb: Callback that takes command result as parameter.
///
/// #Returns
/// out_cred_id: identifier by which credential is stored in the wallet
///
/// #Errors
/// Annoncreds*
/// Common*
/// Wallet*
#[no_mangle]
pub extern fn indy_prover_store_credential(command_handle: CommandHandle,
wallet_handle: WalletHandle,
cred_id: *const c_char,
cred_req_metadata_json: *const c_char,
cred_json: *const c_char,
cred_def_json: *const c_char,
rev_reg_def_json: *const c_char,
cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode,
out_cred_id: *const c_char)>) -> ErrorCode {
trace!("indy_prover_store_credential: >>> wallet_handle: {:?}, cred_id: {:?}, cred_req_metadata_json: {:?}, cred_json: {:?}, cred_def_json: {:?}, \
cred_def_json: {:?}", wallet_handle, cred_id, cred_req_metadata_json, cred_json, cred_def_json, rev_reg_def_json);
check_useful_opt_c_str!(cred_id, ErrorCode::CommonInvalidParam3);
check_useful_json!(cred_req_metadata_json, ErrorCode::CommonInvalidParam4, CredentialRequestMetadata);
check_useful_json!(cred_json, ErrorCode::CommonInvalidParam5, Credential);
check_useful_json!(cred_def_json, ErrorCode::CommonInvalidParam6, CredentialDefinition);
check_useful_opt_json!(rev_reg_def_json, ErrorCode::CommonInvalidParam7, RevocationRegistryDefinition);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam8);
trace!("indy_prover_store_credential: entities >>> wallet_handle: {:?}, cred_id: {:?}, cred_req_metadata_json: {:?}, cred_json: {:?}, cred_def_json: {:?}, \
rev_reg_def_json: {:?}", wallet_handle, cred_id, cred_req_metadata_json, cred_json, cred_def_json, rev_reg_def_json);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Prover(
ProverCommand::StoreCredential(
wallet_handle,
cred_id,
cred_req_metadata_json,
cred_json,
cred_def_json,
rev_reg_def_json,
Box::new(move |result| {
let (err, out_cred_id) = prepare_result_1!(result, String::new());
trace!("indy_prover_store_credential: out_cred_id: {:?}", out_cred_id);
let out_cred_id = ctypes::string_to_cstring(out_cred_id);
cb(command_handle, err, out_cred_id.as_ptr())
})
))));
let res = prepare_result!(result);
trace!("indy_prover_store_credential: <<< res: {:?}", res);
res
}
/// Gets human readable credential by the given id.
///
/// #Params
/// wallet_handle: wallet handler (created by open_wallet).
/// cred_id: Identifier by which requested credential is stored in the wallet
/// cb: Callback that takes command result as parameter.
///
/// #Returns
/// credential json:
/// {
/// "referent": string, // cred_id in the wallet
/// "attrs": {"key1":"raw_value1", "key2":"raw_value2"},
/// "schema_id": string,
/// "cred_def_id": string,
/// "rev_reg_id": Optional<string>,
/// "cred_rev_id": Optional<string>
/// }
///
/// #Errors
/// Annoncreds*
/// Common*
/// Wallet*
#[no_mangle]
pub extern fn indy_prover_get_credential(command_handle: CommandHandle,
wallet_handle: WalletHandle,<|fim▁hole|> credential_json: *const c_char)>) -> ErrorCode {
trace!("indy_prover_get_credential: >>> wallet_handle: {:?}, cred_id: {:?}", wallet_handle, cred_id);
check_useful_c_str!(cred_id, ErrorCode::CommonInvalidParam3);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam4);
trace!("indy_prover_get_credential: entities >>> wallet_handle: {:?}, cred_id: {:?}", cred_id, cred_id);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Prover(
ProverCommand::GetCredential(
wallet_handle,
cred_id,
Box::new(move |result| {
let (err, credential_json) = prepare_result_1!(result, String::new());
trace!("indy_prover_get_credential: credential_json: {:?}", credential_json);
let credential_json = ctypes::string_to_cstring(credential_json);
cb(command_handle, err, credential_json.as_ptr())
})
))));
let res = prepare_result!(result);
trace!("indy_prover_get_credential: <<< res: {:?}", res);
res
}
/// Gets human readable credentials according to the filter.
/// If filter is NULL, then all credentials are returned.
/// Credentials can be filtered by Issuer, credential_def and/or Schema.
///
/// NOTE: This method is deprecated because immediately returns all fetched credentials.
/// Use <indy_prover_search_credentials> to fetch records by small batches.
///
/// #Params
/// wallet_handle: wallet handler (created by open_wallet).
/// filter_json: filter for credentials
/// {
/// "schema_id": string, (Optional)
/// "schema_issuer_did": string, (Optional)
/// "schema_name": string, (Optional)
/// "schema_version": string, (Optional)
/// "issuer_did": string, (Optional)
/// "cred_def_id": string, (Optional)
/// }
/// cb: Callback that takes command result as parameter.
///
/// #Returns
/// credentials json
/// [{
/// "referent": string, // cred_id in the wallet
/// "attrs": {"key1":"raw_value1", "key2":"raw_value2"},
/// "schema_id": string,
/// "cred_def_id": string,
/// "rev_reg_id": Optional<string>,
/// "cred_rev_id": Optional<string>
/// }]
///
/// #Errors
/// Annoncreds*
/// Common*
/// Wallet*
#[no_mangle]
#[deprecated(since="1.6.1", note="Please use indy_prover_search_credentials instead!")]
pub extern fn indy_prover_get_credentials(command_handle: CommandHandle,
wallet_handle: WalletHandle,
filter_json: *const c_char,
cb: Option<extern fn(
command_handle_: CommandHandle, err: ErrorCode,
matched_credentials_json: *const c_char)>) -> ErrorCode {
trace!("indy_prover_get_credentials: >>> wallet_handle: {:?}, filter_json: {:?}", wallet_handle, filter_json);
check_useful_opt_c_str!(filter_json, ErrorCode::CommonInvalidParam3);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam4);
trace!("indy_prover_get_credentials: entities >>> wallet_handle: {:?}, filter_json: {:?}", wallet_handle, filter_json);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Prover(
ProverCommand::GetCredentials(
wallet_handle,
filter_json,
Box::new(move |result| {
let (err, matched_credentials_json) = prepare_result_1!(result, String::new());
trace!("indy_prover_get_credentials: matched_credentials_json: {:?}", matched_credentials_json);
let matched_credentials_json = ctypes::string_to_cstring(matched_credentials_json);
cb(command_handle, err, matched_credentials_json.as_ptr())
})
))));
let res = prepare_result!(result);
trace!("indy_prover_get_credentials: <<< res: {:?}", res);
res
}
/// Search for credentials stored in wallet.
/// Credentials can be filtered by tags created during saving of credential.
///
/// Instead of immediately returning of fetched credentials
/// this call returns search_handle that can be used later
/// to fetch records by small batches (with indy_prover_fetch_credentials).
///
/// #Params
/// wallet_handle: wallet handler (created by open_wallet).
/// query_json: Wql query filter for credentials searching based on tags.
/// where query: indy-sdk/docs/design/011-wallet-query-language/README.md
/// cb: Callback that takes command result as parameter.
///
/// #Returns
/// search_handle: Search handle that can be used later to fetch records by small batches (with indy_prover_fetch_credentials)
/// total_count: Total count of records
///
/// #Errors
/// Annoncreds*
/// Common*
/// Wallet*
#[no_mangle]
pub extern fn indy_prover_search_credentials(command_handle: CommandHandle,
wallet_handle: WalletHandle,
query_json: *const c_char,
cb: Option<extern fn(
command_handle_: CommandHandle, err: ErrorCode,
search_handle: SearchHandle,
total_count: usize)>) -> ErrorCode {
trace!("indy_prover_search_credentials: >>> wallet_handle: {:?}, query_json: {:?}", wallet_handle, query_json);
check_useful_opt_c_str!(query_json, ErrorCode::CommonInvalidParam3);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam4);
trace!("indy_prover_search_credentials: entities >>> wallet_handle: {:?}, query_json: {:?}", wallet_handle, query_json);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Prover(
ProverCommand::SearchCredentials(
wallet_handle,
query_json,
Box::new(move |result| {
let (err, handle, total_count) = prepare_result_2!(result, 0, 0);
cb(command_handle, err, handle, total_count)
})
))));
let res = prepare_result!(result);
trace!("indy_prover_search_credentials: <<< res: {:?}", res);
res
}
/// Fetch next credentials for search.
///
/// #Params
/// search_handle: Search handle (created by indy_prover_search_credentials)
/// count: Count of credentials to fetch
/// cb: Callback that takes command result as parameter.
///
/// #Returns
/// credentials_json: List of human readable credentials:
/// [{
/// "referent": string, // cred_id in the wallet
/// "attrs": {"key1":"raw_value1", "key2":"raw_value2"},
/// "schema_id": string,
/// "cred_def_id": string,
/// "rev_reg_id": Optional<string>,
/// "cred_rev_id": Optional<string>
/// }]
/// NOTE: The list of length less than the requested count means credentials search iterator is completed.
///
/// #Errors
/// Annoncreds*
/// Common*
/// Wallet*
#[no_mangle]
pub extern fn indy_prover_fetch_credentials(command_handle: CommandHandle,
search_handle: SearchHandle,
count: usize,
cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode,
credentials_json: *const c_char)>) -> ErrorCode {
trace!("indy_prover_fetch_credentials: >>> search_handle: {:?}, count: {:?}", search_handle, count);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam4);
trace!("indy_prover_fetch_credentials: entities >>> search_handle: {:?}, count: {:?}", search_handle, count);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Prover(
ProverCommand::FetchCredentials(
search_handle,
count,
Box::new(move |result| {
let (err, credentials_json) = prepare_result_1!(result, String::new());
trace!("indy_prover_fetch_credentials: credentials_json: {:?}", credentials_json);
let credentials_json = ctypes::string_to_cstring(credentials_json);
cb(command_handle, err, credentials_json.as_ptr())
})
))));
let res = prepare_result!(result);
trace!("indy_prover_fetch_credentials: <<< res: {:?}", res);
res
}
/// Close credentials search (make search handle invalid)
///
/// #Params
/// search_handle: Search handle (created by indy_prover_search_credentials)
///
/// #Errors
/// Annoncreds*
/// Common*
/// Wallet*
#[no_mangle]
pub extern fn indy_prover_close_credentials_search(command_handle: CommandHandle,
search_handle: SearchHandle,
cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode)>) -> ErrorCode {
trace!("indy_prover_close_credentials_search: >>> search_handle: {:?}", search_handle);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam5);
trace!("indy_prover_close_credentials_search: entities >>> search_handle: {:?}", search_handle);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Prover(
ProverCommand::CloseCredentialsSearch(
search_handle,
Box::new(move |result| {
let err = prepare_result!(result);
trace!("indy_prover_close_credentials_search:");
cb(command_handle, err)
})
))));
let res = prepare_result!(result);
trace!("indy_prover_close_credentials_search: <<< res: {:?}", res);
res
}
/// Gets human readable credentials matching the given proof request.
///
/// NOTE: This method is deprecated because immediately returns all fetched credentials.
/// Use <indy_prover_search_credentials_for_proof_req> to fetch records by small batches.
///
/// #Params
/// wallet_handle: wallet handler (created by open_wallet).
/// proof_request_json: proof request json
/// {
/// "name": string,
/// "version": string,
/// "nonce": string,
/// "requested_attributes": { // set of requested attributes
/// "<attr_referent>": <attr_info>, // see below
/// ...,
/// },
/// "requested_predicates": { // set of requested predicates
/// "<predicate_referent>": <predicate_info>, // see below
/// ...,
/// },
/// "non_revoked": Optional<<non_revoc_interval>>, // see below,
/// // If specified prover must proof non-revocation
/// // for date in this interval for each attribute
/// // (can be overridden on attribute level)
/// }
/// cb: Callback that takes command result as parameter.
///
/// where
/// attr_referent: Proof-request local identifier of requested attribute
/// attr_info: Describes requested attribute
/// {
/// "name": string, // attribute name, (case insensitive and ignore spaces)
/// "restrictions": Optional<filter_json>, // see above
/// "non_revoked": Optional<<non_revoc_interval>>, // see below,
/// // If specified prover must proof non-revocation
/// // for date in this interval this attribute
/// // (overrides proof level interval)
/// }
/// predicate_referent: Proof-request local identifier of requested attribute predicate
/// predicate_info: Describes requested attribute predicate
/// {
/// "name": attribute name, (case insensitive and ignore spaces)
/// "p_type": predicate type (Currently ">=" only)
/// "p_value": int predicate value
/// "restrictions": Optional<filter_json>, // see above
/// "non_revoked": Optional<<non_revoc_interval>>, // see below,
/// // If specified prover must proof non-revocation
/// // for date in this interval this attribute
/// // (overrides proof level interval)
/// }
/// non_revoc_interval: Defines non-revocation interval
/// {
/// "from": Optional<int>, // timestamp of interval beginning
/// "to": Optional<int>, // timestamp of interval ending
/// }
///
/// #Returns
/// credentials_json: json with credentials for the given proof request.
/// {
/// "requested_attrs": {
/// "<attr_referent>": [{ cred_info: <credential_info>, interval: Optional<non_revoc_interval> }],
/// ...,
/// },
/// "requested_predicates": {
/// "requested_predicates": [{ cred_info: <credential_info>, timestamp: Optional<integer> }, { cred_info: <credential_2_info>, timestamp: Optional<integer> }],
/// "requested_predicate_2_referent": [{ cred_info: <credential_2_info>, timestamp: Optional<integer> }]
/// }
/// }, where credential is
/// {
/// "referent": <string>,
/// "attrs": {"attr_name" : "attr_raw_value"},
/// "schema_id": string,
/// "cred_def_id": string,
/// "rev_reg_id": Optional<int>,
/// "cred_rev_id": Optional<int>,
/// }
///
/// #Errors
/// Annoncreds*
/// Common*
/// Wallet*
#[deprecated(since="1.6.1", note="Please use indy_prover_search_credentials_for_proof_req instead!")]
#[no_mangle]
pub extern fn indy_prover_get_credentials_for_proof_req(command_handle: CommandHandle,
wallet_handle: WalletHandle,
proof_request_json: *const c_char,
cb: Option<extern fn(
command_handle_: CommandHandle, err: ErrorCode,
credentials_json: *const c_char)>) -> ErrorCode {
trace!("indy_prover_get_credentials_for_proof_req: >>> wallet_handle: {:?}, proof_request_json: {:?}", wallet_handle, proof_request_json);
check_useful_json!(proof_request_json, ErrorCode::CommonInvalidParam3, ProofRequest);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam5);
trace!("indy_prover_get_credentials_for_proof_req: entities >>> wallet_handle: {:?}, proof_request_json: {:?}",
wallet_handle, proof_request_json);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Prover(
ProverCommand::GetCredentialsForProofReq(
wallet_handle,
proof_request_json,
Box::new(move |result| {
let (err, credentials_json) = prepare_result_1!(result, String::new());
trace!("indy_prover_get_credentials_for_proof_req: credentials_json: {:?}", credentials_json);
let credentials_json = ctypes::string_to_cstring(credentials_json);
cb(command_handle, err, credentials_json.as_ptr())
})
))));
let res = prepare_result!(result);
trace!("indy_prover_get_credentials_for_proof_req: <<< res: {:?}", res);
res
}
/// Search for credentials matching the given proof request.
///
/// Instead of immediately returning of fetched credentials
/// this call returns search_handle that can be used later
/// to fetch records by small batches (with indy_prover_fetch_credentials_for_proof_req).
///
/// #Params
/// wallet_handle: wallet handler (created by open_wallet).
/// proof_request_json: proof request json
/// {
/// "name": string,
/// "version": string,
/// "nonce": string,
/// "requested_attributes": { // set of requested attributes
/// "<attr_referent>": <attr_info>, // see below
/// ...,
/// },
/// "requested_predicates": { // set of requested predicates
/// "<predicate_referent>": <predicate_info>, // see below
/// ...,
/// },
/// "non_revoked": Optional<<non_revoc_interval>>, // see below,
/// // If specified prover must proof non-revocation
/// // for date in this interval for each attribute
/// // (can be overridden on attribute level)
/// }
/// extra_query_json:(Optional) List of extra queries that will be applied to correspondent attribute/predicate:
/// {
/// "<attr_referent>": <wql query>,
/// "<predicate_referent>": <wql query>,
/// }
/// where wql query: indy-sdk/docs/design/011-wallet-query-language/README.md
/// cb: Callback that takes command result as parameter.
///
/// #Returns
/// search_handle: Search handle that can be used later to fetch records by small batches (with indy_prover_fetch_credentials_for_proof_req)
///
/// #Errors
/// Annoncreds*
/// Common*
/// Wallet*
#[no_mangle]
pub extern fn indy_prover_search_credentials_for_proof_req(command_handle: CommandHandle,
wallet_handle: WalletHandle,
proof_request_json: *const c_char,
extra_query_json: *const c_char,
cb: Option<extern fn(
command_handle_: CommandHandle, err: ErrorCode,
search_handle: SearchHandle)>) -> ErrorCode {
trace!("indy_prover_search_credentials_for_proof_req: >>> wallet_handle: {:?}, proof_request_json: {:?}, extra_query_json: {:?}", wallet_handle, proof_request_json, extra_query_json);
check_useful_json!(proof_request_json, ErrorCode::CommonInvalidParam3, ProofRequest);
check_useful_opt_json!(extra_query_json, ErrorCode::CommonInvalidParam4, ProofRequestExtraQuery);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam5);
trace!("indy_prover_search_credentials_for_proof_req: entities >>> wallet_handle: {:?}, proof_request_json: {:?}, extra_query_json: {:?}",
wallet_handle, proof_request_json, extra_query_json);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Prover(
ProverCommand::SearchCredentialsForProofReq(
wallet_handle,
proof_request_json,
extra_query_json,
Box::new(move |result| {
let (err, search_handle) = prepare_result_1!(result, 0);
trace!("indy_prover_search_credentials_for_proof_req: search_handle: {:?}", search_handle);
cb(command_handle, err, search_handle)
}),
))));
let res = prepare_result!(result);
trace!("indy_prover_search_credentials_for_proof_req: <<< res: {:?}", res);
res
}
/// Fetch next credentials for the requested item using proof request search
/// handle (created by indy_prover_search_credentials_for_proof_req).
///
/// #Params
/// search_handle: Search handle (created by indy_prover_search_credentials_for_proof_req)
/// item_referent: Referent of attribute/predicate in the proof request
/// count: Count of credentials to fetch
/// cb: Callback that takes command result as parameter.
///
/// #Returns
/// credentials_json: List of credentials for the given proof request.
/// [{
/// cred_info: <credential_info>,
/// interval: Optional<non_revoc_interval>
/// }]
/// where
/// credential_info:
/// {
/// "referent": <string>,
/// "attrs": {"attr_name" : "attr_raw_value"},
/// "schema_id": string,
/// "cred_def_id": string,
/// "rev_reg_id": Optional<int>,
/// "cred_rev_id": Optional<int>,
/// }
/// non_revoc_interval:
/// {
/// "from": Optional<int>, // timestamp of interval beginning
/// "to": Optional<int>, // timestamp of interval ending
/// }
/// NOTE: The list of length less than the requested count means that search iterator
/// correspondent to the requested <item_referent> is completed.
///
/// #Errors
/// Annoncreds*
/// Common*
/// Wallet*
#[no_mangle]
pub extern fn indy_prover_fetch_credentials_for_proof_req(command_handle: CommandHandle,
search_handle: SearchHandle,
item_referent: *const c_char,
count: usize,
cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode,
credentials_json: *const c_char)>) -> ErrorCode {
trace!("indy_prover_fetch_credentials_for_proof_req: >>> search_handle: {:?}, count: {:?}", search_handle, count);
check_useful_c_str!(item_referent, ErrorCode::CommonInvalidParam4);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam5);
trace!("indy_prover_fetch_credentials_for_proof_req: entities >>> search_handle: {:?}, count: {:?}", search_handle, count);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Prover(
ProverCommand::FetchCredentialForProofReq(
search_handle,
item_referent,
count,
Box::new(move |result| {
let (err, credentials_json) = prepare_result_1!(result, String::new());
trace!("indy_prover_fetch_credentials_for_proof_request: credentials_json: {:?}", credentials_json);
let credentials_json = ctypes::string_to_cstring(credentials_json);
cb(command_handle, err, credentials_json.as_ptr())
}),
))));
let res = prepare_result!(result);
trace!("indy_prover_fetch_credentials_for_proof_req: <<< res: {:?}", res);
res
}
/// Close credentials search for proof request (make search handle invalid)
///
/// #Params
/// search_handle: Search handle (created by indy_prover_search_credentials_for_proof_req)
///
/// #Errors
/// Annoncreds*
/// Common*
/// Wallet*
#[no_mangle]
pub extern fn indy_prover_close_credentials_search_for_proof_req(command_handle: CommandHandle,
search_handle: SearchHandle,
cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode)>) -> ErrorCode {
trace!("indy_prover_close_credentials_search_for_proof_req: >>> search_handle: {:?}", search_handle);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam5);
trace!("indy_prover_close_credentials_search_for_proof_req: entities >>> search_handle: {:?}", search_handle);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(
AnoncredsCommand::Prover(
ProverCommand::CloseCredentialsSearchForProofReq(
search_handle,
Box::new(move |result| {
let err = prepare_result!(result);
trace!("indy_prover_close_credentials_search:");
cb(command_handle, err)
}),
))));
let res = prepare_result!(result);
trace!("indy_prover_close_credentials_search_for_proof_req: <<< res: {:?}", res);
res
}
/// Creates a proof according to the given proof request
/// Either a corresponding credential with optionally revealed attributes or self-attested attribute must be provided
/// for each requested attribute (see indy_prover_get_credentials_for_pool_req).
/// A proof request may request multiple credentials from different schemas and different issuers.
/// All required schemas, public keys and revocation registries must be provided.
/// The proof request also contains nonce.
/// The proof contains either proof or self-attested attribute value for each requested attribute.
///
/// #Params
/// wallet_handle: wallet handler (created by open_wallet).
/// command_handle: command handle to map callback to user context.
/// proof_request_json: proof request json
/// {
/// "name": string,
/// "version": string,
/// "nonce": string,
/// "requested_attributes": { // set of requested attributes
/// "<attr_referent>": <attr_info>, // see below
/// ...,
/// },
/// "requested_predicates": { // set of requested predicates
/// "<predicate_referent>": <predicate_info>, // see below
/// ...,
/// },
/// "non_revoked": Optional<<non_revoc_interval>>, // see below,
/// // If specified prover must proof non-revocation
/// // for date in this interval for each attribute
/// // (can be overridden on attribute level)
/// }
/// requested_credentials_json: either a credential or self-attested attribute for each requested attribute
/// {
/// "self_attested_attributes": {
/// "self_attested_attribute_referent": string
/// },
/// "requested_attributes": {
/// "requested_attribute_referent_1": {"cred_id": string, "timestamp": Optional<number>, revealed: <bool> }},
/// "requested_attribute_referent_2": {"cred_id": string, "timestamp": Optional<number>, revealed: <bool> }}
/// },
/// "requested_predicates": {
/// "requested_predicates_referent_1": {"cred_id": string, "timestamp": Optional<number> }},
/// }
/// }
/// master_secret_id: the id of the master secret stored in the wallet
/// schemas_json: all schemas json participating in the proof request
/// {
/// <schema1_id>: <schema1_json>,
/// <schema2_id>: <schema2_json>,
/// <schema3_id>: <schema3_json>,
/// }
/// credential_defs_json: all credential definitions json participating in the proof request
/// {
/// "cred_def1_id": <credential_def1_json>,
/// "cred_def2_id": <credential_def2_json>,
/// "cred_def3_id": <credential_def3_json>,
/// }
/// rev_states_json: all revocation states json participating in the proof request
/// {
/// "rev_reg_def1_id": {
/// "timestamp1": <rev_state1>,
/// "timestamp2": <rev_state2>,
/// },
/// "rev_reg_def2_id": {
/// "timestamp3": <rev_state3>
/// },
/// "rev_reg_def3_id": {
/// "timestamp4": <rev_state4>
/// },
/// }
/// cb: Callback that takes command result as parameter.
///
/// where
/// where wql query: indy-sdk/docs/design/011-wallet-query-language/README.md
/// attr_referent: Proof-request local identifier of requested attribute
/// attr_info: Describes requested attribute
/// {
/// "name": string, // attribute name, (case insensitive and ignore spaces)
/// "restrictions": Optional<wql query>,
/// "non_revoked": Optional<<non_revoc_interval>>, // see below,
/// // If specified prover must proof non-revocation
/// // for date in this interval this attribute
/// // (overrides proof level interval)
/// }
/// predicate_referent: Proof-request local identifier of requested attribute predicate
/// predicate_info: Describes requested attribute predicate
/// {
/// "name": attribute name, (case insensitive and ignore spaces)
/// "p_type": predicate type (Currently >= only)
/// "p_value": predicate value
/// "restrictions": Optional<wql query>,
/// "non_revoked": Optional<<non_revoc_interval>>, // see below,
/// // If specified prover must proof non-revocation
/// // for date in this interval this attribute
/// // (overrides proof level interval)
/// }
/// non_revoc_interval: Defines non-revocation interval
/// {
/// "from": Optional<int>, // timestamp of interval beginning
/// "to": Optional<int>, // timestamp of interval ending
/// }
///
/// #Returns
/// Proof json
/// For each requested attribute either a proof (with optionally revealed attribute value) or
/// self-attested attribute value is provided.
/// Each proof is associated with a credential and corresponding schema_id, cred_def_id, rev_reg_id and timestamp.
/// There is also aggregated proof part common for all credential proofs.
/// {
/// "requested_proof": {
/// "revealed_attrs": {
/// "requested_attr1_id": {sub_proof_index: number, raw: string, encoded: string},
/// "requested_attr4_id": {sub_proof_index: number: string, encoded: string},
/// },
/// "unrevealed_attrs": {
/// "requested_attr3_id": {sub_proof_index: number}
/// },
/// "self_attested_attrs": {
/// "requested_attr2_id": self_attested_value,
/// },
/// "requested_predicates": {
/// "requested_predicate_1_referent": {sub_proof_index: int},
/// "requested_predicate_2_referent": {sub_proof_index: int},
/// }
/// }
/// "proof": {
/// "proofs": [ <credential_proof>, <credential_proof>, <credential_proof> ],
/// "aggregated_proof": <aggregated_proof>
/// }
/// "identifiers": [{schema_id, cred_def_id, Optional<rev_reg_id>, Optional<timestamp>}]
/// }
///
/// #Errors
/// Annoncreds*
/// Common*
/// Wallet*
#[no_mangle]
pub extern fn indy_prover_create_proof(command_handle: CommandHandle,
wallet_handle: WalletHandle,
proof_req_json: *const c_char,
requested_credentials_json: *const c_char,
master_secret_id: *const c_char,
schemas_json: *const c_char,
credential_defs_json: *const c_char,
rev_states_json: *const c_char,
cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode,
proof_json: *const c_char)>) -> ErrorCode {
trace!("indy_prover_create_proof: >>> wallet_handle: {:?}, proof_req_json: {:?}, requested_credentials_json: {:?}, master_secret_id: {:?}, \
schemas_json: {:?}, credential_defs_json: {:?}, rev_states_json: {:?}",
wallet_handle, proof_req_json, requested_credentials_json, master_secret_id, schemas_json, credential_defs_json, rev_states_json);
check_useful_json!(proof_req_json, ErrorCode::CommonInvalidParam3, ProofRequest);
check_useful_json!(requested_credentials_json, ErrorCode::CommonInvalidParam4, RequestedCredentials);
check_useful_c_str!(master_secret_id, ErrorCode::CommonInvalidParam5);
check_useful_json!(schemas_json, ErrorCode::CommonInvalidParam6, HashMap<String, Schema>);
check_useful_json!(credential_defs_json, ErrorCode::CommonInvalidParam7, HashMap<String, CredentialDefinition>);
check_useful_json!(rev_states_json, ErrorCode::CommonInvalidParam8, HashMap<String, HashMap<u64, RevocationState>>);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam9);
trace!("indy_prover_create_proof: entities >>> wallet_handle: {:?}, proof_req_json: {:?}, requested_credentials_json: {:?}, master_secret_id: {:?}, \
schemas_json: {:?}, credential_defs_json: {:?}, rev_states_json: {:?}",
wallet_handle, proof_req_json, requested_credentials_json, master_secret_id, schemas_json, credential_defs_json, rev_states_json);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(AnoncredsCommand::Prover(ProverCommand::CreateProof(
wallet_handle,
proof_req_json,
requested_credentials_json,
master_secret_id,
schemas_json,
credential_defs_json,
rev_states_json,
Box::new(move |result| {
let (err, proof_json) = prepare_result_1!(result, String::new());
trace!("indy_prover_create_proof: proof_json: {:?}", proof_json);
let proof_json = ctypes::string_to_cstring(proof_json);
cb(command_handle, err, proof_json.as_ptr())
})
))));
let res = prepare_result!(result);
trace!("indy_prover_create_proof: <<< res: {:?}", res);
res
}
/// Verifies a proof (of multiple credential).
/// All required schemas, public keys and revocation registries must be provided.
///
/// #Params
/// wallet_handle: wallet handler (created by open_wallet).
/// command_handle: command handle to map callback to user context.
/// proof_request_json: proof request json
/// {
/// "name": string,
/// "version": string,
/// "nonce": string,
/// "requested_attributes": { // set of requested attributes
/// "<attr_referent>": <attr_info>, // see below
/// ...,
/// },
/// "requested_predicates": { // set of requested predicates
/// "<predicate_referent>": <predicate_info>, // see below
/// ...,
/// },
/// "non_revoked": Optional<<non_revoc_interval>>, // see below,
/// // If specified prover must proof non-revocation
/// // for date in this interval for each attribute
/// // (can be overridden on attribute level)
/// }
/// proof_json: created for request proof json
/// {
/// "requested_proof": {
/// "revealed_attrs": {
/// "requested_attr1_id": {sub_proof_index: number, raw: string, encoded: string},
/// "requested_attr4_id": {sub_proof_index: number: string, encoded: string},
/// },
/// "unrevealed_attrs": {
/// "requested_attr3_id": {sub_proof_index: number}
/// },
/// "self_attested_attrs": {
/// "requested_attr2_id": self_attested_value,
/// },
/// "requested_predicates": {
/// "requested_predicate_1_referent": {sub_proof_index: int},
/// "requested_predicate_2_referent": {sub_proof_index: int},
/// }
/// }
/// "proof": {
/// "proofs": [ <credential_proof>, <credential_proof>, <credential_proof> ],
/// "aggregated_proof": <aggregated_proof>
/// }
/// "identifiers": [{schema_id, cred_def_id, Optional<rev_reg_id>, Optional<timestamp>}]
/// }
/// schemas_json: all schema jsons participating in the proof
/// {
/// <schema1_id>: <schema1_json>,
/// <schema2_id>: <schema2_json>,
/// <schema3_id>: <schema3_json>,
/// }
/// credential_defs_json: all credential definitions json participating in the proof
/// {
/// "cred_def1_id": <credential_def1_json>,
/// "cred_def2_id": <credential_def2_json>,
/// "cred_def3_id": <credential_def3_json>,
/// }
/// rev_reg_defs_json: all revocation registry definitions json participating in the proof
/// {
/// "rev_reg_def1_id": <rev_reg_def1_json>,
/// "rev_reg_def2_id": <rev_reg_def2_json>,
/// "rev_reg_def3_id": <rev_reg_def3_json>,
/// }
/// rev_regs_json: all revocation registries json participating in the proof
/// {
/// "rev_reg_def1_id": {
/// "timestamp1": <rev_reg1>,
/// "timestamp2": <rev_reg2>,
/// },
/// "rev_reg_def2_id": {
/// "timestamp3": <rev_reg3>
/// },
/// "rev_reg_def3_id": {
/// "timestamp4": <rev_reg4>
/// },
/// }
/// cb: Callback that takes command result as parameter.
///
/// #Returns
/// valid: true - if signature is valid, false - otherwise
///
/// #Errors
/// Annoncreds*
/// Common*
/// Wallet*
#[no_mangle]
pub extern fn indy_verifier_verify_proof(command_handle: CommandHandle,
proof_request_json: *const c_char,
proof_json: *const c_char,
schemas_json: *const c_char,
credential_defs_json: *const c_char,
rev_reg_defs_json: *const c_char,
rev_regs_json: *const c_char,
cb: Option<extern fn(command_handle_: CommandHandle, err: ErrorCode,
valid: bool)>) -> ErrorCode {
trace!("indy_verifier_verify_proof: >>> proof_request_json: {:?}, proof_json: {:?}, schemas_json: {:?}, credential_defs_json: {:?}, \
rev_reg_defs_json: {:?}, rev_regs_json: {:?}", proof_request_json, proof_json, schemas_json, credential_defs_json, rev_reg_defs_json, rev_regs_json);
check_useful_json!(proof_request_json, ErrorCode::CommonInvalidParam2, ProofRequest);
check_useful_json!(proof_json, ErrorCode::CommonInvalidParam3, Proof);
check_useful_json!(schemas_json, ErrorCode::CommonInvalidParam4, HashMap<String, Schema>);
check_useful_json!(credential_defs_json, ErrorCode::CommonInvalidParam5, HashMap<String, CredentialDefinition>);
check_useful_json!(rev_reg_defs_json, ErrorCode::CommonInvalidParam6, HashMap<String, RevocationRegistryDefinition>);
check_useful_json!(rev_regs_json, ErrorCode::CommonInvalidParam7, HashMap<String, HashMap<u64, RevocationRegistry>>);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam8);
trace!("indy_verifier_verify_proof: entities >>> proof_request_json: {:?}, proof_json: {:?}, schemas_json: {:?}, credential_defs_json: {:?}, \
rev_reg_defs_json: {:?}, rev_regs_json: {:?}", proof_request_json, proof_json, schemas_json, credential_defs_json, rev_reg_defs_json, rev_regs_json);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(AnoncredsCommand::Verifier(VerifierCommand::VerifyProof(
proof_request_json,
proof_json,
schemas_json,
credential_defs_json,
rev_reg_defs_json,
rev_regs_json,
Box::new(move |result| {
let (err, valid) = prepare_result_1!(result, false);
trace!("indy_verifier_verify_proof: valid: {:?}", valid);
cb(command_handle, err, valid)
})
))));
let res = prepare_result!(result);
trace!("indy_verifier_verify_proof: <<< res: {:?}", res);
res
}
/// Create revocation state for a credential in the particular time moment.
///
/// #Params
/// command_handle: command handle to map callback to user context
/// blob_storage_reader_handle: configuration of blob storage reader handle that will allow to read revocation tails
/// rev_reg_def_json: revocation registry definition json
/// rev_reg_delta_json: revocation registry definition delta json
/// timestamp: time represented as a total number of seconds from Unix Epoch
/// cred_rev_id: user credential revocation id in revocation registry
/// cb: Callback that takes command result as parameter
///
/// #Returns
/// revocation state json:
/// {
/// "rev_reg": <revocation registry>,
/// "witness": <witness>,
/// "timestamp" : integer
/// }
///
/// #Errors
/// Common*
/// Wallet*
/// Anoncreds*
#[no_mangle]
pub extern fn indy_create_revocation_state(command_handle: CommandHandle,
blob_storage_reader_handle: IndyHandle,
rev_reg_def_json: *const c_char,
rev_reg_delta_json: *const c_char,
timestamp: u64,
cred_rev_id: *const c_char,
cb: Option<extern fn(
command_handle_: CommandHandle, err: ErrorCode,
rev_state_json: *const c_char)>) -> ErrorCode {
trace!("indy_create_revocation_state: >>> blob_storage_reader_handle: {:?}, rev_reg_def_json: {:?}, rev_reg_delta_json: {:?}, timestamp: {:?}, \
cred_rev_id: {:?}", blob_storage_reader_handle, rev_reg_def_json, rev_reg_delta_json, timestamp, cred_rev_id);
check_useful_json!(rev_reg_def_json, ErrorCode::CommonInvalidParam3, RevocationRegistryDefinition);
check_useful_json!(rev_reg_delta_json, ErrorCode::CommonInvalidParam4, RevocationRegistryDelta);
check_useful_c_str!(cred_rev_id, ErrorCode::CommonInvalidParam6);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam7);
trace!("indy_create_revocation_state: entities >>> blob_storage_reader_handle: {:?}, rev_reg_def_json: {:?}, rev_reg_delta_json: {:?}, timestamp: {:?}, \
cred_rev_id: {:?}", blob_storage_reader_handle, rev_reg_def_json, rev_reg_delta_json, timestamp, cred_rev_id);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(AnoncredsCommand::Prover(ProverCommand::CreateRevocationState(
blob_storage_reader_handle,
rev_reg_def_json,
rev_reg_delta_json,
timestamp,
cred_rev_id,
Box::new(move |result| {
let (err, rev_state_json) = prepare_result_1!(result, String::new());
trace!("indy_create_revocation_state: rev_state_json: {:?}", rev_state_json);
let rev_state_json = ctypes::string_to_cstring(rev_state_json);
cb(command_handle, err, rev_state_json.as_ptr())
})
))));
let res = prepare_result!(result);
trace!("indy_create_revocation_state: <<< res: {:?}", res);
res
}
/// Create new revocation state for a credential based on existed state
/// at the particular time moment (to reduce calculation time).
///
/// #Params
/// command_handle: command handle to map callback to user context
/// blob_storage_reader_handle: configuration of blob storage reader handle that will allow to read revocation tails
/// rev_state_json: revocation registry state json
/// rev_reg_def_json: revocation registry definition json
/// rev_reg_delta_json: revocation registry definition delta json
/// timestamp: time represented as a total number of seconds from Unix Epoch
/// cred_rev_id: user credential revocation id in revocation registry
/// cb: Callback that takes command result as parameter
///
/// #Returns
/// revocation state json:
/// {
/// "rev_reg": <revocation registry>,
/// "witness": <witness>,
/// "timestamp" : integer
/// }
///
/// #Errors
/// Common*
/// Wallet*
/// Anoncreds*
#[no_mangle]
pub extern fn indy_update_revocation_state(command_handle: CommandHandle,
blob_storage_reader_handle: IndyHandle,
rev_state_json: *const c_char,
rev_reg_def_json: *const c_char,
rev_reg_delta_json: *const c_char,
timestamp: u64,
cred_rev_id: *const c_char,
cb: Option<extern fn(
command_handle_: CommandHandle, err: ErrorCode,
updated_rev_state_json: *const c_char)>) -> ErrorCode {
trace!("indy_update_revocation_state: >>> blob_storage_reader_handle: {:?}, rev_state_json: {:?}, rev_reg_def_json: {:?}, rev_reg_delta_json: {:?}, \
timestamp: {:?}, cred_rev_id: {:?}", blob_storage_reader_handle, rev_state_json, rev_reg_def_json, rev_reg_delta_json, timestamp, cred_rev_id);
check_useful_json!(rev_state_json, ErrorCode::CommonInvalidParam3, RevocationState);
check_useful_json!(rev_reg_def_json, ErrorCode::CommonInvalidParam4, RevocationRegistryDefinition);
check_useful_json!(rev_reg_delta_json, ErrorCode::CommonInvalidParam5, RevocationRegistryDelta);
check_useful_c_str!(cred_rev_id, ErrorCode::CommonInvalidParam7);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam8);
trace!("indy_update_revocation_state: entities >>> blob_storage_reader_handle: {:?}, rev_state_json: {:?}, rev_reg_def_json: {:?}, rev_reg_delta_json: {:?}, \
timestamp: {:?}, cred_rev_id: {:?}", blob_storage_reader_handle, rev_state_json, rev_reg_def_json, rev_reg_delta_json, timestamp, cred_rev_id);
let result = CommandExecutor::instance()
.send(Command::Anoncreds(AnoncredsCommand::Prover(ProverCommand::UpdateRevocationState(
blob_storage_reader_handle,
rev_state_json,
rev_reg_def_json,
rev_reg_delta_json,
timestamp,
cred_rev_id,
Box::new(move |result| {
let (err, updated_rev_info_json) = prepare_result_1!(result, String::new());
trace!("indy_update_revocation_state: updated_rev_info_json: {:?}", updated_rev_info_json);
let updated_rev_info_json = ctypes::string_to_cstring(updated_rev_info_json);
cb(command_handle, err, updated_rev_info_json.as_ptr())
})
))));
let res = prepare_result!(result);
trace!("indy_update_revocation_state: <<< res: {:?}", res);
res
}<|fim▁end|> | cred_id: *const c_char,
cb: Option<extern fn(
command_handle_: CommandHandle, err: ErrorCode, |
<|file_name|>directives.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Directive} from '@angular/core';
import {_MatMenuContentBase, _MatMenuTriggerBase, MAT_MENU_CONTENT} from '@angular/material/menu';
/** Directive applied to an element that should trigger a `mat-menu`. */
@Directive({
selector: `[mat-menu-trigger-for], [matMenuTriggerFor]`,
host: {
'class': 'mat-mdc-menu-trigger',<|fim▁hole|>
/** Menu content that will be rendered lazily once the menu is opened. */
@Directive({
selector: 'ng-template[matMenuContent]',
providers: [{provide: MAT_MENU_CONTENT, useExisting: MatMenuContent}],
})
export class MatMenuContent extends _MatMenuContentBase {}<|fim▁end|> | },
exportAs: 'matMenuTrigger'
})
export class MatMenuTrigger extends _MatMenuTriggerBase {} |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>###############################################################################
##
## Copyright 2012 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
## Tavendo WebMQ Application Key and Secret for our Web app
APPKEY = 'foobar'
APPSECRET = 'secret'
## The "user database" of our Web app
USERDB = {'joe': 'secret', 'admin': 'hoho'}
import json, uuid, sys
from flask import Flask, url_for, Response, request, session, \
render_template, redirect, escape, flash
app = Flask(__name__)
app.secret_key = str(uuid.uuid4())
<|fim▁hole|>import hmac, hashlib, binascii
def authSignature(authChallenge, authSecret = None):
if authSecret is None:
authSecret = ""
h = hmac.new(authSecret, authChallenge, hashlib.sha256)
sig = binascii.b2a_base64(h.digest()).strip()
return sig
@app.route('/')
def index():
if 'username' in session:
return render_template('index.html',
server = sys.argv[1],
topic = "http://example.com/simple")
else:
return redirect(url_for('login'))
@app.route('/login', methods=['GET', 'POST'])
def login():
error = None
if request.method == 'POST':
username = request.form['username']
if not USERDB.has_key(username) or \
USERDB[username] != request.form['password'] != 'secret':
error = 'Invalid credentials'
else:
flash("You were successfully logged in as '%s'" % username)
session['username'] = username
return redirect(url_for('index'))
return render_template('login.html', error = error)
@app.route('/logout')
def logout():
session.pop('username', None)
return redirect(url_for('index'))
@app.route('/authsign', methods = ['POST'])
def authsign():
if 'username' in session:
try:
data = json.loads(request.data)
print "Challenge:", data
if data['authkey'] == APPKEY:
sig = authSignature(request.data, APPSECRET)
print "Signature:", sig
return sig
except Expection, e:
print e
return ""
if __name__ == "__main__":
app.run(host = "0.0.0.0", port = 8000, debug = True)<|fim▁end|> | |
<|file_name|>searchfilter.js<|end_file_name|><|fim▁begin|>//Libraries
const React = require("react");
const _ = require("lodash");
const DataActions = require("../actions/data_actions");
const DataStore = require("../stores/data_store");
const FilterStore = require("../stores/filter_store");
const ColumnsStore = require("../stores/columns_store");
//Mixins
const cssMixins = require("morse-react-mixins").css_mixins;
const textMixins = require("morse-react-mixins").text_mixins;
class SearchFilters extends React.Component{
constructor(props) {
super(props);
this.dropdown = ["input-group-btn", {"open":false}];
this.state = {
dropdown:this.getClasses(this.dropdown),
expanded:"false",
selectedkey:"all",
searchVal:""
};
}
componentDidMount() {
this.quickSearch = (_.isBoolean(this.props.quickSearch)) ? this.props.quickSearch : true;
if(FilterStore.isSelectedKey(this.props.item)){
this.active = [{active:true}];
this.setState({active:this.getClasses(this.active)});
}
this.setState({searchVal:DataStore.getSearchVal()});
// FilterStore.addChangeListener("change_key", this._openDropdown.bind(this));
ColumnsStore.addChangeListener("adding", this._onAdd.bind(this));
}
componentWillUnmount() {
// FilterStore.removeChangeListener("change_key", this._openDropdown);
ColumnsStore.removeChangeListener("adding", this._onAdd);
}
_onAdd(){
this.setState({
keys:ColumnsStore.getSearchable()
});
}
_onChange(e){
if(this.quickSearch){
if(this.loop){
window.clearTimeout(this.loop);
}
this.loop = window.setTimeout((val)=>{
if(val.length > 2 || val === ""){
DataActions.searching(val);
}
}, 200, e.target.value);
this.setState({searchVal:e.target.value});
}
// _.defer((val)=>{
// DataActions.searching(val);
// }, e.target.value);
}
// _openDropdown(){<|fim▁hole|> // expanded:expanded,
// selectedkey:FilterStore.getSelectedKey()
// });
// }
_preventSubmit(e){
// console.log("submiting", e);
e.preventDefault();
}
// renderKeys(){
// if(this.state.keys){
// let items = this.state.keys.map(function(k){
// return (<Keys item={k} key={_.uniqueId("key")} />);
// });
// return items;
// }
// }
render() {
return (
<form onSubmit={this._preventSubmit.bind(this)} className="search-filter">
<input alt="Search" type="image" src={this.props.icon} />
<div className="fields-container">
<input type="text" name="querystr" id="querystr" placeholder="Search" value={this.state.searchVal} onChange={this._onChange.bind(this)} />
</div>
</form>
);
}
}
Object.assign(SearchFilters.prototype, cssMixins);
Object.assign(SearchFilters.prototype, textMixins);
module.exports = SearchFilters;<|fim▁end|> | // this.dropdown = this.toggleCss(this.dropdown);
// let expanded = (this.state.expended === "true") ? "false" : "true";
// this.setState({
// dropdown:this.getClasses(this.dropdown), |
<|file_name|>mmerge-sort-for-linked-list.py<|end_file_name|><|fim▁begin|># key point is to find the half node
class Node:
def __init__(self, val):
self.val = val
self.next = None
class LinkList:
def __init__(self):
self.head = None
def push(self, val):
node = Node(val)
if self.head:
node.next = self.head
self.head = node
else:
self.head = node
def printList(self):
p = self.head
while p:
print p.val,
p = p.next
print
def mergeSort(head):
if not head:
return
if not head.next:
return
slow = head
fast = head.next
while fast:
fast = fast.next
if fast:
slow = slow.next
fast = fast.next
# 2 3 20 5 10 15
frontHalf = head
backHalf = slow.next
slow.next = None
mergeSort(frontHalf)
mergeSort(backHalf)
head = sortedMerge(frontHalf, backHalf)
return head
def sortedMerge(a, b):
if not a:
return b
elif not b:
return a
temp = None
if a.val <= b.val:
temp = a
a.next = sortedMerge(temp.next, b)
return a
else:
temp = b
b.next = sortedMerge(a, temp.next)
return b
ll = LinkList()<|fim▁hole|>ll.push(10)
ll.push(5)
ll.push(20)
ll.push(3)
ll.push(2)
ll.printList()
ll.head = mergeSort(ll.head)
ll.printList()<|fim▁end|> | ll.push(15) |
<|file_name|>display_common.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Urwid common display code
# Copyright (C) 2004-2011 Ian Ward
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Urwid web site: http://excess.org/urwid/
from __future__ import division, print_function
import os
import sys
try:
import termios
except ImportError:
pass # windows
from urwid.util import StoppingContext, int_scale
from urwid import signals
from urwid.compat import B, bytes3, xrange, with_metaclass
# for replacing unprintable bytes with '?'
UNPRINTABLE_TRANS_TABLE = B("?") * 32 + bytes3(list(xrange(32,256)))
# signals sent by BaseScreen
UPDATE_PALETTE_ENTRY = "update palette entry"
INPUT_DESCRIPTORS_CHANGED = "input descriptors changed"
# AttrSpec internal values
_BASIC_START = 0 # first index of basic color aliases
_CUBE_START = 16 # first index of color cube
_CUBE_SIZE_256 = 6 # one side of the color cube
_GRAY_SIZE_256 = 24
_GRAY_START_256 = _CUBE_SIZE_256 ** 3 + _CUBE_START
_CUBE_WHITE_256 = _GRAY_START_256 -1
_CUBE_SIZE_88 = 4
_GRAY_SIZE_88 = 8
_GRAY_START_88 = _CUBE_SIZE_88 ** 3 + _CUBE_START
_CUBE_WHITE_88 = _GRAY_START_88 -1
_CUBE_BLACK = _CUBE_START
# values copied from xterm 256colres.h:
_CUBE_STEPS_256 = [0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff]
_GRAY_STEPS_256 = [0x08, 0x12, 0x1c, 0x26, 0x30, 0x3a, 0x44, 0x4e, 0x58, 0x62,
0x6c, 0x76, 0x80, 0x84, 0x94, 0x9e, 0xa8, 0xb2, 0xbc, 0xc6, 0xd0,
0xda, 0xe4, 0xee]
# values copied from xterm 88colres.h:
_CUBE_STEPS_88 = [0x00, 0x8b, 0xcd, 0xff]
_GRAY_STEPS_88 = [0x2e, 0x5c, 0x73, 0x8b, 0xa2, 0xb9, 0xd0, 0xe7]
# values copied from X11/rgb.txt and XTerm-col.ad:
_BASIC_COLOR_VALUES = [(0,0,0), (205, 0, 0), (0, 205, 0), (205, 205, 0),
(0, 0, 238), (205, 0, 205), (0, 205, 205), (229, 229, 229),
(127, 127, 127), (255, 0, 0), (0, 255, 0), (255, 255, 0),
(0x5c, 0x5c, 0xff), (255, 0, 255), (0, 255, 255), (255, 255, 255)]
_COLOR_VALUES_256 = (_BASIC_COLOR_VALUES +
[(r, g, b) for r in _CUBE_STEPS_256 for g in _CUBE_STEPS_256
for b in _CUBE_STEPS_256] +
[(gr, gr, gr) for gr in _GRAY_STEPS_256])
_COLOR_VALUES_88 = (_BASIC_COLOR_VALUES +
[(r, g, b) for r in _CUBE_STEPS_88 for g in _CUBE_STEPS_88
for b in _CUBE_STEPS_88] +
[(gr, gr, gr) for gr in _GRAY_STEPS_88])
assert len(_COLOR_VALUES_256) == 256
assert len(_COLOR_VALUES_88) == 88
_FG_COLOR_MASK = 0x000000ff
_BG_COLOR_MASK = 0x0000ff00
_FG_BASIC_COLOR = 0x00010000
_FG_HIGH_COLOR = 0x00020000
_BG_BASIC_COLOR = 0x00040000
_BG_HIGH_COLOR = 0x00080000
_BG_SHIFT = 8
_HIGH_88_COLOR = 0x00100000
_STANDOUT = 0x02000000
_UNDERLINE = 0x04000000
_BOLD = 0x08000000
_BLINK = 0x10000000
_ITALICS = 0x20000000
_STRIKETHROUGH = 0x40000000
_FG_MASK = (_FG_COLOR_MASK | _FG_BASIC_COLOR | _FG_HIGH_COLOR |
_STANDOUT | _UNDERLINE | _BLINK | _BOLD | _ITALICS | _STRIKETHROUGH)
_BG_MASK = _BG_COLOR_MASK | _BG_BASIC_COLOR | _BG_HIGH_COLOR
DEFAULT = 'default'
BLACK = 'black'
DARK_RED = 'dark red'
DARK_GREEN = 'dark green'
BROWN = 'brown'
DARK_BLUE = 'dark blue'
DARK_MAGENTA = 'dark magenta'
DARK_CYAN = 'dark cyan'
LIGHT_GRAY = 'light gray'
DARK_GRAY = 'dark gray'
LIGHT_RED = 'light red'
LIGHT_GREEN = 'light green'
YELLOW = 'yellow'
LIGHT_BLUE = 'light blue'
LIGHT_MAGENTA = 'light magenta'
LIGHT_CYAN = 'light cyan'
WHITE = 'white'
_BASIC_COLORS = [
BLACK,
DARK_RED,
DARK_GREEN,
BROWN,
DARK_BLUE,
DARK_MAGENTA,
DARK_CYAN,
LIGHT_GRAY,
DARK_GRAY,
LIGHT_RED,
LIGHT_GREEN,
YELLOW,
LIGHT_BLUE,
LIGHT_MAGENTA,
LIGHT_CYAN,
WHITE,
]
_ATTRIBUTES = {
'bold': _BOLD,
'italics': _ITALICS,
'underline': _UNDERLINE,
'blink': _BLINK,
'standout': _STANDOUT,
'strikethrough': _STRIKETHROUGH,
}
def _value_lookup_table(values, size):
"""
Generate a lookup table for finding the closest item in values.
Lookup returns (index into values)+1
values -- list of values in ascending order, all < size
size -- size of lookup table and maximum value
>>> _value_lookup_table([0, 7, 9], 10)
[0, 0, 0, 0, 1, 1, 1, 1, 2, 2]
"""
middle_values = [0] + [(values[i] + values[i + 1] + 1) // 2
for i in range(len(values) - 1)] + [size]
lookup_table = []
for i in range(len(middle_values)-1):
count = middle_values[i + 1] - middle_values[i]
lookup_table.extend([i] * count)
return lookup_table
_CUBE_256_LOOKUP = _value_lookup_table(_CUBE_STEPS_256, 256)
_GRAY_256_LOOKUP = _value_lookup_table([0] + _GRAY_STEPS_256 + [0xff], 256)
_CUBE_88_LOOKUP = _value_lookup_table(_CUBE_STEPS_88, 256)
_GRAY_88_LOOKUP = _value_lookup_table([0] + _GRAY_STEPS_88 + [0xff], 256)
# convert steps to values that will be used by string versions of the colors
# 1 hex digit for rgb and 0..100 for grayscale
_CUBE_STEPS_256_16 = [int_scale(n, 0x100, 0x10) for n in _CUBE_STEPS_256]
_GRAY_STEPS_256_101 = [int_scale(n, 0x100, 101) for n in _GRAY_STEPS_256]
_CUBE_STEPS_88_16 = [int_scale(n, 0x100, 0x10) for n in _CUBE_STEPS_88]
_GRAY_STEPS_88_101 = [int_scale(n, 0x100, 101) for n in _GRAY_STEPS_88]
# create lookup tables for 1 hex digit rgb and 0..100 for grayscale values
_CUBE_256_LOOKUP_16 = [_CUBE_256_LOOKUP[int_scale(n, 16, 0x100)]
for n in range(16)]
_GRAY_256_LOOKUP_101 = [_GRAY_256_LOOKUP[int_scale(n, 101, 0x100)]
for n in range(101)]
_CUBE_88_LOOKUP_16 = [_CUBE_88_LOOKUP[int_scale(n, 16, 0x100)]
for n in range(16)]
_GRAY_88_LOOKUP_101 = [_GRAY_88_LOOKUP[int_scale(n, 101, 0x100)]
for n in range(101)]
# The functions _gray_num_256() and _gray_num_88() do not include the gray
# values from the color cube so that the gray steps are an even width.
# The color cube grays are available by using the rgb functions. Pure
# white and black are taken from the color cube, since the gray range does
# not include them, and the basic colors are more likely to have been
# customized by an end-user.
def _gray_num_256(gnum):
"""Return ths color number for gray number gnum.
Color cube black and white are returned for 0 and 25 respectively
since those values aren't included in the gray scale.
"""
# grays start from index 1
gnum -= 1
if gnum < 0:
return _CUBE_BLACK
if gnum >= _GRAY_SIZE_256:
return _CUBE_WHITE_256
return _GRAY_START_256 + gnum
def _gray_num_88(gnum):
"""Return ths color number for gray number gnum.
Color cube black and white are returned for 0 and 9 respectively
since those values aren't included in the gray scale.
"""
# gnums start from index 1
gnum -= 1
if gnum < 0:
return _CUBE_BLACK
if gnum >= _GRAY_SIZE_88:
return _CUBE_WHITE_88
return _GRAY_START_88 + gnum
def _color_desc_256(num):
"""
Return a string description of color number num.
0..15 -> 'h0'..'h15' basic colors (as high-colors)
16..231 -> '#000'..'#fff' color cube colors
232..255 -> 'g3'..'g93' grays
>>> _color_desc_256(15)
'h15'
>>> _color_desc_256(16)
'#000'
>>> _color_desc_256(17)
'#006'
>>> _color_desc_256(230)
'#ffd'
>>> _color_desc_256(233)
'g7'
>>> _color_desc_256(234)
'g11'
"""
assert num >= 0 and num < 256, num
if num < _CUBE_START:
return 'h%d' % num
if num < _GRAY_START_256:
num -= _CUBE_START
b, num = num % _CUBE_SIZE_256, num // _CUBE_SIZE_256
g, num = num % _CUBE_SIZE_256, num // _CUBE_SIZE_256
r = num % _CUBE_SIZE_256
return '#%x%x%x' % (_CUBE_STEPS_256_16[r], _CUBE_STEPS_256_16[g],
_CUBE_STEPS_256_16[b])
return 'g%d' % _GRAY_STEPS_256_101[num - _GRAY_START_256]
def _color_desc_88(num):
"""
Return a string description of color number num.
0..15 -> 'h0'..'h15' basic colors (as high-colors)
16..79 -> '#000'..'#fff' color cube colors
80..87 -> 'g18'..'g90' grays
>>> _color_desc_88(15)
'h15'
>>> _color_desc_88(16)
'#000'
>>> _color_desc_88(17)
'#008'
>>> _color_desc_88(78)
'#ffc'
>>> _color_desc_88(81)
'g36'
>>> _color_desc_88(82)
'g45'
"""
assert num > 0 and num < 88
if num < _CUBE_START:
return 'h%d' % num
if num < _GRAY_START_88:
num -= _CUBE_START
b, num = num % _CUBE_SIZE_88, num // _CUBE_SIZE_88
g, r= num % _CUBE_SIZE_88, num // _CUBE_SIZE_88
return '#%x%x%x' % (_CUBE_STEPS_88_16[r], _CUBE_STEPS_88_16[g],
_CUBE_STEPS_88_16[b])
return 'g%d' % _GRAY_STEPS_88_101[num - _GRAY_START_88]
def _parse_color_256(desc):
"""
Return a color number for the description desc.
'h0'..'h255' -> 0..255 actual color number
'#000'..'#fff' -> 16..231 color cube colors
'g0'..'g100' -> 16, 232..255, 231 grays and color cube black/white
'g#00'..'g#ff' -> 16, 232...255, 231 gray and color cube black/white
Returns None if desc is invalid.
>>> _parse_color_256('h142')
142
>>> _parse_color_256('#f00')
196
>>> _parse_color_256('g100')
231
>>> _parse_color_256('g#80')
244
"""
if len(desc) > 4:
# keep the length within reason before parsing
return None
try:
if desc.startswith('h'):
# high-color number
num = int(desc[1:], 10)
if num < 0 or num > 255:
return None
return num
if desc.startswith('#') and len(desc) == 4:
# color-cube coordinates
rgb = int(desc[1:], 16)
if rgb < 0:
return None
b, rgb = rgb % 16, rgb // 16
g, r = rgb % 16, rgb // 16
# find the closest rgb values
r = _CUBE_256_LOOKUP_16[r]
g = _CUBE_256_LOOKUP_16[g]
b = _CUBE_256_LOOKUP_16[b]
return _CUBE_START + (r * _CUBE_SIZE_256 + g) * _CUBE_SIZE_256 + b
# Only remaining possibility is gray value
if desc.startswith('g#'):
# hex value 00..ff
gray = int(desc[2:], 16)
if gray < 0 or gray > 255:
return None
gray = _GRAY_256_LOOKUP[gray]
elif desc.startswith('g'):
# decimal value 0..100
gray = int(desc[1:], 10)
if gray < 0 or gray > 100:
return None
gray = _GRAY_256_LOOKUP_101[gray]
else:
return None
if gray == 0:
return _CUBE_BLACK
gray -= 1
if gray == _GRAY_SIZE_256:
return _CUBE_WHITE_256
return _GRAY_START_256 + gray
except ValueError:
return None
def _parse_color_88(desc):
"""
Return a color number for the description desc.
'h0'..'h87' -> 0..87 actual color number
'#000'..'#fff' -> 16..79 color cube colors
'g0'..'g100' -> 16, 80..87, 79 grays and color cube black/white
'g#00'..'g#ff' -> 16, 80...87, 79 gray and color cube black/white
Returns None if desc is invalid.
>>> _parse_color_88('h142')
>>> _parse_color_88('h42')
42
>>> _parse_color_88('#f00')
64
>>> _parse_color_88('g100')
79
>>> _parse_color_88('g#80')
83
"""
if len(desc) > 4:
# keep the length within reason before parsing
return None
try:
if desc.startswith('h'):
# high-color number
num = int(desc[1:], 10)
if num < 0 or num > 87:
return None
return num
if desc.startswith('#') and len(desc) == 4:
# color-cube coordinates
rgb = int(desc[1:], 16)
if rgb < 0:
return None
b, rgb = rgb % 16, rgb // 16
g, r = rgb % 16, rgb // 16
# find the closest rgb values
r = _CUBE_88_LOOKUP_16[r]
g = _CUBE_88_LOOKUP_16[g]
b = _CUBE_88_LOOKUP_16[b]
return _CUBE_START + (r * _CUBE_SIZE_88 + g) * _CUBE_SIZE_88 + b
# Only remaining possibility is gray value
if desc.startswith('g#'):
# hex value 00..ff
gray = int(desc[2:], 16)
if gray < 0 or gray > 255:
return None
gray = _GRAY_88_LOOKUP[gray]
elif desc.startswith('g'):
# decimal value 0..100
gray = int(desc[1:], 10)
if gray < 0 or gray > 100:
return None
gray = _GRAY_88_LOOKUP_101[gray]
else:
return None
if gray == 0:
return _CUBE_BLACK
gray -= 1
if gray == _GRAY_SIZE_88:
return _CUBE_WHITE_88
return _GRAY_START_88 + gray
except ValueError:
return None
class AttrSpecError(Exception):
pass
class AttrSpec(object):
def __init__(self, fg, bg, colors=256):
"""
fg -- a string containing a comma-separated foreground color
and settings
Color values:
'default' (use the terminal's default foreground),
'black', 'dark red', 'dark green', 'brown', 'dark blue',
'dark magenta', 'dark cyan', 'light gray', 'dark gray',
'light red', 'light green', 'yellow', 'light blue',
'light magenta', 'light cyan', 'white'
High-color example values:
'#009' (0% red, 0% green, 60% red, like HTML colors)
'#fcc' (100% red, 80% green, 80% blue)
'g40' (40% gray, decimal), 'g#cc' (80% gray, hex),
'#000', 'g0', 'g#00' (black),
'#fff', 'g100', 'g#ff' (white)
'h8' (color number 8), 'h255' (color number 255)
Setting:
'bold', 'italics', 'underline', 'blink', 'standout',
'strikethrough'
Some terminals use 'bold' for bright colors. Most terminals
ignore the 'blink' setting. If the color is not given then
'default' will be assumed.
bg -- a string containing the background color
Color values:
'default' (use the terminal's default background),
'black', 'dark red', 'dark green', 'brown', 'dark blue',
'dark magenta', 'dark cyan', 'light gray'
High-color exaples:
see fg examples above
An empty string will be treated the same as 'default'.
colors -- the maximum colors available for the specification
Valid values include: 1, 16, 88 and 256. High-color
values are only usable with 88 or 256 colors. With
1 color only the foreground settings may be used.
>>> AttrSpec('dark red', 'light gray', 16)
AttrSpec('dark red', 'light gray')
>>> AttrSpec('yellow, underline, bold', 'dark blue')
AttrSpec('yellow,bold,underline', 'dark blue')
>>> AttrSpec('#ddb', '#004', 256) # closest colors will be found
AttrSpec('#dda', '#006')
>>> AttrSpec('#ddb', '#004', 88)
AttrSpec('#ccc', '#000', colors=88)
"""
if colors not in (1, 16, 88, 256):
raise AttrSpecError('invalid number of colors (%d).' % colors)
self._value = 0 | _HIGH_88_COLOR * (colors == 88)
self.foreground = fg
self.background = bg
if self.colors > colors:
raise AttrSpecError(('foreground/background (%s/%s) require ' +
'more colors than have been specified (%d).') %
(repr(fg), repr(bg), colors))
foreground_basic = property(lambda s: s._value & _FG_BASIC_COLOR != 0)
foreground_high = property(lambda s: s._value & _FG_HIGH_COLOR != 0)
foreground_number = property(lambda s: s._value & _FG_COLOR_MASK)
background_basic = property(lambda s: s._value & _BG_BASIC_COLOR != 0)
background_high = property(lambda s: s._value & _BG_HIGH_COLOR != 0)
background_number = property(lambda s: (s._value & _BG_COLOR_MASK)
>> _BG_SHIFT)
italics = property(lambda s: s._value & _ITALICS != 0)
bold = property(lambda s: s._value & _BOLD != 0)
underline = property(lambda s: s._value & _UNDERLINE != 0)
blink = property(lambda s: s._value & _BLINK != 0)
standout = property(lambda s: s._value & _STANDOUT != 0)
strikethrough = property(lambda s: s._value & _STRIKETHROUGH != 0)
def _colors(self):
"""
Return the maximum colors required for this object.
Returns 256, 88, 16 or 1.
"""
if self._value & _HIGH_88_COLOR:
return 88
if self._value & (_BG_HIGH_COLOR | _FG_HIGH_COLOR):
return 256
if self._value & (_BG_BASIC_COLOR | _BG_BASIC_COLOR):
return 16
return 1
colors = property(_colors)
def __repr__(self):
"""
Return an executable python representation of the AttrSpec
object.
"""
args = "%r, %r" % (self.foreground, self.background)
if self.colors == 88:
# 88-color mode is the only one that is handled differently
args = args + ", colors=88"
return "%s(%s)" % (self.__class__.__name__, args)
def _foreground_color(self):
"""Return only the color component of the foreground."""
if not (self.foreground_basic or self.foreground_high):
return 'default'
if self.foreground_basic:
return _BASIC_COLORS[self.foreground_number]
if self.colors == 88:
return _color_desc_88(self.foreground_number)
return _color_desc_256(self.foreground_number)
def _foreground(self):
return (self._foreground_color() +
',bold' * self.bold + ',italics' * self.italics +
',standout' * self.standout + ',blink' * self.blink +
',underline' * self.underline + ',strikethrough' * self.strikethrough)
def _set_foreground(self, foreground):
color = None
flags = 0
# handle comma-separated foreground
for part in foreground.split(','):
part = part.strip()
if part in _ATTRIBUTES:
# parse and store "settings"/attributes in flags
if flags & _ATTRIBUTES[part]:
raise AttrSpecError(("Setting %s specified more than" +
"once in foreground (%s)") % (repr(part),
repr(foreground)))
flags |= _ATTRIBUTES[part]
continue
# past this point we must be specifying a color
if part in ('', 'default'):
scolor = 0
elif part in _BASIC_COLORS:
scolor = _BASIC_COLORS.index(part)
flags |= _FG_BASIC_COLOR
elif self._value & _HIGH_88_COLOR:
scolor = _parse_color_88(part)
flags |= _FG_HIGH_COLOR
else:
scolor = _parse_color_256(part)
flags |= _FG_HIGH_COLOR
# _parse_color_*() return None for unrecognised colors
if scolor is None:
raise AttrSpecError(("Unrecognised color specification %s " +
"in foreground (%s)") % (repr(part), repr(foreground)))
if color is not None:
raise AttrSpecError(("More than one color given for " +
"foreground (%s)") % (repr(foreground),))
color = scolor
if color is None:
color = 0
self._value = (self._value & ~_FG_MASK) | color | flags
foreground = property(_foreground, _set_foreground)
def _background(self):
"""Return the background color."""
if not (self.background_basic or self.background_high):
return 'default'
if self.background_basic:
return _BASIC_COLORS[self.background_number]
if self._value & _HIGH_88_COLOR:
return _color_desc_88(self.background_number)
return _color_desc_256(self.background_number)
def _set_background(self, background):
flags = 0
if background in ('', 'default'):
color = 0
elif background in _BASIC_COLORS:
color = _BASIC_COLORS.index(background)
flags |= _BG_BASIC_COLOR
elif self._value & _HIGH_88_COLOR:
color = _parse_color_88(background)
flags |= _BG_HIGH_COLOR
else:
color = _parse_color_256(background)
flags |= _BG_HIGH_COLOR
if color is None:
raise AttrSpecError(("Unrecognised color specification " +
"in background (%s)") % (repr(background),))
self._value = (self._value & ~_BG_MASK) | (color << _BG_SHIFT) | flags
background = property(_background, _set_background)
def get_rgb_values(self):
"""
Return (fg_red, fg_green, fg_blue, bg_red, bg_green, bg_blue) color
components. Each component is in the range 0-255. Values are taken
from the XTerm defaults and may not exactly match the user's terminal.
If the foreground or background is 'default' then all their compenents
will be returned as None.
>>> AttrSpec('yellow', '#ccf', colors=88).get_rgb_values()
(255, 255, 0, 205, 205, 255)
>>> AttrSpec('default', 'g92').get_rgb_values()
(None, None, None, 238, 238, 238)
"""
if not (self.foreground_basic or self.foreground_high):
vals = (None, None, None)
elif self.colors == 88:
assert self.foreground_number < 88, "Invalid AttrSpec _value"
vals = _COLOR_VALUES_88[self.foreground_number]
else:
vals = _COLOR_VALUES_256[self.foreground_number]
if not (self.background_basic or self.background_high):
return vals + (None, None, None)
elif self.colors == 88:
assert self.background_number < 88, "Invalid AttrSpec _value"
return vals + _COLOR_VALUES_88[self.background_number]
else:
return vals + _COLOR_VALUES_256[self.background_number]
def __eq__(self, other):
return isinstance(other, AttrSpec) and self._value == other._value
def __ne__(self, other):
return not self == other
__hash__ = object.__hash__
class RealTerminal(object):
def __init__(self):
super(RealTerminal,self).__init__()
self._signal_keys_set = False
self._old_signal_keys = None
def tty_signal_keys(self, intr=None, quit=None, start=None,
stop=None, susp=None, fileno=None):
"""
Read and/or set the tty's signal character settings.
This function returns the current settings as a tuple.
Use the string 'undefined' to unmap keys from their signals.
The value None is used when no change is being made.
Setting signal keys is done using the integer ascii
code for the key, eg. 3 for CTRL+C.
If this function is called after start() has been called
then the original settings will be restored when stop()
is called.
"""
if fileno is None:
fileno = sys.stdin.fileno()
if not os.isatty(fileno):
return
tattr = termios.tcgetattr(fileno)
sattr = tattr[6]
skeys = (sattr[termios.VINTR], sattr[termios.VQUIT],
sattr[termios.VSTART], sattr[termios.VSTOP],
sattr[termios.VSUSP])
if intr == 'undefined': intr = 0
if quit == 'undefined': quit = 0
if start == 'undefined': start = 0
if stop == 'undefined': stop = 0
if susp == 'undefined': susp = 0
if intr is not None: tattr[6][termios.VINTR] = intr
if quit is not None: tattr[6][termios.VQUIT] = quit
if start is not None: tattr[6][termios.VSTART] = start
if stop is not None: tattr[6][termios.VSTOP] = stop
if susp is not None: tattr[6][termios.VSUSP] = susp
if intr is not None or quit is not None or \
start is not None or stop is not None or \
susp is not None:
termios.tcsetattr(fileno, termios.TCSADRAIN, tattr)
self._signal_keys_set = True
return skeys
class ScreenError(Exception):
pass
class BaseScreen(with_metaclass(signals.MetaSignals, object)):
"""
Base class for Screen classes (raw_display.Screen, .. etc)
"""
signals = [UPDATE_PALETTE_ENTRY, INPUT_DESCRIPTORS_CHANGED]
def __init__(self):
super(BaseScreen,self).__init__()
self._palette = {}
self._started = False
started = property(lambda self: self._started)
def start(self, *args, **kwargs):
"""Set up the screen. If the screen has already been started, does
nothing.
May be used as a context manager, in which case :meth:`stop` will
automatically be called at the end of the block:
with screen.start():
...
You shouldn't override this method in a subclass; instead, override
:meth:`_start`.
"""
if not self._started:
self._start(*args, **kwargs)
self._started = True
return StoppingContext(self)
def _start(self):
pass
def stop(self):
if self._started:
self._stop()
self._started = False
def _stop(self):
pass
def run_wrapper(self, fn, *args, **kwargs):
"""Start the screen, call a function, then stop the screen. Extra
arguments are passed to `start`.
Deprecated in favor of calling `start` as a context manager.
"""
with self.start(*args, **kwargs):
return fn()
def register_palette(self, palette):
"""Register a set of palette entries.
palette -- a list of (name, like_other_name) or
(name, foreground, background, mono, foreground_high,
background_high) tuples
The (name, like_other_name) format will copy the settings
from the palette entry like_other_name, which must appear
before this tuple in the list.
The mono and foreground/background_high values are
optional ie. the second tuple format may have 3, 4 or 6
values. See register_palette_entry() for a description
of the tuple values.
"""
for item in palette:
if len(item) in (3,4,6):
self.register_palette_entry(*item)
continue
if len(item) != 2:
raise ScreenError("Invalid register_palette entry: %s" %
repr(item))
name, like_name = item
if like_name not in self._palette:
raise ScreenError("palette entry '%s' doesn't exist"%like_name)
self._palette[name] = self._palette[like_name]
def register_palette_entry(self, name, foreground, background,
mono=None, foreground_high=None, background_high=None):
"""Register a single palette entry.
name -- new entry/attribute name
foreground -- a string containing a comma-separated foreground
color and settings
Color values:
'default' (use the terminal's default foreground),
'black', 'dark red', 'dark green', 'brown', 'dark blue',
'dark magenta', 'dark cyan', 'light gray', 'dark gray',
'light red', 'light green', 'yellow', 'light blue',
'light magenta', 'light cyan', 'white'
Settings:
'bold', 'underline', 'blink', 'standout', 'strikethrough'
Some terminals use 'bold' for bright colors. Most terminals
ignore the 'blink' setting. If the color is not given then
'default' will be assumed.
background -- a string containing the background color
Background color values:
'default' (use the terminal's default background),
'black', 'dark red', 'dark green', 'brown', 'dark blue',
'dark magenta', 'dark cyan', 'light gray'
mono -- a comma-separated string containing monochrome terminal
settings (see "Settings" above.)
None = no terminal settings (same as 'default')
foreground_high -- a string containing a comma-separated
foreground color and settings, standard foreground
colors (see "Color values" above) or high-colors may<|fim▁hole|> be used
High-color example values:
'#009' (0% red, 0% green, 60% red, like HTML colors)
'#fcc' (100% red, 80% green, 80% blue)
'g40' (40% gray, decimal), 'g#cc' (80% gray, hex),
'#000', 'g0', 'g#00' (black),
'#fff', 'g100', 'g#ff' (white)
'h8' (color number 8), 'h255' (color number 255)
None = use foreground parameter value
background_high -- a string containing the background color,
standard background colors (see "Background colors" above)
or high-colors (see "High-color example values" above)
may be used
None = use background parameter value
"""
basic = AttrSpec(foreground, background, 16)
if type(mono) == tuple:
# old style of specifying mono attributes was to put them
# in a tuple. convert to comma-separated string
mono = ",".join(mono)
if mono is None:
mono = DEFAULT
mono = AttrSpec(mono, DEFAULT, 1)
if foreground_high is None:
foreground_high = foreground
if background_high is None:
background_high = background
high_256 = AttrSpec(foreground_high, background_high, 256)
# 'hX' where X > 15 are different in 88/256 color, use
# basic colors for 88-color mode if high colors are specified
# in this way (also avoids crash when X > 87)
def large_h(desc):
if not desc.startswith('h'):
return False
if ',' in desc:
desc = desc.split(',',1)[0]
num = int(desc[1:], 10)
return num > 15
if large_h(foreground_high) or large_h(background_high):
high_88 = basic
else:
high_88 = AttrSpec(foreground_high, background_high, 88)
signals.emit_signal(self, UPDATE_PALETTE_ENTRY,
name, basic, mono, high_88, high_256)
self._palette[name] = (basic, mono, high_88, high_256)
def _test():
import doctest
doctest.testmod()
if __name__=='__main__':
_test()<|fim▁end|> | |
<|file_name|>debounce.rs<|end_file_name|><|fim▁begin|>#![allow(dead_code)]
extern crate fomo;
extern crate tempdir;
#[macro_use]
mod utils;
use fomo::notify::*;
use std::sync::mpsc;
use std::thread;
use std::time::{Duration, Instant};
use tempdir::TempDir;
use utils::*;
const DELAY_MS: u64 = 1000;
const TIMEOUT_MS: u64 = 1000;
fn recv_events_debounced(rx: &mpsc::Receiver<fomo::RootMessage>) -> Vec<DebouncedEvent> {
let start = Instant::now();
let mut events = Vec::new();
while start.elapsed() < Duration::from_millis(DELAY_MS + TIMEOUT_MS) {
match rx.try_recv() {
Ok(fomo::RootMessage::Event(event)) => events.push(event),
Err(mpsc::TryRecvError::Empty) => (),
Err(e) => panic!("unexpected channel err: {:?}", e),
_ => panic!("got a query?"),
}
thread::sleep(Duration::from_millis(50));
}
events
}
#[test]
fn create_file() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
sleep_macos(10);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.create("file1");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::Create(tdir.mkpath("file1")) ]);
}
#[test]
fn write_file() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "file1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.write("file1");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeWrite(tdir.mkpath("file1")),
DebouncedEvent::Write(tdir.mkpath("file1")) ]);
}
#[test]
fn write_long_file() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "file1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
let wait = Duration::from_millis(DELAY_MS / 2);
tdir.write("file1");
thread::sleep(wait);
tdir.write("file1");
thread::sleep(wait);
tdir.write("file1");
thread::sleep(wait);
tdir.write("file1");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeWrite(tdir.mkpath("file1")),
DebouncedEvent::Write(tdir.mkpath("file1")) ]);
}
// Linux:
//
// thread 'write_long_file' panicked at 'assertion failed: `(left == right)`
// (left: `[
// NoticeWrite("/tmp/temp_dir.fZov9D5M7lQ6/file1"),
// Write("/tmp/temp_dir.fZov9D5M7lQ6/file1"),
// NoticeWrite("/tmp/temp_dir.fZov9D5M7lQ6/file1"),
// Write("/tmp/temp_dir.fZov9D5M7lQ6/file1")
// ]`,
// right: `[
// NoticeWrite("/tmp/temp_dir.fZov9D5M7lQ6/file1"),
// Write("/tmp/temp_dir.fZov9D5M7lQ6/file1")
// ]`)',
// tests/debounce.rs:100
#[test]
fn modify_file() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "file1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.chmod("file1");
if cfg!(target_os = "windows") {
// windows cannot distinguish between chmod and write
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeWrite(tdir.mkpath("file1")),
DebouncedEvent::Write(tdir.mkpath("file1")) ]);
} else {
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::Chmod(tdir.mkpath("file1")) ]);
}
}
#[test]
fn delete_file() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "file1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.remove("file1");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("file1")),
DebouncedEvent::Remove(tdir.mkpath("file1")) ]);
}
#[test]
fn rename_file() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "file1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.rename("file1", "file2");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("file1")),
DebouncedEvent::Rename(tdir.mkpath("file1"), tdir.mkpath("file2")) ]);
}
#[test]
fn create_write_modify_file() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
sleep_macos(10);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.create("file1");
tdir.write("file1");
tdir.chmod("file1");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::Create(tdir.mkpath("file1")) ]);
}
#[test]
fn create_delete_file() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
sleep_macos(10);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.create("file1");
sleep_macos(10);
tdir.remove("file1");
assert_eq!(recv_events_debounced(&rx), vec![]);
}
#[test]
fn delete_create_file() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "file1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.remove("file1");
sleep_macos(10);
tdir.create("file1");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("file1")),
DebouncedEvent::Write(tdir.mkpath("file1")) ]);
}
#[test]
fn create_rename_file() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
sleep_macos(10);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.create("file1");
tdir.rename("file1", "file2");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::Create(tdir.mkpath("file2")) ]);
}
#[test]
fn create_rename_delete_file() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
sleep_macos(10);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.create("file1");
sleep_macos(10);
tdir.rename("file1", "file2");
sleep_macos(10);
tdir.remove("file2");
sleep_macos(DELAY_MS);
assert_eq!(recv_events_debounced(&rx), vec![]);
}
// ---- create_rename_delete_file stdout ----
// Mac OS
//
// thread 'create_rename_delete_file' panicked at 'assertion failed: `(left == right)`
// (left: `[
// NoticeRemove("/private/var/folders/gw/_2jq29095y7b__wtby9dg_5h0000gn/T/temp_dir.
// MJM4fvovN8qg/file2"),
// Remove("/private/var/folders/gw/_2jq29095y7b__wtby9dg_5h0000gn/T/temp_dir.
// MJM4fvovN8qg/file2")
// ]`,
// right: `[]`)',
// tests/debounce.rs:273
#[test]
fn create_rename_overwrite_file() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "file2" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.create("file1");
tdir.rename("file1", "file2");
if cfg!(target_os = "windows") {
// Windows interprets a move that overwrites a file as a delete of the source file and a
// write to the file that is being overwritten
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeWrite(tdir.mkpath("file2")),
DebouncedEvent::Write(tdir.mkpath("file2")) ]);
} else if cfg!(target_os = "macos") {
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("file2")),
DebouncedEvent::Create(tdir.mkpath("file2")) /* even though the file is
* being overwritten, that
* can't be detected */ ]);
} else {
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::Create(tdir.mkpath("file2")) /* even though the file is
* being overwritten, that
* can't be detected */ ]);
}
}
// https://github.com/passcod/notify/issues/99
#[test]
fn create_rename_write_create() {
// fsevents
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
sleep_macos(10);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.create("file1");
tdir.rename("file1", "file2");
sleep(10);
tdir.write("file2");
tdir.create("file3");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::Create(tdir.mkpath("file2")),
DebouncedEvent::Create(tdir.mkpath("file3")) ]);
}
// https://github.com/passcod/notify/issues/100
#[test]
fn create_rename_remove_create() {
// fsevents
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
sleep_macos(10);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.create("file1");
sleep_macos(35_000);
tdir.rename("file1", "file2");
tdir.remove("file2");
sleep_macos(10);
tdir.create("file3");
if cfg!(target_os = "macos") {
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::Create(tdir.mkpath("file1")),
DebouncedEvent::NoticeRemove(tdir.mkpath("file1")),
DebouncedEvent::NoticeRemove(tdir.mkpath("file2")),
// DebouncedEvent::Remove(tdir.mkpath("file1")), BUG: There should be a
// remove event for file1
DebouncedEvent::Remove(tdir.mkpath("file2")),
DebouncedEvent::Create(tdir.mkpath("file3")) ]);
} else {
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::Create(tdir.mkpath("file3")) ]);
}
}
// https://github.com/passcod/notify/issues/101
#[test]
fn move_out_sleep_move_in() {
// fsevents
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create("watch_dir");
sleep_macos(10);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("watch_dir"), RecursiveMode::Recursive)
.expect("failed to watch directory");
tdir.create("watch_dir/file1");
tdir.rename("watch_dir/file1", "file1");
sleep(DELAY_MS + 10);
tdir.rename("file1", "watch_dir/file2");
if cfg!(target_os = "macos") {
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::Create(tdir.mkpath("watch_dir/file1")),
DebouncedEvent::NoticeRemove(tdir.mkpath("watch_dir/file2")),
DebouncedEvent::Create(tdir.mkpath("watch_dir/file2")) ]);
} else {
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::Create(tdir.mkpath("watch_dir/file2")) ]);
}
}
// A stress test that is moving files around trying to trigger possible bugs related to
// moving files.
// For example, with inotify it's possible that two connected move events are split
// between two mio polls. This doesn't happen often, though.
#[test]
#[ignore]
fn move_repeatedly() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create("watch_dir");
sleep_macos(10);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("watch_dir"), RecursiveMode::Recursive)
.expect("failed to watch directory");
tdir.create("watch_dir/file1");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::Create(tdir.mkpath("watch_dir/file1")) ]);
for i in 1..300 {
let from = format!("watch_dir/file{}", i);
let to = format!("watch_dir/file{}", i + 1);
tdir.rename(&from, &to);
if i % 10 == 0 {
let from = format!("watch_dir/file{}", i - 9);
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath(&from)),
DebouncedEvent::Rename(tdir.mkpath(&from), tdir.mkpath(&to)) ]);
}
}
}
#[test]
fn write_rename_file() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "file1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.write("file1");
tdir.rename("file1", "file2");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeWrite(tdir.mkpath("file1")),
DebouncedEvent::NoticeRemove(tdir.mkpath("file1")),
DebouncedEvent::Rename(tdir.mkpath("file1"), tdir.mkpath("file2")),
DebouncedEvent::Write(tdir.mkpath("file2")) ]);
}
#[test]
fn rename_write_file() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "file1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.rename("file1", "file2");
sleep_macos(10);
tdir.write("file2");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("file1")),
DebouncedEvent::NoticeWrite(tdir.mkpath("file2")), // TODO not necessary
DebouncedEvent::Rename(tdir.mkpath("file1"), tdir.mkpath("file2")),
DebouncedEvent::Write(tdir.mkpath("file2")) ]);
}
#[test]
fn modify_rename_file() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "file1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.chmod("file1");
tdir.rename("file1", "file2");
if cfg!(target_os = "windows") {
// windows cannot distinguish between chmod and write
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeWrite(tdir.mkpath("file1")),
DebouncedEvent::NoticeRemove(tdir.mkpath("file1")),
DebouncedEvent::Rename(tdir.mkpath("file1"), tdir.mkpath("file2")),
DebouncedEvent::Write(tdir.mkpath("file2")) ]);
} else {
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("file1")),
DebouncedEvent::Rename(tdir.mkpath("file1"), tdir.mkpath("file2")),
DebouncedEvent::Chmod(tdir.mkpath("file2")) ]);
}
}
#[test]
fn rename_modify_file() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "file1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.rename("file1", "file2");
sleep_macos(10);
tdir.chmod("file2");
if cfg!(target_os = "windows") {
// windows cannot distinguish between chmod and write
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("file1")),
DebouncedEvent::NoticeWrite(tdir.mkpath("file2")), // TODO unnecessary
DebouncedEvent::Rename(tdir.mkpath("file1"), tdir.mkpath("file2")),
DebouncedEvent::Write(tdir.mkpath("file2")) ]);
} else {
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("file1")),
DebouncedEvent::Rename(tdir.mkpath("file1"), tdir.mkpath("file2")),
DebouncedEvent::Chmod(tdir.mkpath("file2")) ]);
}
}
#[test]
fn rename_rename_file() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "file1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.rename("file1", "file2");
sleep_macos(10);
tdir.rename("file2", "file3");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("file1")),
DebouncedEvent::Rename(tdir.mkpath("file1"), tdir.mkpath("file3")) ]);
}
#[test]
fn write_delete_file() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "file1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.write("file1");
tdir.remove("file1");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeWrite(tdir.mkpath("file1")),
DebouncedEvent::NoticeRemove(tdir.mkpath("file1")),
DebouncedEvent::Remove(tdir.mkpath("file1")) ]);
}
#[test]
fn create_directory() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
sleep_macos(10);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.create("dir1");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::Create(tdir.mkpath("dir1")) ]);
}
#[test]
fn modify_directory() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "dir1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.chmod("dir1");
if cfg!(target_os = "windows") {
// windows cannot distinguish between chmod and write
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeWrite(tdir.mkpath("dir1")),
DebouncedEvent::Write(tdir.mkpath("dir1")) ]);
} else {
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::Chmod(tdir.mkpath("dir1")) ]);
}
}
#[test]
fn delete_directory() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "dir1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.remove("dir1");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("dir1")),
DebouncedEvent::Remove(tdir.mkpath("dir1")) ]);
}
#[test]
fn rename_directory() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "dir1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.rename("dir1", "dir2");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("dir1")),
DebouncedEvent::Rename(tdir.mkpath("dir1"), tdir.mkpath("dir2")) ]);
}
#[test]
fn create_modify_directory() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
sleep_macos(10);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.create("dir1");
tdir.chmod("dir1");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::Create(tdir.mkpath("dir1")) ]);
}
#[test]
fn create_delete_directory() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
sleep_macos(10);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.create("dir1");
sleep_macos(10);
tdir.remove("dir1");
assert_eq!(recv_events_debounced(&rx), vec![]);
}
#[test]
fn delete_create_directory() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "dir1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.remove("dir1");
sleep_macos(10);
tdir.create("dir1");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("dir1")),
DebouncedEvent::Write(tdir.mkpath("dir1")) ]);
}
#[test]
fn create_rename_directory() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
sleep_macos(10);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.create("dir1");
tdir.rename("dir1", "dir2");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::Create(tdir.mkpath("dir2")) ]);
}
#[test]
fn create_rename_delete_directory() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
sleep_macos(10);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.create("dir1");
sleep_macos(10);
tdir.rename("dir1", "dir2");
sleep_macos(10);
tdir.remove("dir2");
assert_eq!(recv_events_debounced(&rx), vec![]);
}
#[test]
#[cfg(not(target_os="windows"))]
fn create_rename_overwrite_directory() {
// overwriting directories doesn't work on windows
if cfg!(target_os = "windows") {
panic!("cannot overwrite directory on windows");
}
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "dir2" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.create("dir1");
tdir.rename("dir1", "dir2");
if cfg!(target_os = "macos") {
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("dir2")), /* even though the
* directory is
* being overwritten,
* that can't be
* detected */
DebouncedEvent::Create(tdir.mkpath("dir2")) /* even though the
* directory is being
* overwritten, that can't
* be detected */ ]);
} else {
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::Create(tdir.mkpath("dir2")) /* even though the
* directory is being
* overwritten, that can't
* be detected */ ]);
}
}
#[test]
fn modify_rename_directory() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");<|fim▁hole|>
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.chmod("dir1");
tdir.chmod("dir1"); // needed by os x
tdir.rename("dir1", "dir2");
if cfg!(target_os = "windows") {
// windows cannot distinguish between chmod and write
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeWrite(tdir.mkpath("dir1")),
DebouncedEvent::NoticeRemove(tdir.mkpath("dir1")),
DebouncedEvent::Rename(tdir.mkpath("dir1"), tdir.mkpath("dir2")),
DebouncedEvent::Write(tdir.mkpath("dir2")) ]);
} else {
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("dir1")),
DebouncedEvent::Rename(tdir.mkpath("dir1"), tdir.mkpath("dir2")),
DebouncedEvent::Chmod(tdir.mkpath("dir2")) ]);
}
}
#[test]
fn rename_modify_directory() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "dir1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.rename("dir1", "dir2");
sleep_macos(10);
tdir.chmod("dir2");
let actual = recv_events_debounced(&rx);
if cfg!(target_os = "windows") {
// windows cannot distinguish between chmod and write
assert_eq!(actual,
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("dir1")),
DebouncedEvent::NoticeWrite(tdir.mkpath("dir2")), // TODO unnecessary
DebouncedEvent::Rename(tdir.mkpath("dir1"), tdir.mkpath("dir2")),
DebouncedEvent::Write(tdir.mkpath("dir2")) ]);
} else if cfg!(target_os = "linux") {
assert_eq_any!(actual,
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("dir1")),
DebouncedEvent::Rename(tdir.mkpath("dir1"), tdir.mkpath("dir2")),
DebouncedEvent::Chmod(tdir.mkpath("dir2")) ],
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("dir1")),
DebouncedEvent::Rename(tdir.mkpath("dir1"), tdir.mkpath("dir2")),
DebouncedEvent::Chmod(tdir.mkpath("dir2")),
DebouncedEvent::Chmod(tdir.mkpath("dir1")) /* excessive chmod event */ ]);
} else {
assert_eq!(actual,
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("dir1")),
DebouncedEvent::Rename(tdir.mkpath("dir1"), tdir.mkpath("dir2")),
DebouncedEvent::Chmod(tdir.mkpath("dir2")) ]);
}
}
#[test]
fn rename_rename_directory() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "dir1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.rename("dir1", "dir2");
sleep_macos(10);
tdir.rename("dir2", "dir3");
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("dir1")),
DebouncedEvent::Rename(tdir.mkpath("dir1"), tdir.mkpath("dir3")) ]);
}
#[test]
fn modify_delete_directory() {
let tdir = TempDir::new("temp_dir").expect("failed to create temporary directory");
tdir.create_all(vec![ "dir1" ]);
sleep_macos(35_000);
let (tx, rx) = mpsc::channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(DELAY_MS))
.expect("failed to create debounced watcher");
watcher.watch(tdir.mkpath("."), RecursiveMode::Recursive).expect("failed to watch directory");
tdir.chmod("dir1");
tdir.chmod("dir1"); // needed by windows
tdir.remove("dir1");
if cfg!(target_os = "windows") {
// windows cannot distinguish between chmod and write
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeWrite(tdir.mkpath("dir1")),
DebouncedEvent::NoticeRemove(tdir.mkpath("dir1")),
DebouncedEvent::Remove(tdir.mkpath("dir1")) ]);
} else {
assert_eq!(recv_events_debounced(&rx),
vec![ DebouncedEvent::NoticeRemove(tdir.mkpath("dir1")),
DebouncedEvent::Remove(tdir.mkpath("dir1")) ]);
}
}<|fim▁end|> |
tdir.create_all(vec![ "dir1" ]);
sleep_macos(35_000); |
<|file_name|>actions.spec.js<|end_file_name|><|fim▁begin|>import * as actions from './actions'
describe('App actions', () => {
it('selectTerm should create SELECT_TERM action', () => {
expect(actions.selectTerm('term')).toEqual({
type: 'SELECT_TERM',
term: 'term'
})
})
it('startFetch should create START_FETCH action', () => {
expect(actions.startFetch()).toEqual({
type: 'START_FETCH',
isBusy: true
})
})
it('fetchTerm calls RECEIVE_ERROR on complete lookup failure', () => {
let failingLookup = (url, settings, onDone ) => { throw "Error" };
let conceptNet = { lookup: failingLookup };
let dispatch = (arg) => { expect(arg['type']).toEqual("RECEIVE_ERROR") };
actions.getIsA(dispatch, conceptNet, "next term");
})
it('fetchTerm calls RECEIVE_ERROR on lookup returning error to onDone', () => {
let errorLookup = (url, settings, onDone ) => { onDone("Error", null) };
let conceptNet = { lookup: errorLookup };
let dispatch = (arg) => { expect(arg['type']).toEqual("RECEIVE_ERROR") };
actions.getIsA(dispatch, conceptNet, "next term");
})
it('fetchTerm calls RECEIVE_RESPONSE on lookup returning results to onDone', () => {
let successLookup = (url, settings, onDone ) => { onDone(null, {'edges': []}) };<|fim▁hole|>
})<|fim▁end|> | let conceptNet = { lookup: successLookup };
let dispatch = (arg) => { expect(arg['type']).toEqual("RECEIVE_RESPONSE") };
actions.getIsA(dispatch, conceptNet, "next term");
}) |
<|file_name|>DefaultColumnQueryMapperBuilderTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2017 Otávio Santana and others
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Apache License v2.0 is available at http://www.opensource.org/licenses/apache2.0.php.<|fim▁hole|> * You may elect to redistribute this code under either of these licenses.
*
* Contributors:
*
* Otavio Santana
*/
package org.jnosql.artemis.column.query;
import org.jnosql.artemis.CDIExtension;
import org.jnosql.artemis.model.Person;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import javax.inject.Inject;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
@ExtendWith(CDIExtension.class)
public class DefaultColumnQueryMapperBuilderTest {
@Inject
private ColumnQueryMapperBuilder mapperBuilder;
@Test
public void shouldReturnErrorWhenEntityClassIsNull() {
assertThrows(NullPointerException.class, () -> mapperBuilder.selectFrom(null));
}
@Test
public void shouldReturnSelectFrom() {
ColumnMapperFrom columnFrom = mapperBuilder.selectFrom(Person.class);
assertNotNull(columnFrom);
}
@Test
public void shouldReturnErrorWhenDeleteEntityClassIsNull() {
assertThrows(NullPointerException.class, () -> mapperBuilder.deleteFrom(null));
}
@Test
public void shouldReturnDeleteFrom() {
ColumnMapperDeleteFrom columnDeleteFrom = mapperBuilder.deleteFrom(Person.class);
assertNotNull(columnDeleteFrom);
}
}<|fim▁end|> | * |
<|file_name|>DislikeDialogFragment.java<|end_file_name|><|fim▁begin|>package doit.study.droid.fragments;
import android.app.Activity;
import android.app.Dialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.DialogFragment;
import android.support.v4.app.Fragment;
import android.support.v7.app.AlertDialog;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.CheckBox;
import android.widget.EditText;
import doit.study.droid.R;
public class DislikeDialogFragment extends DialogFragment {
public static final String EXTRA_CAUSE = "doit.study.droid.extra_cause";
private static final String QUESTION_TEXT_KEY = "doit.study.droid.question_text_key";
private Activity mHostActivity;
private View mView;
private int[] mCauseIds = {R.id.question_incorrect, R.id.answer_incorrect, R.id.documentation_irrelevant};
public static DislikeDialogFragment newInstance(String questionText) {
DislikeDialogFragment dislikeDialog = new DislikeDialogFragment();
Bundle arg = new Bundle();
arg.putString(QUESTION_TEXT_KEY, questionText);
dislikeDialog.setArguments(arg);
return dislikeDialog;
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {<|fim▁hole|> AlertDialog.Builder builder = new AlertDialog.Builder(mHostActivity);
builder.setMessage(getString(R.string.report_because))
.setView(mView)
.setPositiveButton(mHostActivity.getString(R.string.report), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
Fragment fr = getTargetFragment();
if (fr != null) {
Intent intent = new Intent();
intent.putExtra(EXTRA_CAUSE, formReport());
fr.onActivityResult(getTargetRequestCode(), Activity.RESULT_OK, intent);
}
}
})
.setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
// User cancelled the dialog
}
});
// Create the AlertDialog object and return it
return builder.create();
}
private String formReport() {
EditText editText = (EditText) mView.findViewById(R.id.comment);
StringBuilder result = new StringBuilder(" Cause:");
for (int id : mCauseIds) {
CheckBox checkBox = (CheckBox) mView.findViewById(id);
if (checkBox.isChecked())
result.append(checkBox.getText())
.append(",");
}
result.append(" Comment:");
result.append(editText.getText());
return result.toString();
}
}<|fim▁end|> | mHostActivity = getActivity();
LayoutInflater inflater = mHostActivity.getLayoutInflater();
mView = inflater.inflate(R.layout.fragment_dialog_dislike, null);
|
<|file_name|>test_glazeddoorinterzone.py<|end_file_name|><|fim▁begin|>import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.thermal_zones_and_surfaces import GlazedDoorInterzone
log = logging.getLogger(__name__)
class TestGlazedDoorInterzone(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_glazeddoorinterzone(self):
pyidf.validation_level = ValidationLevel.error
obj = GlazedDoorInterzone()
# alpha
var_name = "Name"
obj.name = var_name
# object-list
var_construction_name = "object-list|Construction Name"<|fim▁hole|> obj.building_surface_name = var_building_surface_name
# object-list
var_outside_boundary_condition_object = "object-list|Outside Boundary Condition Object"
obj.outside_boundary_condition_object = var_outside_boundary_condition_object
# real
var_multiplier = 1.0
obj.multiplier = var_multiplier
# real
var_starting_x_coordinate = 6.6
obj.starting_x_coordinate = var_starting_x_coordinate
# real
var_starting_z_coordinate = 7.7
obj.starting_z_coordinate = var_starting_z_coordinate
# real
var_length = 8.8
obj.length = var_length
# real
var_height = 9.9
obj.height = var_height
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with open(self.path, mode='r') as f:
for line in f:
log.debug(line.strip())
idf2 = IDF(self.path)
self.assertEqual(idf2.glazeddoorinterzones[0].name, var_name)
self.assertEqual(idf2.glazeddoorinterzones[0].construction_name, var_construction_name)
self.assertEqual(idf2.glazeddoorinterzones[0].building_surface_name, var_building_surface_name)
self.assertEqual(idf2.glazeddoorinterzones[0].outside_boundary_condition_object, var_outside_boundary_condition_object)
self.assertAlmostEqual(idf2.glazeddoorinterzones[0].multiplier, var_multiplier)
self.assertAlmostEqual(idf2.glazeddoorinterzones[0].starting_x_coordinate, var_starting_x_coordinate)
self.assertAlmostEqual(idf2.glazeddoorinterzones[0].starting_z_coordinate, var_starting_z_coordinate)
self.assertAlmostEqual(idf2.glazeddoorinterzones[0].length, var_length)
self.assertAlmostEqual(idf2.glazeddoorinterzones[0].height, var_height)<|fim▁end|> | obj.construction_name = var_construction_name
# object-list
var_building_surface_name = "object-list|Building Surface Name" |
<|file_name|>unwind-rec2.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern:fail
fn build1() -> ~[int] {
~[0,0,0,0,0,0,0]
}
fn build2() -> ~[int] {
fail!();
}
<|fim▁hole|> node: build1(),
span: build2()
};
}<|fim▁end|> | struct Blk { node: ~[int], span: ~[int] }
fn main() {
let _blk = Blk { |
<|file_name|>AJDebug.java<|end_file_name|><|fim▁begin|>/*
* AJDebug.java
*
* This file is part of Tritonus: http://www.tritonus.org/
*/
/*
* Copyright (c) 1999 - 2002 by Matthias Pfisterer
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Library General Public License as published
* by the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
/*
|<--- this code is formatted to fit into 80 columns --->|
*/
package org.tritonus.debug;
import org.aspectj.lang.JoinPoint;
import javax.sound.midi.MidiSystem;
import javax.sound.midi.Synthesizer;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.SourceDataLine;
import org.tritonus.core.TMidiConfig;
import org.tritonus.core.TInit;
import org.tritonus.share.TDebug;
import org.tritonus.share.midi.TSequencer;
import org.tritonus.midi.device.alsa.AlsaSequencer;
import org.tritonus.midi.device.alsa.AlsaSequencer.PlaybackAlsaMidiInListener;
import org.tritonus.midi.device.alsa.AlsaSequencer.RecordingAlsaMidiInListener;
import org.tritonus.midi.device.alsa.AlsaSequencer.AlsaSequencerReceiver;
import org.tritonus.midi.device.alsa.AlsaSequencer.AlsaSequencerTransmitter;
import org.tritonus.midi.device.alsa.AlsaSequencer.LoaderThread;
import org.tritonus.midi.device.alsa.AlsaSequencer.MasterSynchronizer;
import org.tritonus.share.sampled.convert.TAsynchronousFilteredAudioInputStream;
/** Debugging output aspect.<|fim▁hole|>{
pointcut allExceptions(): handler(Throwable+);
// TAudioConfig, TMidiConfig, TInit
pointcut TMidiConfigCalls(): execution(* TMidiConfig.*(..));
pointcut TInitCalls(): execution(* TInit.*(..));
// share
// midi
pointcut MidiSystemCalls(): execution(* MidiSystem.*(..));
pointcut Sequencer(): execution(TSequencer+.new(..)) ||
execution(* TSequencer+.*(..)) ||
execution(* PlaybackAlsaMidiInListener.*(..)) ||
execution(* RecordingAlsaMidiInListener.*(..)) ||
execution(* AlsaSequencerReceiver.*(..)) ||
execution(* AlsaSequencerTransmitter.*(..)) ||
execution(LoaderThread.new(..)) ||
execution(* LoaderThread.*(..)) ||
execution(MasterSynchronizer.new(..)) ||
execution(* MasterSynchronizer.*(..));
// audio
pointcut AudioSystemCalls(): execution(* AudioSystem.*(..));
pointcut sourceDataLine():
call(* SourceDataLine+.*(..));
// OLD
// pointcut playerStates():
// execution(private void TPlayer.setState(int));
// currently not used
pointcut printVelocity(): execution(* JavaSoundToneGenerator.playTone(..)) && call(JavaSoundToneGenerator.ToneThread.new(..));
// pointcut tracedCall(): execution(protected void JavaSoundAudioPlayer.doRealize() throws Exception);
///////////////////////////////////////////////////////
//
// ACTIONS
//
///////////////////////////////////////////////////////
before(): MidiSystemCalls()
{
if (TDebug.TraceMidiSystem) outEnteringJoinPoint(thisJoinPoint);
}
after(): MidiSystemCalls()
{
if (TDebug.TraceSequencer) outLeavingJoinPoint(thisJoinPoint);
}
before(): Sequencer()
{
if (TDebug.TraceSequencer) outEnteringJoinPoint(thisJoinPoint);
}
after(): Sequencer()
{
if (TDebug.TraceSequencer) outLeavingJoinPoint(thisJoinPoint);
}
before(): TInitCalls()
{
if (TDebug.TraceInit) outEnteringJoinPoint(thisJoinPoint);
}
after(): TInitCalls()
{
if (TDebug.TraceInit) outLeavingJoinPoint(thisJoinPoint);
}
before(): TMidiConfigCalls()
{
if (TDebug.TraceMidiConfig) outEnteringJoinPoint(thisJoinPoint);
}
after(): TMidiConfigCalls()
{
if (TDebug.TraceMidiConfig) outLeavingJoinPoint(thisJoinPoint);
}
// execution(* TAsynchronousFilteredAudioInputStream.read(..))
before(): execution(* TAsynchronousFilteredAudioInputStream.read())
{
if (TDebug.TraceAudioConverter) outEnteringJoinPoint(thisJoinPoint);
}
after(): execution(* TAsynchronousFilteredAudioInputStream.read())
{
if (TDebug.TraceAudioConverter) outLeavingJoinPoint(thisJoinPoint);
}
before(): execution(* TAsynchronousFilteredAudioInputStream.read(byte[]))
{
if (TDebug.TraceAudioConverter) outEnteringJoinPoint(thisJoinPoint);
}
after(): execution(* TAsynchronousFilteredAudioInputStream.read(byte[]))
{
if (TDebug.TraceAudioConverter) outLeavingJoinPoint(thisJoinPoint);
}
before(): execution(* TAsynchronousFilteredAudioInputStream.read(byte[], int, int))
{
if (TDebug.TraceAudioConverter) outEnteringJoinPoint(thisJoinPoint);
}
after(): execution(* TAsynchronousFilteredAudioInputStream.read(byte[], int, int))
{
if (TDebug.TraceAudioConverter) outLeavingJoinPoint(thisJoinPoint);
}
after() returning(int nBytes): call(* TAsynchronousFilteredAudioInputStream.read(byte[], int, int))
{
if (TDebug.TraceAudioConverter) TDebug.out("returning bytes: " + nBytes);
}
// before(int nState): playerStates() && args(nState)
// {
// // if (TDebug.TracePlayerStates)
// // {
// // TDebug.out("TPlayer.setState(): " + nState);
// // }
// }
// before(): playerStateTransitions()
// {
// // if (TDebug.TracePlayerStateTransitions)
// // {
// // TDebug.out("Entering: " + thisJoinPoint);
// // }
// }
// Synthesizer around(): call(* MidiSystem.getSynthesizer())
// {
// // Synthesizer s = proceed();
// // if (TDebug.TraceToneGenerator)
// // {
// // TDebug.out("MidiSystem.getSynthesizer() gives: " + s);
// // }
// // return s;
// // only to get no compilation errors
// return null;
// }
// TODO: v gives an error; find out what to do
// before(int v): printVelocity() && args(nVelocity)
// {
// if (TDebug.TraceToneGenerator)
// {
// TDebug.out("velocity: " + v);
// }
// }
before(Throwable t): allExceptions() && args(t)
{
if (TDebug.TraceAllExceptions) TDebug.out(t);
}
}
/*** AJDebug.java ***/<|fim▁end|> | */
public aspect AJDebug
extends Utils |
<|file_name|>cordova_plugins.js<|end_file_name|><|fim▁begin|>cordova.define('cordova/plugin_list', function(require, exports, module) {
module.exports = [
{
"file": "plugins/cordova-plugin-whitelist/whitelist.js",
"id": "cordova-plugin-whitelist.whitelist",
"pluginId": "cordova-plugin-whitelist",
"runs": true
},
{
"file": "plugins/cordova-plugin-file/www/DirectoryEntry.js",
"id": "cordova-plugin-file.DirectoryEntry",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.DirectoryEntry"
]
},
{
"file": "plugins/cordova-plugin-file/www/DirectoryReader.js",
"id": "cordova-plugin-file.DirectoryReader",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.DirectoryReader"
]
},
{
"file": "plugins/cordova-plugin-file/www/Entry.js",
"id": "cordova-plugin-file.Entry",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.Entry"
]
},
{
"file": "plugins/cordova-plugin-file/www/File.js",
"id": "cordova-plugin-file.File",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.File"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileEntry.js",
"id": "cordova-plugin-file.FileEntry",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileEntry"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileError.js",
"id": "cordova-plugin-file.FileError",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileError"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileReader.js",
"id": "cordova-plugin-file.FileReader",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileReader"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileSystem.js",
"id": "cordova-plugin-file.FileSystem",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileSystem"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileUploadOptions.js",
"id": "cordova-plugin-file.FileUploadOptions",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileUploadOptions"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileUploadResult.js",
"id": "cordova-plugin-file.FileUploadResult",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileUploadResult"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileWriter.js",
"id": "cordova-plugin-file.FileWriter",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileWriter"
]
},
{
"file": "plugins/cordova-plugin-file/www/Flags.js",
"id": "cordova-plugin-file.Flags",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.Flags"
]
},
{
"file": "plugins/cordova-plugin-file/www/LocalFileSystem.js",
"id": "cordova-plugin-file.LocalFileSystem",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.LocalFileSystem"
],
"merges": [
"window"
]
},
{
"file": "plugins/cordova-plugin-file/www/Metadata.js",
"id": "cordova-plugin-file.Metadata",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.Metadata"
]
},
{
"file": "plugins/cordova-plugin-file/www/ProgressEvent.js",
"id": "cordova-plugin-file.ProgressEvent",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.ProgressEvent"
]
},
{
"file": "plugins/cordova-plugin-file/www/fileSystems.js",
"id": "cordova-plugin-file.fileSystems",
"pluginId": "cordova-plugin-file"
},
{
"file": "plugins/cordova-plugin-file/www/requestFileSystem.js",
"id": "cordova-plugin-file.requestFileSystem",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.requestFileSystem"
]
},
{
"file": "plugins/cordova-plugin-file/www/resolveLocalFileSystemURI.js",
"id": "cordova-plugin-file.resolveLocalFileSystemURI",
"pluginId": "cordova-plugin-file",
"merges": [
"window"
]
},
{
"file": "plugins/cordova-plugin-file/www/android/FileSystem.js",
"id": "cordova-plugin-file.androidFileSystem",
"pluginId": "cordova-plugin-file",
"merges": [
"FileSystem"
]
},
{
"file": "plugins/cordova-plugin-file/www/fileSystems-roots.js",
"id": "cordova-plugin-file.fileSystems-roots",
"pluginId": "cordova-plugin-file",
"runs": true
},
{
"file": "plugins/cordova-plugin-file/www/fileSystemPaths.js",
"id": "cordova-plugin-file.fileSystemPaths",
"pluginId": "cordova-plugin-file",
"merges": [
"cordova"
],
"runs": true
},
{
"file": "plugins/cordova-plugin-file-transfer/www/FileTransferError.js",
"id": "cordova-plugin-file-transfer.FileTransferError",
"pluginId": "cordova-plugin-file-transfer",
"clobbers": [
"window.FileTransferError"
]
},
{
"file": "plugins/cordova-plugin-file-transfer/www/FileTransfer.js",
"id": "cordova-plugin-file-transfer.FileTransfer",
"pluginId": "cordova-plugin-file-transfer",
"clobbers": [
"window.FileTransfer"
]
},
{
"file": "plugins/cordova-plugin-device/www/device.js",
"id": "cordova-plugin-device.device",
"pluginId": "cordova-plugin-device",
"clobbers": [
"device"
]
},
{
"file": "plugins/de.appplant.cordova.plugin.email-composer/www/email_composer.js",
"id": "de.appplant.cordova.plugin.email-composer.EmailComposer",
"pluginId": "de.appplant.cordova.plugin.email-composer",
"clobbers": [
"cordova.plugins.email",
"plugin.email"
]
}
];
module.exports.metadata =
// TOP OF METADATA
{}<|fim▁hole|><|fim▁end|> | // BOTTOM OF METADATA
}); |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! This crate reads DVD subtitles in VobSub format. These are typically
//! stored as two files: an `*.idx` file summarizing the subtitles, and an
//! MPEG-2 Program Stream containing the actual subtitle packets.
//!
//! ## Example code
//!
//! ```
//! extern crate image;
//! extern crate vobsub;
//!
//! let idx = vobsub::Index::open("../fixtures/example.idx").unwrap();
//! for sub in idx.subtitles() {
//! let sub = sub.unwrap();
//! println!("Time: {:0.3}-{:0.3}", sub.start_time(), sub.end_time());
//! println!("Always show: {:?}", sub.force());
//! let coords = sub.coordinates();
//! println!("At: {}, {}", coords.left(), coords.top());
//! println!("Size: {}x{}", coords.width(), coords.height());
//! let img: image::RgbaImage = sub.to_image(idx.palette());
//!
//! // You can save or manipulate `img` using the APIs provided by the Rust
//! // `image` crate.
//! }
//! ```
//!
//! ## Performance
//!
//! Performance in debug mode is poor; compile with `--release` before
//! benchmarking.
//!
//! ## Limitations
//!
//! The initial version of this library is focused on extracting just the
//! information shown above, and it does not have full support for all the
//! options found in `*.idx` files. It also lacks support for rapidly
//! finding the subtitle associated with a particular time during playback.
//!
//! ## Background & References
//!
//! VobSub subtitles consist of a simple textual `*.idx` file, and a binary
//! `*.sub` file. The binary `*.sub` file is essentially an MPEG-2 Program
//! Stream containing Packetized Elementary Stream data, but only for a
//! single subtitle track.
//!
//! Useful references include:
//!
//! - [Program Stream](https://en.wikipedia.org/wiki/MPEG_program_stream) (PS)
//! - [Packetized Elementary Stream][PES] (PES)
//! - [DVD subtitles](http://sam.zoy.org/writings/dvd/subtitles/)
//! - [System Time Clock](http://www.bretl.com/mpeghtml/STC.HTM)
//!
//! [PES]: http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
//!
//! There are also any number of open source implementations of subtitles
//! decoders which might be useful once you get past the Program Stream and
//! PES wrappers.
//!
//! There are two closely-related formats that this library could be
//! extended to parse without too much work:
//!
//! - Subtitles embedded in DVD-format video. These should contain the<|fim▁hole|>//! the same basic subtitle format, but the `*.idx` file is replaced by
//! an internal, stripped-down version of the same data in text format.
//!
//! ## Contributing
//!
//! Your feedback and contributions are welcome! Please see
//! [GitHub](https://github.com/emk/subtitles-rs) for details.
#![warn(missing_docs)]
// For error-chain.
#![recursion_limit = "1024"]
extern crate cast;
extern crate common_failures;
#[macro_use]
extern crate failure;
#[cfg(test)]
extern crate env_logger;
extern crate image;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[macro_use]
extern crate nom;
extern crate regex;
extern crate safemem;
mod errors;
mod idx;
mod img;
mod mpeg2;
mod probe;
mod sub;
mod util;
pub use common_failures::{Error, Result};
pub use self::idx::{Index, Palette};
pub use self::probe::{is_idx_file, is_sub_file};
pub use self::sub::{Coordinates, Subtitle, Subtitles, subtitles};<|fim▁end|> | //! same subtitle packet format, but the `*.idx` file is replaced by data
//! stored in an IFO file.
//! - Subtitles stored in the Matroska container format. Again, these use |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.shortcuts import render
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout, update_session_auth_hash
from django.core.exceptions import ObjectDoesNotExist
from django.core.cache import cache
from django.http import HttpResponse, HttpResponseRedirect
from django.db import transaction
from django_redis import get_redis_connection
from users.models import PastebinUser
from users.forms import RegisterForm, LoginForm, ChangePreferencesForm, ChangePasswordForm, VerifyPasswordForm
from users.models import Favorite, SiteSettings
from pastes.models import Paste
from pastebin.util import Paginator
import math
def register_view(request):
"""
Register a new user
"""
# Check if the user is authenticated
if request.user.is_authenticated():
# User is already authenticated
return render(request, 'users/register/already_logged_in.html')
else:
register_form = RegisterForm(request.POST or None)
if request.method == 'POST': # Form data was submitted
if register_form.is_valid(): # Form data is valid
# Create the user
with transaction.atomic():
user = User.objects.create_user(register_form.cleaned_data['username'],
"N/A", # we don't deal with email addresses
register_form.cleaned_data['password'])
PastebinUser.create_user(user)
# TODO: Show a different message if the registration fails
return render(request, 'users/register/register_success.html')
# Show the registration page
return render(request, "users/register/register.html", { "form": register_form })
def login_view(request):
"""<|fim▁hole|> if request.user.is_authenticated():
# User is authenticated
return render(request, "users/login/logged_in.html")
else:
login_form = LoginForm(request.POST or None)
# User is NOT authenticated
if request.method == 'POST': # Form data was submitted
if login_form.is_valid(): # Form data is valid
user = authenticate(username = login_form.cleaned_data['username'],
password = login_form.cleaned_data['password'])
if user is not None and user.is_active:
login(request, user)
return render(request, "users/login/logged_in.html")
else:
# Couldn't authenticate, either the username or password is wrong
error = "User doesn't exist or the password is incorrect."
login_form._errors['password'] = login_form.error_class([error])
# Show the login form
return render(request, "users/login/login.html", { "form": login_form })
def logout_view(request):
"""
Logout the user and show the logout page
"""
if request.user.is_authenticated():
logout(request)
return render(request, 'users/logout/logged_out.html')
def profile(request, username, tab="home", page=1):
"""
Show a publicly visible profile page
"""
page = int(page)
try:
profile_user = cache.get("user:%s" % username)
if profile_user == None:
profile_user = User.objects.get(username=username)
cache.set("user:%s" % username, profile_user)
elif profile_user == False:
return render(request, "users/profile/profile_error.html", {"reason": "not_found"}, status=404)
except ObjectDoesNotExist:
cache.set("user:%s" % username, False)
return render(request, "users/profile/profile_error.html", {"reason": "not_found"}, status=404)
# Get user's settings
profile_settings = cache.get("site_settings:%s" % username)
if profile_settings == None:
try:
profile_settings = SiteSettings.objects.get(user=profile_user)
except ObjectDoesNotExist:
profile_settings = SiteSettings(user=profile_user)
profile_settings.save()
cache.set("site_settings:%s" % username, profile_settings)
if not profile_user.is_active:
return render(request, "users/profile/profile_error.html", {"reason": "not_found"}, status=404)
if request.user != profile_user:
total_paste_count = cache.get("user_public_paste_count:%s" % profile_user.username)
else:
total_paste_count = cache.get("user_paste_count:%s" % profile_user.username)
# If user is viewing his own profile, also include hidden pastes
if total_paste_count == None and request.user != profile_user:
total_paste_count = Paste.objects.filter(user=profile_user, removed=Paste.NO_REMOVAL).filter(hidden=False).count()
cache.set("user_public_paste_count:%s" % profile_user.username, total_paste_count)
elif total_paste_count == None and request.user == profile_user:
total_paste_count = Paste.objects.filter(user=profile_user, removed=Paste.NO_REMOVAL).count()
cache.set("user_paste_count:%s" % profile_user.username, total_paste_count)
total_favorite_count = cache.get("user_favorite_count:%s" % profile_user.username)
if total_favorite_count == None:
total_favorite_count = Favorite.objects.filter(user=profile_user).count()
cache.set("user_favorite_count:%s" % profile_user.username, total_favorite_count)
args = {"profile_user": profile_user,
"profile_settings": profile_settings,
"current_page": page,
"tab": tab,
"total_favorite_count": total_favorite_count,
"total_paste_count": total_paste_count}
if tab == "home":
return home(request, args)
elif tab == "pastes":
return pastes(request, profile_user, args, page)
elif tab == "favorites":
return favorites(request, profile_user, args, page)
# The remaining pages require authentication, so redirect through settings()
else:
return settings(request, profile_user, args, tab)
def settings(request, username, args={}, tab="change_password"):
"""
Show a page which allows the user to change his settings
"""
if not request.user.is_authenticated():
return render(request, "users/settings/settings_error.html", {"reason": "not_logged_in"})
profile_user = User.objects.get(username=username)
if request.user.id != profile_user.id:
return render(request, "users/settings/settings_error.html", {"reason": "incorrect_user"})
if tab == "change_preferences":
return change_preferences(request, args)
if tab == "change_password":
return change_password(request, args)
elif tab == "delete_account":
return delete_account(request, args)
def home(request, args):
"""
Display user profile's home with the most recent pastes and favorites
"""
# Get favorites only if user has made them public
if args["profile_settings"].public_favorites or request.user == args["profile_user"]:
args["favorites"] = cache.get("profile_favorites:%s" % args["profile_user"].username)
if args["favorites"] == None:
args["favorites"] = Favorite.objects.filter(user=args["profile_user"]).order_by('-added').select_related('paste')[:10]
cache.set("profile_favorites:%s" % args["profile_user"].username, args["favorites"])
if request.user == args["profile_user"]:
args["pastes"] = cache.get("profile_pastes:%s" % args["profile_user"].username)
if args["pastes"] == None:
args["pastes"] = Paste.objects.get_pastes(args["profile_user"], include_hidden=True, count=10)
cache.set("profile_pastes:%s" % args["profile_user"].username, args["pastes"])
else:
args["pastes"] = cache.get("profile_public_pastes:%s" % args["profile_user"].username)
if args["pastes"] == None:
args["pastes"] = Paste.objects.get_pastes(args["profile_user"], include_hidden=False, count=10)
cache.set("profile_public_pastes:%s" % args["profile_user"].username, args["pastes"])
return render(request, "users/profile/home/home.html", args)
def pastes(request, user, args, page=1):
"""
Show all of user's pastes
"""
PASTES_PER_PAGE = 15
args["total_pages"] = int(math.ceil(float(args["total_paste_count"]) / float(PASTES_PER_PAGE)))
if page > args["total_pages"]:
page = max(int(args["total_pages"]), 1)
offset = (page-1) * PASTES_PER_PAGE
if request.user == user:
args["pastes"] = cache.get("user_pastes:%s:%s" % (user.username, page))
if args["pastes"] == None:
args["pastes"] = Paste.objects.get_pastes(user, count=PASTES_PER_PAGE, include_hidden=True, offset=offset)
cache.set("user_pastes:%s:%s" % (user.username, page), args["pastes"])
else:
args["pastes"] = cache.get("user_public_pastes:%s:%s" % (user.username, page))
if args["pastes"] == None:
args["pastes"] = Paste.objects.get_pastes(user, count=PASTES_PER_PAGE, include_hidden=False, offset=offset)
cache.set("user_public_pastes:%s:%s" % (user.username, page), args["pastes"])
args["pages"] = Paginator.get_pages(page, PASTES_PER_PAGE, args["total_paste_count"])
args["current_page"] = page
return render(request, "users/profile/pastes/pastes.html", args)
def favorites(request, user, args, page=1):
"""
Show all of user's favorites
"""
FAVORITES_PER_PAGE = 15
if not args["profile_settings"].public_favorites and request.user != args["profile_user"]:
# Don't show pastes to other users if the user doesn't want to
return render(request, "users/profile/favorites/favorites_hidden.html", args)
args["total_pages"] = int(math.ceil(float(args["total_favorite_count"]) / float(FAVORITES_PER_PAGE)))
if page > args["total_pages"]:
page = max(int(args["total_pages"]), 1)
start = (page-1) * FAVORITES_PER_PAGE
end = start + FAVORITES_PER_PAGE
args["favorites"] = cache.get("user_favorites:%s:%s" % (user.username, page))
if args["favorites"] == None:
args["favorites"] = Favorite.objects.filter(user=user).select_related("paste")[start:end]
cache.set("user_favorites:%s:%s" % (user.username, page), args["favorites"])
args["pages"] = Paginator.get_pages(page, FAVORITES_PER_PAGE, args["total_favorite_count"])
args["current_page"] = page
return render(request, "users/profile/favorites/favorites.html", args)
def remove_favorite(request):
"""
Remove a favorite and redirect the user back to the favorite listing
"""
if "favorite_id" not in request.POST or not int(request.POST["favorite_id"]):
return HttpResponse("Favorite ID was not valid.", status=422)
if "page" not in request.POST or not int(request.POST["page"]):
return HttpResponse("Page was not valid.", status=422)
favorite_id = int(request.POST["favorite_id"])
page = int(request.POST["page"])
favorite = Favorite.objects.get(id=favorite_id)
if not request.user.is_authenticated():
return HttpResponse("You are not authenticated", status=422)
if favorite.user != request.user:
return HttpResponse("You can't delete someone else's favorites.", status=422)
favorite.delete()
cache.delete("profile_favorites:%s" % request.user.username)
cache.delete("user_favorite_count:%s" % request.user.username)
return HttpResponseRedirect(reverse("users:favorites", kwargs={"username": request.user.username,
"page": page}))
def change_preferences(request, args):
"""
Change various profile-related preferences
"""
site_settings = SiteSettings.objects.get(user=request.user)
form = ChangePreferencesForm(request.POST or None, initial={"public_favorites": site_settings.public_favorites})
preferences_changed = False
if form.is_valid():
cleaned_data = form.cleaned_data
site_settings.public_favorites = cleaned_data["public_favorites"]
site_settings.save()
cache.set("site_settings:%s" % request.user.username, site_settings)
preferences_changed = True
args["form"] = form
args["preferences_changed"] = preferences_changed
return render(request, "users/settings/change_preferences/change_preferences.html", args)
def change_password(request, args):
"""
Change the user's password
"""
form = ChangePasswordForm(request.POST or None, user=request.user)
password_changed = False
if form.is_valid():
cleaned_data = form.cleaned_data
request.user.set_password(cleaned_data["new_password"])
request.user.save()
# Session auth hash needs to be updated after changing the password
# or the user will be logged out
update_session_auth_hash(request, request.user)
password_changed = True
args["form"] = form
args["password_changed"] = password_changed
return render(request, "users/settings/change_password/change_password.html", args)
def delete_account(request, args):
"""
Delete the user's account
"""
form = VerifyPasswordForm(request.POST or None, user=request.user)
if form.is_valid():
PastebinUser.delete_user(request.user)
logout(request)
return render(request, "users/settings/delete_account/account_deleted.html")
args["form"] = form
return render(request, "users/settings/delete_account/delete_account.html", args)<|fim▁end|> | Log the user in
"""
# Check if the user is authenticated |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | from .voxel_dir import task_dir, storage_dir, image_dir |
<|file_name|>0014_auto_20160710_1200.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-07-10 12:00
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('django_project', '0013_auto_20160710_1124'),
]
operations = [
migrations.AlterField(<|fim▁hole|> field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='django_project.Comment'),
),
]<|fim▁end|> | model_name='annotation',
name='comment', |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.