prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>render.rs<|end_file_name|><|fim▁begin|>use std::any::Any; use viewport::Viewport; use { GenericEvent, RENDER }; /// Render arguments #[derive(Copy, Clone, PartialEq, Debug)] pub struct RenderArgs { /// Extrapolated time in seconds, used to do smooth animation. pub ext_dt: f64, /// The width of rendered area in points. pub width: u32, /// The height of rendered area in points. pub height: u32, /// The width of rendered area in pixels.<|fim▁hole|> /// The height of rendered area in pixels. pub draw_height: u32, } impl RenderArgs { /// Returns viewport information filling entire render area. pub fn viewport(&self) -> Viewport { Viewport { rect: [0, 0, self.draw_width as i32, self.draw_height as i32], window_size: [self.width, self.height], draw_size: [self.draw_width, self.draw_height], } } } /// When the next frame should be rendered pub trait RenderEvent { /// Creates a render event. fn from_render_args(args: &RenderArgs, old_event: &Self) -> Option<Self>; /// Calls closure if this is a render event. fn render<U, F>(&self, f: F) -> Option<U> where F: FnMut(&RenderArgs) -> U; /// Returns render arguments. fn render_args(&self) -> Option<RenderArgs> { self.render(|args| args.clone()) } } impl<T: GenericEvent> RenderEvent for T { fn from_render_args(args: &RenderArgs, old_event: &Self) -> Option<Self> { GenericEvent::from_args(RENDER, args as &Any, old_event) } fn render<U, F>(&self, mut f: F) -> Option<U> where F: FnMut(&RenderArgs) -> U { if self.event_id() != RENDER { return None; } self.with_args(|any| { if let Some(args) = any.downcast_ref::<RenderArgs>() { Some(f(args)) } else { panic!("Expected RenderArgs") } }) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_event_render() { use Event; use RenderArgs; let e = Event::Render(RenderArgs { ext_dt: 0.0, width: 0, height: 0, draw_width: 0, draw_height: 0 }); let x: Option<Event> = RenderEvent::from_render_args( &RenderArgs { ext_dt: 1.0, width: 10, height: 10, draw_width: 10, draw_height: 10, }, &e ); let y: Option<Event> = x.clone().unwrap().render(|args| RenderEvent::from_render_args(args, x.as_ref().unwrap())).unwrap(); assert_eq!(x, y); } }<|fim▁end|>
pub draw_width: u32,
<|file_name|>assign4.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 import pwd for p in pwd.getpwall():<|fim▁hole|> print(p[0])<|fim▁end|>
if p.pw_shell.endswith('/bin/bash'):
<|file_name|>pagerun.js<|end_file_name|><|fim▁begin|>var pagerun = require('pagerun'); // set for debug // pagerun.modulesRoot = '../'; pagerun.mode = 'test'; // pagerun.loadNpmPlugin('httpresponse'); pagerun.loadNpmPlugin('httpsummary'); pagerun.loadNpmPlugin('httperror'); pagerun.loadNpmPlugin('htmlhint'); pagerun.loadNpmPlugin('jserror'); pagerun.loadNpmPlugin('pagesummary'); pagerun.loadNpmPlugin('jsunit'); // pagerun.loadNpmPlugin('jscoverage'); process.on('message', function(config) { pagerun.setConfig(config); pagerun.run(function(result){<|fim▁hole|> process.exit(0); }); });<|fim▁end|>
process.send(result);
<|file_name|>yaml.py<|end_file_name|><|fim▁begin|># (c) 2017, Brian Coca # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or<|fim▁hole|># (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ''' DOCUMENTATION: cache: yaml short_description: File backed, YAML formated. description: - File backed cache that uses YAML as a format, the files are per host. version_added: "2.3" author: Brian Coca (@bcoca) ''' # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import codecs import yaml from ansible.parsing.yaml.loader import AnsibleLoader from ansible.parsing.yaml.dumper import AnsibleDumper from ansible.plugins.cache import BaseFileCacheModule class CacheModule(BaseFileCacheModule): """ A caching module backed by yaml files. """ def _load(self, filepath): with codecs.open(filepath, 'r', encoding='utf-8') as f: return AnsibleLoader(f).get_single_data() def _dump(self, value, filepath): with codecs.open(filepath, 'w', encoding='utf-8') as f: yaml.dump(value, f, Dumper=AnsibleDumper, default_flow_style=False)<|fim▁end|>
<|file_name|>using-interfaces-and-generic-classes-for-services.feature.ts<|end_file_name|><|fim▁begin|>// std import { deepStrictEqual } from 'assert'; // 3p import * as request from 'supertest'; // FoalTS import { controller, createApp, Dependency, Get, HttpResponseOK, IAppController, ServiceManager } from '@foal/core'; import { Entity, getConnection, PrimaryGeneratedColumn, Repository } from '@foal/typeorm/node_modules/typeorm'; import { createTestConnection } from '../../../common'; describe('Feature: Using interfaces and generic classes for services', () => { afterEach(() => getConnection().close()); it('Example: A logger interface and a TypeORM repository', async () => { @Entity() class Product { @PrimaryGeneratedColumn() id: number; } let msg: any = 'not called'; /* ======================= DOCUMENTATION BEGIN ======================= */ class ApiController { @Dependency('product') productRepository: Repository<Product>; @Dependency('logger') logger: ILogger; @Get('/products') async readProducts() { const products = await this.productRepository.find(); this.logger.log(products); return new HttpResponseOK(products); } } /* ======================= DOCUMENTATION END ========================= */ class AppController implements IAppController { subControllers = [ controller('/api', ApiController), ]; } /* ======================= DOCUMENTATION BEGIN ======================= */ interface ILogger { log(message: any): void; } class ConsoleLogger implements ILogger { log(message: any): void { msg = message; } } async function main() { const connection = await createTestConnection([ Product ]); const productRepository = connection.getRepository(Product); const serviceManager = new ServiceManager() .set('product', productRepository) .set('logger', new ConsoleLogger()); return await createApp(AppController, { serviceManager }); } /* ======================= DOCUMENTATION END ========================= */ const app = await main(); await request(app) .get('/api/products') .expect(200) .expect([]); deepStrictEqual(msg, []); });<|fim▁hole|><|fim▁end|>
});
<|file_name|>account_db.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd. // This file is part of Parity. // Parity is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Parity is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. <|fim▁hole|>use util::*; use rlp::NULL_RLP; static NULL_RLP_STATIC: [u8; 1] = [0x80; 1]; // combines a key with an address hash to ensure uniqueness. // leaves the first 96 bits untouched in order to support partial key lookup. #[inline] fn combine_key<'a>(address_hash: &'a H256, key: &'a H256) -> H256 { let mut dst = key.clone(); { let last_src: &[u8] = &*address_hash; let last_dst: &mut [u8] = &mut *dst; for (k, a) in last_dst[12..].iter_mut().zip(&last_src[12..]) { *k ^= *a } } dst } /// A factory for different kinds of account dbs. #[derive(Debug, Clone)] pub enum Factory { /// Mangle hashes based on address. Mangled, /// Don't mangle hashes. Plain, } impl Default for Factory { fn default() -> Self { Factory::Mangled } } impl Factory { /// Create a read-only accountdb. /// This will panic when write operations are called. pub fn readonly<'db>(&self, db: &'db HashDB, address_hash: H256) -> Box<HashDB + 'db> { match *self { Factory::Mangled => Box::new(AccountDB::from_hash(db, address_hash)), Factory::Plain => Box::new(Wrapping(db)), } } /// Create a new mutable hashdb. pub fn create<'db>(&self, db: &'db mut HashDB, address_hash: H256) -> Box<HashDB + 'db> { match *self { Factory::Mangled => Box::new(AccountDBMut::from_hash(db, address_hash)), Factory::Plain => Box::new(WrappingMut(db)), } } } // TODO: introduce HashDBMut? /// DB backend wrapper for Account trie /// Transforms trie node keys for the database pub struct AccountDB<'db> { db: &'db HashDB, address_hash: H256, } impl<'db> AccountDB<'db> { /// Create a new AccountDB from an address. pub fn new(db: &'db HashDB, address: &Address) -> Self { Self::from_hash(db, address.sha3()) } /// Create a new AcountDB from an address' hash. pub fn from_hash(db: &'db HashDB, address_hash: H256) -> Self { AccountDB { db: db, address_hash: address_hash, } } } impl<'db> HashDB for AccountDB<'db>{ fn keys(&self) -> HashMap<H256, i32> { unimplemented!() } fn get(&self, key: &H256) -> Option<DBValue> { if key == &SHA3_NULL_RLP { return Some(DBValue::from_slice(&NULL_RLP_STATIC)); } self.db.get(&combine_key(&self.address_hash, key)) } fn contains(&self, key: &H256) -> bool { if key == &SHA3_NULL_RLP { return true; } self.db.contains(&combine_key(&self.address_hash, key)) } fn insert(&mut self, _value: &[u8]) -> H256 { unimplemented!() } fn emplace(&mut self, _key: H256, _value: DBValue) { unimplemented!() } fn remove(&mut self, _key: &H256) { unimplemented!() } } /// DB backend wrapper for Account trie pub struct AccountDBMut<'db> { db: &'db mut HashDB, address_hash: H256, } impl<'db> AccountDBMut<'db> { /// Create a new AccountDB from an address. pub fn new(db: &'db mut HashDB, address: &Address) -> Self { Self::from_hash(db, address.sha3()) } /// Create a new AcountDB from an address' hash. pub fn from_hash(db: &'db mut HashDB, address_hash: H256) -> Self { AccountDBMut { db: db, address_hash: address_hash, } } #[allow(dead_code)] pub fn immutable(&'db self) -> AccountDB<'db> { AccountDB { db: self.db, address_hash: self.address_hash.clone() } } } impl<'db> HashDB for AccountDBMut<'db>{ fn keys(&self) -> HashMap<H256, i32> { unimplemented!() } fn get(&self, key: &H256) -> Option<DBValue> { if key == &SHA3_NULL_RLP { return Some(DBValue::from_slice(&NULL_RLP_STATIC)); } self.db.get(&combine_key(&self.address_hash, key)) } fn contains(&self, key: &H256) -> bool { if key == &SHA3_NULL_RLP { return true; } self.db.contains(&combine_key(&self.address_hash, key)) } fn insert(&mut self, value: &[u8]) -> H256 { if value == &NULL_RLP { return SHA3_NULL_RLP.clone(); } let k = value.sha3(); let ak = combine_key(&self.address_hash, &k); self.db.emplace(ak, DBValue::from_slice(value)); k } fn emplace(&mut self, key: H256, value: DBValue) { if key == SHA3_NULL_RLP { return; } let key = combine_key(&self.address_hash, &key); self.db.emplace(key, value) } fn remove(&mut self, key: &H256) { if key == &SHA3_NULL_RLP { return; } let key = combine_key(&self.address_hash, key); self.db.remove(&key) } } struct Wrapping<'db>(&'db HashDB); impl<'db> HashDB for Wrapping<'db> { fn keys(&self) -> HashMap<H256, i32> { unimplemented!() } fn get(&self, key: &H256) -> Option<DBValue> { if key == &SHA3_NULL_RLP { return Some(DBValue::from_slice(&NULL_RLP_STATIC)); } self.0.get(key) } fn contains(&self, key: &H256) -> bool { if key == &SHA3_NULL_RLP { return true; } self.0.contains(key) } fn insert(&mut self, _value: &[u8]) -> H256 { unimplemented!() } fn emplace(&mut self, _key: H256, _value: DBValue) { unimplemented!() } fn remove(&mut self, _key: &H256) { unimplemented!() } } struct WrappingMut<'db>(&'db mut HashDB); impl<'db> HashDB for WrappingMut<'db>{ fn keys(&self) -> HashMap<H256, i32> { unimplemented!() } fn get(&self, key: &H256) -> Option<DBValue> { if key == &SHA3_NULL_RLP { return Some(DBValue::from_slice(&NULL_RLP_STATIC)); } self.0.get(key) } fn contains(&self, key: &H256) -> bool { if key == &SHA3_NULL_RLP { return true; } self.0.contains(key) } fn insert(&mut self, value: &[u8]) -> H256 { if value == &NULL_RLP { return SHA3_NULL_RLP.clone(); } self.0.insert(value) } fn emplace(&mut self, key: H256, value: DBValue) { if key == SHA3_NULL_RLP { return; } self.0.emplace(key, value) } fn remove(&mut self, key: &H256) { if key == &SHA3_NULL_RLP { return; } self.0.remove(key) } }<|fim▁end|>
// You should have received a copy of the GNU General Public License // along with Parity. If not, see <http://www.gnu.org/licenses/>. //! DB backend wrapper for Account trie
<|file_name|>extract_and_upload_iris_classifier.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Load common imports and system envs to build the core object import sys, os # Load the Environment: os.environ["ENV_DEPLOYMENT_TYPE"] = "JustRedis" from src.common.inits_for_python import * ##################################################################### # # Start Arg Processing: # action = "Extract and Upload IRIS Models to S3" parser = argparse.ArgumentParser(description="Parser for Action: " + str(action)) parser.add_argument('-u', '--url', help='URL to Download', dest='url') parser.add_argument('-b', '--s3bucket', help='S3 Bucket (Optional)', dest='s_bucket') parser.add_argument('-k', '--s3key', help='S3 Key (Optional)', dest='s_key') parser.add_argument("-d", "--debug", help="Debug Flag", dest='debug', action='store_true') args = parser.parse_args() if args.debug: debug = True core.enable_debug() data_dir = str(os.getenv("ENV_DATA_DST_DIR", "/opt/work/data/dst")) if not os.path.exists(data_dir): os.mkdir(data_dir, 0777) ds_name = "iris_classifier" cur_date_str = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S") s3_bucket = "unique-bucket-name-for-datasets" s3_key = "dataset_" + core.to_upper(ds_name) + ".cache.pickle.zlib" s3_loc = "" if args.s_bucket: s3_bucket = str(args.s_bucket) if args.s_key: s3_key = str(args.s_key) # # End Arg Processing # ##################################################################### s3_loc = str(s3_bucket) + ":" + str(s3_key) lg("-------------------------------------------------", 6) lg("Extracting and Uploading Models from CACHE to S3Loc(" + str(s3_loc) + ")", 6) lg("", 6) cache_req = { "RAName" : "CACHE", # Redis instance name holding the models "DSName" : str(ds_name), # Dataset name for pulling out of the cache "S3Loc" : str(s3_loc), # S3 location to store the model file "DeleteAfter" : False, # Optional delete after upload "SaveDir" : data_dir, # Optional dir to save the model file - default is ENV_DATA_DST_DIR "TrackingID" : "" # Future support for using the tracking id } upload_results = core.ml_upload_cached_dataset_to_s3(cache_req, core.get_rds(), core.get_dbs(), debug) if upload_results["Status"] == "SUCCESS": lg("Done Uploading Model and Analysis DSName(" + str(ds_name) + ") S3Loc(" + str(cache_req["S3Loc"]) + ")", 6) else: lg("", 6) lg("ERROR: Failed Upload Model and Analysis Caches as file for DSName(" + str(ds_name) + ")", 6)<|fim▁hole|> sys.exit(1) # end of if extract + upload worked lg("", 6) lg("Extract and Upload Completed", 5) lg("", 6) sys.exit(0)<|fim▁end|>
lg(upload_results["Error"], 6) lg("", 6)
<|file_name|>0066_auto_20150821_1131.py<|end_file_name|><|fim▁begin|><|fim▁hole|> from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('lizard_efcis', '0065_auto_20150818_1521'), ] operations = [ migrations.AlterField( model_name='mappingfield', name='db_datatype', field=models.CharField(blank=True, max_length=255, null=True, help_text='DataType of Foreign-Tabelnaam b.v. float, Locatie', choices=[('CharField', 'CharField'), ('float', 'float'), ('date', 'date'), ('time', 'time'), ('Activiteit', 'Activiteit'), ('BioStatus', 'BioStatus'), ('Detectiegrens', 'Detectiegrens'), ('FCStatus', 'FCStatus'), ('Locatie', 'Locatie'), ('Meetnet', 'Meetnet'), ('ParameterGroep', 'ParameterGroep'), ('StatusKRW', 'StatusKRW'), ('Waterlichaam', 'Waterlichaam'), ('Watertype', 'Watertype'), ('WNS', 'WNS')]), preserve_default=True, ), migrations.AlterField( model_name='meetnet', name='parent', field=models.ForeignKey(blank=True, to='lizard_efcis.Meetnet', null=True), preserve_default=True, ), migrations.AlterField( model_name='parameter', name='parametergroep', field=models.ForeignKey(blank=True, to='lizard_efcis.ParameterGroep', null=True), preserve_default=True, ), migrations.AlterField( model_name='parametergroep', name='parent', field=models.ForeignKey(blank=True, to='lizard_efcis.ParameterGroep', null=True), preserve_default=True, ), ]<|fim▁end|>
# -*- coding: utf-8 -*- from __future__ import unicode_literals
<|file_name|>UIEvent.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2001 Peter Kelly ([email protected]) * Copyright (C) 2001 Tobias Anton ([email protected]) * Copyright (C) 2006 Samuel Weinig ([email protected]) * Copyright (C) 2003, 2005, 2006, 2008 Apple Inc. All rights reserved. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Library General Public License for more details. * * You should have received a copy of the GNU Library General Public License * along with this library; see the file COPYING.LIB. If not, write to * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, * Boston, MA 02110-1301, USA. */ #include "sky/engine/config.h" #include "sky/engine/core/events/UIEvent.h" namespace blink { UIEventInit::UIEventInit() : view(nullptr) , detail(0) { } UIEvent::UIEvent() : m_detail(0) { } UIEvent::UIEvent(const AtomicString& eventType, bool canBubbleArg, bool cancelableArg, PassRefPtr<AbstractView> viewArg, int detailArg) : Event(eventType, canBubbleArg, cancelableArg) , m_view(viewArg) , m_detail(detailArg) { } UIEvent::UIEvent(const AtomicString& eventType, const UIEventInit& initializer) : Event(eventType, initializer) , m_view(initializer.view) , m_detail(initializer.detail) { } UIEvent::~UIEvent() { } void UIEvent::initUIEvent(const AtomicString& typeArg, bool canBubbleArg, bool cancelableArg, PassRefPtr<AbstractView> viewArg, int detailArg) { if (dispatched()) return; initEvent(typeArg, canBubbleArg, cancelableArg); m_view = viewArg; m_detail = detailArg; } bool UIEvent::isUIEvent() const { return true; } <|fim▁hole|>} int UIEvent::keyCode() const { return 0; } int UIEvent::charCode() const { return 0; } int UIEvent::layerX() { return 0; } int UIEvent::layerY() { return 0; } int UIEvent::pageX() const { return 0; } int UIEvent::pageY() const { return 0; } int UIEvent::which() const { return 0; } } // namespace blink<|fim▁end|>
const AtomicString& UIEvent::interfaceName() const { return EventNames::UIEvent;
<|file_name|>metadata_provider_anidb.py<|end_file_name|><|fim▁begin|>""" Copyright (C) 2015 Quinn D Granfor <[email protected]> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License version 2, as published by the Free Software Foundation. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License version 2 for more details. You should have received a copy of the GNU General Public License version 2 along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """ import gzip import inspect import json import time from common import common_file from common import common_logging_elasticsearch_httpx from common import common_network_async class CommonMetadataANIdb: """ Class for interfacing with anidb """ def __init__(self, db_connection): self.adba_connection = None self.db_connection = db_connection async def com_net_anidb_fetch_titles_file(self): """ Fetch the tarball of anime titles """ await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info', message_text={ 'function': inspect.stack()[0][ 3], 'locals': locals(), 'caller': inspect.stack()[1][ 3]}) # check to see if local titles file is older than 24 hours if common_file.com_file_modification_timestamp('./cache/anidb_titles.gz') \ < (time.time() - 86400): await common_network_async.mk_network_fetch_from_url_async( 'http://anidb.net/api/anime-titles.xml.gz', './cache/anidb_titles.gz') return True # new file return False<|fim▁hole|> Save anidb title data to database """ await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info', message_text={ 'function': inspect.stack()[0][ 3], 'locals': locals(), 'caller': inspect.stack()[1][ 3]}) file_handle = gzip.open(title_file, 'rb') # file_handle = gzip.open(title_file, 'rt', encoding='utf-8') # python 3.3+ anime_aid = None anime_title = None anime_title_ja = None for file_line in file_handle.readlines(): # common_logging_elasticsearch_httpx.com_es_httpx_post(message_type='info', message_text= # {'stuff':'line: %s', file_line.decode('utf-8')) if file_line.decode('utf-8').find('<anime aid="') != -1: anime_aid = file_line.decode( 'utf-8').split('"', 1)[1].rsplit('"', 1)[0] # common_logging_elasticsearch_httpx.com_es_httpx_post(message_type='info', message_text= # {'stuff':'aid: %s', anime_aid) elif file_line.decode('utf-8').find('title xml:lang="ja"') != -1: anime_title_ja = file_line.decode( 'utf-8').split('>', 1)[1].rsplit('<', 1)[0] # common_logging_elasticsearch_httpx.com_es_httpx_post(message_type='info', message_text= # {'stuff':'title: %s', anime_title_ja) elif file_line.decode('utf-8').find('title xml:lang="en"') != -1: anime_title = file_line.decode( 'utf-8').split('>', 1)[1].rsplit('<', 1)[0] # common_logging_elasticsearch_httpx.com_es_httpx_post(message_type='info', message_text= # {'stuff':'title: %s', anime_title) elif file_line.decode('utf-8').find('</anime>') != -1: if self.db_connection.db_meta_anime_meta_by_id(anime_aid) is None: if anime_title is None: anime_title = anime_title_ja self.db_connection.db_meta_anime_title_insert( {'anidb': anime_aid}, anime_title, None, None, None, None, None) # reset each time to handle ja when this doesn't exist anime_title = None file_handle.close() common_logging_elasticsearch_httpx.com_es_httpx_post(message_type='info', message_text={'stuff': 'end'}) async def com_net_anidb_aid_by_title(self, title_to_search): """ Find AID by title """ await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info', message_text={ 'function': inspect.stack()[0][ 3], 'locals': locals(), 'caller': inspect.stack()[1][ 3]}) # check the local DB local_db_result = self.db_connection.db_meta_anime_title_search( title_to_search) if local_db_result is None: # check to see if local titles file is older than 24 hours if self.com_net_anidb_fetch_titles_file(): # since new titles file....recheck by title self.com_net_anidb_aid_by_title(title_to_search) else: return None else: return local_db_result async def com_net_anidb_connect(self, user_name, user_password): """ Remote api calls """ await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info', message_text={ 'function': inspect.stack()[0][ 3], 'locals': locals(), 'caller': inspect.stack()[1][ 3]}) self.adba_connection = adba.Connection(log=True) try: self.adba_connection.auth(user_name, user_password) except Exception as err_code: common_logging_elasticsearch_httpx.com_es_httpx_post(message_type='error', message_text={"exception msg": err_code}) return self.adba_connection async def com_net_anidb_logout(self): """ Logout of anidb """ await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info', message_text={ 'function': inspect.stack()[0][ 3], 'locals': locals(), 'caller': inspect.stack()[1][ 3]}) self.adba_connection.logout() async def com_net_anidb_stop(self): """ Close the anidb connect and stop the thread """ await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info', message_text={ 'function': inspect.stack()[0][ 3], 'locals': locals(), 'caller': inspect.stack()[1][ 3]}) self.adba_connection.stop()<|fim▁end|>
async def com_net_anidb_save_title_data_to_db(self, title_file='./cache/anidb_titles.gz'): """
<|file_name|>trafficgraphwidget.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2011-2015 The Bitcoin Core developers // Distributed under the MIT software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #include <qt/trafficgraphwidget.h> #include <interfaces/node.h> #include <qt/clientmodel.h> #include <QColor> #include <QPainter> #include <QPainterPath> #include <QTimer> #include <cmath> #define DESIRED_SAMPLES 800 #define XMARGIN 10 #define YMARGIN 10 TrafficGraphWidget::TrafficGraphWidget(QWidget *parent) : QWidget(parent), timer(nullptr), fMax(0.0f), nMins(0), vSamplesIn(), vSamplesOut(), nLastBytesIn(0), nLastBytesOut(0), clientModel(nullptr) { timer = new QTimer(this); connect(timer, &QTimer::timeout, this, &TrafficGraphWidget::updateRates); } void TrafficGraphWidget::setClientModel(ClientModel *model) { clientModel = model; if (model) { nLastBytesIn = model->node().getTotalBytesRecv(); nLastBytesOut = model->node().getTotalBytesSent(); } } int TrafficGraphWidget::getGraphRangeMins() const { return nMins; } void TrafficGraphWidget::paintPath(QPainterPath &path, QQueue<float> &samples) { int sampleCount = samples.size(); if (sampleCount > 0) { int h = height() - YMARGIN * 2, w = width() - XMARGIN * 2; int x = XMARGIN + w; path.moveTo(x, YMARGIN + h); for (int i = 0; i < sampleCount; ++i) { x = XMARGIN + w - w * i / DESIRED_SAMPLES; int y = YMARGIN + h - (int)(h * samples.at(i) / fMax); path.lineTo(x, y); } path.lineTo(x, YMARGIN + h); } } void TrafficGraphWidget::paintEvent(QPaintEvent *) { QPainter painter(this); painter.fillRect(rect(), Qt::black); if (fMax <= 0.0f) { return; } QColor axisCol(Qt::gray); int h = height() - YMARGIN * 2; painter.setPen(axisCol); painter.drawLine(XMARGIN, YMARGIN + h, width() - XMARGIN, YMARGIN + h); // decide what order of magnitude we are int base = floor(log10(fMax)); float val = pow(10.0f, base); const QString units = tr("KB/s"); const float yMarginText = 2.0; // draw lines painter.setPen(axisCol); painter.drawText(XMARGIN, YMARGIN + h - h * val / fMax - yMarginText, QString("%1 %2").arg(val).arg(units)); for (float y = val; y < fMax; y += val) { int yy = YMARGIN + h - h * y / fMax; painter.drawLine(XMARGIN, yy, width() - XMARGIN, yy); } // if we drew 3 or fewer lines, break them up at the next lower order of // magnitude if (fMax / val <= 3.0f) { axisCol = axisCol.darker(); val = pow(10.0f, base - 1); painter.setPen(axisCol); painter.drawText(XMARGIN, YMARGIN + h - h * val / fMax - yMarginText, QString("%1 %2").arg(val).arg(units)); int count = 1; for (float y = val; y < fMax; y += val, count++) { // don't overwrite lines drawn above if (count % 10 == 0) { continue; } int yy = YMARGIN + h - h * y / fMax; painter.drawLine(XMARGIN, yy, width() - XMARGIN, yy); } } if (!vSamplesIn.empty()) { QPainterPath p; paintPath(p, vSamplesIn); painter.fillPath(p, QColor(0, 255, 0, 128)); painter.setPen(Qt::green); painter.drawPath(p); } if (!vSamplesOut.empty()) { QPainterPath p; paintPath(p, vSamplesOut); painter.fillPath(p, QColor(255, 0, 0, 128)); painter.setPen(Qt::red); painter.drawPath(p); } } void TrafficGraphWidget::updateRates() { if (!clientModel) { return; } quint64 bytesIn = clientModel->node().getTotalBytesRecv(), bytesOut = clientModel->node().getTotalBytesSent(); float inRate = (bytesIn - nLastBytesIn) / 1024.0f * 1000 / timer->interval(); float outRate = (bytesOut - nLastBytesOut) / 1024.0f * 1000 / timer->interval(); vSamplesIn.push_front(inRate); vSamplesOut.push_front(outRate); nLastBytesIn = bytesIn; nLastBytesOut = bytesOut; while (vSamplesIn.size() > DESIRED_SAMPLES) { vSamplesIn.pop_back(); } while (vSamplesOut.size() > DESIRED_SAMPLES) { vSamplesOut.pop_back(); } float tmax = 0.0f; for (const float f : vSamplesIn) { if (f > tmax) { tmax = f; } } for (const float f : vSamplesOut) { if (f > tmax) { tmax = f; } } fMax = tmax; update(); } void TrafficGraphWidget::setGraphRangeMins(int mins) { nMins = mins; int msecsPerSample = nMins * 60 * 1000 / DESIRED_SAMPLES; timer->stop(); timer->setInterval(msecsPerSample); clear(); } void TrafficGraphWidget::clear() { timer->stop(); vSamplesOut.clear(); vSamplesIn.clear(); fMax = 0.0f; if (clientModel) { nLastBytesIn = clientModel->node().getTotalBytesRecv();<|fim▁hole|> timer->start(); }<|fim▁end|>
nLastBytesOut = clientModel->node().getTotalBytesSent(); }
<|file_name|>assetmanager.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Anthony Schmieder # Use of this source code is governed by the 2-clause BSD license that # can be found in the LICENSE.txt file. """Loads and manages art assets""" import pyglet import os _ASSET_PATHS = ["res"] _ASSET_FILE_NAMES = [ "black_key_down.png", "black_key_up.png", "white_key_down.png", "white_key_up.png", "staff_line.png", ] class Assets(object): _loadedAssets = None @staticmethod def loadAssets(): Assets._loadedAssets = dict() Assets._updateResourcePath() for f in _ASSET_FILE_NAMES: Assets.loadAsset(f) @staticmethod<|fim▁hole|> def loadAsset(filename): Assets._loadedAssets[filename] = pyglet.resource.image(filename) @staticmethod def _updateResourcePath(): for p in _ASSET_PATHS: pyglet.resource.path.append(os.path.join(os.getcwd(), p)) pyglet.resource.reindex() @staticmethod def get(filename): if Assets._loadedAssets is None: raise RuntimeError("You must initialize the asset manager before " "retrieving assets") return Assets._loadedAssets[filename]<|fim▁end|>
<|file_name|>auth.d.ts<|end_file_name|><|fim▁begin|>/** * Credential is a generic exported credential. */ export declare type Credential = ServiceAccountKey | ExternalAccount; /** * ServiceAccountKeyCredential is an exported credential for a service account key. */ export declare type ServiceAccountKey = { type: string; project_id: string; private_key_id: string; private_key: string; client_email: string; client_id: string; auth_uri: string; token_uri: string; auth_provider_x509_cert_url: string; client_x509_cert_url: string; }; /** * ExternalAccount is an exported credential for an external account * like a workload identity pool. */ export declare type ExternalAccount = { type: string; audience: string; subject_token_type: string; service_account_impersonation_url?: string; token_url: string; token_info_url?: string; client_id?: string; client_secret?: string; quota_project_id?: string; workforce_pool_user_project?: string; credential_source: { file?: string; url?: string; headers?: { [key: string]: string; }; format?: { type: 'json' | 'text'; subject_token_field_name?: string; }; environment_id?: string; region_url?: string; regional_cred_verification_url: string; }; }; /** * parseCredential attempts to parse the given string as a service account key * JSON or external account credentials. It handles if the input is<|fim▁hole|> * @param input String that is an exported JSON service account key or external * account credentials file (or base64-encoded). * * @return The parsed credential. It could be a service account key or an * external credentials file. */ export declare function parseCredential(input: string): Credential; /** * isServiceAccountKey returns true if the given interface is a * ServiceAccountKey, false otherwise. * * @param credential Credential to check if is a service account key. */ export declare function isServiceAccountKey(credential: Credential): credential is ServiceAccountKey; /** * isExternalAccount returns true if the given interface is a ExternalAccount, * false otherwise. * * @param credential Credential to check if is an external account */ export declare function isExternalAccount(credential: Credential): credential is ExternalAccount; declare const _default: { parseCredential: typeof parseCredential; isServiceAccountKey: typeof isServiceAccountKey; isExternalAccount: typeof isExternalAccount; }; export default _default;<|fim▁end|>
* base64-encoded. *
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>"""Shared pytest fixtures and test data.""" import copy import uuid import pytest from django.contrib.auth import get_user_model from onfido.models import Applicant, Check, Event, Report APPLICANT_ID = str(uuid.uuid4()) CHECK_ID = str(uuid.uuid4()) IDENTITY_REPORT_ID = str(uuid.uuid4()) DOCUMENT_REPORT_ID = str(uuid.uuid4()) DOCUMENT_ID = str(uuid.uuid4()) User = get_user_model() @pytest.fixture def user(): return User.objects.create_user( "fred", first_name="Fred", last_name="Flinstone", email="[email protected]" ) @pytest.fixture def applicant(user): data = copy.deepcopy(TEST_APPLICANT) return Applicant.objects.create_applicant(user=user, raw=data) @pytest.fixture def check(applicant): data = copy.deepcopy(TEST_CHECK) return Check.objects.create_check(applicant, raw=data) @pytest.fixture def identity_report(check): data = copy.deepcopy(TEST_REPORT_IDENTITY_ENHANCED) return Report.objects.create_report(check, raw=data) @pytest.fixture def document_report(check): data = copy.deepcopy(TEST_REPORT_DOCUMENT) return Report.objects.create_report(check, raw=data) @pytest.fixture def report(identity_report): return identity_report @pytest.fixture def event(check): data = copy.deepcopy(TEST_EVENT) return Event().parse(data) # Test data taken from Onfido v3 API docs. # https://documentation.onfido.com/#applicant-object TEST_APPLICANT = { "id": APPLICANT_ID, "created_at": "2019-10-09T16:52:42Z", "sandbox": True, "first_name": "Jane", "last_name": "Doe", "email": None, "dob": "1990-01-01", "delete_at": None, "href": f"/v3/applicants/{APPLICANT_ID}", "id_numbers": [], "address": { "flat_number": None, "building_number": None, "building_name": None, "street": "Second Street", "sub_street": None, "town": "London", "state": None, "postcode": "S2 2DF", "country": "GBR", "line1": None, "line2": None, "line3": None, }, } # https://documentation.onfido.com/#check-object TEST_CHECK = { "id": CHECK_ID, "created_at": "2019-10-09T17:01:59Z", "status": "in_progress", "redirect_uri": None, "result": None, "sandbox": True, "tags": [], "results_uri": f"https://onfido.com/checks/{CHECK_ID}/reports", "form_uri": None, "paused": False, "version": "3.0", "report_ids": [IDENTITY_REPORT_ID], "href": f"/v3/checks/{CHECK_ID}", "applicant_id": APPLICANT_ID, "applicant_provides_data": False, } # https://documentation.onfido.com/#identity-enhanced-report TEST_REPORT_IDENTITY_ENHANCED = { "created_at": "2019-10-03T15:54:20Z", "href": f"/v3/reports/{IDENTITY_REPORT_ID}", "id": IDENTITY_REPORT_ID, "name": "identity_enhanced", "properties": { "matched_address": 19099121, "matched_addresses": [ {"id": 19099121, "match_types": ["credit_agencies", "voting_register"]} ], }, "result": "clear", "status": "complete", "sub_result": None, "breakdown": { "sources": { "result": "clear", "breakdown": { "total_sources": { "result": "clear", "properties": {"total_number_of_sources": "3"}, } }, }, "address": { "result": "clear", "breakdown": { "credit_agencies": { "result": "clear", "properties": {"number_of_matches": "1"}, },<|fim▁hole|> }, }, "date_of_birth": { "result": "clear", "breakdown": { "credit_agencies": {"result": "clear", "properties": {}}, "voting_register": {"result": "clear", "properties": {}}, }, }, "mortality": {"result": "clear"}, }, "check_id": CHECK_ID, "documents": [], } TEST_REPORT_DOCUMENT = { "created_at": "2019-10-03T14:05:48Z", "documents": [{"id": DOCUMENT_ID}], "href": f"/v3/reports/{DOCUMENT_REPORT_ID}", "id": DOCUMENT_REPORT_ID, "name": "document", "properties": { "nationality": "", "last_name": "Names", "issuing_country": "GBR", "gender": "", "first_name": "Report", "document_type": "passport", "document_numbers": [{"value": "123456789", "type": "document_number"}], "date_of_expiry": "2030-01-01", "date_of_birth": "1990-01-01", }, "result": "clear", "status": "complete", "sub_result": "clear", "breakdown": { "data_comparison": { "result": "clear", "breakdown": { "issuing_country": {"result": "clear", "properties": {}}, "gender": {"result": "clear", "properties": {}}, "date_of_expiry": {"result": "clear", "properties": {}}, "last_name": {"result": "clear", "properties": {}}, "document_type": {"result": "clear", "properties": {}}, "document_numbers": {"result": "clear", "properties": {}}, "first_name": {"result": "clear", "properties": {}}, "date_of_birth": {"result": "clear", "properties": {}}, }, }, "data_validation": { "result": "clear", "breakdown": { "gender": {"result": "clear", "properties": {}}, "date_of_birth": {"result": "clear", "properties": {}}, "document_numbers": {"result": "clear", "properties": {}}, "document_expiration": {"result": "clear", "properties": {}}, "expiry_date": {"result": "clear", "properties": {}}, "mrz": {"result": "clear", "properties": {}}, }, }, "age_validation": { "result": "clear", "breakdown": { "minimum_accepted_age": {"result": "clear", "properties": {}} }, }, "image_integrity": { "result": "clear", "breakdown": { "image_quality": {"result": "clear", "properties": {}}, "conclusive_document_quality": {"result": "clear", "properties": {}}, "supported_document": {"result": "clear", "properties": {}}, "colour_picture": {"result": "clear", "properties": {}}, }, }, "visual_authenticity": { "result": "clear", "breakdown": { "fonts": {"result": "clear", "properties": {}}, "picture_face_integrity": {"result": "clear", "properties": {}}, "template": {"result": "clear", "properties": {}}, "security_features": {"result": "clear", "properties": {}}, "original_document_present": {"result": "clear", "properties": {}}, "digital_tampering": {"result": "clear", "properties": {}}, "other": {"result": "clear", "properties": {}}, "face_detection": {"result": "clear", "properties": {}}, }, }, "data_consistency": { "result": "clear", "breakdown": { "date_of_expiry": {"result": "clear", "properties": {}}, "document_numbers": {"result": "clear", "properties": {}}, "issuing_country": {"result": "clear", "properties": {}}, "document_type": {"result": "clear", "properties": {}}, "date_of_birth": {"result": "clear", "properties": {}}, "gender": {"result": "clear", "properties": {}}, "first_name": {"result": "clear", "properties": {}}, "last_name": {"result": "clear", "properties": {}}, "nationality": {"result": "clear", "properties": {}}, }, }, "police_record": {"result": "clear"}, "compromised_document": {"result": "clear"}, }, "check_id": CHECK_ID, } TEST_EVENT = { "payload": { "resource_type": "check", "action": "check.form_opened", "object": { "id": CHECK_ID, "status": "complete", "completed_at_iso8601": "2019-10-28T15:00:39Z", "href": f"https://api.onfido.com/v3/checks/{CHECK_ID}", }, } }<|fim▁end|>
"telephone_database": {"result": "clear", "properties": {}}, "voting_register": {"result": "clear", "properties": {}},
<|file_name|>segy.py<|end_file_name|><|fim▁begin|>"""segy.py - read and write SEG-Y files From command line: python segy.py <path-to-segy-file> """ from collections import OrderedDict from pprint import pprint import numpy as np from sacker import Sacker # SEG-Y spec: http://www.tritonimaginginc.com/site/content/public/downloads/FileFormatInfo/seg_y_rev1.pdf SAMPLE_FORMATS = { 'f': 5, # 4-byte, IEEE floating-point 'i': 2, # 4-byte, two's complement integer 'h': 3, # 2-byte, two's complement integer 'b': 8, # 1-byte, two's complement integer } SEGY_HEADER = Sacker('>', ''' I job_id # Job identification number i line_num # Line number i reel_num # Reel number h n_traces_per_ensemble # Number of data traces per ensemble h n_auxtraces_per_ensemble # Number of auxilary traces per ensemble h sample_interval # Sample interval (us) h orig_sample_interval # Sample interval of original field recording h n_trace_samples # Number of samples per data trace h orig_n_trace_samples # Number of samples per data trace for original # field recording h sample_format # Data sample format code h ensemble_fold # Expected number of data traces per # trace ensemble (e.g. the CMP fold) h trace_sorting_code h vertical_sum_code h sweep_freq_at_start # (Hz) h sweep_freq_at_end # (Hz) h sweep_length # (ms) h sweep_type_code h sweep_channel_trace_number h start_taper_length # (ms) h end_taper_length # (ms) h taper_type h correlated_traces<|fim▁hole|> h measurement_system # (1: meters, 2: feet) h impulse_signal_polarity h vibratory_polarity_code 240x h segy_rev h fixed_length_trace_flag h n_extended_headers 94x''', length = 400) TRACE_HEADER = Sacker('>', ''' i trace_seq_in_line # Trace sequence number within line - Numbers # continue to increase if the same line # continues across multiple SEG Y files i trace_seq_in_file # Trace sequence number within SEG Y file. # Each file starts with trace sequence one. i orig_field_record_num i trace_num_in_orig_record i energy_source_point_number i ensemble_num # i.e. CDP, CMP, CRP, etc i trace_num_in_ensemble # Each ensemble starts with trace 1 h trace_id_code h n_of_vertically_summed_traces # yielding this trace h n_of_horizontally_summed_traces # yielding this trace h data_use # (1 - production, 2 - test) i source_reciever_dist i reciever_elevation i surface_elevation_at_source i source_depth_below_surface # (a positive number) i datum_elevation_at_reciever i datum_elevation_at_source i water_depth_at_source i water_depth_at_reciever h elevations_scaler # (1, 10, 100, 1000, 10000) h coordinates_scaler # (1, 10, 100, 1000, 10000) i source_coord_x i source_coord_y i reciever_coord_x i reciever_coord_y h coordinate_units # (1: length, 2: secs of arc, 3: decimal degrees, # 4: degrees, minutes, seconds) h weathering_velocity # (m/s or ft/s) h subweathering_velocity # (m/s or ft/s) h uphole_time_at_source # (ms) h uphole_time_at_reciever # (ms) h static_correction_at_source # (ms) h static_correction_at_reciever # (ms) h total_static # (ms) h lag_time_A # (ms) h lag_time_B # (ms) h delay_recording_time # (ms) h mute_time_start # (ms) h mute_time_end # (ms) h n_samples # Number of samples in this trace h sample_interval # (us) h field_instruments_gain_type # (1: fixed, 2: binary, 3: float) h instrument_gain_const # (dB) h instrument_early_gain # (dB) h correlated # (1: no, 2: yes) h sweep_freq_at_start # (Hz) h sweep_freq_at_end # (Hz) h sweep_length # (ms) h sweep_type_code h start_taper_length # (ms) h end_taper_length # (ms) h taper_type h alias_filter_freq # (Hz) h alias_filter_slope # (dB/octave) h notch_filter_freq # (Hz) h notch_filter_slope # (dB/octave) h low_cut_filter_freq # (Hz) h high_cut_filter_freq # (Hz) h low_cut_filter_slope # (dB/octave) h high_cut_filter_slope # (dB/octave) h year h day_of_year h hour h minute h second h time_basis_code # (1: local, 2: GMT, 3: Other, 4: UTC) h trace_weighting_factor h geophone_group_num_of_roll_switch h geophone_group_num_of_first_trace h geophone_group_num_of_last_trace h gap_size # (total number of groups dropped) h over_travel # associated with taper (1: down, 2: up) 60x''', length = 240) TEXT_LEN = 3200 def decode_text(s): text = s.decode('ibm037') return '\n'.join(text[i:i+80] for i in range(0, len(text), 80)) def encode_text(s): t = ''.join(line.ljust(80,' ') for line in s.split('\n')).ljust(TEXT_LEN,' ') return t.encode('ibm037') def write_SEGY(outfile, file_header, text, traces): with open(outfile, 'wb') as out: out.write(encode_text(text)) out.write(SEGY_HEADER.wrap(file_header)) for header, data in traces: out.write(TRACE_HEADER.wrap(header)) out.write(np.getbuffer(data.byteswap())) def read_SEGY(infile): file_data = memoryview(open(infile, 'rb').read()) print decode_text(file_data[:TEXT_LEN].tobytes()) data = file_data[TEXT_LEN:] header_len, header = SEGY_HEADER.unwrap(data, data_factory = OrderedDict) pprint([(k, v) for k, v in header.items() if v != 0]) i = 0 data = data[header_len:] while data: trace_len, trace = TRACE_HEADER.unwrap(data, data_factory = OrderedDict) print 'TRACE', i, '[%d]' % trace['trace_num_in_orig_record'], pprint([(k, v) for k, v in trace.items() if v != 0]) print np.frombuffer(data[trace_len:trace_len + trace['n_samples']*2].tobytes(), np.int16).byteswap() data = data[trace_len + trace['n_samples'] * 2:] i += 1 if i > 10: break def main(infile): read_SEGY(infile) if __name__ == '__main__': import sys if len(sys.argv) != 2: sys.exit('Error: wrong arguments\n' + __doc__.rstrip()) main(*sys.argv[1:])<|fim▁end|>
h binary_gain_recovered h amplitude_recovery_method
<|file_name|>EventController.js<|end_file_name|><|fim▁begin|>angular .module('platoon.event', []) .controller('EventController', ['$scope', '$location', '$filter', function($scope, $location, $filter) { $scope.eventTypes = [{ label: 'Private', type: 'private' },{ label: 'Company',<|fim▁hole|> label: 'Wedding', type: 'wedding' },{ label: 'Student', type: 'student' } ]; $scope.add = {}; $scope.add.djs = { djs: [], add: function(dj) { $scope.$evalAsync(function() { $scope.add.djs.djs.push(dj); $scope.add.djs.current = ''; }.bind(this)); }, remove: function(event) { var target = $(event.target).find(':selected'); if (target.length > 0 && event.which === 8 || event.which === 46) { event.preventDefault(); event.stopPropagation(); var dj = target.html(); for (var i = 0; i < this.djs.length; i++) { if (this.djs[i] == dj) { this.djs.splice(i, 1); break; } } } }, engine: { name: 'djs', source: new Bloodhound({ queryTokenizer: Bloodhound.tokenizers.whitespace, datumTokenizer: Bloodhound.tokenizers.whitespace, local: ['Max', 'Arvid', 'Roshan', 'Amanda', 'Robin'] }) } }; $scope.add.type = $scope.eventTypes[0].type; $scope.add.phone = ''; $scope.add.layout = 'standard'; $scope.$watch(function() { return $scope.add.phone; }, function(val) { if (val != null) $scope.add.phone = $filter('phoneNumber')(val); }); }]);<|fim▁end|>
type: 'company' },{
<|file_name|>poster.py<|end_file_name|><|fim▁begin|>import simplejson as json import urllib import urllib2 import time server = "" def GET(uri, params): params = urllib.urlencode(params) req = urllib2.Request(server + uri + "?" + params , headers={'Accept': 'application/json'}) return json.loads(urllib2.urlopen(req).read()) def POST(uri, params): params = json.dumps(params) req = urllib2.Request(server + uri, params, headers={'Content-Type': 'application/json', 'Accept': 'application/json'}) response = json.loads(urllib2.urlopen(req).read()) return response["id"] def set_server_url(url): global server server = url class Detector: def __init__(self, name, url): self.name = name self.url = url def get_id(self): try: return self.id except AttributeError: try: detectors = GET("/detectors/", {'name': self.name}) self.id = detectors[0]['id'] except urllib2.HTTPError as e: self.id = POST("/detectors/", {'name': self.name, 'url': self.url})<|fim▁hole|> return self.id def realize(self): self.get_id() class Metric: def __init__(self, name, descr, detector): self.name = name self.descr = descr self.detector = detector def get_id(self): try: return self.id except AttributeError: uri = "/detectors/" + str(self.detector.get_id()) + "/metrics/" try: metrics = GET(uri, {'name': self.name}) return metrics[0]['id'] except urllib2.HTTPError as e: return POST(uri, {'name': self.name, 'description': self.descr}) def realize(self): self.get_id() def post_alert(detector, metric, payload, emails="", date=time.strftime("%Y-%m-%d")): try: payload = json.dumps(payload) uri = "/detectors/" + str(detector.get_id()) + "/metrics/" + str(metric.get_id()) + "/alerts/" return POST(uri, {'description': payload, 'date': date, 'emails': emails}) except urllib2.HTTPError as e: if e.code == 422: print "Alert for detector: " + detector.name + ", metric: " + metric.name + ", has already been submitted!" else: raise e if __name__ == "__main__": set_server_url("http://localhost:8080") detector = Detector("Histogram Regression Detector", "foobar") metric = Metric("metric100", "foobar", detector) post_alert(detector, metric, "foobar")<|fim▁end|>
<|file_name|>Stepper.test.js<|end_file_name|><|fim▁begin|>import React from 'react' import { mount, shallow, render } from 'enzyme' import { Stepper } from './Stepper' import Step from './Stepper.Step' import { StepUI, StepperUI } from './Stepper.css' const mockSteps = [ { id: 'Id1', title: 'Test Title 1', }, { id: 'Id2', title: 'Test Title 2', }, { id: 'Id3', title: 'Test Title 3', }, { id: 'Id4', title: 'Test Title 4', }, ] describe('className', () => { test('Has default className', () => { const wrapper = render(<Stepper />) expect(wrapper.hasClass('c-Stepper')).toBeTruthy() }) test('Can render custom className', () => { const customClassName = 'blue' const wrapper = render(<Stepper className={customClassName} />) expect(wrapper.hasClass(customClassName)).toBeTruthy() }) }) describe('HTML props', () => { test('Can render default HTML props', () => { const wrapper = render(<Stepper data-cy="blue" />) expect(wrapper.attr('data-cy')).toBe('blue') }) }) describe('children', () => { test('should have a child component for each step', () => { const wrapper = mount(<Stepper steps={mockSteps} currentIndex={0} />) const steps = wrapper.find(Step) expect(steps.length).toEqual(4) }) test('should assign proper isActive state to each step', () => { const wrapper = mount(<Stepper steps={mockSteps} currentIndex={2} />) wrapper.update() const steps = wrapper.find(StepUI) let results = [] steps.forEach(step => { results.push(step.hasClass('is-active')) }) expect(results[0]).toEqual(true) expect(results[1]).toEqual(true) expect(results[2]).toEqual(true) expect(results[3]).toEqual(false) }) }) describe('callbacks', () => { test('should call callbacks', () => { const onChangeSpy = jest.fn() const onCompleteSpy = jest.fn() const wrapper = mount( <Stepper onChange={onChangeSpy} onComplete={onCompleteSpy} currentIndex={0} steps={mockSteps} /> ) expect(onChangeSpy).toHaveBeenCalledTimes(0) expect(onCompleteSpy).toHaveBeenCalledTimes(0) wrapper.setProps({ currentIndex: 1 }) expect(onChangeSpy).toHaveBeenCalledTimes(1) expect(onCompleteSpy).toHaveBeenCalledTimes(0) wrapper.setProps({ currentIndex: 2 }) expect(onChangeSpy).toHaveBeenCalledTimes(2) expect(onCompleteSpy).toHaveBeenCalledTimes(0) wrapper.setProps({ currentIndex: 3 }) expect(onChangeSpy).toHaveBeenCalledTimes(3) expect(onCompleteSpy).toHaveBeenCalledTimes(1) }) test('should call onStepClick callback', () => { const onStepClickSpy = jest.fn() const wrapper = shallow( <Stepper steps={mockSteps} onStepClick={onStepClickSpy} /> ) wrapper .find(Step) .at(0) .simulate('click') expect(onStepClickSpy).toHaveBeenCalledTimes(1) }) }) describe('StepperUI', () => { test('should return the correct value for getProgress', () => { const wrapper = mount(<Stepper steps={mockSteps} currentIndex={2} />) expect(wrapper.find(StepperUI).prop('aria-valuenow')).toEqual(3) }) }) describe('getProgress', () => { test('should equal 2', () => { const wrapper = mount(<Stepper steps={mockSteps} currentIndex={1} />) expect(wrapper.instance().getProgress()).toEqual(2) }) test('when no currentIndex is null, kkshould return 1', () => { const wrapper = mount(<Stepper currentIndex={null} />) expect(wrapper.instance().getProgress()).toEqual(1) }) }) describe('getMatchIndex', () => { test('should return 1', () => { const wrapper = mount(<Stepper currentIndex={1} />) expect(wrapper.instance().getMatchIndex()).toEqual(1) }) test('when no currentIndex defined should return 0', () => { const wrapper = mount(<Stepper currentIndex={null} />) expect(wrapper.instance().getMatchIndex()).toEqual(-1) }) }) describe('componentDidUpdate', () => { test('should call onChange callback', () => { const onChangeSpy = jest.fn() const wrapper = mount( <Stepper onChange={onChangeSpy} steps={mockSteps} currentIndex={1} /> ) wrapper.instance().componentDidUpdate({ currentIndex: 0 }) expect(onChangeSpy).toHaveBeenCalled() }) test('should not call onChange callback', () => { const onChangeSpy = jest.fn() const wrapper = mount(<|fim▁hole|> ) wrapper.instance().componentDidUpdate({ currentIndex: 1 }) expect(onChangeSpy).not.toHaveBeenCalled() }) }) describe('handleChangeCallback', () => { test('should not call onChange', () => { const onChangeSpy = jest.fn() const wrapper = mount( <Stepper onChange={onChangeSpy} steps={[]} currentIndex={1} /> ) wrapper.instance().handleChangeCallback() expect(onChangeSpy).not.toHaveBeenCalled() }) }) describe('Step className', () => { test('should call click handler if isClickable is true', () => { const onClickSpy = jest.fn() const wrapper = mount(<Step isClickable={true} onClick={onClickSpy} />) wrapper .find('.c-StepperStep') .at(0) .simulate('click') expect(onClickSpy).toHaveBeenCalledTimes(1) }) test('should NOT call click handler if isClickable is false', () => { const onClickSpy = jest.fn() const wrapper = mount(<Step isClickable={false} onClick={onClickSpy} />) wrapper .find('.c-StepperStep') .at(0) .simulate('click') expect(onClickSpy).toHaveBeenCalledTimes(0) }) })<|fim▁end|>
<Stepper onChange={onChangeSpy} steps={mockSteps} currentIndex={1} />
<|file_name|>fns.rs<|end_file_name|><|fim▁begin|>#[repr(C)] pub struct Fns { noArgs: fn(), anonymousArg: fn(i32), returnsNumber: fn() -> i32, namedArgs: fn(first: i32, snd: i16) -> i8, namedArgsWildcards: fn(_: i32, named: i16, _: i64) -> i8, } #[no_mangle]<|fim▁hole|>pub extern "C" fn no_return() -> ! { loop {} }<|fim▁end|>
pub extern "C" fn root(_fns: Fns) {} #[no_mangle]
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main import ( "flag" "io/ioutil" "log" "os" "os/signal" "strconv" "syscall" "code.cloudfoundry.org/cflager" "code.cloudfoundry.org/lager" "code.cloudfoundry.org/route-registrar/config" "code.cloudfoundry.org/route-registrar/healthchecker" "code.cloudfoundry.org/route-registrar/messagebus" "code.cloudfoundry.org/route-registrar/registrar" "github.com/tedsuo/ifrit" ) func main() { var configPath string flags := flag.NewFlagSet(os.Args[0], flag.ExitOnError) pidfile := flags.String("pidfile", "", "Path to pid file") cflager.AddFlags(flags) flags.StringVar(&configPath, "configPath", "", "path to configuration file with json encoded content") flags.Set("configPath", "registrar_settings.yml") flags.Parse(os.Args[1:]) logger, _ := cflager.New("Route Registrar") logger.Info("Initializing") configSchema, err := config.NewConfigSchemaFromFile(configPath) if err != nil { logger.Fatal("error parsing file: %s\n", err) } c, err := configSchema.ToConfig() if err != nil { log.Fatalln(err) } hc := healthchecker.NewHealthChecker(logger) logger.Info("creating nats connection") messageBus := messagebus.NewMessageBus(logger) r := registrar.NewRegistrar(*c, hc, logger, messageBus) if *pidfile != "" { pid := strconv.Itoa(os.Getpid()) err := ioutil.WriteFile(*pidfile, []byte(pid), 0644) logger.Info("Writing pid", lager.Data{"pid": pid, "file": *pidfile}) if err != nil {<|fim▁hole|> "error writing pid to pidfile", err, lager.Data{ "pid": pid, "pidfile": *pidfile, }, ) } } sigChan := make(chan os.Signal, 1) signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM, syscall.SIGKILL) logger.Info("Running") process := ifrit.Invoke(r) for { select { case s := <-sigChan: logger.Info("Caught signal", lager.Data{"signal": s}) process.Signal(s) case err := <-process.Wait(): if err != nil { logger.Fatal("Exiting with error", err) } logger.Info("Exiting without error") os.Exit(0) } } }<|fim▁end|>
logger.Fatal(
<|file_name|>build.js<|end_file_name|><|fim▁begin|>(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r})()({1:[function(require,module,exports){ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports["default"] = void 0; var _index = _interopRequireDefault(require("../../index")); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; } function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return _assertThisInitialized(self); } function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } function _get(target, property, receiver) { if (typeof Reflect !== "undefined" && Reflect.get) { _get = Reflect.get; } else { _get = function _get(target, property, receiver) { var base = _superPropBase(target, property); if (!base) return; var desc = Object.getOwnPropertyDescriptor(base, property); if (desc.get) { return desc.get.call(receiver); } return desc.value; }; } return _get(target, property, receiver || target); } function _superPropBase(object, property) { while (!Object.prototype.hasOwnProperty.call(object, property)) { object = _getPrototypeOf(object); if (object === null) break; } return object; } function _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); } function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } var Custom = /*#__PURE__*/ function (_Smooth) { _inherits(Custom, _Smooth); function Custom(opt) { var _this; _classCallCheck(this, Custom); _this = _possibleConstructorReturn(this, _getPrototypeOf(Custom).call(this, opt)); _this.perfs = { now: null, last: null }; _this.dom.section = opt.section; return _this; } _createClass(Custom, [{ key: "init", value: function init() { _get(_getPrototypeOf(Custom.prototype), "init", this).call(this); } }, { key: "run", value: function run() { this.perfs.now = window.performance.now(); _get(_getPrototypeOf(Custom.prototype), "run", this).call(this); this.dom.section.style[this.prefix] = this.getTransform(-this.vars.current.toFixed(2)); console.log(this.perfs.now - this.perfs.last); this.perfs.last = this.perfs.now; } }, { key: "resize", value: function resize() { this.vars.bounding = this.dom.section.getBoundingClientRect().height - this.vars.height; _get(_getPrototypeOf(Custom.prototype), "resize", this).call(this); } }]); return Custom; }(_index["default"]); var _default = Custom; exports["default"] = _default; },{"../../index":3}],2:[function(require,module,exports){ "use strict"; var _custom = _interopRequireDefault(require("./custom")); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; } var scroll = new _custom["default"]({ "extends": true, section: document.querySelector('.vs-section') }); scroll.init(); },{"./custom":1}],3:[function(require,module,exports){ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports["default"] = void 0; var _domClasses = _interopRequireDefault(require("dom-classes")); var _domCreateElement = _interopRequireDefault(require("dom-create-element")); var _prefix = _interopRequireDefault(require("prefix")); var _virtualScroll = _interopRequireDefault(require("virtual-scroll")); var _domEvents = _interopRequireDefault(require("dom-events")); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } var Smooth = /*#__PURE__*/ function () { function Smooth() { var opt = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; _classCallCheck(this, Smooth); this.createBound(); this.options = opt; this.prefix = (0, _prefix["default"])('transform'); this.rAF = undefined; // It seems that under heavy load, Firefox will still call the RAF callback even though the RAF has been canceled // To prevent that we set a flag to prevent any callback to be executed when RAF is removed this.isRAFCanceled = false; var constructorName = this.constructor.name ? this.constructor.name : 'Smooth'; this["extends"] = typeof opt["extends"] === 'undefined' ? this.constructor !== Smooth : opt["extends"]; this.callback = this.options.callback || null; this.vars = { direction: this.options.direction || 'vertical', "native": this.options["native"] || false, ease: this.options.ease || 0.075, preload: this.options.preload || false, current: 0, last: 0, target: 0, height: window.innerHeight, width: window.innerWidth, bounding: 0, timer: null, ticking: false }; this.vs = this.vars["native"] ? null : new _virtualScroll["default"]({ limitInertia: this.options.vs && this.options.vs.limitInertia || false, mouseMultiplier: this.options.vs && this.options.vs.mouseMultiplier || 1, touchMultiplier: this.options.vs && this.options.vs.touchMultiplier || 1.5, firefoxMultiplier: this.options.vs && this.options.vs.firefoxMultiplier || 30, preventTouch: this.options.vs && this.options.vs.preventTouch || true }); this.dom = { listener: this.options.listener || document.body, section: this.options.section || document.querySelector('.vs-section') || null, scrollbar: this.vars["native"] || this.options.noscrollbar ? null : { state: { clicked: false, x: 0 }, el: (0, _domCreateElement["default"])({ selector: 'div', styles: "vs-scrollbar vs-".concat(this.vars.direction, " vs-scrollbar-").concat(constructorName.toLowerCase()) }), drag: { el: (0, _domCreateElement["default"])({ selector: 'div', styles: 'vs-scrolldrag' }), delta: 0, height: 50 } } }; } _createClass(Smooth, [{ key: "createBound", value: function createBound() { var _this = this; ['run', 'calc', 'debounce', 'resize', 'mouseUp', 'mouseDown', 'mouseMove', 'calcScroll', 'scrollTo'].forEach(function (fn) { return _this[fn] = _this[fn].bind(_this); }); } }, { key: "init", value: function init() { this.addClasses(); this.vars.preload && this.preloadImages(); this.vars["native"] ? this.addFakeScrollHeight() : !this.options.noscrollbar && this.addFakeScrollBar(); this.addEvents(); this.resize(); } }, { key: "addClasses", value: function addClasses() { var type = this.vars["native"] ? 'native' : 'virtual'; var direction = this.vars.direction === 'vertical' ? 'y' : 'x'; _domClasses["default"].add(this.dom.listener, "is-".concat(type, "-scroll")); _domClasses["default"].add(this.dom.listener, "".concat(direction, "-scroll")); } }, { key: "preloadImages", value: function preloadImages() { var _this2 = this; var images = Array.prototype.slice.call(this.dom.listener.querySelectorAll('img'), 0); images.forEach(function (image) { var img = document.createElement('img'); _domEvents["default"].once(img, 'load', function () { images.splice(images.indexOf(image), 1); images.length === 0 && _this2.resize(); }); img.src = image.getAttribute('src'); }); } }, { key: "calc", value: function calc(e) { var delta = this.vars.direction == 'horizontal' ? e.deltaX : e.deltaY; this.vars.target += delta * -1; this.clampTarget(); } }, { key: "debounce", value: function debounce() { var _this3 = this; var win = this.dom.listener === document.body; this.vars.target = this.vars.direction === 'vertical' ? win ? window.scrollY || window.pageYOffset : this.dom.listener.scrollTop : win ? window.scrollX || window.pageXOffset : this.dom.listener.scrollLeft; clearTimeout(this.vars.timer); if (!this.vars.ticking) { this.vars.ticking = true; _domClasses["default"].add(this.dom.listener, 'is-scrolling'); } this.vars.timer = setTimeout(function () { _this3.vars.ticking = false; _domClasses["default"].remove(_this3.dom.listener, 'is-scrolling'); }, 200); } }, { key: "run", value: function run() { if (this.isRAFCanceled) return; this.vars.current += (this.vars.target - this.vars.current) * this.vars.ease; this.vars.current < .1 && (this.vars.current = 0); this.requestAnimationFrame(); if (!this["extends"]) { this.dom.section.style[this.prefix] = this.getTransform(-this.vars.current.toFixed(2)); } if (!this.vars["native"] && !this.options.noscrollbar) { var size = this.dom.scrollbar.drag.height; var bounds = this.vars.direction === 'vertical' ? this.vars.height : this.vars.width; var value = Math.abs(this.vars.current) / (this.vars.bounding / (bounds - size)) + size / .5 - size; var clamp = Math.max(0, Math.min(value - size, value + size)); this.dom.scrollbar.drag.el.style[this.prefix] = this.getTransform(clamp.toFixed(2)); } if (this.callback && this.vars.current !== this.vars.last) { this.callback(this.vars.current); } this.vars.last = this.vars.current; } }, { key: "getTransform", value: function getTransform(value) { return this.vars.direction === 'vertical' ? "translate3d(0,".concat(value, "px,0)") : "translate3d(".concat(value, "px,0,0)"); } }, { key: "on", value: function on() { var requestAnimationFrame = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true; if (this.isRAFCanceled) { this.isRAFCanceled = false; } var node = this.dom.listener === document.body ? window : this.dom.listener; this.vars["native"] ? _domEvents["default"].on(node, 'scroll', this.debounce) : this.vs && this.vs.on(this.calc); requestAnimationFrame && this.requestAnimationFrame(); } }, { key: "off", value: function off() { var cancelAnimationFrame = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true; var node = this.dom.listener === document.body ? window : this.dom.listener; this.vars["native"] ? _domEvents["default"].off(node, 'scroll', this.debounce) : this.vs && this.vs.off(this.calc); cancelAnimationFrame && this.cancelAnimationFrame(); } }, { key: "requestAnimationFrame", value: function (_requestAnimationFrame) { function requestAnimationFrame() { return _requestAnimationFrame.apply(this, arguments); } requestAnimationFrame.toString = function () { return _requestAnimationFrame.toString(); }; return requestAnimationFrame; }(function () { this.rAF = requestAnimationFrame(this.run); }) }, { key: "cancelAnimationFrame", value: function (_cancelAnimationFrame) { function cancelAnimationFrame() { return _cancelAnimationFrame.apply(this, arguments); } cancelAnimationFrame.toString = function () { return _cancelAnimationFrame.toString(); }; return cancelAnimationFrame; }(function () { this.isRAFCanceled = true; cancelAnimationFrame(this.rAF); }) }, { key: "addEvents", value: function addEvents() { this.on(); _domEvents["default"].on(window, 'resize', this.resize); } }, { key: "removeEvents", value: function removeEvents() { this.off(); _domEvents["default"].off(window, 'resize', this.resize); } }, { key: "addFakeScrollBar", value: function addFakeScrollBar() { this.dom.listener.appendChild(this.dom.scrollbar.el); this.dom.scrollbar.el.appendChild(this.dom.scrollbar.drag.el); _domEvents["default"].on(this.dom.scrollbar.el, 'click', this.calcScroll); _domEvents["default"].on(this.dom.scrollbar.el, 'mousedown', this.mouseDown); _domEvents["default"].on(document, 'mousemove', this.mouseMove); _domEvents["default"].on(document, 'mouseup', this.mouseUp); } }, { key: "removeFakeScrollBar", value: function removeFakeScrollBar() { _domEvents["default"].off(this.dom.scrollbar.el, 'click', this.calcScroll); _domEvents["default"].off(this.dom.scrollbar.el, 'mousedown', this.mouseDown); _domEvents["default"].off(document, 'mousemove', this.mouseMove); _domEvents["default"].off(document, 'mouseup', this.mouseUp); this.dom.listener.removeChild(this.dom.scrollbar.el); } }, { key: "mouseDown", value: function mouseDown(e) { e.preventDefault(); e.which == 1 && (this.dom.scrollbar.state.clicked = true); } }, { key: "mouseUp", value: function mouseUp(e) { this.dom.scrollbar.state.clicked = false; _domClasses["default"].remove(this.dom.listener, 'is-dragging'); } }, { key: "mouseMove", value: function mouseMove(e) { this.dom.scrollbar.state.clicked && this.calcScroll(e); } }, { key: "addFakeScrollHeight", value: function addFakeScrollHeight() { this.dom.scroll = (0, _domCreateElement["default"])({ selector: 'div', styles: 'vs-scroll-view' }); this.dom.listener.appendChild(this.dom.scroll); } }, { key: "removeFakeScrollHeight", value: function removeFakeScrollHeight() { this.dom.listener.removeChild(this.dom.scroll); } }, { key: "calcScroll", value: function calcScroll(e) { var client = this.vars.direction == 'vertical' ? e.clientY : e.clientX; var bounds = this.vars.direction == 'vertical' ? this.vars.height : this.vars.width; var delta = client * (this.vars.bounding / bounds); _domClasses["default"].add(this.dom.listener, 'is-dragging'); this.vars.target = delta; this.clampTarget(); this.dom.scrollbar && (this.dom.scrollbar.drag.delta = this.vars.target); } }, { key: "scrollTo", value: function scrollTo(offset) { if (this.vars["native"]) { this.vars.direction == 'vertical' ? window.scrollTo(0, offset) : window.scrollTo(offset, 0); } else { this.vars.target = offset; this.clampTarget(); } } }, { key: "resize", value: function resize() { var prop = this.vars.direction === 'vertical' ? 'height' : 'width'; this.vars.height = window.innerHeight; this.vars.width = window.innerWidth; if (!this["extends"]) { var bounding = this.dom.section.getBoundingClientRect(); this.vars.bounding = this.vars.direction === 'vertical' ? bounding.height - (this.vars["native"] ? 0 : this.vars.height) : bounding.right - (this.vars["native"] ? 0 : this.vars.width); } if (!this.vars["native"] && !this.options.noscrollbar) { this.dom.scrollbar.drag.height = this.vars.height * (this.vars.height / (this.vars.bounding + this.vars.height)); this.dom.scrollbar.drag.el.style[prop] = "".concat(this.dom.scrollbar.drag.height, "px"); } else if (this.vars["native"]) { this.dom.scroll.style[prop] = "".concat(this.vars.bounding, "px"); } !this.vars["native"] && this.clampTarget(); } }, { key: "clampTarget", value: function clampTarget() { this.vars.target = Math.round(Math.max(0, Math.min(this.vars.target, this.vars.bounding))); } }, { key: "destroy", value: function destroy() { if (this.vars["native"]) { _domClasses["default"].remove(this.dom.listener, 'is-native-scroll'); this.removeFakeScrollHeight(); } else { _domClasses["default"].remove(this.dom.listener, 'is-virtual-scroll'); !this.options.noscrollbar && this.removeFakeScrollBar(); } this.vars.direction === 'vertical' ? _domClasses["default"].remove(this.dom.listener, 'y-scroll') : _domClasses["default"].remove(this.dom.listener, 'x-scroll'); this.vars.current = 0; this.vs && (this.vs.destroy(), this.vs = null); this.removeEvents(); } }]); return Smooth; }(); exports["default"] = Smooth; window.Smooth = Smooth; },{"dom-classes":5,"dom-create-element":6,"dom-events":7,"prefix":11,"virtual-scroll":17}],4:[function(require,module,exports){ 'use strict'; var toString = Object.prototype.toString, hasOwnProperty = Object.prototype.hasOwnProperty; module.exports = function(object) { if(!object) return console.warn('bindAll requires at least one argument.'); var functions = Array.prototype.slice.call(arguments, 1); if (functions.length === 0) { for (var method in object) { if(hasOwnProperty.call(object, method)) { if(typeof object[method] == 'function' && toString.call(object[method]) == "[object Function]") { functions.push(method); } } } } for(var i = 0; i < functions.length; i++) { var f = functions[i]; object[f] = bind(object[f], object); } }; /* Faster bind without specific-case checking. (see https://coderwall.com/p/oi3j3w). bindAll is only needed for events binding so no need to make slow fixes for constructor or partial application. */ function bind(func, context) { return function() { return func.apply(context, arguments); }; } },{}],5:[function(require,module,exports){ /** * Module dependencies. */ var index = require('indexof'); /** * Whitespace regexp. */ var whitespaceRe = /\s+/; /** * toString reference. */ var toString = Object.prototype.toString; module.exports = classes; module.exports.add = add; module.exports.contains = has; module.exports.has = has; module.exports.toggle = toggle; module.exports.remove = remove; module.exports.removeMatching = removeMatching; function classes (el) { if (el.classList) { return el.classList; } var str = el.className.replace(/^\s+|\s+$/g, ''); var arr = str.split(whitespaceRe); if ('' === arr[0]) arr.shift(); return arr; } function add (el, name) { // classList if (el.classList) { el.classList.add(name); return; } // fallback var arr = classes(el); var i = index(arr, name); if (!~i) arr.push(name); el.className = arr.join(' '); } function has (el, name) { return el.classList ? el.classList.contains(name) : !! ~index(classes(el), name); } function remove (el, name) { if ('[object RegExp]' == toString.call(name)) { return removeMatching(el, name); } // classList if (el.classList) { el.classList.remove(name); return; } // fallback var arr = classes(el); var i = index(arr, name); if (~i) arr.splice(i, 1); el.className = arr.join(' '); } function removeMatching (el, re, ref) { var arr = Array.prototype.slice.call(classes(el)); for (var i = 0; i < arr.length; i++) { if (re.test(arr[i])) { remove(el, arr[i]); } } } function toggle (el, name) { // classList if (el.classList) { return el.classList.toggle(name); } // fallback if (has(el, name)) { remove(el, name); } else { add(el, name); } } },{"indexof":8}],6:[function(require,module,exports){ /* `dom-create-element` var create = require('dom-create-element'); var el = create({ selector: 'div', styles: 'preloader', html: '<span>Text</span>' }); */ module.exports = create; function create(opt) { opt = opt || {}; var el = document.createElement(opt.selector); if(opt.attr) for(var index in opt.attr) opt.attr.hasOwnProperty(index) && el.setAttribute(index, opt.attr[index]); "a" == opt.selector && opt.link && ( el.href = opt.link, opt.target && el.setAttribute("target", opt.target) ); "img" == opt.selector && opt.src && ( el.src = opt.src, opt.lazyload && ( el.style.opacity = 0, el.onload = function(){ el.style.opacity = 1; } ) ); opt.id && (el.id = opt.id); opt.styles && (el.className = opt.styles); opt.html && (el.innerHTML = opt.html); opt.children && (el.appendChild(opt.children)); return el; }; },{}],7:[function(require,module,exports){ var synth = require('synthetic-dom-events'); var on = function(element, name, fn, capture) { return element.addEventListener(name, fn, capture || false); }; var off = function(element, name, fn, capture) { return element.removeEventListener(name, fn, capture || false); }; var once = function (element, name, fn, capture) { function tmp (ev) { off(element, name, tmp, capture); fn(ev); } on(element, name, tmp, capture); }; var emit = function(element, name, opt) { var ev = synth(name, opt); element.dispatchEvent(ev); }; if (!document.addEventListener) { on = function(element, name, fn) { return element.attachEvent('on' + name, fn); }; } if (!document.removeEventListener) { off = function(element, name, fn) { return element.detachEvent('on' + name, fn); }; } if (!document.dispatchEvent) { emit = function(element, name, opt) { var ev = synth(name, opt); return element.fireEvent('on' + ev.type, ev); }; } module.exports = { on: on, off: off, once: once, emit: emit }; },{"synthetic-dom-events":12}],8:[function(require,module,exports){ var indexOf = [].indexOf; module.exports = function(arr, obj){ if (indexOf) return arr.indexOf(obj); for (var i = 0; i < arr.length; ++i) { if (arr[i] === obj) return i; } return -1; }; },{}],9:[function(require,module,exports){ // Generated by CoffeeScript 1.9.2 (function() { var root; root = typeof exports !== "undefined" && exports !== null ? exports : this; root.Lethargy = (function() { function Lethargy(stability, sensitivity, tolerance, delay) { this.stability = stability != null ? Math.abs(stability) : 8; this.sensitivity = sensitivity != null ? 1 + Math.abs(sensitivity) : 100; this.tolerance = tolerance != null ? 1 + Math.abs(tolerance) : 1.1; this.delay = delay != null ? delay : 150; this.lastUpDeltas = (function() { var i, ref, results; results = []; for (i = 1, ref = this.stability * 2; 1 <= ref ? i <= ref : i >= ref; 1 <= ref ? i++ : i--) { results.push(null); } return results; }).call(this); this.lastDownDeltas = (function() { var i, ref, results; results = []; for (i = 1, ref = this.stability * 2; 1 <= ref ? i <= ref : i >= ref; 1 <= ref ? i++ : i--) { results.push(null); } return results; }).call(this); this.deltasTimestamp = (function() { var i, ref, results; results = []; for (i = 1, ref = this.stability * 2; 1 <= ref ? i <= ref : i >= ref; 1 <= ref ? i++ : i--) { results.push(null); } return results; }).call(this); } Lethargy.prototype.check = function(e) { var lastDelta; e = e.originalEvent || e; if (e.wheelDelta != null) { lastDelta = e.wheelDelta; } else if (e.deltaY != null) { lastDelta = e.deltaY * -40; } else if ((e.detail != null) || e.detail === 0) { lastDelta = e.detail * -40; } this.deltasTimestamp.push(Date.now()); this.deltasTimestamp.shift(); if (lastDelta > 0) { this.lastUpDeltas.push(lastDelta); this.lastUpDeltas.shift(); return this.isInertia(1); } else { this.lastDownDeltas.push(lastDelta); this.lastDownDeltas.shift(); return this.isInertia(-1); } return false; }; Lethargy.prototype.isInertia = function(direction) { var lastDeltas, lastDeltasNew, lastDeltasOld, newAverage, newSum, oldAverage, oldSum; lastDeltas = direction === -1 ? this.lastDownDeltas : this.lastUpDeltas; if (lastDeltas[0] === null) { return direction; } if (this.deltasTimestamp[(this.stability * 2) - 2] + this.delay > Date.now() && lastDeltas[0] === lastDeltas[(this.stability * 2) - 1]) { return false; } lastDeltasOld = lastDeltas.slice(0, this.stability); lastDeltasNew = lastDeltas.slice(this.stability, this.stability * 2); oldSum = lastDeltasOld.reduce(function(t, s) { return t + s; }); newSum = lastDeltasNew.reduce(function(t, s) { return t + s; }); oldAverage = oldSum / lastDeltasOld.length; newAverage = newSum / lastDeltasNew.length; if (Math.abs(oldAverage) < Math.abs(newAverage * this.tolerance) && (this.sensitivity < Math.abs(newAverage))) { return direction; } else { return false; } }; Lethargy.prototype.showLastUpDeltas = function() { return this.lastUpDeltas; }; Lethargy.prototype.showLastDownDeltas = function() { return this.lastDownDeltas; }; return Lethargy; })(); }).call(this); },{}],10:[function(require,module,exports){ /* object-assign (c) Sindre Sorhus @license MIT */ 'use strict'; /* eslint-disable no-unused-vars */ var getOwnPropertySymbols = Object.getOwnPropertySymbols; var hasOwnProperty = Object.prototype.hasOwnProperty; var propIsEnumerable = Object.prototype.propertyIsEnumerable; function toObject(val) { if (val === null || val === undefined) { throw new TypeError('Object.assign cannot be called with null or undefined'); } return Object(val); } function shouldUseNative() { try { if (!Object.assign) { return false; } // Detect buggy property enumeration order in older V8 versions. // https://bugs.chromium.org/p/v8/issues/detail?id=4118 var test1 = new String('abc'); // eslint-disable-line no-new-wrappers test1[5] = 'de'; if (Object.getOwnPropertyNames(test1)[0] === '5') { return false; } // https://bugs.chromium.org/p/v8/issues/detail?id=3056 var test2 = {}; for (var i = 0; i < 10; i++) { test2['_' + String.fromCharCode(i)] = i; } var order2 = Object.getOwnPropertyNames(test2).map(function (n) { return test2[n]; }); if (order2.join('') !== '0123456789') { return false; } // https://bugs.chromium.org/p/v8/issues/detail?id=3056 var test3 = {}; 'abcdefghijklmnopqrst'.split('').forEach(function (letter) { test3[letter] = letter; }); if (Object.keys(Object.assign({}, test3)).join('') !== 'abcdefghijklmnopqrst') { return false; } return true; } catch (err) { // We don't expect any of the above to throw, but better to be safe. return false; } } module.exports = shouldUseNative() ? Object.assign : function (target, source) { var from; var to = toObject(target); var symbols; for (var s = 1; s < arguments.length; s++) { from = Object(arguments[s]); for (var key in from) { if (hasOwnProperty.call(from, key)) { to[key] = from[key]; } } if (getOwnPropertySymbols) { symbols = getOwnPropertySymbols(from); for (var i = 0; i < symbols.length; i++) { if (propIsEnumerable.call(from, symbols[i])) { to[symbols[i]] = from[symbols[i]]; } } } } return to; }; },{}],11:[function(require,module,exports){ // check document first so it doesn't error in node.js var style = typeof document != 'undefined' ? document.createElement('p').style : {} var prefixes = ['O', 'ms', 'Moz', 'Webkit'] var upper = /([A-Z])/g var memo = {} /** * prefix `key` * * prefix('transform') // => WebkitTransform * * @param {String} key * @return {String} * @api public */ function prefix(key){ // Camel case key = key.replace(/-([a-z])/g, function(_, char){ return char.toUpperCase() }) // Without prefix if (style[key] !== undefined) return key // With prefix var Key = key.charAt(0).toUpperCase() + key.slice(1) var i = prefixes.length while (i--) { var name = prefixes[i] + Key if (style[name] !== undefined) return name } return key } /** * Memoized version of `prefix` * * @param {String} key * @return {String} * @api public */ function prefixMemozied(key){ return key in memo ? memo[key] : memo[key] = prefix(key)<|fim▁hole|> * * @param {String} key * @return {String} * @api public */ function prefixDashed(key){ key = prefix(key) if (upper.test(key)) { key = '-' + key.replace(upper, '-$1') upper.lastIndex = 0 } return key.toLowerCase() } module.exports = prefixMemozied module.exports.dash = prefixDashed },{}],12:[function(require,module,exports){ // for compression var win = window; var doc = document || {}; var root = doc.documentElement || {}; // detect if we need to use firefox KeyEvents vs KeyboardEvents var use_key_event = true; try { doc.createEvent('KeyEvents'); } catch (err) { use_key_event = false; } // Workaround for https://bugs.webkit.org/show_bug.cgi?id=16735 function check_kb(ev, opts) { if (ev.ctrlKey != (opts.ctrlKey || false) || ev.altKey != (opts.altKey || false) || ev.shiftKey != (opts.shiftKey || false) || ev.metaKey != (opts.metaKey || false) || ev.keyCode != (opts.keyCode || 0) || ev.charCode != (opts.charCode || 0)) { ev = document.createEvent('Event'); ev.initEvent(opts.type, opts.bubbles, opts.cancelable); ev.ctrlKey = opts.ctrlKey || false; ev.altKey = opts.altKey || false; ev.shiftKey = opts.shiftKey || false; ev.metaKey = opts.metaKey || false; ev.keyCode = opts.keyCode || 0; ev.charCode = opts.charCode || 0; } return ev; } // modern browsers, do a proper dispatchEvent() var modern = function(type, opts) { opts = opts || {}; // which init fn do we use var family = typeOf(type); var init_fam = family; if (family === 'KeyboardEvent' && use_key_event) { family = 'KeyEvents'; init_fam = 'KeyEvent'; } var ev = doc.createEvent(family); var init_fn = 'init' + init_fam; var init = typeof ev[init_fn] === 'function' ? init_fn : 'initEvent'; var sig = initSignatures[init]; var args = []; var used = {}; opts.type = type; for (var i = 0; i < sig.length; ++i) { var key = sig[i]; var val = opts[key]; // if no user specified value, then use event default if (val === undefined) { val = ev[key]; } used[key] = true; args.push(val); } ev[init].apply(ev, args); // webkit key event issue workaround if (family === 'KeyboardEvent') { ev = check_kb(ev, opts); } // attach remaining unused options to the object for (var key in opts) { if (!used[key]) { ev[key] = opts[key]; } } return ev; }; var legacy = function (type, opts) { opts = opts || {}; var ev = doc.createEventObject(); ev.type = type; for (var key in opts) { if (opts[key] !== undefined) { ev[key] = opts[key]; } } return ev; }; // expose either the modern version of event generation or legacy // depending on what we support // avoids if statements in the code later module.exports = doc.createEvent ? modern : legacy; var initSignatures = require('./init.json'); var types = require('./types.json'); var typeOf = (function () { var typs = {}; for (var key in types) { var ts = types[key]; for (var i = 0; i < ts.length; i++) { typs[ts[i]] = key; } } return function (name) { return typs[name] || 'Event'; }; })(); },{"./init.json":13,"./types.json":14}],13:[function(require,module,exports){ module.exports={ "initEvent" : [ "type", "bubbles", "cancelable" ], "initUIEvent" : [ "type", "bubbles", "cancelable", "view", "detail" ], "initMouseEvent" : [ "type", "bubbles", "cancelable", "view", "detail", "screenX", "screenY", "clientX", "clientY", "ctrlKey", "altKey", "shiftKey", "metaKey", "button", "relatedTarget" ], "initMutationEvent" : [ "type", "bubbles", "cancelable", "relatedNode", "prevValue", "newValue", "attrName", "attrChange" ], "initKeyboardEvent" : [ "type", "bubbles", "cancelable", "view", "ctrlKey", "altKey", "shiftKey", "metaKey", "keyCode", "charCode" ], "initKeyEvent" : [ "type", "bubbles", "cancelable", "view", "ctrlKey", "altKey", "shiftKey", "metaKey", "keyCode", "charCode" ] } },{}],14:[function(require,module,exports){ module.exports={ "MouseEvent" : [ "click", "mousedown", "mouseup", "mouseover", "mousemove", "mouseout" ], "KeyboardEvent" : [ "keydown", "keyup", "keypress" ], "MutationEvent" : [ "DOMSubtreeModified", "DOMNodeInserted", "DOMNodeRemoved", "DOMNodeRemovedFromDocument", "DOMNodeInsertedIntoDocument", "DOMAttrModified", "DOMCharacterDataModified" ], "HTMLEvents" : [ "load", "unload", "abort", "error", "select", "change", "submit", "reset", "focus", "blur", "resize", "scroll" ], "UIEvent" : [ "DOMFocusIn", "DOMFocusOut", "DOMActivate" ] } },{}],15:[function(require,module,exports){ function E () { // Keep this empty so it's easier to inherit from // (via https://github.com/lipsmack from https://github.com/scottcorgan/tiny-emitter/issues/3) } E.prototype = { on: function (name, callback, ctx) { var e = this.e || (this.e = {}); (e[name] || (e[name] = [])).push({ fn: callback, ctx: ctx }); return this; }, once: function (name, callback, ctx) { var self = this; function listener () { self.off(name, listener); callback.apply(ctx, arguments); }; listener._ = callback return this.on(name, listener, ctx); }, emit: function (name) { var data = [].slice.call(arguments, 1); var evtArr = ((this.e || (this.e = {}))[name] || []).slice(); var i = 0; var len = evtArr.length; for (i; i < len; i++) { evtArr[i].fn.apply(evtArr[i].ctx, data); } return this; }, off: function (name, callback) { var e = this.e || (this.e = {}); var evts = e[name]; var liveEvents = []; if (evts && callback) { for (var i = 0, len = evts.length; i < len; i++) { if (evts[i].fn !== callback && evts[i].fn._ !== callback) liveEvents.push(evts[i]); } } // Remove event from queue to prevent memory leak // Suggested by https://github.com/lazd // Ref: https://github.com/scottcorgan/tiny-emitter/commit/c6ebfaa9bc973b33d110a84a307742b7cf94c953#commitcomment-5024910 (liveEvents.length) ? e[name] = liveEvents : delete e[name]; return this; } }; module.exports = E; },{}],16:[function(require,module,exports){ 'use strict'; module.exports = function(source) { return JSON.parse(JSON.stringify(source)); }; },{}],17:[function(require,module,exports){ 'use strict'; var objectAssign = require('object-assign'); var Emitter = require('tiny-emitter'); var Lethargy = require('lethargy').Lethargy; var support = require('./support'); var clone = require('./clone'); var bindAll = require('bindall-standalone'); var EVT_ID = 'virtualscroll'; module.exports = VirtualScroll; var keyCodes = { LEFT: 37, UP: 38, RIGHT: 39, DOWN: 40, SPACE: 32 }; function VirtualScroll(options) { bindAll(this, '_onWheel', '_onMouseWheel', '_onTouchStart', '_onTouchMove', '_onKeyDown'); this.el = window; if (options && options.el) { this.el = options.el; delete options.el; } this.options = objectAssign({ mouseMultiplier: 1, touchMultiplier: 2, firefoxMultiplier: 15, keyStep: 120, preventTouch: false, unpreventTouchClass: 'vs-touchmove-allowed', limitInertia: false }, options); if (this.options.limitInertia) this._lethargy = new Lethargy(); this._emitter = new Emitter(); this._event = { y: 0, x: 0, deltaX: 0, deltaY: 0 }; this.touchStartX = null; this.touchStartY = null; this.bodyTouchAction = null; if (this.options.passive !== undefined) { this.listenerOptions = {passive: this.options.passive}; } } VirtualScroll.prototype._notify = function(e) { var evt = this._event; evt.x += evt.deltaX; evt.y += evt.deltaY; this._emitter.emit(EVT_ID, { x: evt.x, y: evt.y, deltaX: evt.deltaX, deltaY: evt.deltaY, originalEvent: e }); }; VirtualScroll.prototype._onWheel = function(e) { var options = this.options; if (this._lethargy && this._lethargy.check(e) === false) return; var evt = this._event; // In Chrome and in Firefox (at least the new one) evt.deltaX = e.wheelDeltaX || e.deltaX * -1; evt.deltaY = e.wheelDeltaY || e.deltaY * -1; // for our purpose deltamode = 1 means user is on a wheel mouse, not touch pad // real meaning: https://developer.mozilla.org/en-US/docs/Web/API/WheelEvent#Delta_modes if(support.isFirefox && e.deltaMode == 1) { evt.deltaX *= options.firefoxMultiplier; evt.deltaY *= options.firefoxMultiplier; } evt.deltaX *= options.mouseMultiplier; evt.deltaY *= options.mouseMultiplier; this._notify(e); }; VirtualScroll.prototype._onMouseWheel = function(e) { if (this.options.limitInertia && this._lethargy.check(e) === false) return; var evt = this._event; // In Safari, IE and in Chrome if 'wheel' isn't defined evt.deltaX = (e.wheelDeltaX) ? e.wheelDeltaX : 0; evt.deltaY = (e.wheelDeltaY) ? e.wheelDeltaY : e.wheelDelta; this._notify(e); }; VirtualScroll.prototype._onTouchStart = function(e) { var t = (e.targetTouches) ? e.targetTouches[0] : e; this.touchStartX = t.pageX; this.touchStartY = t.pageY; }; VirtualScroll.prototype._onTouchMove = function(e) { var options = this.options; if(options.preventTouch && !e.target.classList.contains(options.unpreventTouchClass)) { e.preventDefault(); } var evt = this._event; var t = (e.targetTouches) ? e.targetTouches[0] : e; evt.deltaX = (t.pageX - this.touchStartX) * options.touchMultiplier; evt.deltaY = (t.pageY - this.touchStartY) * options.touchMultiplier; this.touchStartX = t.pageX; this.touchStartY = t.pageY; this._notify(e); }; VirtualScroll.prototype._onKeyDown = function(e) { var evt = this._event; evt.deltaX = evt.deltaY = 0; var windowHeight = window.innerHeight - 40 switch(e.keyCode) { case keyCodes.LEFT: case keyCodes.UP: evt.deltaY = this.options.keyStep; break; case keyCodes.RIGHT: case keyCodes.DOWN: evt.deltaY = - this.options.keyStep; break; case keyCodes.SPACE && e.shiftKey: evt.deltaY = windowHeight; break; case keyCodes.SPACE: evt.deltaY = - windowHeight; break; default: return; } this._notify(e); }; VirtualScroll.prototype._bind = function() { if(support.hasWheelEvent) this.el.addEventListener('wheel', this._onWheel, this.listenerOptions); if(support.hasMouseWheelEvent) this.el.addEventListener('mousewheel', this._onMouseWheel, this.listenerOptions); if(support.hasTouch) { this.el.addEventListener('touchstart', this._onTouchStart, this.listenerOptions); this.el.addEventListener('touchmove', this._onTouchMove, this.listenerOptions); } if(support.hasPointer && support.hasTouchWin) { this.bodyTouchAction = document.body.style.msTouchAction; document.body.style.msTouchAction = 'none'; this.el.addEventListener('MSPointerDown', this._onTouchStart, true); this.el.addEventListener('MSPointerMove', this._onTouchMove, true); } if(support.hasKeyDown) document.addEventListener('keydown', this._onKeyDown); }; VirtualScroll.prototype._unbind = function() { if(support.hasWheelEvent) this.el.removeEventListener('wheel', this._onWheel); if(support.hasMouseWheelEvent) this.el.removeEventListener('mousewheel', this._onMouseWheel); if(support.hasTouch) { this.el.removeEventListener('touchstart', this._onTouchStart); this.el.removeEventListener('touchmove', this._onTouchMove); } if(support.hasPointer && support.hasTouchWin) { document.body.style.msTouchAction = this.bodyTouchAction; this.el.removeEventListener('MSPointerDown', this._onTouchStart, true); this.el.removeEventListener('MSPointerMove', this._onTouchMove, true); } if(support.hasKeyDown) document.removeEventListener('keydown', this._onKeyDown); }; VirtualScroll.prototype.on = function(cb, ctx) { this._emitter.on(EVT_ID, cb, ctx); var events = this._emitter.e; if (events && events[EVT_ID] && events[EVT_ID].length === 1) this._bind(); }; VirtualScroll.prototype.off = function(cb, ctx) { this._emitter.off(EVT_ID, cb, ctx); var events = this._emitter.e; if (!events[EVT_ID] || events[EVT_ID].length <= 0) this._unbind(); }; VirtualScroll.prototype.reset = function() { var evt = this._event; evt.x = 0; evt.y = 0; }; VirtualScroll.prototype.destroy = function() { this._emitter.off(); this._unbind(); }; },{"./clone":16,"./support":18,"bindall-standalone":4,"lethargy":9,"object-assign":10,"tiny-emitter":15}],18:[function(require,module,exports){ 'use strict'; module.exports = (function getSupport() { return { hasWheelEvent: 'onwheel' in document, hasMouseWheelEvent: 'onmousewheel' in document, hasTouch: 'ontouchstart' in document, hasTouchWin: navigator.msMaxTouchPoints && navigator.msMaxTouchPoints > 1, hasPointer: !!window.navigator.msPointerEnabled, hasKeyDown: 'onkeydown' in document, isFirefox: navigator.userAgent.indexOf('Firefox') > -1 }; })(); },{}]},{},[2]);<|fim▁end|>
} /** * Create a dashed prefix
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate rustc_serialize; pub mod rep; pub use rep::{Context, Payload}; use std::io::{self, Read, Write}; use rustc_serialize::{Decodable, Encodable, json}; fn exec<E: Encodable, D: Decodable>( handler: fn(Context, Payload<D>) -> E ) { // } <|fim▁hole|>}<|fim▁end|>
#[test] fn it_works() {
<|file_name|>GeofenceProxy.js<|end_file_name|><|fim▁begin|>module.exports = { initialize: function (success, fail, args, env) { GeofenceComponent.GeofenceTrigger.register(); success && success(); }, addOrUpdate: function (success, fail, args, env) { args.forEach(function (geo) { var geoN = new GeofenceComponent.GeoNotification(); geoN.id = geo.id; geoN.latitude = geo.latitude; geoN.longitude = geo.longitude; geoN.transitionType = geo.transitionType;<|fim▁hole|> geoN.notificationTitle = geo.notification.title; geoN.openAppOnClick = geo.notification.openAppOnClick; geoN.data = JSON.stringify(geo.notification.data); GeofenceComponent.GeoNotificationManager.addOrUpdate(geoN); }); success && success(); }, remove: function (success, fail, args, env) { args.forEach(function (geoId) { GeofenceComponent.GeoNotificationManager.remove(geoId); }); success && success(); }, removeAll: function (success, fail, args, env) { GeofenceComponent.GeoNotificationManager.removeAll(); success && success(); } }; require("cordova/exec/proxy").add("GeofencePlugin", module.exports);<|fim▁end|>
geoN.radius = geo.radius; geoN.notificationText = geo.notification.text;
<|file_name|>etcd.rs<|end_file_name|><|fim▁begin|>extern crate serde; extern crate serde_json; extern crate hyper; extern crate openssl; use std::time::Duration; use std::thread; use hyper::Error; use hyper::status::StatusCode; use serde_json::Value; pub use common::etcd::SSLOptions; use common::etcd::etcd_https_client; /// Polls the etcd health endpoint until it reports we're healthy. pub fn wait_till_healthy(server_url: String, options: SSLOptions) -> Result<(), Error> { let client = try!(etcd_https_client(options)); let health_url = server_url + "/health"; loop { thread::sleep(Duration::from_millis(1000)); println!("Checking etcd status..."); <|fim▁hole|> println!("Got HTTP {}", response.status); continue; } let value: Value = serde_json::from_reader(response).unwrap(); let health = value .as_object().unwrap() .get("health").unwrap() .as_str().unwrap(); if health == "true" { return Ok(()); } else { println!("Health is {}", health); } } else { println!("Etcd not responding. Will try again"); } } }<|fim▁end|>
if let Ok(response) = client.get(&health_url).send() { if response.status != StatusCode::Ok {
<|file_name|>routing.go<|end_file_name|><|fim▁begin|>package dht import ( "sync" context "github.com/ipfs/go-ipfs/Godeps/_workspace/src/golang.org/x/net/context" key "github.com/ipfs/go-ipfs/blocks/key" notif "github.com/ipfs/go-ipfs/notifications" inet "github.com/ipfs/go-ipfs/p2p/net" peer "github.com/ipfs/go-ipfs/p2p/peer" "github.com/ipfs/go-ipfs/routing" pb "github.com/ipfs/go-ipfs/routing/dht/pb" kb "github.com/ipfs/go-ipfs/routing/kbucket" record "github.com/ipfs/go-ipfs/routing/record" pset "github.com/ipfs/go-ipfs/util/peerset" ) // asyncQueryBuffer is the size of buffered channels in async queries. This // buffer allows multiple queries to execute simultaneously, return their // results and continue querying closer peers. Note that different query // results will wait for the channel to drain. var asyncQueryBuffer = 10 // This file implements the Routing interface for the IpfsDHT struct. // Basic Put/Get // PutValue adds value corresponding to given Key. // This is the top level "Store" operation of the DHT func (dht *IpfsDHT) PutValue(ctx context.Context, key key.Key, value []byte) error { log.Debugf("PutValue %s", key) sk, err := dht.getOwnPrivateKey() if err != nil { return err } sign, err := dht.Validator.IsSigned(key) if err != nil { return err } rec, err := record.MakePutRecord(sk, key, value, sign) if err != nil { log.Debug("Creation of record failed!") return err } err = dht.putLocal(key, rec) if err != nil { return err } pchan, err := dht.GetClosestPeers(ctx, key) if err != nil { return err } wg := sync.WaitGroup{} for p := range pchan { wg.Add(1) go func(p peer.ID) { defer wg.Done() notif.PublishQueryEvent(ctx, &notif.QueryEvent{ Type: notif.Value, ID: p, }) err := dht.putValueToPeer(ctx, p, key, rec) if err != nil { log.Debugf("failed putting value to peer: %s", err) } }(p) } wg.Wait() return nil } // GetValue searches for the value corresponding to given Key. // If the search does not succeed, a multiaddr string of a closer peer is // returned along with util.ErrSearchIncomplete func (dht *IpfsDHT) GetValue(ctx context.Context, key key.Key) ([]byte, error) { // If we have it local, dont bother doing an RPC! val, err := dht.getLocal(key) if err == nil { log.Debug("have it locally") return val, nil } else { log.Debug("failed to get value locally: %s", err) } // get closest peers in the routing table rtp := dht.routingTable.NearestPeers(kb.ConvertKey(key), AlphaValue) log.Debugf("peers in rt: %s", len(rtp), rtp) if len(rtp) == 0 { log.Warning("No peers from routing table!") return nil, kb.ErrLookupFailure } // setup the Query query := dht.newQuery(key, func(ctx context.Context, p peer.ID) (*dhtQueryResult, error) { notif.PublishQueryEvent(ctx, &notif.QueryEvent{ Type: notif.SendingQuery, ID: p, }) val, peers, err := dht.getValueOrPeers(ctx, p, key) if err != nil { return nil, err } res := &dhtQueryResult{value: val, closerPeers: peers} if val != nil { res.success = true } notif.PublishQueryEvent(ctx, &notif.QueryEvent{ Type: notif.PeerResponse, ID: p, Responses: pointerizePeerInfos(peers), }) return res, nil }) // run it! result, err := query.Run(ctx, rtp) if err != nil { return nil, err } log.Debugf("GetValue %v %v", key, result.value) if result.value == nil { return nil, routing.ErrNotFound } return result.value, nil } // Value provider layer of indirection. // This is what DSHTs (Coral and MainlineDHT) do to store large values in a DHT. // Provide makes this node announce that it can provide a value for the given key func (dht *IpfsDHT) Provide(ctx context.Context, key key.Key) error { defer log.EventBegin(ctx, "provide", &key).Done() // add self locally dht.providers.AddProvider(ctx, key, dht.self) peers, err := dht.GetClosestPeers(ctx, key) if err != nil { return err } wg := sync.WaitGroup{} for p := range peers { wg.Add(1) go func(p peer.ID) { defer wg.Done() log.Debugf("putProvider(%s, %s)", key, p) err := dht.putProvider(ctx, p, string(key)) if err != nil { log.Debug(err) } }(p) } wg.Wait() return nil } // FindProviders searches until the context expires. func (dht *IpfsDHT) FindProviders(ctx context.Context, key key.Key) ([]peer.PeerInfo, error) { var providers []peer.PeerInfo for p := range dht.FindProvidersAsync(ctx, key, KValue) { providers = append(providers, p) } return providers, nil } // FindProvidersAsync is the same thing as FindProviders, but returns a channel. // Peers will be returned on the channel as soon as they are found, even before // the search query completes. func (dht *IpfsDHT) FindProvidersAsync(ctx context.Context, key key.Key, count int) <-chan peer.PeerInfo { log.Event(ctx, "findProviders", &key) peerOut := make(chan peer.PeerInfo, count) go dht.findProvidersAsyncRoutine(ctx, key, count, peerOut) return peerOut } func (dht *IpfsDHT) findProvidersAsyncRoutine(ctx context.Context, key key.Key, count int, peerOut chan peer.PeerInfo) { defer log.EventBegin(ctx, "findProvidersAsync", &key).Done() defer close(peerOut) ps := pset.NewLimited(count) provs := dht.providers.GetProviders(ctx, key) for _, p := range provs { // NOTE: assuming that this list of peers is unique if ps.TryAdd(p) { select { case peerOut <- dht.peerstore.PeerInfo(p): case <-ctx.Done(): return } } // If we have enough peers locally, dont bother with remote RPC if ps.Size() >= count { return } } // setup the Query query := dht.newQuery(key, func(ctx context.Context, p peer.ID) (*dhtQueryResult, error) { notif.PublishQueryEvent(ctx, &notif.QueryEvent{ Type: notif.SendingQuery, ID: p, }) pmes, err := dht.findProvidersSingle(ctx, p, key) if err != nil { return nil, err } log.Debugf("%d provider entries", len(pmes.GetProviderPeers())) provs := pb.PBPeersToPeerInfos(pmes.GetProviderPeers()) log.Debugf("%d provider entries decoded", len(provs)) // Add unique providers from request, up to 'count' for _, prov := range provs { log.Debugf("got provider: %s", prov) if ps.TryAdd(prov.ID) { log.Debugf("using provider: %s", prov) select { case peerOut <- prov: case <-ctx.Done(): log.Debug("Context timed out sending more providers") return nil, ctx.Err() } } if ps.Size() >= count { log.Debugf("got enough providers (%d/%d)", ps.Size(), count) return &dhtQueryResult{success: true}, nil } } // Give closer peers back to the query to be queried closer := pmes.GetCloserPeers() clpeers := pb.PBPeersToPeerInfos(closer) log.Debugf("got closer peers: %d %s", len(clpeers), clpeers) notif.PublishQueryEvent(ctx, &notif.QueryEvent{ Type: notif.PeerResponse, ID: p, Responses: pointerizePeerInfos(clpeers), }) return &dhtQueryResult{closerPeers: clpeers}, nil }) peers := dht.routingTable.NearestPeers(kb.ConvertKey(key), AlphaValue) _, err := query.Run(ctx, peers) if err != nil { log.Debugf("Query error: %s", err) notif.PublishQueryEvent(ctx, &notif.QueryEvent{ Type: notif.QueryError, Extra: err.Error(), }) } } // FindPeer searches for a peer with given ID. func (dht *IpfsDHT) FindPeer(ctx context.Context, id peer.ID) (peer.PeerInfo, error) { defer log.EventBegin(ctx, "FindPeer", id).Done() // Check if were already connected to them if pi := dht.FindLocal(id); pi.ID != "" {<|fim▁hole|> } peers := dht.routingTable.NearestPeers(kb.ConvertPeerID(id), AlphaValue) if len(peers) == 0 { return peer.PeerInfo{}, kb.ErrLookupFailure } // Sanity... for _, p := range peers { if p == id { log.Debug("Found target peer in list of closest peers...") return dht.peerstore.PeerInfo(p), nil } } // setup the Query query := dht.newQuery(key.Key(id), func(ctx context.Context, p peer.ID) (*dhtQueryResult, error) { notif.PublishQueryEvent(ctx, &notif.QueryEvent{ Type: notif.SendingQuery, ID: p, }) pmes, err := dht.findPeerSingle(ctx, p, id) if err != nil { return nil, err } closer := pmes.GetCloserPeers() clpeerInfos := pb.PBPeersToPeerInfos(closer) // see it we got the peer here for _, npi := range clpeerInfos { if npi.ID == id { return &dhtQueryResult{ peer: npi, success: true, }, nil } } notif.PublishQueryEvent(ctx, &notif.QueryEvent{ Type: notif.PeerResponse, Responses: pointerizePeerInfos(clpeerInfos), }) return &dhtQueryResult{closerPeers: clpeerInfos}, nil }) // run it! result, err := query.Run(ctx, peers) if err != nil { return peer.PeerInfo{}, err } log.Debugf("FindPeer %v %v", id, result.success) if result.peer.ID == "" { return peer.PeerInfo{}, routing.ErrNotFound } return result.peer, nil } // FindPeersConnectedToPeer searches for peers directly connected to a given peer. func (dht *IpfsDHT) FindPeersConnectedToPeer(ctx context.Context, id peer.ID) (<-chan peer.PeerInfo, error) { peerchan := make(chan peer.PeerInfo, asyncQueryBuffer) peersSeen := peer.Set{} peers := dht.routingTable.NearestPeers(kb.ConvertPeerID(id), AlphaValue) if len(peers) == 0 { return nil, kb.ErrLookupFailure } // setup the Query query := dht.newQuery(key.Key(id), func(ctx context.Context, p peer.ID) (*dhtQueryResult, error) { pmes, err := dht.findPeerSingle(ctx, p, id) if err != nil { return nil, err } var clpeers []peer.PeerInfo closer := pmes.GetCloserPeers() for _, pbp := range closer { pi := pb.PBPeerToPeerInfo(pbp) // skip peers already seen if _, found := peersSeen[pi.ID]; found { continue } peersSeen[pi.ID] = struct{}{} // if peer is connected, send it to our client. if pb.Connectedness(*pbp.Connection) == inet.Connected { select { case <-ctx.Done(): return nil, ctx.Err() case peerchan <- pi: } } // if peer is the peer we're looking for, don't bother querying it. // TODO maybe query it? if pb.Connectedness(*pbp.Connection) != inet.Connected { clpeers = append(clpeers, pi) } } return &dhtQueryResult{closerPeers: clpeers}, nil }) // run it! run it asynchronously to gen peers as results are found. // this does no error checking go func() { if _, err := query.Run(ctx, peers); err != nil { log.Debug(err) } // close the peerchan channel when done. close(peerchan) }() return peerchan, nil }<|fim▁end|>
return pi, nil
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # zambiaureport documentation build configuration file, created by # sphinx-quickstart. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document.<|fim▁hole|> # General information about the project. project = u'zambiaureport' copyright = u'2014, Andre Lesa' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.1' # The full version, including alpha/beta/rc tags. release = '0.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'zambiaureportdoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'zambiaureport.tex', u'zambiaureport Documentation', u'Andre Lesa', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'zambiaureport', u'zambiaureport Documentation', [u'Andre Lesa'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'zambiaureport', u'zambiaureport Documentation', u'Andre Lesa', 'zambiaureport', 'Zambia U-Report reference implementation.','Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote'<|fim▁end|>
master_doc = 'index'
<|file_name|>en.js<|end_file_name|><|fim▁begin|>export default {<|fim▁hole|><|fim▁end|>
hello : "hello" };
<|file_name|>csrf_token_error_handler_urls.py<|end_file_name|><|fim▁begin|>urlpatterns = [] <|fim▁hole|><|fim▁end|>
handler404 = 'csrf_tests.views.csrf_token_error_handler'
<|file_name|>test_loading.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import unittest from test.basetestcases import PluginLoadingMixin class StatisticsLoadingTest (PluginLoadingMixin, unittest.TestCase): def getPluginDir(self): """ Должен возвращать путь до папки с тестируемым плагином """ return "../plugins/statistics"<|fim▁hole|> найти в PluginsLoader """ return "Statistics"<|fim▁end|>
def getPluginName(self): """ Должен возвращать имя плагина, по которому его можно
<|file_name|>chunker.go<|end_file_name|><|fim▁begin|>package golf import ( "errors" "io" "math" "github.com/google/uuid" ) <|fim▁hole|>} func newChunker(w io.Writer, chunkSize int) (*chunker, error) { if chunkSize < 13 { return nil, ErrChunkTooSmall } c := &chunker{ chunkSize: chunkSize, buff: make([]byte, 0), w: w, } return c, nil } func (c *chunker) reset() { c.buff = make([]byte, 0) } func (c *chunker) Write(p []byte) (int, error) { c.buff = append(c.buff, p...) return len(p), nil } func (c *chunker) Flush() error { idFull, err := uuid.NewRandom() if err != nil { return err } idBytes, err := idFull.MarshalBinary() if err != nil { return err } err = c.flushWithId(idBytes[0:8]) return err } func (c *chunker) flushWithId(id []byte) error { if len(id) < 8 || len(id) > 8 { return errors.New("id length must be equal to 8") } offset := 0 buffLen := len(c.buff) chunkSize := c.chunkSize - 12 // Reusing this buffer may cause problems with duplicate data being sent // if the data isn't written to something else by the io.Writer before // the chunk's data is updated. chunkBuff := make([]byte, c.chunkSize) copy(chunkBuff[0:2], []byte{0x1e, 0x0f}) copy(chunkBuff[2:10], id) totalChunks := int(math.Ceil(float64(buffLen) / float64(chunkSize))) chunkBuff[11] = byte(totalChunks) for { left := buffLen - offset if left > chunkSize { copy(chunkBuff[12:], c.buff[offset:offset+chunkSize]) c.w.Write(chunkBuff) } else { copy(chunkBuff[12:], c.buff[offset:offset+left]) c.w.Write(chunkBuff[0 : left+12]) break } offset += chunkSize chunkBuff[10] += 1 } c.reset() return nil }<|fim▁end|>
type chunker struct { chunkSize int buff []byte w io.Writer
<|file_name|>tubular-highcharts-bundle.js<|end_file_name|><|fim▁begin|>/** * highcharts-ng * @version v0.0.13 - 2016-10-04 * @link https://github.com/pablojim/highcharts-ng * @author Barry Fitzgerald <> * @license MIT License, http://www.opensource.org/licenses/MIT */ if (typeof module !== 'undefined' && typeof exports !== 'undefined' && module.exports === exports){ module.exports = 'highcharts-ng'; } (function () { 'use strict'; /*global angular: false, Highcharts: false */ angular.module('highcharts-ng', []) .factory('highchartsNG', ['$q', '$window', highchartsNG]) .directive('highchart', ['highchartsNG', '$timeout', highchart]); //IE8 support function indexOf(arr, find, i /*opt*/) { if (i === undefined) i = 0; if (i < 0) i += arr.length; if (i < 0) i = 0; for (var n = arr.length; i < n; i++) if (i in arr && arr[i] === find) return i; return -1; } function prependMethod(obj, method, func) { var original = obj[method]; obj[method] = function () { var args = Array.prototype.slice.call(arguments); func.apply(this, args); if (original) { return original.apply(this, args); } else { return; } }; } function deepExtend(destination, source) { //Slightly strange behaviour in edge cases (e.g. passing in non objects) //But does the job for current use cases. if (angular.isArray(source)) { destination = angular.isArray(destination) ? destination : []; for (var i = 0; i < source.length; i++) { destination[i] = deepExtend(destination[i] || {}, source[i]); } } else if (angular.isObject(source)) { destination = angular.isObject(destination) ? destination : {}; for (var property in source) { destination[property] = deepExtend(destination[property] || {}, source[property]); } } else { destination = source; } return destination; } function highchartsNG($q, $window) { var highchartsProm = $q.when($window.Highcharts); function getHighchartsOnce() { return highchartsProm; } return { getHighcharts: getHighchartsOnce, ready: function ready(callback, thisArg) { getHighchartsOnce().then(function() { callback.call(thisArg); }); } }; } function highchart(highchartsNGUtils, $timeout) { // acceptable shared state var seriesId = 0; var ensureIds = function (series) { var changed = false; angular.forEach(series, function(s) { if (!angular.isDefined(s.id)) { s.id = 'series-' + seriesId++; changed = true; } }); return changed; }; // immutable var axisNames = [ 'xAxis', 'yAxis' ]; var chartTypeMap = { 'stock': 'StockChart', 'map': 'Map', 'chart': 'Chart' }; var getMergedOptions = function (scope, element, config) { var mergedOptions = {}; var defaultOptions = { chart: { events: {} }, title: {}, subtitle: {}, series: [], credits: {}, plotOptions: {}, navigator: {enabled: false}, xAxis: { events: {} }, yAxis: { events: {} } }; if (config.options) { mergedOptions = deepExtend(defaultOptions, config.options); } else { mergedOptions = defaultOptions; } mergedOptions.chart.renderTo = element[0]; angular.forEach(axisNames, function(axisName) { if(angular.isDefined(config[axisName])) { mergedOptions[axisName] = deepExtend(mergedOptions[axisName] || {}, config[axisName]); if(angular.isDefined(config[axisName].currentMin) || angular.isDefined(config[axisName].currentMax)) { prependMethod(mergedOptions.chart.events, 'selection', function(e){ var thisChart = this; if (e[axisName]) { scope.$apply(function () { scope.config[axisName].currentMin = e[axisName][0].min; scope.config[axisName].currentMax = e[axisName][0].max; }); } else { //handle reset button - zoom out to all scope.$apply(function () { scope.config[axisName].currentMin = thisChart[axisName][0].dataMin; scope.config[axisName].currentMax = thisChart[axisName][0].dataMax; }); } }); prependMethod(mergedOptions.chart.events, 'addSeries', function(e){ scope.config[axisName].currentMin = this[axisName][0].min || scope.config[axisName].currentMin; scope.config[axisName].currentMax = this[axisName][0].max || scope.config[axisName].currentMax; }); prependMethod(mergedOptions[axisName].events, 'setExtremes', function (e) { if (e.trigger && e.trigger !== 'zoom') { // zoom trigger is handled by selection event $timeout(function () { scope.config[axisName].currentMin = e.min; scope.config[axisName].currentMax = e.max; scope.config[axisName].min = e.min; // set min and max to adjust scrollbar/navigator scope.config[axisName].max = e.max; }, 0); } }); } } }); if(config.title) { mergedOptions.title = config.title; } if (config.subtitle) { mergedOptions.subtitle = config.subtitle; } if (config.credits) { mergedOptions.credits = config.credits; } if(config.size) { if (config.size.width) { mergedOptions.chart.width = config.size.width; } if (config.size.height) { mergedOptions.chart.height = config.size.height; } } return mergedOptions; }; var updateZoom = function (axis, modelAxis) { var extremes = axis.getExtremes(); if(modelAxis.currentMin !== extremes.dataMin || modelAxis.currentMax !== extremes.dataMax) { if (axis.setExtremes) { axis.setExtremes(modelAxis.currentMin, modelAxis.currentMax, false); } else { axis.detachedsetExtremes(modelAxis.currentMin, modelAxis.currentMax, false); } } }; var processExtremes = function(chart, axis, axisName) { if(axis.currentMin || axis.currentMax) { chart[axisName][0].setExtremes(axis.currentMin, axis.currentMax, true); } }; var chartOptionsWithoutEasyOptions = function (options) { return angular.extend( deepExtend({}, options), { data: null, visible: null } ); }; var getChartType = function(scope) { if (scope.config === undefined) return 'Chart'; return chartTypeMap[('' + scope.config.chartType).toLowerCase()] || (scope.config.useHighStocks ? 'StockChart' : 'Chart'); }; function linkWithHighcharts(Highcharts, scope, element, attrs) { // We keep some chart-specific variables here as a closure // instead of storing them on 'scope'. // prevSeriesOptions is maintained by processSeries var prevSeriesOptions = {}; // chart is maintained by initChart var chart = false; var processSeries = function(series, seriesOld) { var i; var ids = []; if(series) { var setIds = ensureIds(series); if(setIds && !scope.disableDataWatch) { //If we have set some ids this will trigger another digest cycle. //In this scenario just return early and let the next cycle take care of changes return false; } //Find series to add or update angular.forEach(series, function(s, idx) { ids.push(s.id); var chartSeries = chart.get(s.id); if (chartSeries) { if (!angular.equals(prevSeriesOptions[s.id], chartOptionsWithoutEasyOptions(s))) { chartSeries.update(angular.copy(s), false); } else { if (s.visible !== undefined && chartSeries.visible !== s.visible) { chartSeries.setVisible(s.visible, false); } // Make sure the current series index can be accessed in seriesOld if (idx < seriesOld.length) { var sOld = seriesOld[idx]; var sCopy = angular.copy(sOld); // Get the latest data point from the new series var ptNew = s.data[s.data.length - 1]; // Check if the new and old series are identical with the latest data point added // If so, call addPoint without shifting sCopy.data.push(ptNew); if (angular.equals(sCopy, s)) { chartSeries.addPoint(ptNew, false); } // Check if the data change was a push and shift operation // If so, call addPoint WITH shifting else { sCopy.data.shift(); if (angular.equals(sCopy, s)) { chartSeries.addPoint(ptNew, false, true); } else { chartSeries.setData(angular.copy(s.data), false); } }<|fim▁hole|> } } else { chart.addSeries(angular.copy(s), false); } prevSeriesOptions[s.id] = chartOptionsWithoutEasyOptions(s); }); // Shows no data text if all series are empty if(scope.config.noData) { var chartContainsData = false; for(i = 0; i < series.length; i++) { if (series[i].data && series[i].data.length > 0) { chartContainsData = true; break; } } if (!chartContainsData) { chart.showLoading(scope.config.noData); } else { chart.hideLoading(); } } } //Now remove any missing series for(i = chart.series.length - 1; i >= 0; i--) { var s = chart.series[i]; if (s.options.id !== 'highcharts-navigator-series' && indexOf(ids, s.options.id) < 0) { s.remove(false); } } return true; }; var initChart = function() { if (chart) chart.destroy(); prevSeriesOptions = {}; var config = scope.config || {}; var mergedOptions = getMergedOptions(scope, element, config); var func = config.func || undefined; var chartType = getChartType(scope); chart = new Highcharts[chartType](mergedOptions, func); for (var i = 0; i < axisNames.length; i++) { if (config[axisNames[i]]) { processExtremes(chart, config[axisNames[i]], axisNames[i]); } } if(config.loading) { chart.showLoading(); } config.getHighcharts = function() { return chart; }; }; initChart(); if(scope.disableDataWatch){ scope.$watchCollection('config.series', function (newSeries, oldSeries) { processSeries(newSeries); chart.redraw(); }); } else { scope.$watch('config.series', function (newSeries, oldSeries) { var needsRedraw = processSeries(newSeries, oldSeries); if(needsRedraw) { chart.redraw(); } }, true); } scope.$watch('config.title', function (newTitle) { chart.setTitle(newTitle, true); }, true); scope.$watch('config.subtitle', function (newSubtitle) { chart.setTitle(true, newSubtitle); }, true); scope.$watch('config.loading', function (loading) { if(loading) { chart.showLoading(loading === true ? null : loading); } else { chart.hideLoading(); } }); scope.$watch('config.noData', function (noData) { if(scope.config && scope.config.loading) { chart.showLoading(noData); } }, true); scope.$watch('config.credits.enabled', function (enabled) { if (enabled) { chart.credits.show(); } else if (chart.credits) { chart.credits.hide(); } }); scope.$watch(getChartType, function (chartType, oldChartType) { if (chartType === oldChartType) return; initChart(); }); angular.forEach(axisNames, function(axisName) { scope.$watch('config.' + axisName, function(newAxes) { if (!newAxes) { return; } if (angular.isArray(newAxes)) { for (var axisIndex = 0; axisIndex < newAxes.length; axisIndex++) { var axis = newAxes[axisIndex]; if (axisIndex < chart[axisName].length) { chart[axisName][axisIndex].update(axis, false); updateZoom(chart[axisName][axisIndex], angular.copy(axis)); } } } else { // update single axis chart[axisName][0].update(newAxes, false); updateZoom(chart[axisName][0], angular.copy(newAxes)); } chart.redraw(); }, true); }); scope.$watch('config.options', function (newOptions, oldOptions, scope) { //do nothing when called on registration if (newOptions === oldOptions) return; initChart(); processSeries(scope.config.series); chart.redraw(); }, true); scope.$watch('config.size', function (newSize, oldSize) { if(newSize === oldSize) return; if(newSize) { chart.setSize(newSize.width || chart.chartWidth, newSize.height || chart.chartHeight); } }, true); scope.$on('highchartsng.reflow', function () { chart.reflow(); }); scope.$on('$destroy', function() { if (chart) { try{ chart.destroy(); }catch(ex){ // fail silently as highcharts will throw exception if element doesn't exist } $timeout(function(){ element.remove(); }, 0); } }); } function link(scope, element, attrs) { function highchartsCb(Highcharts) { linkWithHighcharts(Highcharts, scope, element, attrs); } highchartsNGUtils .getHighcharts() .then(highchartsCb); } return { restrict: 'EAC', replace: true, template: '<div></div>', scope: { config: '=', disableDataWatch: '=' }, link: link }; } }()); (function (angular) { 'use strict'; angular.module('tubular-hchart.directives', ['tubular.services', 'highcharts-ng']) /** * @ngdoc component * @name tbHighcharts * * @description * The `tbHighcharts` component is the base to create any Highcharts component. * * @param {string} serverUrl Set the HTTP URL where the data comes. * @param {string} chartName Defines the chart name. * @param {string} chartType Defines the chart type. * @param {string} title Defines the title. * @param {bool} requireAuthentication Set if authentication check must be executed, default true. * @param {function} onLoad Defines a method to run in chart data load * @param {string} emptyMessage The empty message. * @param {string} errorMessage The error message. * @param {object} options The Highcharts options method. */ .component('tbHighcharts', { template: '<div class="tubular-chart">' + '<highchart config="$ctrl.options" ng-hide="$ctrl.isEmpty || $ctrl.hasError">' + '</highchart>' + '<div class="alert alert-info" ng-show="$ctrl.isEmpty">{{$ctrl.emptyMessage}}</div>' + '<div class="alert alert-warning" ng-show="$ctrl.hasError">{{$ctrl.errorMessage}}</div>' + '</div>', bindings: { serverUrl: '@', title: '@?', requireAuthentication: '=?', name: '@?chartName', chartType: '@?', emptyMessage: '@?', errorMessage: '@?', onLoad: '=?', options: '=?', onClick: '=?' }, controller: [ '$scope', 'tubularHttp', '$timeout', 'tubularConfig', function ($scope, tubularHttp, $timeout, tubularConfig) { var $ctrl = this; $ctrl.dataService = tubularHttp.getDataService($ctrl.dataServiceName); $ctrl.showLegend = angular.isUndefined($ctrl.showLegend) ? true : $ctrl.showLegend; $ctrl.chartType = $ctrl.chartType || 'line'; $ctrl.options = angular.extend({}, $ctrl.options, { options: { chart: { type: $ctrl.chartType }, plotOptions: { pie: { point: { events: { click: ($ctrl.onClick || angular.noop) } } }, series: { point: { events: { click: ($ctrl.onClick || angular.noop) } } } } }, title: { text: $ctrl.title || '' }, xAxis: { categories: [] }, yAxis: {}, series: [] }); // Setup require authentication $ctrl.requireAuthentication = angular.isUndefined($ctrl.requireAuthentication) ? true : $ctrl.requireAuthentication; $ctrl.loadData = function () { tubularConfig.webApi.requireAuthentication($ctrl.requireAuthentication); $ctrl.hasError = false; tubularHttp.get($ctrl.serverUrl).then($ctrl.handleData, function (error) { $scope.$emit('tbChart_OnConnectionError', error); $ctrl.hasError = true; }); }; $ctrl.handleData = function (data) { if (!data || !data.Data || data.Data.length === 0) { $ctrl.isEmpty = true; $ctrl.options.series = [{ data: [] }]; if ($ctrl.onLoad) { $ctrl.onLoad($ctrl.options, {}); } return; } $ctrl.isEmpty = false; if (data.Series) { $ctrl.options.xAxis.categories = data.Labels; $ctrl.options.series = data.Series.map(function (el, ix) { return { name: el, data: data.Data[ix] }; }); } else { var uniqueSerie = data.Labels.map(function (el, ix) { return { name: el, y: data.Data[ix] }; }); $ctrl.options.series = [{ name: data.SerieName || '', data: uniqueSerie, showInLegend: (data.SerieName || '') !== '' }]; } if ($ctrl.onLoad) { $timeout(function () { $ctrl.onLoad($ctrl.options, {}, $ctrl.options.getHighcharts().series); }, 100); $ctrl.onLoad($ctrl.options, {}, null); } }; $scope.$watch('$ctrl.serverUrl', function (val) { if (angular.isDefined(val) && val != null && val !== '') { $ctrl.loadData(); } }); $scope.$watch('$ctrl.chartType', function (val) { if (angular.isDefined(val) && val != null) { $ctrl.options.options.chart.type = val; } }); } ] }); })(angular);<|fim▁end|>
} else { chartSeries.setData(angular.copy(s.data), false); }
<|file_name|>Issue.java<|end_file_name|><|fim▁begin|>package edu.xored.tracker; <|fim▁hole|> import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; public class Issue { private String hash; private String summary; private String description; private User author; private Status status; private LocalDateTime createdDateTime; @JsonIgnore private List<Comment> comments = new ArrayList<>(); public Issue() { } public Issue(String hash, String summary, String description, Status status) { this.hash = hash; this.summary = summary; this.description = description; this.status = status; this.createdDateTime = LocalDateTime.now(); } public String getHash() { return hash; } public void setHash(String hash) { this.hash = hash; } public String getSummary() { return summary; } public void setSummary(String summary) { this.summary = summary; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public User getAuthor() { return author; } public void setAuthor(User author) { this.author = author; } public Status getStatus() { return status; } public void setStatus(Status status) { this.status = status; } public LocalDateTime getCreatedDateTime() { return createdDateTime; } public void setCreatedDateTime(LocalDateTime createdDateTime) { this.createdDateTime = createdDateTime; } public List<Comment> getComments() { return Collections.unmodifiableList(comments); } public void addComment(Comment comment) { if (comment != null) { comments.add(comment); } } public void addComments(Collection<Comment> comments) { if (comments != null) { this.comments.addAll(comments); } } public Issue updateIssue(Issue other) { if (other.getSummary() != null) { setSummary(other.getSummary()); } if (other.getDescription() != null) { setDescription(other.getDescription()); } if (other.getAuthor() != null) { setAuthor(other.getAuthor()); } if (other.getStatus() != null) { setStatus(other.getStatus()); } if (other.getCreatedDateTime() != null) { setCreatedDateTime(other.getCreatedDateTime()); } if (other.getComments() != null) { addComments(other.getComments()); } return this; } public enum Status { OPEN, RESOLVED; } }<|fim▁end|>
import com.fasterxml.jackson.annotation.JsonIgnore;
<|file_name|>fibonacci.rs<|end_file_name|><|fim▁begin|>use std::cmp; /// Returns a vector containing the fibonacci sequence, up to n numbers. fn fib(n: u32) -> Vec<u32> { let mut n2 = n; let mut f = vec![]; if n == 0 { return f; } while n2 > cmp::max(n-2, 0) { f.push(1); n2 -= 1; } while n2 > 0 { let length = f.len(); let a = f[length-1]; let b = f[length-2];<|fim▁hole|> n2 -= 1; } f }<|fim▁end|>
f.push(a+b);
<|file_name|>decorator.py<|end_file_name|><|fim▁begin|>#!-*- coding:utf-8 -*- import time def retries(times=3, timeout=1): """对未捕获异常进行重试""" def decorator(func): def _wrapper(*args, **kw): att, retry = 0, 0 while retry < times: retry += 1 try: return func(*args, **kw) except: att += timeout<|fim▁hole|> return _wrapper return decorator def empty_content_retries(times=3, timeout=2): """响应为空的进行重试""" def decorator(func): def _wrapper(*args, **kw): att, retry = 0, 0 while retry < times: retry += 1 ret = func(*args, **kw) if ret: return ret att += timeout time.sleep(att) return _wrapper return decorator def use_logging(level): """带参数的装饰器""" def decorator(func): print func.__name__ def wrapper(*args, **kwargs): if level == "warn": print ("level:%s, %s is running" % (level, func.__name__)) elif level == "info": print ("level:%s, %s is running" % (level, func.__name__)) return func(*args, **kwargs) return wrapper return decorator if __name__ == "__main__": @use_logging(level="warn") def foo(name='foo'): print("i am %s" % name) foo()<|fim▁end|>
if retry < times: time.sleep(att)
<|file_name|>source.hpp<|end_file_name|><|fim▁begin|>/****************************************************************************** * ____ _ _____ * * / ___| / \ | ___| C++ * * | | / _ \ | |_ Actor * * | |___ / ___ \| _| Framework * * \____/_/ \_|_| * * * * Copyright (C) 2011 - 2015 * * Dominik Charousset <dominik.charousset (at) haw-hamburg.de> * * * * Distributed under the terms and conditions of the BSD 3-Clause License or * * (at your option) under the terms and conditions of the Boost Software * * License 1.0. See accompanying files LICENSE and LICENSE_ALTERNATIVE. * * * * If you did not receive a copy of the license files, see * * http://opensource.org/licenses/BSD-3-Clause and * * http://www.boost.org/LICENSE_1_0.txt. * ******************************************************************************/ #ifndef SOURCE_HPP #define SOURCE_HPP #include <QSpinBox> #include <QProgressBar> #include "entity.hpp" #include "mainwindow.hpp" class source : virtual public entity { public: source(environment* env, QWidget* parent, QString name); ~source() override; void start() override;<|fim▁hole|>protected: // Pointer to the next stage in the pipeline. std::vector<caf::actor> consumers_; // Pointer to the CAF stream handler to advance the stream manually. caf::stream_manager_ptr stream_manager_; }; #endif // SOURCE_HPP<|fim▁end|>
void add_consumer(caf::actor consumer);
<|file_name|>metadata_tests.cpp<|end_file_name|><|fim▁begin|>/* * ****************************************************************************** * Copyright 2014-2016 Spectra Logic Corporation. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"). You may not use * this file except in compliance with the License. A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * **************************************************************************** */ #include <stdio.h> #include <glib.h> #include "ds3.h" #include "test.h" #include <boost/test/unit_test.hpp> BOOST_AUTO_TEST_CASE( put_metadata ) { printf("-----Testing put_metadata-------\n"); ds3_bulk_object_list_response* obj_list; uint64_t metadata_count; ds3_master_object_list_response* bulk_response; ds3_metadata* metadata_result; ds3_metadata_entry* metadata_entry; const char* file_name[1] = {"resources/beowulf.txt"}; ds3_client* client = get_client(); const char* bucket_name = "metadata_test"; FILE* file; ds3_error* error = create_bucket_with_data_policy(client, bucket_name, ids.data_policy_id->value); handle_error(error); ds3_request* request = ds3_init_put_object_request(bucket_name, "empty-folder/", 0); error = ds3_put_object_request(client, request, NULL, NULL); ds3_request_free(request); handle_error(error); obj_list = ds3_convert_file_list(file_name, 1); request = ds3_init_put_bulk_job_spectra_s3_request(bucket_name, obj_list); error = ds3_put_bulk_job_spectra_s3_request(client, request, &bulk_response); ds3_request_free(request); handle_error(error); request = ds3_init_put_object_request(bucket_name, "resources/beowulf.txt", obj_list->objects[0]->length); ds3_request_set_job(request, bulk_response->job_id->value); file = fopen(obj_list->objects[0]->name->value, "r"); ds3_bulk_object_list_response_free(obj_list); ds3_request_set_metadata(request, "name", "value"); error = ds3_put_object_request(client, request, file, ds3_read_from_file); ds3_request_free(request); fclose(file); handle_error(error); ds3_master_object_list_response_free(bulk_response); request = ds3_init_head_object_request(bucket_name, "resources/beowulf.txt"); error = ds3_head_object_request(client, request, &metadata_result); ds3_request_free(request); handle_error(error); BOOST_CHECK(metadata_result != NULL); metadata_count = ds3_metadata_size(metadata_result); BOOST_CHECK(metadata_count == 1); metadata_entry = ds3_metadata_get_entry(metadata_result, "name"); BOOST_CHECK(metadata_entry != NULL); BOOST_CHECK(g_strcmp0(metadata_entry->name->value, "name") == 0); ds3_metadata_entry_free(metadata_entry); ds3_metadata_free(metadata_result); clear_bucket(client, bucket_name); free_client(client); } BOOST_AUTO_TEST_CASE( put_emtpy_metadata ) { printf("-----Testing put_emtpy_metadata-------\n"); ds3_bulk_object_list_response* obj_list; uint64_t metadata_count; ds3_master_object_list_response* bulk_response; ds3_metadata* metadata_result; ds3_metadata_entry* metadata_entry; const char* file_name[1] = {"resources/beowulf.txt"}; ds3_client* client = get_client(); const char* bucket_name = "metadata_test"; FILE* file; ds3_error* error = create_bucket_with_data_policy(client, bucket_name, ids.data_policy_id->value); handle_error(error); ds3_request* request = ds3_init_put_object_request(bucket_name, "empty-folder/", 0); error = ds3_put_object_request(client, request, NULL, NULL); ds3_request_free(request); handle_error(error); obj_list = ds3_convert_file_list(file_name, 1); request = ds3_init_put_bulk_job_spectra_s3_request(bucket_name, obj_list); error = ds3_put_bulk_job_spectra_s3_request(client, request, &bulk_response); ds3_request_free(request); handle_error(error); request = ds3_init_put_object_request(bucket_name, "resources/beowulf.txt", obj_list->objects[0]->length); ds3_request_set_job(request, bulk_response->job_id->value); file = fopen(obj_list->objects[0]->name->value, "r"); ds3_bulk_object_list_response_free(obj_list); ds3_request_set_metadata(request, "name", ""); error = ds3_put_object_request(client, request, file, ds3_read_from_file); ds3_request_free(request); fclose(file); handle_error(error); ds3_master_object_list_response_free(bulk_response); request = ds3_init_head_object_request(bucket_name, "resources/beowulf.txt"); error = ds3_head_object_request(client, request, &metadata_result); ds3_request_free(request); handle_error(error); metadata_count = ds3_metadata_size(metadata_result); BOOST_CHECK(metadata_count == 0); metadata_entry = ds3_metadata_get_entry(metadata_result, "name"); BOOST_CHECK(metadata_entry == NULL); ds3_metadata_entry_free(metadata_entry); ds3_metadata_free(metadata_result); clear_bucket(client, bucket_name); free_client(client); } BOOST_AUTO_TEST_CASE( put_null_metadata ) { printf("-----Testing put_null_metadata-------\n"); ds3_bulk_object_list_response* obj_list; uint64_t metadata_count; ds3_master_object_list_response* bulk_response; ds3_metadata* metadata_result; ds3_metadata_entry* metadata_entry; const char* file_name[1] = {"resources/beowulf.txt"}; ds3_client* client = get_client(); const char* bucket_name = "metadata_test"; FILE* file; ds3_error* error = create_bucket_with_data_policy(client, bucket_name, ids.data_policy_id->value); handle_error(error); ds3_request* request = ds3_init_put_object_request(bucket_name, "empty-folder/", 0); error = ds3_put_object_request(client, request, NULL, NULL); ds3_request_free(request); handle_error(error); obj_list = ds3_convert_file_list(file_name, 1); request = ds3_init_put_bulk_job_spectra_s3_request(bucket_name, obj_list); error = ds3_put_bulk_job_spectra_s3_request(client, request, &bulk_response); ds3_request_free(request); handle_error(error); request = ds3_init_put_object_request(bucket_name, "resources/beowulf.txt", obj_list->objects[0]->length); ds3_request_set_job(request, bulk_response->job_id->value); file = fopen(obj_list->objects[0]->name->value, "r"); ds3_bulk_object_list_response_free(obj_list); ds3_request_set_metadata(request, "name", NULL); error = ds3_put_object_request(client, request, file, ds3_read_from_file); ds3_request_free(request); fclose(file); handle_error(error); ds3_master_object_list_response_free(bulk_response); request = ds3_init_head_object_request(bucket_name, "resources/beowulf.txt"); error = ds3_head_object_request(client, request, &metadata_result); ds3_request_free(request); handle_error(error); metadata_count = ds3_metadata_size(metadata_result); BOOST_CHECK(metadata_count == 0); metadata_entry = ds3_metadata_get_entry(metadata_result, "name"); BOOST_CHECK(metadata_entry == NULL); ds3_metadata_entry_free(metadata_entry); ds3_metadata_free(metadata_result); clear_bucket(client, bucket_name); free_client(client); } BOOST_AUTO_TEST_CASE( head_bucket ) { printf("-----Testing head_bucket-------\n"); ds3_client* client = get_client(); const char* bucket_name = "metadata_test"; ds3_error* error = create_bucket_with_data_policy(client, bucket_name, ids.data_policy_id->value); handle_error(error); ds3_request* request = ds3_init_head_bucket_request(bucket_name); error = ds3_head_bucket_request(client, request); ds3_request_free(request); handle_error(error); clear_bucket(client, bucket_name); free_client(client); } BOOST_AUTO_TEST_CASE( head_folder ) { printf("-----Testing head_folder-------\n"); ds3_metadata* metadata_result; ds3_client* client = get_client(); const char* bucket_name = "head_folder_test"; ds3_error* error = create_bucket_with_data_policy(client, bucket_name, ids.data_policy_id->value); handle_error(error); const char* test_folder = "test_folder/"; ds3_request* request = ds3_init_put_object_request(bucket_name, test_folder, 0); error = ds3_put_object_request(client, request, NULL, NULL); ds3_request_free(request); handle_error(error); request = ds3_init_head_object_request(bucket_name, test_folder); error = ds3_head_object_request(client, request, &metadata_result); ds3_request_free(request); handle_error(error); BOOST_CHECK(metadata_result != NULL); ds3_metadata_free(metadata_result); clear_bucket(client, bucket_name); free_client(client); } BOOST_AUTO_TEST_CASE( put_multiple_metadata_items ) { printf("-----Testing put_multiple_metadata_items-------\n"); ds3_bulk_object_list_response* obj_list; uint64_t metadata_count; ds3_master_object_list_response* bulk_response; ds3_metadata* metadata_result; ds3_metadata_entry* metadata_entry; const char* file_name[1] = {"resources/beowulf.txt"}; //ds3_client* client = get_client_at_loglvl(DS3_DEBUG); ds3_client* client = get_client(); const char* bucket_name = "multi_metadata_test"; FILE* file; ds3_error* error = create_bucket_with_data_policy(client, bucket_name, ids.data_policy_id->value); handle_error(error); obj_list = ds3_convert_file_list(file_name, 1); ds3_request* request = ds3_init_put_bulk_job_spectra_s3_request(bucket_name, obj_list); error = ds3_put_bulk_job_spectra_s3_request(client, request, &bulk_response); ds3_request_free(request); handle_error(error); request = ds3_init_put_object_request(bucket_name, "resources/beowulf.txt", obj_list->objects[0]->length); ds3_request_set_job(request, bulk_response->job_id->value); file = fopen(obj_list->objects[0]->name->value, "r"); ds3_bulk_object_list_response_free(obj_list); ds3_request_set_metadata(request, "key", "value2"); ds3_request_set_metadata(request, "name", "value"); error = ds3_put_object_request(client, request, file, ds3_read_from_file); ds3_request_free(request); fclose(file); handle_error(error); ds3_master_object_list_response_free(bulk_response); request = ds3_init_head_object_request(bucket_name, "resources/beowulf.txt"); error = ds3_head_object_request(client, request, &metadata_result); ds3_request_free(request); handle_error(error); BOOST_CHECK(metadata_result != NULL); metadata_count = ds3_metadata_size(metadata_result); BOOST_CHECK(metadata_count == 2); metadata_entry = ds3_metadata_get_entry(metadata_result, "name"); BOOST_CHECK(metadata_entry != NULL); BOOST_CHECK(g_strcmp0(metadata_entry->name->value, "name") == 0); BOOST_CHECK(g_strcmp0(metadata_entry->values[0]->value, "value") == 0); ds3_metadata_entry_free(metadata_entry); metadata_entry = ds3_metadata_get_entry(metadata_result, "key"); BOOST_CHECK(metadata_entry != NULL); BOOST_CHECK(g_strcmp0(metadata_entry->name->value, "key") == 0); BOOST_CHECK(g_strcmp0(metadata_entry->values[0]->value, "value2") == 0); ds3_metadata_entry_free(metadata_entry); ds3_metadata_free(metadata_result); clear_bucket(client, bucket_name); free_client(client); } static bool contains_key(const ds3_metadata_keys_result* metadata_keys, const char* key) { uint64_t i; for (i = 0; i < metadata_keys->num_keys; i++) { if (g_strcmp0(key, metadata_keys->keys[i]->value) == 0) { return TRUE; } } return FALSE; } BOOST_AUTO_TEST_CASE( metadata_keys ) { printf("-----Testing metadata_keys-------\n"); ds3_bulk_object_list_response* obj_list; uint64_t metadata_count; ds3_master_object_list_response* bulk_response; ds3_metadata* metadata_result = NULL; ds3_metadata_keys_result* metadata_keys = NULL; const char* file_name[1] = {"resources/beowulf.txt"}; //ds3_client* client = get_client_at_loglvl(DS3_DEBUG); ds3_client* client = get_client(); const char* bucket_name = "key_metadata_test"; FILE* file; ds3_error* error = create_bucket_with_data_policy(client, bucket_name, ids.data_policy_id->value); handle_error(error); obj_list = ds3_convert_file_list(file_name, 1); ds3_request* request = ds3_init_put_bulk_job_spectra_s3_request(bucket_name, obj_list); error = ds3_put_bulk_job_spectra_s3_request(client, request, &bulk_response); ds3_request_free(request); handle_error(error); request = ds3_init_put_object_request(bucket_name, "resources/beowulf.txt", obj_list->objects[0]->length); ds3_request_set_job(request, bulk_response->job_id->value); file = fopen(obj_list->objects[0]->name->value, "r"); ds3_bulk_object_list_response_free(obj_list); ds3_request_set_metadata(request, "key", "value2"); ds3_request_set_metadata(request, "name", "value"); <|fim▁hole|> fclose(file); handle_error(error); ds3_master_object_list_response_free(bulk_response); request = ds3_init_head_object_request(bucket_name, "resources/beowulf.txt"); error = ds3_head_object_request(client, request, &metadata_result); ds3_request_free(request); handle_error(error); BOOST_CHECK(metadata_result != NULL); metadata_count = ds3_metadata_size(metadata_result); BOOST_CHECK(metadata_count == 2); metadata_keys = ds3_metadata_keys(metadata_result); BOOST_CHECK(metadata_keys != NULL); BOOST_CHECK(metadata_keys->num_keys == 2); BOOST_CHECK(contains_key(metadata_keys, "key")); BOOST_CHECK(contains_key(metadata_keys, "name")); ds3_metadata_keys_free(metadata_keys); ds3_metadata_free(metadata_result); clear_bucket(client, bucket_name); free_client(client); } BOOST_AUTO_TEST_CASE( put_metadata_using_get_object_retrieval ) { printf("-----Testing put_metadata_using_get_object_retrieval-------\n"); ds3_bulk_object_list_response* obj_list; uint64_t metadata_count; ds3_master_object_list_response* bulk_response; ds3_metadata* metadata_result; ds3_metadata_entry* metadata_entry; const char* file_name[1] = {"resources/beowulf.txt"}; ds3_client* client = get_client(); const char* bucket_name = "put_metadata_using_get_object_retrieval"; FILE* file; ds3_error* error = create_bucket_with_data_policy(client, bucket_name, ids.data_policy_id->value); handle_error(error); obj_list = ds3_convert_file_list(file_name, 1); ds3_request* request = ds3_init_put_bulk_job_spectra_s3_request(bucket_name, obj_list); error = ds3_put_bulk_job_spectra_s3_request(client, request, &bulk_response); ds3_request_free(request); handle_error(error); request = ds3_init_put_object_request(bucket_name, "resources/beowulf.txt", obj_list->objects[0]->length); ds3_request_set_job(request, bulk_response->job_id->value); file = fopen(obj_list->objects[0]->name->value, "r"); ds3_request_set_metadata(request, "name", "value"); error = ds3_put_object_request(client, request, file, ds3_read_from_file); ds3_request_free(request); fclose(file); handle_error(error); ds3_master_object_list_response_free(bulk_response); request = ds3_init_get_bulk_job_spectra_s3_request(bucket_name, obj_list); ds3_request_set_chunk_client_processing_order_guarantee_ds3_job_chunk_client_processing_order_guarantee(request, DS3_JOB_CHUNK_CLIENT_PROCESSING_ORDER_GUARANTEE_NONE); error = ds3_get_bulk_job_spectra_s3_request(client, request, &bulk_response); ds3_request_free(request); ds3_bulk_object_list_response_free(obj_list); handle_error(error); request = ds3_init_get_object_request(bucket_name, "resources/beowulf.txt", 0); ds3_request_set_job(request, bulk_response->job_id->value); file = fopen("/dev/null", "w"); error = ds3_get_object_with_metadata(client, request, file, ds3_write_to_file, &metadata_result); ds3_request_free(request); ds3_master_object_list_response_free(bulk_response); fclose(file); handle_error(error); BOOST_CHECK(metadata_result != NULL); metadata_count = ds3_metadata_size(metadata_result); BOOST_CHECK(metadata_count == 1); metadata_entry = ds3_metadata_get_entry(metadata_result, "name"); BOOST_CHECK(metadata_entry != NULL); BOOST_CHECK(g_strcmp0(metadata_entry->name->value, "name") == 0); ds3_metadata_entry_free(metadata_entry); ds3_metadata_free(metadata_result); clear_bucket(client, bucket_name); free_client(client); }<|fim▁end|>
error = ds3_put_object_request(client, request, file, ds3_read_from_file); ds3_request_free(request);
<|file_name|>templates.js<|end_file_name|><|fim▁begin|>'use strict'; /** * @ngdoc function * @name freshcardUiApp.controller:TemplatesCtrl * @description * # TemplatesCtrl * Controller of the freshcardUiApp */ angular.module('freshcardUiApp') .controller('TemplatesCtrl', function ( $scope, $rootScope, $localStorage, $filter, $timeout, FileUploader, OrganizationService, configuration ) { $scope.templateLayoutSaved = false; $scope.templateLayoutError = false; $scope.templateLayoutPublished = false; $scope.templateLayoutPublishError = false; $scope.templateImagePath = $rootScope.currentOrganizationTemplate; $scope.fields = [ false, false, false, false, false, false, false ]; $scope.fieldNames = [ 'NAME', 'EMAIL_ADDRESS', 'STREET_ADDRESS', 'POSTAL_CODE', 'CITY', 'PHONE_NUMBER', 'WEBSITE' ]; $scope.fieldMappings = [ $filter('translate')('NAME'), $filter('translate')('EMAIL_ADDRESS'), $filter('translate')('STREET_ADDRESS'), $filter('translate')('POSTAL_CODE'), $filter('translate')('CITY'), $filter('translate')('PHONE_NUMBER'), $filter('translate')('WEBSITE') ]; $scope.showGrid = false; $scope.snapToGrid = false; $scope.fonts = [ 'Helvetica Neue', 'Open Sans', 'Helvetica', 'Arial', 'Times New Roman' ]; $scope.fontSizes = [ 14, 16, 18, 20, 24, 28 ]; $scope.selectedFont = $scope.fonts[0]; $scope.selectedFontSize = $scope.fontSizes[3]; $scope.templateLayout = { fields: { }, font: $scope.selectedFont, fontSize: $scope.selectedFontSize, showGrid: $scope.showGrid, snapToGrid: $scope.snapToGrid }; OrganizationService.get( { organizationId: $rootScope.user.currentOrganizationId }, function(organization) { if (organization.templateLayout) { $scope.templateLayout = JSON.parse(organization.templateLayout); $scope.selectedFont = $scope.templateLayout.font; $scope.selectedFontSize = $scope.templateLayout.fontSize; $scope.fields = [ false, false, false, false, false, false, false ]; for (var field in $scope.templateLayout.fields) { for (var i = 0; i < $scope.fieldMappings.length; i++) { if ($scope.fieldMappings[i] === $filter('translate')(field)) { $scope.fields[i] = true; } } } } } ); var imageUploader = $scope.imageUploader = new FileUploader( { url: configuration.apiRootURL + 'api/v1/organizations/uploadTemplateImage/' + $rootScope.user.currentOrganizationId, headers: { 'X-Auth-Token': $rootScope.authToken } } ); imageUploader.onAfterAddingFile = function() { $scope.imageUploadCompleted = false; }; imageUploader.onCompleteItem = function(fileItem, response) { if (response.imagePath !== null && response.imagePath !== undefined) { $scope.templateImagePath = $localStorage.currentOrganizationTemplate = $rootScope.currentOrganizationTemplate = response.imagePath; $scope.imageUploadCompleted = true; } }; $scope.saveTemplate = function() { $scope.templateLayout.font = $scope.selectedFont; $scope.templateLayout.fontSize = $scope.selectedFontSize; var svgResult = $scope.canvas.toSVG(); for (var i = 0; i < $scope.fieldMappings.length; i++) { svgResult = svgResult.replace($scope.fieldMappings[i], $scope.fieldNames[i]); } OrganizationService.update( { id: $rootScope.user.currentOrganizationId, templateLayout: JSON.stringify($scope.templateLayout), templateAsSVG: svgResult }, function() { $scope.templateLayoutPublished = false; $scope.templateLayoutPublishError = false; $scope.templateLayoutSaved = true; $scope.templateLayoutError = false; $timeout( function() { $scope.templateLayoutSaved = false;<|fim▁hole|> }, function() { $scope.templateLayoutPublished = false; $scope.templateLayoutPublishError = false; $scope.templateLayoutSaved = false; $scope.templateLayoutError = true; } ); }; $scope.publishTemplate = function() { OrganizationService.publishTemplate( { id: $rootScope.user.currentOrganizationId }, function() { $scope.templateLayoutPublished = true; $scope.templateLayoutPublishError = false; $scope.templateLayoutSaved = false; $scope.templateLayoutError = false; $timeout( function() { $scope.templateLayoutPublished = false; }, 5000 ); }, function() { $scope.templateLayoutPublished = false; $scope.templateLayoutPublishError = true; $scope.templateLayoutSaved = false; $scope.templateLayoutError = false; } ); }; });<|fim▁end|>
}, 5000 );
<|file_name|>DataUtils.java<|end_file_name|><|fim▁begin|>package org.buildmlearn.toolkit.flashcardtemplate.data; import org.w3c.dom.Document; import org.xml.sax.SAXException;<|fim▁hole|> import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; /** * Created by Anupam (opticod) on 10/8/16. */ /** * @brief Contains xml data utils for flash card template's simulator. */ public class DataUtils { public static String[] readTitleAuthor() { String result[] = new String[2]; DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setValidating(false); DocumentBuilder db; Document doc; try { File fXmlFile = new File(org.buildmlearn.toolkit.flashcardtemplate.Constants.XMLFileName); db = dbf.newDocumentBuilder(); doc = db.parse(fXmlFile); doc.normalize(); result[0] = doc.getElementsByTagName("title").item(0).getChildNodes() .item(0).getNodeValue(); result[1] = doc.getElementsByTagName("name").item(0).getChildNodes() .item(0).getNodeValue(); } catch (ParserConfigurationException | SAXException | IOException e) { e.printStackTrace(); } return result; } }<|fim▁end|>
import java.io.File; import java.io.IOException;
<|file_name|>test_issue_064.py<|end_file_name|><|fim▁begin|><|fim▁hole|> from nose.util import is_generator, isgenerator<|fim▁end|>
def test_is_generator_alias():
<|file_name|>RemoteJMXAttributes.java<|end_file_name|><|fim▁begin|>/* * Copyright 2015 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.agent.monitor.extension; import org.jboss.as.controller.AttributeDefinition; import org.jboss.as.controller.SimpleAttributeDefinition; import org.jboss.as.controller.SimpleAttributeDefinitionBuilder; import org.jboss.as.controller.registry.AttributeAccess; import org.jboss.dmr.ModelNode; import org.jboss.dmr.ModelType; public interface RemoteJMXAttributes { SimpleAttributeDefinition ENABLED = new SimpleAttributeDefinitionBuilder("enabled", ModelType.BOOLEAN) .setAllowNull(true) .setDefaultValue(new ModelNode(true)) .setAllowExpression(true) .addFlag(AttributeAccess.Flag.RESTART_RESOURCE_SERVICES) .build(); SimpleAttributeDefinition URL = new SimpleAttributeDefinitionBuilder("url", ModelType.STRING) .setAllowNull(false) .setAllowExpression(true)<|fim▁hole|> SimpleAttributeDefinition USERNAME = new SimpleAttributeDefinitionBuilder("username", ModelType.STRING) .setAllowNull(true) .setAllowExpression(true) .addFlag(AttributeAccess.Flag.RESTART_RESOURCE_SERVICES) .build(); SimpleAttributeDefinition PASSWORD = new SimpleAttributeDefinitionBuilder("password", ModelType.STRING) .setAllowNull(true) .setAllowExpression(true) .addFlag(AttributeAccess.Flag.RESTART_RESOURCE_SERVICES) .build(); SimpleAttributeDefinition SECURITY_REALM = new SimpleAttributeDefinitionBuilder("securityRealm", ModelType.STRING) .setAllowNull(true) .setAllowExpression(true) .addFlag(AttributeAccess.Flag.RESTART_RESOURCE_SERVICES) .build(); SimpleAttributeDefinition RESOURCE_TYPE_SETS = new SimpleAttributeDefinitionBuilder( "resourceTypeSets", ModelType.STRING) .setAllowNull(true) .setAllowExpression(true) .addFlag(AttributeAccess.Flag.RESTART_RESOURCE_SERVICES) .build(); AttributeDefinition[] ATTRIBUTES = { ENABLED, URL, USERNAME, PASSWORD, SECURITY_REALM, RESOURCE_TYPE_SETS }; }<|fim▁end|>
.addFlag(AttributeAccess.Flag.RESTART_RESOURCE_SERVICES) .build();
<|file_name|>ip.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![unstable(feature = "ip", reason = "extra functionality has not been \ scrutinized to the level that it should \ be stable")] use prelude::v1::*; use cmp::Ordering; use hash; use fmt; use libc; use sys_common::{AsInner, FromInner}; use net::{hton, ntoh}; /// An IP address, either a IPv4 or IPv6 address. #[unstable(feature = "ip_addr", reason = "recent addition")] #[derive(Copy, Clone, Eq, PartialEq, Debug, Hash, PartialOrd, Ord)] pub enum IpAddr { /// Representation of an IPv4 address. V4(Ipv4Addr), /// Representation of an IPv6 address. V6(Ipv6Addr), } /// Representation of an IPv4 address. #[derive(Copy)] #[stable(feature = "rust1", since = "1.0.0")] pub struct Ipv4Addr { inner: libc::in_addr, } /// Representation of an IPv6 address. #[derive(Copy)] #[stable(feature = "rust1", since = "1.0.0")] pub struct Ipv6Addr { inner: libc::in6_addr, } #[allow(missing_docs)] #[derive(Copy, PartialEq, Eq, Clone, Hash, Debug)] pub enum Ipv6MulticastScope { InterfaceLocal, LinkLocal, RealmLocal, AdminLocal, SiteLocal, OrganizationLocal, Global } impl Ipv4Addr { /// Creates a new IPv4 address from four eight-bit octets. /// /// The result will represent the IP address `a`.`b`.`c`.`d`. #[stable(feature = "rust1", since = "1.0.0")] pub fn new(a: u8, b: u8, c: u8, d: u8) -> Ipv4Addr { Ipv4Addr { inner: libc::in_addr { s_addr: hton(((a as u32) << 24) | ((b as u32) << 16) | ((c as u32) << 8) | (d as u32)), } } } /// Returns the four eight-bit integers that make up this address. #[stable(feature = "rust1", since = "1.0.0")] pub fn octets(&self) -> [u8; 4] { let bits = ntoh(self.inner.s_addr); [(bits >> 24) as u8, (bits >> 16) as u8, (bits >> 8) as u8, bits as u8] } /// Returns true for the special 'unspecified' address 0.0.0.0. pub fn is_unspecified(&self) -> bool { self.inner.s_addr == 0 } /// Returns true if this is a loopback address (127.0.0.0/8). pub fn is_loopback(&self) -> bool { self.octets()[0] == 127 } /// Returns true if this is a private address. /// /// The private address ranges are defined in RFC1918 and include: /// /// - 10.0.0.0/8 /// - 172.16.0.0/12 /// - 192.168.0.0/16 pub fn is_private(&self) -> bool { match (self.octets()[0], self.octets()[1]) { (10, _) => true, (172, b) if b >= 16 && b <= 31 => true, (192, 168) => true, _ => false } } /// Returns true if the address is link-local (169.254.0.0/16). pub fn is_link_local(&self) -> bool { self.octets()[0] == 169 && self.octets()[1] == 254 } /// Returns true if the address appears to be globally routable. /// /// The following return false: /// /// - private address (10.0.0.0/8, 172.16.0.0/12 and 192.168.0.0/16) /// - the loopback address (127.0.0.0/8) /// - the link-local address (169.254.0.0/16) /// - the broadcast address (255.255.255.255/32) /// - test addresses used for documentation (192.0.2.0/24, 198.51.100.0/24 and 203.0.113.0/24) pub fn is_global(&self) -> bool { !self.is_private() && !self.is_loopback() && !self.is_link_local() && !self.is_broadcast() && !self.is_documentation() } /// Returns true if this is a multicast address. /// /// Multicast addresses have a most significant octet between 224 and 239. pub fn is_multicast(&self) -> bool { self.octets()[0] >= 224 && self.octets()[0] <= 239 } /// Returns true if this is a broadcast address. /// /// A broadcast address has all octets set to 255 as defined in RFC 919. pub fn is_broadcast(&self) -> bool { self.octets()[0] == 255 && self.octets()[1] == 255 && self.octets()[2] == 255 && self.octets()[3] == 255 } /// Returns true if this address is in a range designated for documentation. /// /// This is defined in RFC 5737: /// /// - 192.0.2.0/24 (TEST-NET-1) /// - 198.51.100.0/24 (TEST-NET-2) /// - 203.0.113.0/24 (TEST-NET-3) pub fn is_documentation(&self) -> bool { match(self.octets()[0], self.octets()[1], self.octets()[2], self.octets()[3]) { (192, 0, 2, _) => true, (198, 51, 100, _) => true, (203, 0, 113, _) => true, _ => false } } /// Converts this address to an IPv4-compatible IPv6 address. /// /// a.b.c.d becomes ::a.b.c.d #[stable(feature = "rust1", since = "1.0.0")] pub fn to_ipv6_compatible(&self) -> Ipv6Addr { Ipv6Addr::new(0, 0, 0, 0, 0, 0, ((self.octets()[0] as u16) << 8) | self.octets()[1] as u16, ((self.octets()[2] as u16) << 8) | self.octets()[3] as u16) } /// Converts this address to an IPv4-mapped IPv6 address. /// /// a.b.c.d becomes ::ffff:a.b.c.d #[stable(feature = "rust1", since = "1.0.0")] pub fn to_ipv6_mapped(&self) -> Ipv6Addr { Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, ((self.octets()[0] as u16) << 8) | self.octets()[1] as u16, ((self.octets()[2] as u16) << 8) | self.octets()[3] as u16) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Display for IpAddr { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match *self { IpAddr::V4(ref a) => a.fmt(fmt), IpAddr::V6(ref a) => a.fmt(fmt), } } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Display for Ipv4Addr { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { let octets = self.octets(); write!(fmt, "{}.{}.{}.{}", octets[0], octets[1], octets[2], octets[3]) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for Ipv4Addr { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, fmt) } } #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Ipv4Addr { fn clone(&self) -> Ipv4Addr { *self } } #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for Ipv4Addr { fn eq(&self, other: &Ipv4Addr) -> bool { self.inner.s_addr == other.inner.s_addr } } #[stable(feature = "rust1", since = "1.0.0")] impl Eq for Ipv4Addr {} #[stable(feature = "rust1", since = "1.0.0")] impl hash::Hash for Ipv4Addr { fn hash<H: hash::Hasher>(&self, s: &mut H) { self.inner.s_addr.hash(s) } } #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for Ipv4Addr { fn partial_cmp(&self, other: &Ipv4Addr) -> Option<Ordering> { Some(self.cmp(other)) } } #[stable(feature = "rust1", since = "1.0.0")] impl Ord for Ipv4Addr { fn cmp(&self, other: &Ipv4Addr) -> Ordering { self.inner.s_addr.cmp(&other.inner.s_addr) } } impl AsInner<libc::in_addr> for Ipv4Addr { fn as_inner(&self) -> &libc::in_addr { &self.inner } } impl FromInner<libc::in_addr> for Ipv4Addr { fn from_inner(addr: libc::in_addr) -> Ipv4Addr { Ipv4Addr { inner: addr } } } #[stable(feature = "ip_u32", since = "1.1.0")] impl From<Ipv4Addr> for u32 { fn from(ip: Ipv4Addr) -> u32 { let ip = ip.octets(); ((ip[0] as u32) << 24) + ((ip[1] as u32) << 16) + ((ip[2] as u32) << 8) + (ip[3] as u32) } } #[stable(feature = "ip_u32", since = "1.1.0")] impl From<u32> for Ipv4Addr { fn from(ip: u32) -> Ipv4Addr { Ipv4Addr::new((ip >> 24) as u8, (ip >> 16) as u8, (ip >> 8) as u8, ip as u8) } } impl Ipv6Addr { /// Creates a new IPv6 address from eight 16-bit segments. /// /// The result will represent the IP address a:b:c:d:e:f:g:h. #[stable(feature = "rust1", since = "1.0.0")] pub fn new(a: u16, b: u16, c: u16, d: u16, e: u16, f: u16, g: u16, h: u16) -> Ipv6Addr { Ipv6Addr { inner: libc::in6_addr { s6_addr: [hton(a), hton(b), hton(c), hton(d), hton(e), hton(f), hton(g), hton(h)] } } } /// Returns the eight 16-bit segments that make up this address. #[stable(feature = "rust1", since = "1.0.0")] pub fn segments(&self) -> [u16; 8] { [ntoh(self.inner.s6_addr[0]), ntoh(self.inner.s6_addr[1]), ntoh(self.inner.s6_addr[2]), ntoh(self.inner.s6_addr[3]), ntoh(self.inner.s6_addr[4]), ntoh(self.inner.s6_addr[5]), ntoh(self.inner.s6_addr[6]), ntoh(self.inner.s6_addr[7])] } /// Returns true for the special 'unspecified' address ::. pub fn is_unspecified(&self) -> bool { self.segments() == [0, 0, 0, 0, 0, 0, 0, 0] } /// Returns true if this is a loopback address (::1). pub fn is_loopback(&self) -> bool { self.segments() == [0, 0, 0, 0, 0, 0, 0, 1] } /// Returns true if the address appears to be globally routable. /// /// The following return false: /// /// - the loopback address /// - link-local, site-local, and unique local unicast addresses /// - interface-, link-, realm-, admin- and site-local multicast addresses pub fn is_global(&self) -> bool { match self.multicast_scope() { Some(Ipv6MulticastScope::Global) => true, None => self.is_unicast_global(), _ => false } } /// Returns true if this is a unique local address (IPv6). /// /// Unique local addresses are defined in RFC4193 and have the form fc00::/7. pub fn is_unique_local(&self) -> bool { (self.segments()[0] & 0xfe00) == 0xfc00 } /// Returns true if the address is unicast and link-local (fe80::/10). pub fn is_unicast_link_local(&self) -> bool { (self.segments()[0] & 0xffc0) == 0xfe80 } /// Returns true if this is a deprecated unicast site-local address (IPv6 /// fec0::/10). pub fn is_unicast_site_local(&self) -> bool { (self.segments()[0] & 0xffc0) == 0xfec0 } /// Returns true if the address is a globally routable unicast address. /// /// The following return false: /// /// - the loopback address /// - the link-local addresses /// - the (deprecated) site-local addresses /// - unique local addresses pub fn is_unicast_global(&self) -> bool { !self.is_multicast() && !self.is_loopback() && !self.is_unicast_link_local() && !self.is_unicast_site_local() && !self.is_unique_local() } /// Returns the address's multicast scope if the address is multicast. pub fn multicast_scope(&self) -> Option<Ipv6MulticastScope> { if self.is_multicast() { match self.segments()[0] & 0x000f {<|fim▁hole|> 3 => Some(Ipv6MulticastScope::RealmLocal), 4 => Some(Ipv6MulticastScope::AdminLocal), 5 => Some(Ipv6MulticastScope::SiteLocal), 8 => Some(Ipv6MulticastScope::OrganizationLocal), 14 => Some(Ipv6MulticastScope::Global), _ => None } } else { None } } /// Returns true if this is a multicast address. /// /// Multicast addresses have the form ff00::/8. pub fn is_multicast(&self) -> bool { (self.segments()[0] & 0xff00) == 0xff00 } /// Converts this address to an IPv4 address. Returns None if this address is /// neither IPv4-compatible or IPv4-mapped. /// /// ::a.b.c.d and ::ffff:a.b.c.d become a.b.c.d #[stable(feature = "rust1", since = "1.0.0")] pub fn to_ipv4(&self) -> Option<Ipv4Addr> { match self.segments() { [0, 0, 0, 0, 0, f, g, h] if f == 0 || f == 0xffff => { Some(Ipv4Addr::new((g >> 8) as u8, g as u8, (h >> 8) as u8, h as u8)) }, _ => None } } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Display for Ipv6Addr { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match self.segments() { // We need special cases for :: and ::1, otherwise they're formatted // as ::0.0.0.[01] [0, 0, 0, 0, 0, 0, 0, 0] => write!(fmt, "::"), [0, 0, 0, 0, 0, 0, 0, 1] => write!(fmt, "::1"), // Ipv4 Compatible address [0, 0, 0, 0, 0, 0, g, h] => { write!(fmt, "::{}.{}.{}.{}", (g >> 8) as u8, g as u8, (h >> 8) as u8, h as u8) } // Ipv4-Mapped address [0, 0, 0, 0, 0, 0xffff, g, h] => { write!(fmt, "::ffff:{}.{}.{}.{}", (g >> 8) as u8, g as u8, (h >> 8) as u8, h as u8) }, _ => { fn find_zero_slice(segments: &[u16; 8]) -> (usize, usize) { let mut longest_span_len = 0; let mut longest_span_at = 0; let mut cur_span_len = 0; let mut cur_span_at = 0; for i in 0..8 { if segments[i] == 0 { if cur_span_len == 0 { cur_span_at = i; } cur_span_len += 1; if cur_span_len > longest_span_len { longest_span_len = cur_span_len; longest_span_at = cur_span_at; } } else { cur_span_len = 0; cur_span_at = 0; } } (longest_span_at, longest_span_len) } let (zeros_at, zeros_len) = find_zero_slice(&self.segments()); if zeros_len > 1 { fn fmt_subslice(segments: &[u16]) -> String { segments .iter() .map(|&seg| format!("{:x}", seg)) .collect::<Vec<String>>() .join(":") } write!(fmt, "{}::{}", fmt_subslice(&self.segments()[..zeros_at]), fmt_subslice(&self.segments()[zeros_at + zeros_len..])) } else { let &[a, b, c, d, e, f, g, h] = &self.segments(); write!(fmt, "{:x}:{:x}:{:x}:{:x}:{:x}:{:x}:{:x}:{:x}", a, b, c, d, e, f, g, h) } } } } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for Ipv6Addr { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, fmt) } } #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Ipv6Addr { fn clone(&self) -> Ipv6Addr { *self } } #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for Ipv6Addr { fn eq(&self, other: &Ipv6Addr) -> bool { self.inner.s6_addr == other.inner.s6_addr } } #[stable(feature = "rust1", since = "1.0.0")] impl Eq for Ipv6Addr {} #[stable(feature = "rust1", since = "1.0.0")] impl hash::Hash for Ipv6Addr { fn hash<H: hash::Hasher>(&self, s: &mut H) { self.inner.s6_addr.hash(s) } } #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for Ipv6Addr { fn partial_cmp(&self, other: &Ipv6Addr) -> Option<Ordering> { Some(self.cmp(other)) } } #[stable(feature = "rust1", since = "1.0.0")] impl Ord for Ipv6Addr { fn cmp(&self, other: &Ipv6Addr) -> Ordering { self.inner.s6_addr.cmp(&other.inner.s6_addr) } } impl AsInner<libc::in6_addr> for Ipv6Addr { fn as_inner(&self) -> &libc::in6_addr { &self.inner } } impl FromInner<libc::in6_addr> for Ipv6Addr { fn from_inner(addr: libc::in6_addr) -> Ipv6Addr { Ipv6Addr { inner: addr } } } // Tests for this module #[cfg(test)] mod tests { use prelude::v1::*; use io; use net::*; use net::Ipv6MulticastScope::*; use net::test::{tsa, sa6, sa4}; #[test] fn test_from_str_ipv4() { assert_eq!(Ok(Ipv4Addr::new(127, 0, 0, 1)), "127.0.0.1".parse()); assert_eq!(Ok(Ipv4Addr::new(255, 255, 255, 255)), "255.255.255.255".parse()); assert_eq!(Ok(Ipv4Addr::new(0, 0, 0, 0)), "0.0.0.0".parse()); // out of range let none: Option<Ipv4Addr> = "256.0.0.1".parse().ok(); assert_eq!(None, none); // too short let none: Option<Ipv4Addr> = "255.0.0".parse().ok(); assert_eq!(None, none); // too long let none: Option<Ipv4Addr> = "255.0.0.1.2".parse().ok(); assert_eq!(None, none); // no number between dots let none: Option<Ipv4Addr> = "255.0..1".parse().ok(); assert_eq!(None, none); } #[test] fn test_from_str_ipv6() { assert_eq!(Ok(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0)), "0:0:0:0:0:0:0:0".parse()); assert_eq!(Ok(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), "0:0:0:0:0:0:0:1".parse()); assert_eq!(Ok(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), "::1".parse()); assert_eq!(Ok(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0)), "::".parse()); assert_eq!(Ok(Ipv6Addr::new(0x2a02, 0x6b8, 0, 0, 0, 0, 0x11, 0x11)), "2a02:6b8::11:11".parse()); // too long group let none: Option<Ipv6Addr> = "::00000".parse().ok(); assert_eq!(None, none); // too short let none: Option<Ipv6Addr> = "1:2:3:4:5:6:7".parse().ok(); assert_eq!(None, none); // too long let none: Option<Ipv6Addr> = "1:2:3:4:5:6:7:8:9".parse().ok(); assert_eq!(None, none); // triple colon let none: Option<Ipv6Addr> = "1:2:::6:7:8".parse().ok(); assert_eq!(None, none); // two double colons let none: Option<Ipv6Addr> = "1:2::6::8".parse().ok(); assert_eq!(None, none); } #[test] fn test_from_str_ipv4_in_ipv6() { assert_eq!(Ok(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 49152, 545)), "::192.0.2.33".parse()); assert_eq!(Ok(Ipv6Addr::new(0, 0, 0, 0, 0, 0xFFFF, 49152, 545)), "::FFFF:192.0.2.33".parse()); assert_eq!(Ok(Ipv6Addr::new(0x64, 0xff9b, 0, 0, 0, 0, 49152, 545)), "64:ff9b::192.0.2.33".parse()); assert_eq!(Ok(Ipv6Addr::new(0x2001, 0xdb8, 0x122, 0xc000, 0x2, 0x2100, 49152, 545)), "2001:db8:122:c000:2:2100:192.0.2.33".parse()); // colon after v4 let none: Option<Ipv4Addr> = "::127.0.0.1:".parse().ok(); assert_eq!(None, none); // not enough groups let none: Option<Ipv6Addr> = "1.2.3.4.5:127.0.0.1".parse().ok(); assert_eq!(None, none); // too many groups let none: Option<Ipv6Addr> = "1.2.3.4.5:6:7:127.0.0.1".parse().ok(); assert_eq!(None, none); } #[test] fn test_from_str_socket_addr() { assert_eq!(Ok(sa4(Ipv4Addr::new(77, 88, 21, 11), 80)), "77.88.21.11:80".parse()); assert_eq!(Ok(sa6(Ipv6Addr::new(0x2a02, 0x6b8, 0, 1, 0, 0, 0, 1), 53)), "[2a02:6b8:0:1::1]:53".parse()); assert_eq!(Ok(sa6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0x7F00, 1), 22)), "[::127.0.0.1]:22".parse()); // without port let none: Option<SocketAddr> = "127.0.0.1".parse().ok(); assert_eq!(None, none); // without port let none: Option<SocketAddr> = "127.0.0.1:".parse().ok(); assert_eq!(None, none); // wrong brackets around v4 let none: Option<SocketAddr> = "[127.0.0.1]:22".parse().ok(); assert_eq!(None, none); // port out of range let none: Option<SocketAddr> = "127.0.0.1:123456".parse().ok(); assert_eq!(None, none); } #[test] fn ipv6_addr_to_string() { // ipv4-mapped address let a1 = Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0xc000, 0x280); assert_eq!(a1.to_string(), "::ffff:192.0.2.128"); // ipv4-compatible address let a1 = Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0xc000, 0x280); assert_eq!(a1.to_string(), "::192.0.2.128"); // v6 address with no zero segments assert_eq!(Ipv6Addr::new(8, 9, 10, 11, 12, 13, 14, 15).to_string(), "8:9:a:b:c:d:e:f"); // reduce a single run of zeros assert_eq!("ae::ffff:102:304", Ipv6Addr::new(0xae, 0, 0, 0, 0, 0xffff, 0x0102, 0x0304).to_string()); // don't reduce just a single zero segment assert_eq!("1:2:3:4:5:6:0:8", Ipv6Addr::new(1, 2, 3, 4, 5, 6, 0, 8).to_string()); // 'any' address assert_eq!("::", Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0).to_string()); // loopback address assert_eq!("::1", Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1).to_string()); // ends in zeros assert_eq!("1::", Ipv6Addr::new(1, 0, 0, 0, 0, 0, 0, 0).to_string()); // two runs of zeros, second one is longer assert_eq!("1:0:0:4::8", Ipv6Addr::new(1, 0, 0, 4, 0, 0, 0, 8).to_string()); // two runs of zeros, equal length assert_eq!("1::4:5:0:0:8", Ipv6Addr::new(1, 0, 0, 4, 5, 0, 0, 8).to_string()); } #[test] fn ipv4_to_ipv6() { assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0x1234, 0x5678), Ipv4Addr::new(0x12, 0x34, 0x56, 0x78).to_ipv6_mapped()); assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0x1234, 0x5678), Ipv4Addr::new(0x12, 0x34, 0x56, 0x78).to_ipv6_compatible()); } #[test] fn ipv6_to_ipv4() { assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0x1234, 0x5678).to_ipv4(), Some(Ipv4Addr::new(0x12, 0x34, 0x56, 0x78))); assert_eq!(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0x1234, 0x5678).to_ipv4(), Some(Ipv4Addr::new(0x12, 0x34, 0x56, 0x78))); assert_eq!(Ipv6Addr::new(0, 0, 1, 0, 0, 0, 0x1234, 0x5678).to_ipv4(), None); } #[test] fn ipv4_properties() { fn check(octets: &[u8; 4], unspec: bool, loopback: bool, private: bool, link_local: bool, global: bool, multicast: bool, broadcast: bool, documentation: bool) { let ip = Ipv4Addr::new(octets[0], octets[1], octets[2], octets[3]); assert_eq!(octets, &ip.octets()); assert_eq!(ip.is_unspecified(), unspec); assert_eq!(ip.is_loopback(), loopback); assert_eq!(ip.is_private(), private); assert_eq!(ip.is_link_local(), link_local); assert_eq!(ip.is_global(), global); assert_eq!(ip.is_multicast(), multicast); assert_eq!(ip.is_broadcast(), broadcast); assert_eq!(ip.is_documentation(), documentation); } // address unspec loopbk privt linloc global multicast brdcast doc check(&[0, 0, 0, 0], true, false, false, false, true, false, false, false); check(&[0, 0, 0, 1], false, false, false, false, true, false, false, false); check(&[1, 0, 0, 0], false, false, false, false, true, false, false, false); check(&[10, 9, 8, 7], false, false, true, false, false, false, false, false); check(&[127, 1, 2, 3], false, true, false, false, false, false, false, false); check(&[172, 31, 254, 253], false, false, true, false, false, false, false, false); check(&[169, 254, 253, 242], false, false, false, true, false, false, false, false); check(&[192, 0, 2, 183], false, false, false, false, false, false, false, true); check(&[192, 1, 2, 183], false, false, false, false, true, false, false, false); check(&[192, 168, 254, 253], false, false, true, false, false, false, false, false); check(&[198, 51, 100, 0], false, false, false, false, false, false, false, true); check(&[203, 0, 113, 0], false, false, false, false, false, false, false, true); check(&[203, 2, 113, 0], false, false, false, false, true, false, false, false); check(&[224, 0, 0, 0], false, false, false, false, true, true, false, false); check(&[239, 255, 255, 255], false, false, false, false, true, true, false, false); check(&[255, 255, 255, 255], false, false, false, false, false, false, true, false); } #[test] fn ipv6_properties() { fn check(str_addr: &str, unspec: bool, loopback: bool, unique_local: bool, global: bool, u_link_local: bool, u_site_local: bool, u_global: bool, m_scope: Option<Ipv6MulticastScope>) { let ip: Ipv6Addr = str_addr.parse().unwrap(); assert_eq!(str_addr, ip.to_string()); assert_eq!(ip.is_unspecified(), unspec); assert_eq!(ip.is_loopback(), loopback); assert_eq!(ip.is_unique_local(), unique_local); assert_eq!(ip.is_global(), global); assert_eq!(ip.is_unicast_link_local(), u_link_local); assert_eq!(ip.is_unicast_site_local(), u_site_local); assert_eq!(ip.is_unicast_global(), u_global); assert_eq!(ip.multicast_scope(), m_scope); assert_eq!(ip.is_multicast(), m_scope.is_some()); } // unspec loopbk uniqlo global unill unisl uniglo mscope check("::", true, false, false, true, false, false, true, None); check("::1", false, true, false, false, false, false, false, None); check("::0.0.0.2", false, false, false, true, false, false, true, None); check("1::", false, false, false, true, false, false, true, None); check("fc00::", false, false, true, false, false, false, false, None); check("fdff:ffff::", false, false, true, false, false, false, false, None); check("fe80:ffff::", false, false, false, false, true, false, false, None); check("febf:ffff::", false, false, false, false, true, false, false, None); check("fec0::", false, false, false, false, false, true, false, None); check("ff01::", false, false, false, false, false, false, false, Some(InterfaceLocal)); check("ff02::", false, false, false, false, false, false, false, Some(LinkLocal)); check("ff03::", false, false, false, false, false, false, false, Some(RealmLocal)); check("ff04::", false, false, false, false, false, false, false, Some(AdminLocal)); check("ff05::", false, false, false, false, false, false, false, Some(SiteLocal)); check("ff08::", false, false, false, false, false, false, false, Some(OrganizationLocal)); check("ff0e::", false, false, false, true, false, false, false, Some(Global)); } #[test] fn to_socket_addr_socketaddr() { let a = sa4(Ipv4Addr::new(77, 88, 21, 11), 12345); assert_eq!(Ok(vec![a]), tsa(a)); } #[test] fn test_ipv4_to_int() { let a = Ipv4Addr::new(127, 0, 0, 1); assert_eq!(u32::from(a), 2130706433); } #[test] fn test_int_to_ipv4() { let a = Ipv4Addr::new(127, 0, 0, 1); assert_eq!(Ipv4Addr::from(2130706433), a); } }<|fim▁end|>
1 => Some(Ipv6MulticastScope::InterfaceLocal), 2 => Some(Ipv6MulticastScope::LinkLocal),
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate rustc_serialize; use std::fs::File; use std::io::prelude::*; use rustc_serialize::hex::{FromHex, ToHex}; fn xor_hex_strings(str1: &str, str2: &str) -> Option<String> { if str1.len() != str2.len() { None } else { let bytes1 = str1.from_hex().expect("could not parse hex string"); let bytes2 = str2.from_hex().expect("could not parse hex string"); let mut vec = Vec::new(); for (a, b) in bytes1.iter().zip(bytes2) { vec.push(a ^ b); } Some(vec.to_hex()) } } fn score_phrase(phrase: &str, score: &mut f64) { for letter in phrase.to_lowercase().chars() { match letter { 'a' => *score += 8.167, 'b' => *score += 1.492, 'c' => *score += 2.782, 'd' => *score += 4.253, 'e' => *score += 12.702, 'f' => *score += 2.228, 'g' => *score += 2.015, 'h' => *score += 6.094, 'i' => *score += 6.966, 'j' => *score += 0.153, 'k' => *score += 0.772, 'l' => *score += 4.025, 'm' => *score += 2.406, 'n' => *score += 6.749, 'o' => *score += 7.507, 'p' => *score += 1.929, 'q' => *score += 0.095, 'r' => *score += 5.987, 's' => *score += 6.327, 't' => *score += 9.056, 'u' => *score += 2.758, 'v' => *score += 0.978, 'w' => *score += 2.360, 'x' => *score += 0.150, 'y' => *score += 1.974, 'z' => *score += 0.074, _ => *score -= 20.0, } } } fn find_best_phrase(phrases: &[String]) -> String { let mut score_vec = vec![0.0; phrases.len()]; for (phrase, mut score) in phrases.iter().zip(score_vec.iter_mut()) { score_phrase(phrase, &mut score); } let max = score_vec.iter().map(|x| *x as i64).max().unwrap(); let pos = score_vec.iter().position(|x| (*x as i64) == max).unwrap(); phrases[pos].to_string() } fn find_phrase(phrase: &str) -> Option<String> { let mut vec = Vec::new(); for i in 0..u8::max_value() { let v = vec![i; phrase.len() / 2]; // Divide by 2 since 2 characters is 1 u8 match xor_hex_strings(phrase, v.to_hex().as_str()) { Some(x) => match String::from_utf8(x.from_hex().unwrap()) { Ok(y) => vec.push(y), Err(_) => continue, }, None => continue, } } if vec.is_empty() { None } else { Some(find_best_phrase(&vec)) } } fn main() { let mut input = File::open("4.txt").expect("file not found"); let mut content = String::new(); input.read_to_string(&mut content).expect("couldn't read file"); <|fim▁hole|> let potential_phrases: Vec<String> = lines.iter().map(|x| find_phrase(x)).filter(|x| x.is_some()).map(|x| x.unwrap()).collect(); println!("{}", find_best_phrase(&potential_phrases)); }<|fim▁end|>
let lines: Vec<&str> = content.lines().collect();
<|file_name|>post.js<|end_file_name|><|fim▁begin|>import React, { PureComponent } from "react"; import { graphql } from 'gatsby' import Link from "gatsby-link"; import path from "ramda/src/path"; import ScrollReveal from "scrollreveal"; import Layout from "../components/Layout"; import "prismjs/themes/prism.css"; import styles from "./post.module.scss"; const getPost = path(["data", "markdownRemark"]); const getContext = path(["pageContext"]); const PostNav = ({ prev, next }) => ( <div className={styles.postNav}> {prev && ( <Link to={`/${prev.fields.slug}`}> 上一篇: {prev.frontmatter.title} </Link> )} {next && ( <Link to={`/${next.fields.slug}`}> 下一篇: {next.frontmatter.title} </Link> )} </div> ); export default class Post extends PureComponent { componentDidMount() { ScrollReveal().reveal(".article-header>h1", { delay: 500, useDelay: "onload", reset: true, origin: "top", distance: "120px" }); ScrollReveal().reveal(".article-content", { delay: 500, useDelay: "onload", reset: true, origin: "bottom", distance: "120px" }); } componentWillUnmount() { ScrollReveal().destroy(); } render() { const post = getPost(this.props); const { next, prev } = getContext(this.props); // Not to be confused with react context... return (<|fim▁hole|> <h1>{post.frontmatter.title}</h1> </header> <div className="article-content" dangerouslySetInnerHTML={{ __html: post.html }} /> <PostNav prev={prev} next={next} /> </Layout> ); } } export const query = graphql` query BlogPostQuery($slug: String!) { markdownRemark(fields: { slug: { eq: $slug } }) { html frontmatter { title } } } `;<|fim▁end|>
<Layout> <header className="article-header">
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014, Fundacion Dr. Manuel Sadosky # All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer.<|fim▁hole|># and/or other materials provided with the distribution. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from __future__ import absolute_import from .arch import *<|fim▁end|>
# 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation
<|file_name|>IOrdersManager.java<|end_file_name|><|fim▁begin|>package com.senla.bolkunets.autoservice.api.managers; import java.util.Comparator; import java.util.Date; import java.util.List; import com.senla.bolkunets.autoservice.api.beans.IOrder; import com.senla.bolkunets.autoservice.api.enums.OrderStatus; public interface IOrdersManager { List<IOrder> getOrders(); void addOrder(IOrder order); boolean changeOrderStatus(long id, OrderStatus orderStatus); List<IOrder> getSortedListProgressOrders(Comparator<IOrder> orderComparator); <|fim▁hole|> IOrder getOrder(long idMaster); void setGarageToOrder(long idGarage, long idOrder); void removeGarageFromOrder(long idOrder); boolean addMasterToOrder(long idMaster, long idOrder); void removeMasterFromOrder(long idMaster, long idOrder); List<IOrder> getOrders(OrderStatus status, Date dateLeft, Date dateRight, Comparator<IOrder> comp); public int getCountFreePlace(Date date); Date getNextFreeDate(); void shiftOrders(long orderId, int countDay); void save(); }<|fim▁end|>
List<IOrder> getSortedListAllOrders(Comparator<IOrder> orderComparator);
<|file_name|>rate_limit_transport.go<|end_file_name|><|fim▁begin|>// Copyright 2016-2019 The Libsacloud Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package api import ( "go.uber.org/ratelimit" "net/http" "sync" ) // RateLimitRoundTripper 秒間アクセス数を制限するためのhttp.RoundTripper実装 type RateLimitRoundTripper struct { // Transport 親となるhttp.RoundTripper、nilの場合http.DefaultTransportが利用される Transport http.RoundTripper // RateLimitPerSec 秒あたりのリクエスト数 RateLimitPerSec int once sync.Once rateLimit ratelimit.Limiter } // RoundTrip http.RoundTripperの実装 func (r *RateLimitRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {<|fim▁hole|> }) if r.Transport == nil { r.Transport = http.DefaultTransport } r.rateLimit.Take() return r.Transport.RoundTrip(req) }<|fim▁end|>
r.once.Do(func() { r.rateLimit = ratelimit.New(r.RateLimitPerSec)
<|file_name|>Connection.cpp<|end_file_name|><|fim▁begin|>/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #include <aws/events/model/Connection.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::Utils::Json; using namespace Aws::Utils; namespace Aws { namespace CloudWatchEvents { namespace Model { Connection::Connection() : m_connectionArnHasBeenSet(false), m_nameHasBeenSet(false), m_connectionState(ConnectionState::NOT_SET), m_connectionStateHasBeenSet(false), m_stateReasonHasBeenSet(false), m_authorizationType(ConnectionAuthorizationType::NOT_SET), m_authorizationTypeHasBeenSet(false), m_creationTimeHasBeenSet(false), m_lastModifiedTimeHasBeenSet(false), m_lastAuthorizedTimeHasBeenSet(false) { } Connection::Connection(JsonView jsonValue) : m_connectionArnHasBeenSet(false), m_nameHasBeenSet(false), m_connectionState(ConnectionState::NOT_SET), m_connectionStateHasBeenSet(false), m_stateReasonHasBeenSet(false), m_authorizationType(ConnectionAuthorizationType::NOT_SET), m_authorizationTypeHasBeenSet(false), m_creationTimeHasBeenSet(false), m_lastModifiedTimeHasBeenSet(false), m_lastAuthorizedTimeHasBeenSet(false) { *this = jsonValue; } Connection& Connection::operator =(JsonView jsonValue) { if(jsonValue.ValueExists("ConnectionArn")) {<|fim▁hole|> m_connectionArn = jsonValue.GetString("ConnectionArn"); m_connectionArnHasBeenSet = true; } if(jsonValue.ValueExists("Name")) { m_name = jsonValue.GetString("Name"); m_nameHasBeenSet = true; } if(jsonValue.ValueExists("ConnectionState")) { m_connectionState = ConnectionStateMapper::GetConnectionStateForName(jsonValue.GetString("ConnectionState")); m_connectionStateHasBeenSet = true; } if(jsonValue.ValueExists("StateReason")) { m_stateReason = jsonValue.GetString("StateReason"); m_stateReasonHasBeenSet = true; } if(jsonValue.ValueExists("AuthorizationType")) { m_authorizationType = ConnectionAuthorizationTypeMapper::GetConnectionAuthorizationTypeForName(jsonValue.GetString("AuthorizationType")); m_authorizationTypeHasBeenSet = true; } if(jsonValue.ValueExists("CreationTime")) { m_creationTime = jsonValue.GetDouble("CreationTime"); m_creationTimeHasBeenSet = true; } if(jsonValue.ValueExists("LastModifiedTime")) { m_lastModifiedTime = jsonValue.GetDouble("LastModifiedTime"); m_lastModifiedTimeHasBeenSet = true; } if(jsonValue.ValueExists("LastAuthorizedTime")) { m_lastAuthorizedTime = jsonValue.GetDouble("LastAuthorizedTime"); m_lastAuthorizedTimeHasBeenSet = true; } return *this; } JsonValue Connection::Jsonize() const { JsonValue payload; if(m_connectionArnHasBeenSet) { payload.WithString("ConnectionArn", m_connectionArn); } if(m_nameHasBeenSet) { payload.WithString("Name", m_name); } if(m_connectionStateHasBeenSet) { payload.WithString("ConnectionState", ConnectionStateMapper::GetNameForConnectionState(m_connectionState)); } if(m_stateReasonHasBeenSet) { payload.WithString("StateReason", m_stateReason); } if(m_authorizationTypeHasBeenSet) { payload.WithString("AuthorizationType", ConnectionAuthorizationTypeMapper::GetNameForConnectionAuthorizationType(m_authorizationType)); } if(m_creationTimeHasBeenSet) { payload.WithDouble("CreationTime", m_creationTime.SecondsWithMSPrecision()); } if(m_lastModifiedTimeHasBeenSet) { payload.WithDouble("LastModifiedTime", m_lastModifiedTime.SecondsWithMSPrecision()); } if(m_lastAuthorizedTimeHasBeenSet) { payload.WithDouble("LastAuthorizedTime", m_lastAuthorizedTime.SecondsWithMSPrecision()); } return payload; } } // namespace Model } // namespace CloudWatchEvents } // namespace Aws<|fim▁end|>
<|file_name|>bitcoin_gl.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="gl" version="2.1"> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About YEScoin</source> <translation type="unfinished"/> </message> <message> <location line="+39"/> <source>&lt;b&gt;YEScoin&lt;/b&gt; version</source> <translation type="unfinished"/> </message> <message> <location line="+41"/> <source>Copyright © 2009-2014 The Bitcoin developers Copyright © 2012-2014 The NovaCoin developers Copyright © 2014 The YEScoin developers</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation> Isto é software experimental. Distribuído baixo a licencia de software MIT/X11, véxase o arquivo que acompaña COPYING ou http://www.opensource.org/licenses/mit-license.php. Este produto inclúe software desenvolvido polo OpenSSL Project para o uso no OpenSSL Toolkit (http://www.openssl.org/) e software criptográfico escrito por Eric Young ([email protected]) e software UPnP escrito por Thomas Bernard.</translation> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>Double-click to edit address or label</source> <translation>Doble click para editar a dirección ou a etiqueta</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Crear unha nova dirección</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Copiar a dirección seleccionada ao cartafol</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation type="unfinished"/> </message> <message> <location line="-46"/> <source>These are your YEScoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation type="unfinished"/> </message> <message> <location line="+60"/> <source>&amp;Copy Address</source> <translation>&amp;Copiar Dirección</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a YEScoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation>Borrar a dirección actualmente seleccionada da listaxe</translation> </message> <message> <location line="-14"/> <source>Verify a message to ensure it was signed with a specified YEScoin address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Borrar</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+65"/> <source>Copy &amp;Label</source> <translation>Copiar &amp;Etiqueta</translation> </message> <message> <location line="+2"/> <source>&amp;Edit</source> <translation>&amp;Modificar</translation> </message> <message> <location line="+250"/> <source>Export Address Book Data</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Arquivo separado por comas (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Etiqueta</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Dirección</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(sen etiqueta)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Diálogo de Contrasinal</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Introduce contrasinal</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Novo contrasinal</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Repite novo contrasinal</translation> </message> <message> <location line="+33"/> <source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>For staking only</source> <translation type="unfinished"/> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+35"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Introduce o novo contrasinal ao moedeiro.&lt;br/&gt;Por favor empregue un contrasinal de &lt;b&gt;10 ou máis caracteres aleatorios&lt;/b&gt;, ou &lt;b&gt;oito ou máis palabras&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Encriptar moedeiro</translation> </message> <message> <location line="+7"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Esta operación precisa o contrasinal do teu moedeiro para desbloquear o moedeiro.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Desbloquear moedeiro</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Esta operación precisa o contrasinal do teu moedeiro para desencriptar o moedeiro.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Desencriptar moedeiro</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Cambiar contrasinal</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Introduce o vello e novo contrasinais no moedeiro.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Confirmar encriptación de moedeiro</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR COINS&lt;/b&gt;!</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Estás seguro de que desexas encriptar o teu moedeiro?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>IMPORTANTE: Calquera copia de seguridade previa que fixeses do teu arquivo de moedeiro debería ser substituída polo recén xerado arquivo encriptado de moedeiro. Por razóns de seguridade, as copias de seguridade previas de un arquivo de moedeiro desencriptado tornaránse inútiles no momento no que comeces a emprega-lo novo, encriptado, moedeiro.</translation> </message> <message> <location line="+103"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Precaución: A tecla de Bloqueo de Maiúsculas está activada!</translation> </message> <message> <location line="-133"/> <location line="+60"/> <source>Wallet encrypted</source> <translation>Moedeiro encriptado</translation> </message> <message> <location line="-58"/> <source>YEScoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+44"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Encriptación de moedeiro fallida</translation> </message> <message> <location line="-56"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>A encriptación do moedeiro fallou por mor dun erro interno. O teu moedeiro non foi encriptado.</translation> </message> <message> <location line="+7"/> <location line="+50"/> <source>The supplied passphrases do not match.</source> <translation>Os contrasinais suministrados non coinciden.</translation> </message> <message> <location line="-38"/> <source>Wallet unlock failed</source> <translation>Desbloqueo de moedeiro fallido</translation> </message> <message> <location line="+1"/> <location line="+12"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>O contrasinal introducido para a desencriptación do moedeiro foi incorrecto.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Desencriptación de moedeiro fallida</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>Cambiouse con éxito o contrasinal do moedeiro.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+282"/> <source>Sign &amp;message...</source> <translation>&amp;Asinar mensaxe...</translation> </message> <message> <location line="+251"/> <source>Synchronizing with network...</source> <translation>Sincronizando coa rede...</translation> </message> <message> <location line="-319"/> <source>&amp;Overview</source> <translation>&amp;Vista xeral</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Amosar vista xeral do moedeiro</translation> </message> <message> <location line="+17"/> <source>&amp;Transactions</source> <translation>&amp;Transacciones</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Navegar historial de transaccións</translation> </message> <message> <location line="+5"/> <source>&amp;Address Book</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Edit the list of stored addresses and labels</source> <translation type="unfinished"/> </message> <message> <location line="-13"/> <source>&amp;Receive coins</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show the list of addresses for receiving payments</source> <translation type="unfinished"/> </message> <message> <location line="-7"/> <source>&amp;Send coins</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>E&amp;xit</source> <translation>&amp;Saír</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Saír da aplicación</translation> </message> <message> <location line="+6"/> <source>Show information about YEScoin</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>Acerca de &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Amosar información acerca de Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Opcións...</translation> </message> <message> <location line="+4"/> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Encriptar Moedeiro...</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>Copia de &amp;Seguridade do Moedeiro...</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>&amp;Cambiar contrasinal...</translation> </message> <message numerus="yes"> <location line="+259"/> <source>~%n block(s) remaining</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+6"/> <source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source> <translation type="unfinished"/> </message> <message> <location line="-256"/> <source>&amp;Export...</source> <translation type="unfinished"/> </message> <message> <location line="-64"/> <source>Send coins to a YEScoin address</source> <translation type="unfinished"/> </message> <message> <location line="+47"/> <source>Modify configuration options for YEScoin</source> <translation type="unfinished"/> </message> <message> <location line="+18"/> <source>Export the data in the current tab to a file</source> <translation type="unfinished"/> </message> <message> <location line="-14"/> <source>Encrypt or decrypt wallet</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Backup wallet to another location</source> <translation>Facer copia de seguridade do moedeiro noutra localización</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Cambiar o contrasinal empregado para a encriptación do moedeiro</translation> </message> <message> <location line="+10"/> <source>&amp;Debug window</source> <translation>Ventana de &amp;Depuración</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Abrir consola de depuración e diagnóstico</translation> </message> <message> <location line="-5"/> <source>&amp;Verify message...</source> <translation>&amp;Verificar mensaxe...</translation> </message> <message> <location line="-202"/> <source>YEScoin</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Wallet</source> <translation>Moedeiro</translation> </message> <message> <location line="+180"/> <source>&amp;About YEScoin</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>&amp;Amosar/Agachar</translation> </message> <message> <location line="+9"/> <source>Unlock wallet</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>&amp;Lock Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Lock wallet</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>&amp;File</source> <translation>&amp;Arquivo</translation> </message> <message> <location line="+8"/> <source>&amp;Settings</source> <translation>Axus&amp;tes</translation> </message> <message> <location line="+8"/> <source>&amp;Help</source> <translation>A&amp;xuda</translation> </message> <message> <location line="+12"/> <source>Tabs toolbar</source> <translation>Barra de ferramentas</translation> </message> <message> <location line="+8"/> <source>Actions toolbar</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <location line="+9"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+0"/> <location line="+60"/> <source>YEScoin client</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+75"/> <source>%n active connection(s) to YEScoin network</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+40"/> <source>Downloaded %1 blocks of transaction history.</source> <translation type="unfinished"/> </message> <message> <location line="+413"/> <source>Staking.&lt;br&gt;Your weight is %1&lt;br&gt;Network weight is %2&lt;br&gt;Expected time to earn reward is %3</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Not staking because wallet is locked</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Not staking because wallet is offline</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Not staking because wallet is syncing</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Not staking because you don&apos;t have mature coins</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="-403"/> <source>%n second(s) ago</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="-312"/> <source>About YEScoin card</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show information about YEScoin card</source> <translation type="unfinished"/> </message> <message> <location line="+18"/> <source>&amp;Unlock Wallet...</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+297"/> <source>%n minute(s) ago</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n hour(s) ago</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s) ago</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+6"/> <source>Up to date</source> <translation>Actualizado</translation> </message> <message> <location line="+7"/> <source>Catching up...</source> <translation>Poñendo ao día...</translation> </message> <message> <location line="+10"/> <source>Last received block was generated %1.</source> <translation type="unfinished"/> </message> <message> <location line="+59"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Confirm transaction fee</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Sent transaction</source> <translation>Transacción enviada</translation> </message> <message> <location line="+1"/> <source>Incoming transaction</source> <translation>Transacción entrante</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Data: %1 Cantidade: %2 Tipo: %3 Dirección: %4 </translation> </message> <message> <location line="+100"/> <location line="+15"/> <source>URI handling</source> <translation type="unfinished"/> </message> <message> <location line="-15"/> <location line="+15"/> <source>URI can not be parsed! This can be caused by an invalid YEScoin address or malformed URI parameters.</source> <translation type="unfinished"/> </message> <message> <location line="+18"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>O moedeiro está &lt;b&gt;encriptado&lt;/b&gt; e actualmente &lt;b&gt;desbloqueado&lt;/b&gt;</translation> </message> <message> <location line="+10"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>O moedeiro está &lt;b&gt;encriptado&lt;/b&gt; e actualmente &lt;b&gt;bloqueado&lt;/b&gt;</translation> </message> <message> <location line="+25"/> <source>Backup Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+76"/> <source>%n second(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n minute(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n hour(s)</source> <translation><numerusform>%n hora</numerusform><numerusform>%n horas</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation><numerusform>%n día</numerusform><numerusform>%n días</numerusform></translation> </message> <message> <location line="+18"/> <source>Not staking</source> <translation type="unfinished"/> </message> <message> <location filename="../bitcoin.cpp" line="+109"/> <source>A fatal error occurred. YEScoin can no longer continue safely and will quit.</source> <translation type="unfinished"/> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+90"/> <source>Network Alert</source> <translation>Alerta de Rede</translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <location filename="../forms/coincontroldialog.ui" line="+14"/> <source>Coin Control</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>Quantity:</source> <translation>Cantidade:</translation> </message> <message> <location line="+32"/> <source>Bytes:</source> <translation>Bytes:</translation> </message> <message> <location line="+48"/> <source>Amount:</source> <translation>Importe:</translation> </message> <message> <location line="+32"/> <source>Priority:</source> <translation>Prioridade:</translation> </message> <message> <location line="+48"/> <source>Fee:</source> <translation>Pago:</translation> </message> <message> <location line="+35"/> <source>Low Output:</source> <translation type="unfinished"/> </message> <message> <location filename="../coincontroldialog.cpp" line="+551"/> <source>no</source> <translation>non</translation> </message> <message> <location filename="../forms/coincontroldialog.ui" line="+51"/> <source>After Fee:</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>Change:</source> <translation>Cambiar:</translation> </message> <message> <location line="+69"/> <source>(un)select all</source> <translation>(des)selecciona todo</translation> </message> <message> <location line="+13"/> <source>Tree mode</source> <translation>Modo árbore</translation> </message> <message> <location line="+16"/> <source>List mode</source> <translation>Modo lista</translation> </message> <message> <location line="+45"/> <source>Amount</source> <translation>Cantidade</translation> </message> <message> <location line="+5"/> <source>Label</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Address</source> <translation>Dirección</translation> </message> <message> <location line="+5"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+5"/> <source>Confirmations</source> <translation>Confirmacións</translation> </message> <message> <location line="+3"/> <source>Confirmed</source> <translation>Confirmado</translation> </message> <message> <location line="+5"/> <source>Priority</source> <translation>Prioridade</translation> </message> <message> <location filename="../coincontroldialog.cpp" line="-515"/> <source>Copy address</source> <translation>Copiar dirección</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Copiar etiqueta</translation> </message> <message> <location line="+1"/> <location line="+26"/> <source>Copy amount</source> <translation>Copiar cantidade</translation> </message> <message> <location line="-25"/> <source>Copy transaction ID</source> <translation>Copiar ID de transacción</translation> </message> <message> <location line="+24"/> <source>Copy quantity</source> <translation>Copiar cantidade</translation> </message> <message> <location line="+2"/> <source>Copy fee</source> <translation>Copiar pago</translation> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation>Copiar despóis do pago</translation> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation>Copiar bytes</translation> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation>Copiar prioridade</translation> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy change</source> <translation>Copiar cambio</translation> </message> <message> <location line="+317"/> <source>highest</source> <translation>O máis alto</translation> </message> <message> <location line="+1"/> <source>high</source> <translation>alto</translation> </message> <message> <location line="+1"/> <source>medium-high</source> <translation>medio-alto</translation> </message> <message> <location line="+1"/> <source>medium</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>low-medium</source> <translation>medio-baixo</translation> </message> <message> <location line="+1"/> <source>low</source> <translation>baixo</translation> </message> <message> <location line="+1"/> <source>lowest</source> <translation>o máis baixo</translation> </message> <message> <location line="+155"/> <source>DUST</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>yes</source> <translation>Si</translation> </message> <message> <location line="+10"/> <source>This label turns red, if the transaction size is bigger than 10000 bytes. This means a fee of at least %1 per kb is required. Can vary +/- 1 Byte per input.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transactions with higher priority get more likely into a block. This label turns red, if the priority is smaller than &quot;medium&quot;. This means a fee of at least %1 per kb is required.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>This label turns red, if any recipient receives an amount smaller than %1. This means a fee of at least %2 is required. Amounts below 0.546 times the minimum relay fee are shown as DUST.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>This label turns red, if the change is smaller than %1. This means a fee of at least %2 is required.</source> <translation type="unfinished"/> </message> <message> <location line="+37"/> <location line="+66"/> <source>(no label)</source> <translation>(sen etiqueta)</translation> </message> <message> <location line="-9"/> <source>change from %1 (%2)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>(change)</source> <translation>(cambio)</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Modificar Dirección</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Etiqueta</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Dirección</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation type="unfinished"/> </message> <message> <location filename="../editaddressdialog.cpp" line="+20"/> <source>New receiving address</source> <translation>Nova dirección para recibir</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Nova dirección para enviar</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Modificar dirección para recibir</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Modificar dirección para enviar</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>A dirección introducida &quot;%1&quot; xa está no libro de direccións.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid YEScoin address.</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Non se puido desbloquear o moedeiro.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>A xeración de nova clave fallou.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+420"/> <location line="+12"/> <source>YEScoin-Qt</source> <translation type="unfinished"/> </message> <message> <location line="-12"/> <source>version</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Usage:</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>command-line options</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>UI options</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation type="unfinished"/> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Opcións</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Principal</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Pagar &amp;tarifa da transacción</translation> </message> <message> <location line="+31"/> <source>Reserved amount does not participate in staking and is therefore spendable at any time.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Reserve</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>Automatically start YEScoin after logging in to the system.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Start YEScoin on system login</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Detach databases at shutdown</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>&amp;Network</source> <translation>&amp;Rede</translation> </message> <message> <location line="+6"/> <source>Automatically open the YEScoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Mapear porto empregando &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the YEScoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation type="unfinished"/> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>&amp;IP do Proxy:</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>&amp;Porto:</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Porto do proxy (exemplo: 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>&amp;Version de SOCKS:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>Versión SOCKS del proxy (exemplo: 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>&amp;Xanela</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Amosar so un icono na bandexa tras minimiza-la xanela.</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Minimizar á bandexa en lugar de á barra de tarefas.</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Minimizar en lugar de saír da aplicación cando se pecha a xanela. Cando se habilita esta opción, a aplicación so se pechará tras seleccionar Saír no menú.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>M&amp;inimizar ao pechar</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>&amp;Visualización</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>&amp;Linguaxe de interface de usuario:</translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting YEScoin.</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Unidade na que amosar as cantidades:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Escolle a unidade de subdivisión por defecto para amosar na interface e ao enviar moedas.</translation> </message> <message> <location line="+9"/> <source>Whether to show YEScoin addresses in the transaction list or not.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>&amp;Visualizar direccións na listaxe de transaccións</translation> </message> <message> <location line="+7"/> <source>Whether to show coin control features or not.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Display coin &amp;control features (experts only!)</source> <translation type="unfinished"/> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;OK</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Cancelar</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation type="unfinished"/> </message> <message> <location filename="../optionsdialog.cpp" line="+55"/> <source>default</source> <translation>por defecto</translation> </message> <message> <location line="+149"/> <location line="+9"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting YEScoin.</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>A dirección de proxy suministrada é inválida.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Formulario</translation> </message> <message> <location line="+33"/> <location line="+231"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the YEScoin network after a connection is established, but this process has not completed yet.</source> <translation type="unfinished"/> </message> <message> <location line="-160"/> <source>Stake:</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Sen confirmar:</translation> </message> <message> <location line="-107"/> <source>Wallet</source> <translation>Moedeiro</translation> </message> <message> <location line="+49"/> <source>Spendable:</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Your current spendable balance</source> <translation>O teu balance actualmente dispoñible</translation> </message> <message> <location line="+71"/> <source>Immature:</source> <translation>Inmaduro:</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation>O balance minado todavía non madurou</translation> </message> <message> <location line="+20"/> <source>Total:</source> <translation>Total:</translation> </message> <message> <location line="+16"/> <source>Your current total balance</source> <translation>O teu balance actual total</translation> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Transaccións recentes&lt;/b&gt;</translation> </message> <message> <location line="-108"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation type="unfinished"/> </message> <message> <location line="-29"/> <source>Total of coins that was staked, and do not yet count toward the current balance</source> <translation type="unfinished"/> </message> <message> <location filename="../overviewpage.cpp" line="+113"/> <location line="+1"/> <source>out of sync</source> <translation>non sincronizado</translation> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation type="unfinished"/> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation type="unfinished"/> </message> <message> <location line="+56"/> <source>Amount:</source> <translation type="unfinished"/> </message> <message> <location line="-44"/> <source>Label:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Message:</source> <translation type="unfinished"/> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation type="unfinished"/> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation type="unfinished"/> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation type="unfinished"/> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Nome do cliente</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+348"/> <source>N/A</source> <translation>N/A</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>Versión do cliente</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Información</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>Usar versión OpenSSL</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>Tempo de arranque</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Rede</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Número de conexións</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>Cadea de bloques</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Número actual de bloques</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>Bloques totais estimados</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>Hora do último bloque</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>&amp;Abrir</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Show the YEScoin-Qt help message to get a list with possible YEScoin command-line options.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation type="unfinished"/> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Consola</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>Data de construción</translation> </message> <message> <location line="-104"/> <source>YEScoin - Debug window</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>YEScoin Core</source> <translation type="unfinished"/> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation>Arquivo de log de depuración</translation> </message> <message> <location line="+7"/> <source>Open the YEScoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation type="unfinished"/> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Limpar consola</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-33"/> <source>Welcome to the YEScoin RPC console.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Emprega as flechas arriba e abaixo para navegar polo historial, e &lt;b&gt;Ctrl-L&lt;/b&gt; para limpar a pantalla.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Escribe &lt;b&gt;axuda&lt;/b&gt; para unha vista xeral dos comandos dispoñibles.</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+182"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Moedas Enviadas</translation> </message> <message> <location line="+76"/> <source>Coin Control Features</source> <translation type="unfinished"/> </message> <message> <location line="+20"/> <source>Inputs...</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>automatically selected</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Insufficient funds!</source> <translation type="unfinished"/> </message> <message> <location line="+77"/> <source>Quantity:</source> <translation>Cantidade:</translation> </message> <message> <location line="+22"/> <location line="+35"/> <source>0</source> <translation type="unfinished"/> </message> <message> <location line="-19"/> <source>Bytes:</source> <translation>Bytes:</translation> </message> <message> <location line="+51"/> <source>Amount:</source> <translation>Importe:</translation> </message> <message> <location line="+22"/> <location line="+86"/> <location line="+86"/> <location line="+32"/> <source>0.00 BTC</source> <translation type="unfinished"/> </message> <message> <location line="-191"/> <source>Priority:</source> <translation>Prioridade:</translation> </message> <message> <location line="+19"/> <source>medium</source> <translation type="unfinished"/> </message> <message> <location line="+32"/> <source>Fee:</source> <translation>Pago:</translation> </message> <message> <location line="+35"/> <source>Low Output:</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>no</source> <translation type="unfinished"/> </message> <message> <location line="+32"/> <source>After Fee:</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>Change</source> <translation type="unfinished"/> </message> <message> <location line="+50"/> <source>custom change address</source> <translation type="unfinished"/> </message> <message> <location line="+106"/> <source>Send to multiple recipients at once</source> <translation>Enviar a múltiples receptores á vez</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>Engadir &amp;Receptor</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Limpar &amp;Todo</translation> </message> <message> <location line="+28"/> <source>Balance:</source> <translation>Balance:</translation> </message> <message> <location line="+16"/> <source>123.456 BTC</source> <translation type="unfinished"/> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Confirmar a acción de envío</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>&amp;Enviar</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-173"/> <source>Enter a YEScoin address (e.g. YEScoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Copy quantity</source> <translation>Copiar cantidade</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Copiar cantidade</translation> </message> <message> <location line="+1"/> <source>Copy fee</source> <translation>Copiar pago</translation> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation>Copiar despóis do pago</translation> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation>Copiar bytes</translation> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation>Copiar prioridade</translation> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Copy change</source> <translation>Copiar cambio</translation> </message> <message> <location line="+86"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Confirmar envío de moedas</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source> and </source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>The recipient address is not valid, please recheck.</source> <translation>A dirección de recepción non é válida, por favor compróbea.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>A cantidade a pagar debe ser maior que 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>A cantidade sobrepasa o teu balance.</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>O total sobrepasa o teu balance cando se inclúe a tarifa de transacción %1.</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Atopouse dirección duplicada, so se pode enviar a cada dirección unha vez por operación.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed.</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> <message> <location line="+251"/> <source>WARNING: Invalid YEScoin address</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>(no label)</source> <translation>(sen etiqueta)</translation> </message> <message> <location line="+4"/> <source>WARNING: unknown change address</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>&amp;Cantidade:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>Pagar &amp;A:</translation> </message> <message> <location line="+24"/> <location filename="../sendcoinsentry.cpp" line="+25"/> <source>Enter a label for this address to add it to your address book</source> <translation>Introduce unha etiqueta para esta dirección para engadila ao teu libro de direccións</translation> </message> <message> <location line="+9"/> <source>&amp;Label:</source> <translation>&amp;Etiqueta:</translation> </message> <message> <location line="+18"/> <source>The address to send the payment to (e.g. YEScoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Choose address from address book</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Pegar dirección dende portapapeis</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation type="unfinished"/> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a YEScoin address (e.g. YEScoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation type="unfinished"/> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>Sinaturas - Asinar / Verificar unha Mensaxe</translation> </message> <message> <location line="+13"/> <location line="+124"/> <source>&amp;Sign Message</source> <translation>&amp;Asinar Mensaxe</translation> </message> <message> <location line="-118"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Podes asinar mensaxes coas túas direccións para probar que ti as posees. Ten conta de non asinar nada vago, xa que hai ataques de phishing que tentarán que asines coa túa identidade por riba deles. Asina únicamente declaracións totalmente detalladas coas que esteas de acordo.</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. YEScoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation type="unfinished"/> </message> <message> <location line="+10"/> <location line="+203"/> <source>Choose an address from the address book</source> <translation type="unfinished"/> </message> <message> <location line="-193"/> <location line="+203"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-193"/> <source>Paste address from clipboard</source> <translation>Pegar dirección dende portapapeis</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Introduce a mensaxe que queres asinar aquí</translation> </message> <message> <location line="+24"/> <source>Copy the current signature to the system clipboard</source> <translation>Copiar a sinatura actual ao portapapeis do sistema</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this YEScoin address</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Reset all sign message fields</source> <translation>Restaurar todos os campos de sinatura de mensaxe</translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Limpar &amp;Todo</translation> </message> <message> <location line="-87"/> <location line="+70"/> <source>&amp;Verify Message</source> <translation>&amp;Verificar Mensaxe</translation> </message> <message> <location line="-64"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>Introduce a dirección coa que asinar, a mensaxe (asegúrate de copiar exactamente os saltos de liña, espacios, tabulacións, etc.) e a sinatura debaixo para verificar a mensaxe. Ten coidado de non ler máis na sinatura do que hai no mensaxe asinado mesmo, a fin de evitar ser cazado nun ataque de home no medio.</translation> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. YEScoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation type="unfinished"/> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified YEScoin address</source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>Reset all verify message fields</source> <translation>Restaurar todos os campos de verificación de mensaxe</translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a YEScoin address (e.g. YEScoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation type="unfinished"/> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Click en &quot;Asinar Mensaxe&quot; para xerar sinatura</translation> </message> <message> <location line="+3"/> <source>Enter YEScoin signature</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>A dirección introducida é inválida.</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Por favor comproba a dirección e proba de novo.</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation>A dirección introducida non se refire a ninguna clave.</translation> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation>Cancelouse o desbloqueo do moedeiro.</translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation>A clave privada da dirección introducida non está dispoñible.</translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>Fallou a sinatura da mensaxe.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>Mensaxe asinada.</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>A sinatura non puido ser decodificada.</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>Por favor revise a sinatura e probe de novo.</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation>A sinatura non coincide co resumo da mensaxe.</translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>A verificación da mensaxe fallou.</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>Mensaxe verificada.</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+19"/> <source>Open until %1</source> <translation>Aberto ata %1</translation> </message> <message numerus="yes"> <location line="-2"/> <source>Open for %n block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+8"/> <source>conflicted</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>%1/offline</source> <translation>%1/fóra de liña</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/sen confirmar</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 confirmacións</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Estado</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation><numerusform>, propagado a % nodo</numerusform><numerusform>, propagado a % nodos</numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>Orixe</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Xerado</translation> </message><|fim▁hole|> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>Dende</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>A</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation>dirección propia</translation> </message> <message> <location line="-2"/> <source>label</source> <translation>etiqueta</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Crédito</translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation><numerusform>madura nun bloque máis</numerusform><numerusform>madura en %n bloques máis</numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>non aceptado</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Débito</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Tarifa de transacción</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Cantidade neta</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Mensaxe</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Comentario</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>ID de Transacción</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 510 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Debug information</source> <translation>Información de depuración</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Transacción</translation> </message> <message> <location line="+5"/> <source>Inputs</source> <translation>Entradas</translation> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Cantidade</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>verdadeiro</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>falso</translation> </message> <message> <location line="-211"/> <source>, has not been successfully broadcast yet</source> <translation>, non foi propagado con éxito todavía</translation> </message> <message> <location line="+35"/> <source>unknown</source> <translation>descoñecido</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Detalles de transacción</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Este panel amosa unha descripción detallada da transacción</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+226"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Tipo</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Dirección</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Cantidade</translation> </message> <message> <location line="+60"/> <source>Open until %1</source> <translation>Aberto ata %1</translation> </message> <message> <location line="+12"/> <source>Confirmed (%1 confirmations)</source> <translation>Confirmado (%1 confirmacións)</translation> </message> <message numerus="yes"> <location line="-15"/> <source>Open for %n more block(s)</source> <translation><numerusform>Abrir para %n bloque máis</numerusform><numerusform>Abrir para %n bloques máis</numerusform></translation> </message> <message> <location line="+6"/> <source>Offline</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Unconfirmed</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Confirming (%1 of %2 recommended confirmations)</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Conflicted</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Immature (%1 confirmations, will be available after %2)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Este bloque non foi recibido por ningún outro nodo e probablemente non será aceptado!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Xerado pero non aceptado</translation> </message> <message> <location line="+42"/> <source>Received with</source> <translation>Recibido con</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Recibido de</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Enviado a</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Pago a ti mesmo</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Minado</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(n/a)</translation> </message> <message> <location line="+190"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Estado da transacción. Pasa por riba deste campo para amosar o número de confirmacións.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Data e hora na que foi recibida a transacción.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Tipo de transacción.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Dirección de destino da transacción.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Cantidade borrada ou engadida no balance.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+55"/> <location line="+16"/> <source>All</source> <translation>Todo</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Hoxe</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Esta semana</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Este mes</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>O último mes</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Este ano</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Periodo...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Recibido con</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Enviado a</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>A ti mesmo</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Minado</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Outro</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Introduce dirección ou etiqueta para buscar</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Cantidade mínima</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Copiar dirección</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Copiar etiqueta</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Copiar cantidade</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation>Copiar ID de transacción</translation> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Modificar etiqueta</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Amosar detalles da transacción</translation> </message> <message> <location line="+144"/> <source>Export Transaction Data</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Arquivo separado por comas (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Confirmado</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Tipo</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Etiqueta</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Dirección</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Cantidade</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation type="unfinished"/> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Periodo:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>a</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+206"/> <source>Sending...</source> <translation type="unfinished"/> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+33"/> <source>YEScoin version</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Usage:</source> <translation>Emprego:</translation> </message> <message> <location line="+1"/> <source>Send command to -server or YEScoind</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>List commands</source> <translation>Listar comandos</translation> </message> <message> <location line="+1"/> <source>Get help for a command</source> <translation>Obter axuda para un comando</translation> </message> <message> <location line="+2"/> <source>Options:</source> <translation>Opcións:</translation> </message> <message> <location line="+2"/> <source>Specify configuration file (default: YEScoin.conf)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Specify pid file (default: YEScoind.pid)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Specify wallet file (within data directory)</source> <translation>Especificar arquivo do moedeiro (dentro do directorio de datos)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Especificar directorio de datos</translation> </message> <message> <location line="+2"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Fixar tamaño da caché da base de datos en megabytes (por defecto: 25)</translation> </message> <message> <location line="+1"/> <source>Set database disk log size in megabytes (default: 100)</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>Listen for connections on &lt;port&gt; (default: 15714 or testnet: 25714)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Manter como moito &lt;n&gt; conexións con pares (por defecto: 125)</translation> </message> <message> <location line="+3"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Conectar a nodo para recuperar direccións de pares, e desconectar</translation> </message> <message> <location line="+1"/> <source>Specify your own public address</source> <translation>Especificar a túa propia dirección pública</translation> </message> <message> <location line="+5"/> <source>Bind to given address. Use [host]:port notation for IPv6</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Stake your coins to support network and gain reward (default: 1)</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Umbral para desconectar pares con mal comportamento (por defecto: 100)</translation> </message> <message> <location line="+1"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Número de segundos para manter sen reconectar aos pares con mal comportamento (por defecto: 86400)</translation> </message> <message> <location line="-44"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation>Ocorreu un erro mentres se establecía o porto RPC %u para escoitar sobre IPv4: %s</translation> </message> <message> <location line="+51"/> <source>Detach block and address databases. Increases shutdown time (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+109"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> <message> <location line="-5"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source> <translation type="unfinished"/> </message> <message> <location line="-87"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 15715 or testnet: 25715)</source> <translation type="unfinished"/> </message> <message> <location line="-11"/> <source>Accept command line and JSON-RPC commands</source> <translation>Aceptar liña de comandos e comandos JSON-RPC</translation> </message> <message> <location line="+101"/> <source>Error: Transaction creation failed </source> <translation type="unfinished"/> </message> <message> <location line="-5"/> <source>Error: Wallet locked, unable to create transaction </source> <translation type="unfinished"/> </message> <message> <location line="-8"/> <source>Importing blockchain data file.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Importing bootstrap blockchain data file.</source> <translation type="unfinished"/> </message> <message> <location line="-88"/> <source>Run in the background as a daemon and accept commands</source> <translation>Executar no fondo como un demo e aceptar comandos</translation> </message> <message> <location line="+1"/> <source>Use the test network</source> <translation>Empregar a rede de proba</translation> </message> <message> <location line="-24"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Aceptar conexións de fóra (por defecto: 1 se non -proxy ou -connect)</translation> </message> <message> <location line="-38"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation>Ocorreu un erro mentres se establecía o porto RPC %u para escoitar sobre IPv6, voltando a IPv4: %s</translation> </message> <message> <location line="+117"/> <source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source> <translation type="unfinished"/> </message> <message> <location line="-20"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Precaución: -paytxfee está posto moi algo! Esta é a tarifa de transacción que ti pagarás se envías unha transacción.</translation> </message> <message> <location line="+61"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong YEScoin will not work properly.</source> <translation type="unfinished"/> </message> <message> <location line="-31"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Precaución: erro lendo wallet.dat! Tódalas claves lidas correctamente, pero os datos de transacción ou as entradas do libro de direccións podrían estar ausentes ou incorrectos.</translation> </message> <message> <location line="-18"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Precaución: wallet.dat corrupto, datos salvagardados! O wallet.dat orixinal foi gardado como wallet.{timestamp}.bak en %s; se o teu balance ou transaccións son incorrectas deberías restauralas dende unha copia de seguridade.</translation> </message> <message> <location line="-30"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Tentar recuperar claves privadas dende un wallet.dat corrupto</translation> </message> <message> <location line="+4"/> <source>Block creation options:</source> <translation>Opcións de creación de bloque:</translation> </message> <message> <location line="-62"/> <source>Connect only to the specified node(s)</source> <translation>Conectar so ao(s) nodo(s) especificado(s)</translation> </message> <message> <location line="+4"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Descobrir dirección IP propia (por defecto: 1 se á escoita e non -externalip)</translation> </message> <message> <location line="+94"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Fallou escoitar en calquera porto. Emprega -listen=0 se queres esto.</translation> </message> <message> <location line="-90"/> <source>Find peers using DNS lookup (default: 1)</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Sync checkpoints policy (default: strict)</source> <translation type="unfinished"/> </message> <message> <location line="+83"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Invalid amount for -reservebalance=&lt;amount&gt;</source> <translation type="unfinished"/> </message> <message> <location line="-82"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Máximo buffer por-conexión para recibir, &lt;n&gt;*1000 bytes (por defecto: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Máximo buffer por-conexión para enviar, &lt;n&gt;*1000 bytes (por defecto: 1000)</translation> </message> <message> <location line="-16"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation>Conectar so a nodos na rede &lt;net&gt; (IPv4, IPv6 ou Tor)</translation> </message> <message> <location line="+28"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Prepend debug output with timestamp</source> <translation type="unfinished"/> </message> <message> <location line="+35"/> <source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source> <translation>Opcións SSL: (ver ńa Wiki Bitcoin as instrucción de configuración de SSL)</translation> </message> <message> <location line="-74"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation type="unfinished"/> </message> <message> <location line="+41"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Enviar traza/información de depuración á consola en lugar de ao arquivo debug.log</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation type="unfinished"/> </message> <message> <location line="+28"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation type="unfinished"/> </message> <message> <location line="-1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Fixar tamaño mínimo de bloque en bytes (por defecto: 0)</translation> </message> <message> <location line="-29"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Recortar o arquivo debug.log ao arrancar o cliente (por defecto: 1 cando no-debug)</translation> </message> <message> <location line="-42"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Especificar tempo límite da conexión en milisegundos (por defecto: 5000)</translation> </message> <message> <location line="+109"/> <source>Unable to sign checkpoint, wrong checkpointkey? </source> <translation type="unfinished"/> </message> <message> <location line="-80"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Usar UPnP para mapear o porto de escoita (por defecto: 0)</translation> </message> <message> <location line="-1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Usar UPnP para mapear o porto de escoita (por defecto: 1 se á escoita)</translation> </message> <message> <location line="-25"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation type="unfinished"/> </message> <message> <location line="+42"/> <source>Username for JSON-RPC connections</source> <translation>Nome de usuario para conexións JSON-RPC</translation> </message> <message> <location line="+47"/> <source>Verifying database integrity...</source> <translation type="unfinished"/> </message> <message> <location line="+57"/> <source>WARNING: syncronized checkpoint violation detected, but skipped!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Warning: Disk space is low!</source> <translation type="unfinished"/> </message> <message> <location line="-2"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Precaución: Esta versión é obsoleta, precísase unha actualización!</translation> </message> <message> <location line="-48"/> <source>wallet.dat corrupt, salvage failed</source> <translation>wallet.dat corrupto, fallou o gardado</translation> </message> <message> <location line="-54"/> <source>Password for JSON-RPC connections</source> <translation>Contrasinal para conexións JSON-RPC</translation> </message> <message> <location line="-84"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=YEScoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;YEScoin Alert&quot; [email protected] </source> <translation type="unfinished"/> </message> <message> <location line="+51"/> <source>Find peers using internet relay chat (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Permitir conexións JSON-RPC dende direccións IP especificadas</translation> </message> <message> <location line="+1"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Enviar comandos a nodo executando na &lt;ip&gt; (por defecto: 127.0.0.1)</translation> </message> <message> <location line="+1"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Executar comando cando o mellor bloque cambie (%s no comando é sustituído polo hash do bloque)</translation> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Executar comando cando unha transacción do moedeiro cambia (%s no comando é substituído por TxID)</translation> </message> <message> <location line="+3"/> <source>Require a confirmations for change (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Upgrade wallet to latest format</source> <translation>Actualizar moedeiro ao formato máis recente</translation> </message> <message> <location line="+1"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Fixar tamaño do pool de claves a &lt;n&gt; (por defecto: 100)</translation> </message> <message> <location line="+1"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Rescanear transaccións ausentes na cadea de bloques</translation> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 2500, 0 = all)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-6, default: 1)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Imports blocks from external blk000?.dat file</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Empregar OpenSSL (https) para conexións JSON-RPC</translation> </message> <message> <location line="+1"/> <source>Server certificate file (default: server.cert)</source> <translation>Arquivo de certificado do servidor (por defecto: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Clave privada do servidor (por defecto: server.perm)</translation> </message> <message> <location line="+1"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation type="unfinished"/> </message> <message> <location line="+53"/> <source>Error: Wallet unlocked for staking only, unable to create transaction.</source> <translation type="unfinished"/> </message> <message> <location line="+18"/> <source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source> <translation type="unfinished"/> </message> <message> <location line="-158"/> <source>This help message</source> <translation>Esta mensaxe de axuda</translation> </message> <message> <location line="+95"/> <source>Wallet %s resides outside data directory %s.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Cannot obtain a lock on data directory %s. YEScoin is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="-98"/> <source>YEScoin</source> <translation type="unfinished"/> </message> <message> <location line="+140"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Imposible enlazar con %s neste ordenador (enlace devolveu erro %d, %s)</translation> </message> <message> <location line="-130"/> <source>Connect through socks proxy</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Permitir lookup de DNS para -addnote, -seednote e -connect</translation> </message> <message> <location line="+122"/> <source>Loading addresses...</source> <translation>Cargando direccións...</translation> </message> <message> <location line="-15"/> <source>Error loading blkindex.dat</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Erro cargando wallet.dat: Moedeiro corrupto</translation> </message> <message> <location line="+4"/> <source>Error loading wallet.dat: Wallet requires newer version of YEScoin</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Wallet needed to be rewritten: restart YEScoin to complete</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error loading wallet.dat</source> <translation>Erro cargando wallet.dat</translation> </message> <message> <location line="-16"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Dirección -proxy inválida: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>Rede descoñecida especificada en -onlynet: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>Versión solicitada de proxy -socks descoñecida: %i</translation> </message> <message> <location line="+4"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>Non se pode resolver a dirección -bind: &apos;%s&apos;</translation> </message> <message> <location line="+2"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>Non se pode resolver dirección -externalip: &apos;%s&apos;</translation> </message> <message> <location line="-24"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Cantidade inválida para -paytxfee=&lt;cantidade&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+44"/> <source>Error: could not start node</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Sending...</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Invalid amount</source> <translation>Cantidade inválida</translation> </message> <message> <location line="+1"/> <source>Insufficient funds</source> <translation>Fondos insuficientes</translation> </message> <message> <location line="-34"/> <source>Loading block index...</source> <translation>Cargando índice de bloques...</translation> </message> <message> <location line="-103"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Engadir un nodo ao que conectarse e tentar manter a conexión aberta</translation> </message> <message> <location line="+122"/> <source>Unable to bind to %s on this computer. YEScoin is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="-97"/> <source>Fee per KB to add to transactions you send</source> <translation type="unfinished"/> </message> <message> <location line="+55"/> <source>Invalid amount for -mininput=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+25"/> <source>Loading wallet...</source> <translation>Cargando moedeiro...</translation> </message> <message> <location line="+8"/> <source>Cannot downgrade wallet</source> <translation>Non se pode desactualizar o moedeiro</translation> </message> <message> <location line="+1"/> <source>Cannot initialize keypool</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Cannot write default address</source> <translation>Non se pode escribir a dirección por defecto</translation> </message> <message> <location line="+1"/> <source>Rescanning...</source> <translation>Rescaneando...</translation> </message> <message> <location line="+5"/> <source>Done loading</source> <translation>Carga completa</translation> </message> <message> <location line="-167"/> <source>To use the %s option</source> <translation>Empregar a opción %s</translation> </message> <message> <location line="+14"/> <source>Error</source> <translation>Erro</translation> </message> <message> <location line="+6"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Debes fixar rpcpassword=&lt;contrasinal&gt; no arquivo de configuración: %s Se o arquivo non existe, debes crealo con permisos de so lectura para o propietario.</translation> </message> </context> </TS><|fim▁end|>
<|file_name|>upload-subtitle.py<|end_file_name|><|fim▁begin|>from builtins import str from builtins import object import httplib2 import MySQLdb import json import os import sys import time import config from apiclient.discovery import build from oauth2client.file import Storage from oauth2client.client import OAuth2WebServerFlow from oauth2client.tools import run class Error(Exception): """Custom Exception subclass.""" pass class YoutubeCaption(object): OAUTH_SCOPE = "https://gdata.youtube.com" CAPTIONS_URL_FORMAT = ("http://gdata.youtube.com/feeds/api/videos/%s/" \ "captions?alt=json") CAPTIONS_CONTENT_TYPE = "application/vnd.youtube.timedtext; charset=UTF-8" CAPTIONS_LANGUAGE_CODE = "en" CAPTIONS_TITLE = "" def __init__(self, developer_key, client_id, client_secret): self.CLIENT_ID = client_id self.CLIENT_SECRET = client_secret self.DEVELOPER_KEY = developer_key def authenticate(self): storage = Storage('youtube-oauth.storage') self.credentials = storage.get() if self.credentials is None or self.credentials.invalid: flow = OAuth2WebServerFlow( client_id = self.CLIENT_ID, client_secret = self.CLIENT_SECRET, scope = self.OAUTH_SCOPE, user_agent = 'Mozilla/5.0 (X11; Linux x86_64) \ Gecko/20100101 Firefox/31.0' ) self.credentials = run(flow, storage) def setup_http_request_object(self): self.headers = { "GData-Version": "2", "X-GData-Key": "key=%s" % self.DEVELOPER_KEY } self.http = self.credentials.authorize(httplib2.Http()) def upload_translated_captions(self, srt_file_path, video_id): try: self.authenticate() self.setup_http_request_object() except Exception as e: raise Error("Error while authenticating: %s" % str(e)) self.headers["Content-Type"] = self.CAPTIONS_CONTENT_TYPE self.headers["Content-Language"] = self.CAPTIONS_LANGUAGE_CODE self.headers["Slug"] = self.CAPTIONS_TITLE srt_file = open(srt_file_path) self.translated_captions_body = srt_file.read() url = self.CAPTIONS_URL_FORMAT % video_id response_headers, body = self.http.request ( url, "POST", body = self.translated_captions_body, headers = self.headers ) if response_headers["status"] != "201": return "Received HTTP response %s when uploading captions \ to %s." % (response_headers["status"], url), False return '%s - %s %s - caption updated' % (video_id, \ self.CAPTIONS_LANGUAGE_CODE, self.CAPTIONS_TITLE), True <|fim▁hole|> def set_caption_language_title(self, language='', title=''): self.CAPTIONS_LANGUAGE_CODE = language self.CAPTIONS_TITLE = title if __name__ == "__main__": caption = YoutubeCaption(config.DEVELOPER_KEY, config.CLIENT_ID, \ config.CLIENT_SECRET) db = MySQLdb.connect(host = config.DB_HOST, user = config.DB_USER, \ passwd = config.DB_PASS, db = config.DB_NAME) ldb = MySQLdb.connect(host = config.DB_HOST, user = config.DB_USER, \ passwd = config.DB_PASS, db = 'cron_logs') db_cursor = db.cursor() db_cursor.execute("select ctr.id, ctr.language_id, ctr.video, \ ctr.tutorial_detail_id, ctr.video_id, ctd.foss_id, ctd.tutorial from \ creation_tutorialresource ctr INNER JOIN creation_tutorialdetail ctd \ ON ( ctr.tutorial_detail_id = ctd.id ) WHERE ((ctr.status = 1 OR \ ctr.status = 2 ) AND ctr.video_id IS NOT NULL AND ctr.id NOT IN \ (select distinct trid from cron_logs.srt_uploads)) ORDER BY \ ctd.foss_id, ctd.level_id, ctd.order ASC") rows = db_cursor.fetchall() ldb = MySQLdb.connect(host = config.DB_HOST, user = config.DB_USER, \ passwd = config.DB_PASS, db = 'cron_logs') ldb_cursor = ldb.cursor() for row in rows: overall_status = 0 db_cursor.execute("select id, name, code from creation_language \ where id = %s", [str(row[1]),]) language = db_cursor.fetchone() video_title = str(row[6].replace(' ', '-')) video_path = config.MEDIA_ROOT + 'videos/' + str(row[5]) + '/' + \ str(row[3]) + '/' english_srt = video_path + video_title + '-English.srt' status_flag = False file_missing = False print('') print(('FOSS Id:', row[5])) print(('Tutorial:', row[6])) print(('Language:', language[1])) if os.path.isfile(english_srt): file_missing = False ldb_cursor.execute("select * from srt_pending_uploads where trid=" \ + str(row[0]) + " and native_or_english=0") esrt_row = ldb_cursor.fetchone() #print 'e------------', esrt_row, '----------' if esrt_row is None: caption.set_caption_language_title('en') message, status_flag = caption.upload_translated_captions(\ english_srt, row[4]) if status_flag: ldb_cursor.execute("insert into srt_pending_uploads \ (trid,native_or_english) values(%s, 0)", \ [str(row[0]),]) ldb.commit() overall_status = 1 print(message) else: print((row[4], '- English - Already Exist')) overall_status = 1 else: file_missing = True print((row[4], '- English -', 'SRT File Missing')) if language[1] != 'English': native_srt = video_path + video_title + '-' + language[1] + '.srt' if os.path.isfile(native_srt): ldb_cursor.execute("select * from srt_pending_uploads where \ trid=" + str(row[0]) + " and native_or_english=1") nsrt_row = ldb_cursor.fetchone() #print 'n------------', nsrt_row, '----------' if nsrt_row is None: file_missing = False language_title = '' if language[2] == 'en': language_title = language[1] caption.set_caption_language_title(language[2], \ language_title) message, status_flag = caption.upload_translated_captions(\ native_srt, row[4]) if status_flag: ldb_cursor.execute("insert into srt_pending_uploads \ (trid,native_or_english) values(%s, 1)", \ [str(row[0]),]) ldb.commit() print(message) else: print((row[4], '-', language[1], '- Already Exist')) status_flag = True else: file_missing = True print((row[4], '-', language[1], '-', 'SRT File Missing')) status_flag = False if status_flag and overall_status: ldb_cursor.execute("insert into srt_uploads (trid) values(%s)", \ [str(row[0]),]) ldb.commit() elif file_missing: continue else: time.sleep(1) time.sleep(1)<|fim▁end|>
<|file_name|>MyViewportViewModel.js<|end_file_name|><|fim▁begin|>/* * File: app/view/MyViewportViewModel.js * * This file was generated by Sencha Architect version 3.1.0. * http://www.sencha.com/products/architect/ *<|fim▁hole|> * * This file will be auto-generated each and everytime you save your project. * * Do NOT hand edit this file. */ Ext.define('W3D3_Homework.view.MyViewportViewModel', { extend: 'Ext.app.ViewModel', alias: 'viewmodel.myviewport' });<|fim▁end|>
* This file requires use of the Ext JS 5.0.x library, under independent license. * License of Sencha Architect does not include license for Ext JS 5.0.x. For more * details see http://www.sencha.com/license or contact [email protected].
<|file_name|>pygenlib2.py<|end_file_name|><|fim▁begin|>import os, sys ########### def verbose_print(msg): print msg ########### def linetrim(s): return s.replace('\r', '').replace('\n','') ### def sublast(s, s2): return s[s.find(s2)+len(s2):] def parse_package(fpath): #p = fpath[:fpath.rfind('/')] p = fpath package = 'konoha' if p.find('/konoha/') != -1: return 'konoha' elif p.find('/class/') != -1: package = sublast(p, '/class/') elif p.find('/package/') != -1: package = '+' + sublast(p, '/package/') elif p.find('/api/') != -1: package = sublast(p, '/api/') elif p.find('/driver/') != -1: package = '#' + sublast(p, '/driver/') if package.find('_.') > 0: return 'konoha' if package.find('/') > 0: return package.split('/')[0] return package # p = fpath.split('/') # if p[-1].find('.') == -1: return p[-1] # return p[-2] ### def fpath_shortname(fpath): p = fpath.split('/') return p[-1].replace('.c', '') ### def safedict(d, key, defv): if d.has_key(key): return d[key] d[key] = defv return defv ### ### def list_topair(list): t1 = list[0] t2 = list[1] return t1, t2, list[2:] def parse_funcparams(functype): if not functype.endswith(')'): debug_print('Invalid functype: %s' % functype) t = functype.replace('(', ' ').replace(',', ' ').replace(')', '').split() params = [] while len(t) > 1: tt, tn, t = list_topair(t) params.append(nz_cparam(tt, tn)) return params ########### # --------------------------------------------------------------------------- LINE = ''' /* ------------------------------------------------------------------------ */ ''' DLINE = ''' /* ------------------------------------------------------------------------ */ ''' # --------------------------------------------------------------------------- def write_println(f, msg = ''): f.write(msg+'\n') def write_line(f): f.write(LINE) def write_dline(f): f.write(DLINE) def write_comment(f, msg): f.write('/* %s */\n' % msg) def write_chapter(f, msg): f.write(DLINE) write_comment(f, msg) def write_section(f, msg): f.write(LINE) write_comment(f, msg) def write_define(f, name, value='', n=40): s = '#define %s ' % name while(len(s) < n) : s+=' ' f.write(s) f.write(value) f.write('\n') ### def write_ifndefine(f, name, value='', n=40): f.write('#ifndef %s\n' % name) write_define(f, name, value, n) f.write('#endif\n') ### def write_ifndef(f, name, value='', n=40): f.write('#ifndef %s\n' % name) write_define(f, name, value, n) f.write('#endif\n') def write_ifdef(f, n): f.write(''' #ifdef %s''' % n.upper()) def write_else(f, n): f.write(''' #else /*%s*/ ''' % n.upper()) def write_endif(f, n): f.write(''' #endif/*%s*/ ''' % n.upper()) # --------------------------------------------------------------------------- def write_BOM(f): f.write("%c%c%c" % (0xef, 0xbb, 0xbf)) def write_license(f): f.write('''/**************************************************************************** * KONOHA2 COPYRIGHT, LICENSE NOTICE, AND DISCRIMER * * Copyright (c) 2006-2012, Kimio Kuramitsu <kimio at ynu.ac.jp> * (c) 2008- Konoha Team [email protected] * All rights reserved. * * You may choose one of the following two licenses when you use konoha. * If you want to use the latter license, please contact us. * * (1) GNU General Public License 3.0 (with K_UNDER_GPL) * (2) Konoha Non-Disclosure License 1.0 * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * ****************************************************************************/ ''') def write_begin_c(f): f.write(''' #ifdef __cplusplus extern "C" { #endif ''') def write_end_c(f): f.write(''' #ifdef __cplusplus } #endif ''') # --------------------------------------------------------------------------- def getdict(d, n, defv): if d.has_key(n): return d[n] return defv def read_settings(fn): KNH_DATA = {} try: f = open(fn) exec(f) f.close() return KNH_DATA except OSError, e: print e return KNH_DATA # --------------------------------------------------------------------------- def nz_fname(fname): if fname.rfind('/') > 0: return fname[fname.rfind('/')+1:] return fname def open_h(fname, lists):<|fim▁hole|> write_license(f) d = nz_fname(fname).replace('.', '_'). upper() f.write(''' #ifndef %s #define %s ''' % (d, d)) for i in lists: f.write(''' #include%s''' % i) if len(lists) > 0: f.write('\n\n') write_begin_c(f) write_dline(f) return f def open_h2(fname, lists): f = open(fname, 'w') write_license(f) d = nz_fname(fname).replace('.', '_'). upper() f.write(''' #ifndef %s #define %s ''' % (d, d)) for i in lists: f.write(''' #include%s''' % i) if len(lists) > 0: f.write('\n\n') return f # --------------------------------------------------------------------------- def close_h(f, fname): d = nz_fname(fname).replace('.', '_'). upper() write_end_c(f) write_dline(f) f.write(''' #endif/*%s*/ ''' % d) f.close() # --------------------------------------------------------------------------- def open_c(fname, lists, bom = None): f = open(fname, 'w') if bom == 'BOM': write_BOM(f) write_license(f) for i in lists: f.write(''' #include%s''' % i) if len(lists) > 0: f.write('\n\n') write_begin_c(f) write_dline(f) return f def close_c(f, fname): write_end_c(f) f.close() def get_serial_number(): f = open('SERIAL_NUMBER') n = int(f.readline()) f.close() n += 1 f = open('SERIAL_NUMBER', 'w') f.write('%d\n' % n) f.close() return n # --------------------------------------------------------------------------- # --------------------------------------------------------------------------- def parse_options(option): d = {} if option is None: return d for t in option.split(): if t.find('(') > 0: t = t.replace('(', ' ').replace(')', '') t = t.split() d[t[0]] = t[1] else: d[t] = 1 return d # --------------------------------------------------------------------------- def check_ifdef(d): ifdef = '' endif = '' if d.has_key('@ifdef'): ifdef = '#ifdef KNH_IMPORT_%s_\n' % d['@ifdef'] endif = '#endif/*KNH_IMPORT_%s_*/\n' %d['@ifdef'] return ifdef, endif # --------------------------------------------------------------------------- def alias_lname(cname): if cname.find('_') > 0: return cname.split('_')[1] return cname def STRUCT_cname(cname): return 'STRUCT_%s' % cname def STRUCT_sname(cname): return 'STRUCT_%s' % cname def SAFE_cname(t) : t = t.replace('..', '') t = t.replace('!', '') t = t.replace('[]', '') t = t.replace('::', '__') t = t.replace(':', '__') return t def CLASS_cname(cname) : prefix = '' if cname.endswith('[]'): prefix = 'A' if cname.endswith('..'): prefix = 'I' return '%sCLASS_%s' % (prefix, SAFE_cname(cname)) def T_cname(t) : prefix = '' if t.endswith("[]!"): prefix = 'NNA' elif t.endswith("!") : prefix = 'NN' if t.endswith('[]'): prefix = 'A' if t.endswith('..'): prefix = 'NNI' return '%sT_%s' % (prefix, SAFE_cname(t)) def DEBUG_cname(cname): return 'DEBUG_%s' % cname def FN_name(fn): return 'FN_%s' % fn def SAFE_mname(mname): return mname.replace('::', '__').replace(':', '__').replace('%', '_') def MN_mname(mname): return 'MN_%s' % SAFE_mname(mname) # --------------------------------------------------------------------------- DEBUG = None def debug_print(msg): if not DEBUG: print msg def nz_dir(dir): if dir.endswith('/'): return dir[:len(dir)-1] return dir #------------------------------------------------------------------------------------ FUNCMAP = {} def FUNCMAP_found(funcname): FUNCMAP[funcname] = funcname def FUNCMAP_exists(funcname): return FUNCMAP.has_key(funcname)<|fim▁end|>
f = open(fname, 'w')
<|file_name|>OrthographicCamera.js<|end_file_name|><|fim▁begin|>'use strict'; import { gl } from './Context'; import { mat4 } from 'gl-matrix'; import Camera from './Camera'; class OrthographicCamera extends Camera { constructor( { path, uniforms, background, translucence, right, top, name = 'orthographic.camera', left = -1, bottom = -1, near = 0.1, far = 1 } = {}) { super({ name, path, uniforms, background, translucence }); this.left = left; this.right = right;<|fim▁hole|> this.bottom = bottom; this.top = top; this.near = near; this.far = far; this.inheritance = ['Entity', 'Structure', 'Camera', 'OrthographicCamera']; this.configure(); } get left() { return this._left; } set left(left) { this._left = left; } get right() { return this._right; } set right(right) { this._right = right; } get bottom() { return this._bottom; } set bottom(bottom) { this._bottom = bottom; } get top() { return this._top; } set top(top) { this._top = top; } get near() { return this._near; } set near(near) { this._near = near; } get far() { return this._far; } set far(far) { this._far = far; } configure() { super.configure(); mat4.ortho(this.projectionMatrix, this.left, this.right, this.bottom, this.top, this.near, this.far); mat4.identity(this.modelViewMatrix); } bind(program) { super.bind(program); gl.disable(gl.DEPTH_TEST); gl.viewport(0, 0, this.right, this.top); } } export default OrthographicCamera;<|fim▁end|>
<|file_name|>main.js<|end_file_name|><|fim▁begin|><|fim▁hole|> * SPDX-License-Identifier: Apache-2.0 */ goog.provide('Main'); // Core // Either require 'Blockly.requires', or just the components you use: goog.require('Blockly'); goog.require('Blockly.geras.Renderer'); goog.require('Blockly.VerticalFlyout'); // Blocks goog.require('Blockly.libraryBlocks'); goog.require('Blockly.libraryBlocks.testBlocks'); Main.init = function() { Blockly.inject('blocklyDiv', { 'toolbox': document.getElementById('toolbox') }); }; window.addEventListener('load', Main.init);<|fim▁end|>
/** * @license * Copyright 2019 Google LLC
<|file_name|>time.rs<|end_file_name|><|fim▁begin|>use std::marker::PhantomData;<|fim▁hole|> use chrono; use super::Module; use crate::{Color, Powerline, Style}; pub struct Time<S: TimeScheme> { time_format: &'static str, scheme: PhantomData<S>, } pub trait TimeScheme { const TIME_BG: Color; const TIME_FG: Color; } impl<S: TimeScheme> Time<S> { pub fn new() -> Time<S> { Time { time_format: "%H:%M:%S", scheme: PhantomData } } pub fn with_time_format(time_format: &'static str) -> Time<S> { Time { time_format, scheme: PhantomData } } } impl<S: TimeScheme> Module for Time<S> { fn append_segments(&mut self, powerline: &mut Powerline) { let now = chrono::offset::Local::now().format(self.time_format); powerline.add_segment(now, Style::simple(S::TIME_FG, S::TIME_BG)); } }<|fim▁end|>
<|file_name|>schema-helper.js<|end_file_name|><|fim▁begin|>import Mirage from 'ember-cli-mirage'; import Schema from 'ember-cli-mirage/orm/schema'; import Model from 'ember-cli-mirage/orm/model'; import Db from 'ember-cli-mirage/db'; <|fim▁hole|> setup() { let db = new Db(); this.schema = new Schema(db); this.schema.registerModels({ wordSmith: Model.extend({ blogPosts: Mirage.hasMany() }), blogPost: Model.extend({ wordSmith: Mirage.belongsTo(), fineComments: Mirage.hasMany() }), fineComment: Model.extend({ blogPost: Mirage.belongsTo() }), greatPhoto: Model, foo: Model.extend({ bar: Mirage.belongsTo() }), bar: Model.extend({ baz: Mirage.belongsTo() }), baz: Model.extend({ quuxes: Mirage.hasMany() }), quux: Model.extend({ zomgs: Mirage.hasMany() }), zomg: Model.extend({ lol: Mirage.belongsTo() }), lol: Model, roflCopter: Model }); return this.schema; } };<|fim▁end|>
export default {
<|file_name|>dft_pitch.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # This file is part of AudioLazy, the signal processing Python package. # Copyright (C) 2012-2014 Danilo de Jesus da Silva Bellini # # AudioLazy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, version 3 of the License. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Created on Wed May 01 2013 # danilo [dot] bellini [at] gmail [dot] com """ Pitch follower via DFT peak with Tkinter GUI """ # ------------------------ # AudioLazy pitch follower # ------------------------ import sys from audiolazy import (tostream, AudioIO, freq2str, sHz, chunks, lowpass, envelope, pi, thub, Stream, maverage) from numpy.fft import rfft def limiter(sig, threshold=.1, size=256, env=envelope.rms, cutoff=pi/2048): sig = thub(sig, 2) return sig * Stream( 1. if el <= threshold else threshold / el for el in maverage(size)(env(sig, cutoff=cutoff)) ) @tostream def dft_pitch(sig, size=2048, hop=None): for blk in Stream(sig).blocks(size=size, hop=hop): dft_data = rfft(blk) idx, vmax = max(enumerate(dft_data), key=lambda el: abs(el[1]) / (2 * el[0] / size + 1) ) yield 2 * pi * idx / size def pitch_from_mic(upd_time_in_ms): rate = 44100 s, Hz = sHz(rate) api = sys.argv[1] if sys.argv[1:] else None # Choose API via command-line chunks.size = 1 if api == "jack" else 16 with AudioIO(api=api) as recorder: snd = recorder.record(rate=rate) sndlow = lowpass(400 * Hz)(limiter(snd, cutoff=20 * Hz)) hop = int(upd_time_in_ms * 1e-3 * s) for pitch in freq2str(dft_pitch(sndlow, size=2*hop, hop=hop) / Hz): yield pitch # ---------------- # GUI with tkinter # ---------------- if __name__ == "__main__": try: import tkinter except ImportError: import Tkinter as tkinter import threading import re <|fim▁hole|> lbldata = tkinter.StringVar(tk) lbltext = tkinter.Label(tk, textvariable=lbldata, font=("Purisa", 72), width=10) lbltext.pack(expand=True, fill=tkinter.BOTH) btnclose = tkinter.Button(tk, text="Close", command=tk.destroy, default="active") btnclose.pack(fill=tkinter.X) # Needed data regex_note = re.compile(r"^([A-Gb#]*-?[0-9]*)([?+-]?)(.*?%?)$") upd_time_in_ms = 200 # Update functions for each thread def upd_value(): # Recording thread pitches = iter(pitch_from_mic(upd_time_in_ms)) while not tk.should_finish: tk.value = next(pitches) def upd_timer(): # GUI mainloop thread lbldata.set("\n".join(regex_note.findall(tk.value)[0])) tk.after(upd_time_in_ms, upd_timer) # Multi-thread management initialization tk.should_finish = False tk.value = freq2str(0) # Starting value lbldata.set(tk.value) tk.upd_thread = threading.Thread(target=upd_value) # Go tk.upd_thread.start() tk.after_idle(upd_timer) tk.mainloop() tk.should_finish = True tk.upd_thread.join()<|fim▁end|>
# Window (Tk init), text label and button tk = tkinter.Tk() tk.title(__doc__.strip().splitlines()[0])
<|file_name|>ip.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Internet Protocol (IP) addresses. //! //! This module contains functions useful for parsing, formatting, and //! manipulating IP addresses. #![allow(missing_doc)] use collections::Collection; use fmt; use from_str::FromStr; use iter::Iterator; use option::{Option, None, Some}; use str::StrSlice; use slice::{MutableCloneableVector, ImmutableVector, MutableVector}; pub type Port = u16; #[deriving(PartialEq, Eq, Clone, Hash)] pub enum IpAddr { Ipv4Addr(u8, u8, u8, u8), Ipv6Addr(u16, u16, u16, u16, u16, u16, u16, u16) }<|fim▁hole|> fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match *self { Ipv4Addr(a, b, c, d) => write!(fmt, "{}.{}.{}.{}", a, b, c, d), // Ipv4 Compatible address Ipv6Addr(0, 0, 0, 0, 0, 0, g, h) => { write!(fmt, "::{}.{}.{}.{}", (g >> 8) as u8, g as u8, (h >> 8) as u8, h as u8) } // Ipv4-Mapped address Ipv6Addr(0, 0, 0, 0, 0, 0xFFFF, g, h) => { write!(fmt, "::FFFF:{}.{}.{}.{}", (g >> 8) as u8, g as u8, (h >> 8) as u8, h as u8) } Ipv6Addr(a, b, c, d, e, f, g, h) => write!(fmt, "{:x}:{:x}:{:x}:{:x}:{:x}:{:x}:{:x}:{:x}", a, b, c, d, e, f, g, h) } } } #[deriving(PartialEq, Eq, Clone, Hash)] pub struct SocketAddr { pub ip: IpAddr, pub port: Port, } impl fmt::Show for SocketAddr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.ip { Ipv4Addr(..) => write!(f, "{}:{}", self.ip, self.port), Ipv6Addr(..) => write!(f, "[{}]:{}", self.ip, self.port), } } } struct Parser<'a> { // parsing as ASCII, so can use byte array s: &'a [u8], pos: uint, } impl<'a> Parser<'a> { fn new(s: &'a str) -> Parser<'a> { Parser { s: s.as_bytes(), pos: 0, } } fn is_eof(&self) -> bool { self.pos == self.s.len() } // Commit only if parser returns Some fn read_atomically<T>(&mut self, cb: |&mut Parser| -> Option<T>) -> Option<T> { let pos = self.pos; let r = cb(self); if r.is_none() { self.pos = pos; } r } // Commit only if parser read till EOF fn read_till_eof<T>(&mut self, cb: |&mut Parser| -> Option<T>) -> Option<T> { self.read_atomically(|p| cb(p).filtered(|_| p.is_eof())) } // Return result of first successful parser fn read_or<T>(&mut self, parsers: &mut [|&mut Parser| -> Option<T>]) -> Option<T> { for pf in parsers.mut_iter() { match self.read_atomically(|p: &mut Parser| (*pf)(p)) { Some(r) => return Some(r), None => {} } } None } // Apply 3 parsers sequentially fn read_seq_3<A, B, C>( &mut self, pa: |&mut Parser| -> Option<A>, pb: |&mut Parser| -> Option<B>, pc: |&mut Parser| -> Option<C>) -> Option<(A, B, C)> { self.read_atomically(|p| { let a = pa(p); let b = if a.is_some() { pb(p) } else { None }; let c = if b.is_some() { pc(p) } else { None }; match (a, b, c) { (Some(a), Some(b), Some(c)) => Some((a, b, c)), _ => None } }) } // Read next char fn read_char(&mut self) -> Option<char> { if self.is_eof() { None } else { let r = self.s[self.pos] as char; self.pos += 1; Some(r) } } // Return char and advance iff next char is equal to requested fn read_given_char(&mut self, c: char) -> Option<char> { self.read_atomically(|p| { p.read_char().filtered(|&next| next == c) }) } // Read digit fn read_digit(&mut self, radix: u8) -> Option<u8> { fn parse_digit(c: char, radix: u8) -> Option<u8> { let c = c as u8; // assuming radix is either 10 or 16 if c >= '0' as u8 && c <= '9' as u8 { Some(c - '0' as u8) } else if radix > 10 && c >= 'a' as u8 && c < 'a' as u8 + (radix - 10) { Some(c - 'a' as u8 + 10) } else if radix > 10 && c >= 'A' as u8 && c < 'A' as u8 + (radix - 10) { Some(c - 'A' as u8 + 10) } else { None } } self.read_atomically(|p| { p.read_char().and_then(|c| parse_digit(c, radix)) }) } fn read_number_impl(&mut self, radix: u8, max_digits: u32, upto: u32) -> Option<u32> { let mut r = 0u32; let mut digit_count = 0; loop { match self.read_digit(radix) { Some(d) => { r = r * (radix as u32) + (d as u32); digit_count += 1; if digit_count > max_digits || r >= upto { return None } } None => { if digit_count == 0 { return None } else { return Some(r) } } }; } } // Read number, failing if max_digits of number value exceeded fn read_number(&mut self, radix: u8, max_digits: u32, upto: u32) -> Option<u32> { self.read_atomically(|p| p.read_number_impl(radix, max_digits, upto)) } fn read_ipv4_addr_impl(&mut self) -> Option<IpAddr> { let mut bs = [0u8, ..4]; let mut i = 0; while i < 4 { if i != 0 && self.read_given_char('.').is_none() { return None; } let octet = self.read_number(10, 3, 0x100).map(|n| n as u8); match octet { Some(d) => bs[i] = d, None => return None, }; i += 1; } Some(Ipv4Addr(bs[0], bs[1], bs[2], bs[3])) } // Read IPv4 address fn read_ipv4_addr(&mut self) -> Option<IpAddr> { self.read_atomically(|p| p.read_ipv4_addr_impl()) } fn read_ipv6_addr_impl(&mut self) -> Option<IpAddr> { fn ipv6_addr_from_head_tail(head: &[u16], tail: &[u16]) -> IpAddr { assert!(head.len() + tail.len() <= 8); let mut gs = [0u16, ..8]; gs.copy_from(head); gs.mut_slice(8 - tail.len(), 8).copy_from(tail); Ipv6Addr(gs[0], gs[1], gs[2], gs[3], gs[4], gs[5], gs[6], gs[7]) } fn read_groups(p: &mut Parser, groups: &mut [u16, ..8], limit: uint) -> (uint, bool) { let mut i = 0; while i < limit { if i < limit - 1 { let ipv4 = p.read_atomically(|p| { if i == 0 || p.read_given_char(':').is_some() { p.read_ipv4_addr() } else { None } }); match ipv4 { Some(Ipv4Addr(a, b, c, d)) => { groups[i + 0] = (a as u16 << 8) | (b as u16); groups[i + 1] = (c as u16 << 8) | (d as u16); return (i + 2, true); } _ => {} } } let group = p.read_atomically(|p| { if i == 0 || p.read_given_char(':').is_some() { p.read_number(16, 4, 0x10000).map(|n| n as u16) } else { None } }); match group { Some(g) => groups[i] = g, None => return (i, false) } i += 1; } (i, false) } let mut head = [0u16, ..8]; let (head_size, head_ipv4) = read_groups(self, &mut head, 8); if head_size == 8 { return Some(Ipv6Addr( head[0], head[1], head[2], head[3], head[4], head[5], head[6], head[7])) } // IPv4 part is not allowed before `::` if head_ipv4 { return None } // read `::` if previous code parsed less than 8 groups if !self.read_given_char(':').is_some() || !self.read_given_char(':').is_some() { return None; } let mut tail = [0u16, ..8]; let (tail_size, _) = read_groups(self, &mut tail, 8 - head_size); Some(ipv6_addr_from_head_tail(head.slice(0, head_size), tail.slice(0, tail_size))) } fn read_ipv6_addr(&mut self) -> Option<IpAddr> { self.read_atomically(|p| p.read_ipv6_addr_impl()) } fn read_ip_addr(&mut self) -> Option<IpAddr> { let ipv4_addr = |p: &mut Parser| p.read_ipv4_addr(); let ipv6_addr = |p: &mut Parser| p.read_ipv6_addr(); self.read_or(&mut [ipv4_addr, ipv6_addr]) } fn read_socket_addr(&mut self) -> Option<SocketAddr> { let ip_addr = |p: &mut Parser| { let ipv4_p = |p: &mut Parser| p.read_ip_addr(); let ipv6_p = |p: &mut Parser| { let open_br = |p: &mut Parser| p.read_given_char('['); let ip_addr = |p: &mut Parser| p.read_ipv6_addr(); let clos_br = |p: &mut Parser| p.read_given_char(']'); p.read_seq_3::<char, IpAddr, char>(open_br, ip_addr, clos_br) .map(|t| match t { (_, ip, _) => ip }) }; p.read_or(&mut [ipv4_p, ipv6_p]) }; let colon = |p: &mut Parser| p.read_given_char(':'); let port = |p: &mut Parser| p.read_number(10, 5, 0x10000).map(|n| n as u16); // host, colon, port self.read_seq_3::<IpAddr, char, u16>(ip_addr, colon, port) .map(|t| match t { (ip, _, port) => SocketAddr { ip: ip, port: port } }) } } impl FromStr for IpAddr { fn from_str(s: &str) -> Option<IpAddr> { Parser::new(s).read_till_eof(|p| p.read_ip_addr()) } } impl FromStr for SocketAddr { fn from_str(s: &str) -> Option<SocketAddr> { Parser::new(s).read_till_eof(|p| p.read_socket_addr()) } } #[cfg(test)] mod test { use prelude::*; use super::*; use from_str::FromStr; #[test] fn test_from_str_ipv4() { assert_eq!(Some(Ipv4Addr(127, 0, 0, 1)), FromStr::from_str("127.0.0.1")); assert_eq!(Some(Ipv4Addr(255, 255, 255, 255)), FromStr::from_str("255.255.255.255")); assert_eq!(Some(Ipv4Addr(0, 0, 0, 0)), FromStr::from_str("0.0.0.0")); // out of range let none: Option<IpAddr> = FromStr::from_str("256.0.0.1"); assert_eq!(None, none); // too short let none: Option<IpAddr> = FromStr::from_str("255.0.0"); assert_eq!(None, none); // too long let none: Option<IpAddr> = FromStr::from_str("255.0.0.1.2"); assert_eq!(None, none); // no number between dots let none: Option<IpAddr> = FromStr::from_str("255.0..1"); assert_eq!(None, none); } #[test] fn test_from_str_ipv6() { assert_eq!(Some(Ipv6Addr(0, 0, 0, 0, 0, 0, 0, 0)), FromStr::from_str("0:0:0:0:0:0:0:0")); assert_eq!(Some(Ipv6Addr(0, 0, 0, 0, 0, 0, 0, 1)), FromStr::from_str("0:0:0:0:0:0:0:1")); assert_eq!(Some(Ipv6Addr(0, 0, 0, 0, 0, 0, 0, 1)), FromStr::from_str("::1")); assert_eq!(Some(Ipv6Addr(0, 0, 0, 0, 0, 0, 0, 0)), FromStr::from_str("::")); assert_eq!(Some(Ipv6Addr(0x2a02, 0x6b8, 0, 0, 0, 0, 0x11, 0x11)), FromStr::from_str("2a02:6b8::11:11")); // too long group let none: Option<IpAddr> = FromStr::from_str("::00000"); assert_eq!(None, none); // too short let none: Option<IpAddr> = FromStr::from_str("1:2:3:4:5:6:7"); assert_eq!(None, none); // too long let none: Option<IpAddr> = FromStr::from_str("1:2:3:4:5:6:7:8:9"); assert_eq!(None, none); // triple colon let none: Option<IpAddr> = FromStr::from_str("1:2:::6:7:8"); assert_eq!(None, none); // two double colons let none: Option<IpAddr> = FromStr::from_str("1:2::6::8"); assert_eq!(None, none); } #[test] fn test_from_str_ipv4_in_ipv6() { assert_eq!(Some(Ipv6Addr(0, 0, 0, 0, 0, 0, 49152, 545)), FromStr::from_str("::192.0.2.33")); assert_eq!(Some(Ipv6Addr(0, 0, 0, 0, 0, 0xFFFF, 49152, 545)), FromStr::from_str("::FFFF:192.0.2.33")); assert_eq!(Some(Ipv6Addr(0x64, 0xff9b, 0, 0, 0, 0, 49152, 545)), FromStr::from_str("64:ff9b::192.0.2.33")); assert_eq!(Some(Ipv6Addr(0x2001, 0xdb8, 0x122, 0xc000, 0x2, 0x2100, 49152, 545)), FromStr::from_str("2001:db8:122:c000:2:2100:192.0.2.33")); // colon after v4 let none: Option<IpAddr> = FromStr::from_str("::127.0.0.1:"); assert_eq!(None, none); // not enought groups let none: Option<IpAddr> = FromStr::from_str("1.2.3.4.5:127.0.0.1"); assert_eq!(None, none); // too many groups let none: Option<IpAddr> = FromStr::from_str("1.2.3.4.5:6:7:127.0.0.1"); assert_eq!(None, none); } #[test] fn test_from_str_socket_addr() { assert_eq!(Some(SocketAddr { ip: Ipv4Addr(77, 88, 21, 11), port: 80 }), FromStr::from_str("77.88.21.11:80")); assert_eq!(Some(SocketAddr { ip: Ipv6Addr(0x2a02, 0x6b8, 0, 1, 0, 0, 0, 1), port: 53 }), FromStr::from_str("[2a02:6b8:0:1::1]:53")); assert_eq!(Some(SocketAddr { ip: Ipv6Addr(0, 0, 0, 0, 0, 0, 0x7F00, 1), port: 22 }), FromStr::from_str("[::127.0.0.1]:22")); // without port let none: Option<SocketAddr> = FromStr::from_str("127.0.0.1"); assert_eq!(None, none); // without port let none: Option<SocketAddr> = FromStr::from_str("127.0.0.1:"); assert_eq!(None, none); // wrong brackets around v4 let none: Option<SocketAddr> = FromStr::from_str("[127.0.0.1]:22"); assert_eq!(None, none); // port out of range let none: Option<SocketAddr> = FromStr::from_str("127.0.0.1:123456"); assert_eq!(None, none); } #[test] fn ipv6_addr_to_str() { let a1 = Ipv6Addr(0, 0, 0, 0, 0, 0xffff, 0xc000, 0x280); assert!(a1.to_str() == "::ffff:192.0.2.128".to_string() || a1.to_str() == "::FFFF:192.0.2.128".to_string()); assert_eq!(Ipv6Addr(8, 9, 10, 11, 12, 13, 14, 15).to_str(), "8:9:a:b:c:d:e:f".to_string()); } }<|fim▁end|>
impl fmt::Show for IpAddr {
<|file_name|>codegen.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors // Licensed under the MIT License: // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. use capnp; use std::collections; use schema_capnp; use self::FormattedText::{Indent, Line, Branch, BlankLine}; fn tuple_result<T,U,V>(t : Result<T, V>, u : Result<U, V>) -> Result<(T,U), V> { match (t, u) { (Ok(t1), Ok(u1)) => Ok((t1, u1)), (Err(e), _) => Err(e), (_, Err(e)) => Err(e), } } fn prim_type_str (typ : schema_capnp::type_::WhichReader) -> &'static str { use schema_capnp::type_::*; match typ { Void(()) => "()", Bool(()) => "bool", Int8(()) => "i8", Int16(()) => "i16", Int32(()) => "i32", Int64(()) => "i64", Uint8(()) => "u8", Uint16(()) => "u16", Uint32(()) => "u32", Uint64(()) => "u64", Float32(()) => "f32", Float64(()) => "f64", Enum(_) => "u16", _ => panic!("not primitive") } } #[allow(dead_code)] fn camel_to_upper_case(s : &str) -> String { use std::ascii::*; let mut result_chars : Vec<char> = Vec::new(); for c in s.chars() { assert!(c.is_alphanumeric(), format!("not alphanumeric '{}'", c)); if c.is_uppercase() { result_chars.push('_'); } result_chars.push((c as u8).to_ascii_uppercase() as char); } return result_chars.into_iter().collect(); } fn snake_to_upper_case(s : &str) -> String { use std::ascii::*; let mut result_chars : Vec<char> = Vec::new(); for c in s.chars() { if c == '_' { result_chars.push('_'); } else { assert!(c.is_alphanumeric(), format!("not alphanumeric '{}'", c)); result_chars.push((c as u8).to_ascii_uppercase() as char); } } return result_chars.into_iter().collect(); } fn camel_to_snake_case(s : &str) -> String { use std::ascii::*; let mut result_chars : Vec<char> = Vec::new(); let mut first_char = true; for c in s.chars() { assert!(c.is_alphanumeric(), format!("not alphanumeric '{}', i.e. {}", c, c as usize)); if c.is_uppercase() && !first_char { result_chars.push('_'); } result_chars.push((c as u8).to_ascii_lowercase() as char); first_char = false; } return result_chars.into_iter().collect(); } fn capitalize_first_letter(s : &str) -> String { use std::ascii::*; let mut result_chars : Vec<char> = Vec::new(); for c in s.chars() { result_chars.push(c) } result_chars[0] = (result_chars[0] as u8).to_ascii_uppercase() as char; return result_chars.into_iter().collect(); } #[test] fn test_camel_to_upper_case() { assert_eq!(camel_to_upper_case("fooBar"), "FOO_BAR".to_string()); assert_eq!(camel_to_upper_case("fooBarBaz"), "FOO_BAR_BAZ".to_string()); assert_eq!(camel_to_upper_case("helloWorld"), "HELLO_WORLD".to_string()); } #[test] fn test_camel_to_snake_case() { assert_eq!(camel_to_snake_case("fooBar"), "foo_bar".to_string()); assert_eq!(camel_to_snake_case("FooBar"), "foo_bar".to_string()); assert_eq!(camel_to_snake_case("fooBarBaz"), "foo_bar_baz".to_string()); assert_eq!(camel_to_snake_case("FooBarBaz"), "foo_bar_baz".to_string()); assert_eq!(camel_to_snake_case("helloWorld"), "hello_world".to_string()); assert_eq!(camel_to_snake_case("HelloWorld"), "hello_world".to_string()); assert_eq!(camel_to_snake_case("uint32Id"), "uint32_id".to_string()); } #[derive(PartialEq)] enum FormattedText { Indent(Box<FormattedText>), Branch(Vec<FormattedText>), Line(String), BlankLine } fn to_lines(ft : &FormattedText, indent : usize) -> Vec<String> { match *ft { Indent (ref ft) => { return to_lines(&**ft, indent + 1); } Branch (ref fts) => { let mut result = Vec::new(); for ft in fts.iter() { for line in to_lines(ft, indent).iter() { result.push(line.clone()); // TODO there's probably a better way to do this. } } return result; } Line(ref s) => { let mut s1 : String = ::std::iter::repeat(' ').take(indent * 2).collect(); s1.push_str(&s); return vec!(s1.to_string()); } BlankLine => return vec!("".to_string()) } } fn stringify(ft : & FormattedText) -> String { let mut result = to_lines(ft, 0).connect("\n"); result.push_str("\n"); return result.to_string(); } const RUST_KEYWORDS : [&'static str; 51] = ["abstract", "alignof", "as", "be", "box", "break", "const", "continue", "crate", "do", "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in", "let", "loop", "match", "mod", "move", "mut", "offsetof", "once", "override", "priv", "proc", "pub", "pure", "ref", "return", "sizeof", "static", "self", "struct", "super", "true", "trait", "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield"]; fn module_name(camel_case : &str) -> String { let mut name = camel_to_snake_case(camel_case); if RUST_KEYWORDS.contains(&&*name) { name.push('_'); } return name; } fn populate_scope_map(node_map : &collections::hash_map::HashMap<u64, schema_capnp::node::Reader>, scope_map : &mut collections::hash_map::HashMap<u64, Vec<String>>, scope_names : Vec<String>, node_id : u64) { scope_map.insert(node_id, scope_names.clone()); // unused nodes in imported files might be omitted from the node map let node_reader = match node_map.get(&node_id) { Some(node) => node, None => return (), }; let nested_nodes = node_reader.get_nested_nodes().unwrap(); for nested_node in nested_nodes.iter(){ let mut scope_names = scope_names.clone(); let nested_node_id = nested_node.get_id(); match node_map.get(&nested_node_id) { None => {} Some(node_reader) => { match node_reader.which() { Ok(schema_capnp::node::Enum(_enum_reader)) => { scope_names.push(nested_node.get_name().unwrap().to_string()); populate_scope_map(node_map, scope_map, scope_names, nested_node_id); } _ => { scope_names.push(module_name(nested_node.get_name().unwrap())); populate_scope_map(node_map, scope_map, scope_names, nested_node_id); } } } } } match node_reader.which() { Ok(schema_capnp::node::Struct(struct_reader)) => { let fields = struct_reader.get_fields().unwrap(); for field in fields.iter() { match field.which() { Ok(schema_capnp::field::Group(group)) => { let name = module_name(field.get_name().unwrap()); let mut scope_names = scope_names.clone(); scope_names.push(name); populate_scope_map(node_map, scope_map, scope_names, group.get_type_id()); } _ => {} } } } _ => { } } } fn generate_import_statements() -> FormattedText { Branch(vec!( Line("#![allow(unused_imports)]".to_string()), Line("use capnp::capability::{FromClientHook, FromTypelessPipeline};".to_string()), Line("use capnp::{text, data, Result};".to_string()), Line("use capnp::private::layout;".to_string()), Line("use capnp::traits::{FromStructBuilder, FromStructReader};".to_string()), Line("use capnp::{primitive_list, enum_list, struct_list, text_list, data_list, list_list};".to_string()), )) } fn list_list_type_param(scope_map : &collections::hash_map::HashMap<u64, Vec<String>>, typ : schema_capnp::type_::Reader, is_reader: bool, lifetime_name: &str) -> String { use schema_capnp::type_; let module = if is_reader { "Reader" } else { "Builder" }; match typ.which() { Err(_) => panic!("unsupported type"), Ok(t) => { match t { type_::Void(()) | type_::Bool(()) | type_::Int8(()) | type_::Int16(()) | type_::Int32(()) | type_::Int64(()) | type_::Uint8(()) | type_::Uint16(()) | type_::Uint32(()) | type_::Uint64(()) | type_::Float32(()) | type_::Float64(()) => { format!("primitive_list::{}<{}, {}>", module, lifetime_name, prim_type_str(t)) } type_::Enum(en) => { let the_mod = scope_map[&en.get_type_id()].connect("::"); format!("enum_list::{}<{},{}>", module, lifetime_name, the_mod) } type_::Text(()) => { format!("text_list::{}<{}>", module, lifetime_name) } type_::Data(()) => { format!("data_list::{}<{}>", module, lifetime_name) } type_::Struct(st) => { format!("struct_list::{}<{lifetime}, {}::{}<{lifetime}>>", module, scope_map[&st.get_type_id()].connect("::"), module, lifetime = lifetime_name) } type_::List(t) => { let inner = list_list_type_param(scope_map, t.get_element_type().unwrap(), is_reader, lifetime_name); format!("list_list::{}<{}, {}>", module, lifetime_name, inner) } type_::AnyPointer(_) => { panic!("List(AnyPointer) is unsupported"); } type_::Interface(_i) => { panic!("unimplemented"); } } } } } fn prim_default (value : &schema_capnp::value::Reader) -> Option<String> { use schema_capnp::value; match value.which().unwrap() { value::Bool(false) | value::Int8(0) | value::Int16(0) | value::Int32(0) | value::Int64(0) | value::Uint8(0) | value::Uint16(0) | value::Uint32(0) | value::Uint64(0) | value::Float32(0.0) | value::Float64(0.0) => None, value::Bool(true) => Some(format!("true")), value::Int8(i) => Some(i.to_string()), value::Int16(i) => Some(i.to_string()), value::Int32(i) => Some(i.to_string()), value::Int64(i) => Some(i.to_string()), value::Uint8(i) => Some(i.to_string()), value::Uint16(i) => Some(i.to_string()), value::Uint32(i) => Some(i.to_string()), value::Uint64(i) => Some(i.to_string()), value::Float32(f) => Some(format!("{}u32", unsafe {::std::mem::transmute::<f32, u32>(f)}.to_string())), value::Float64(f) => Some(format!("{}u64", unsafe {::std::mem::transmute::<f64, u64>(f)}.to_string())), _ => {panic!()} } } fn getter_text (_node_map : &collections::hash_map::HashMap<u64, schema_capnp::node::Reader>, scope_map : &collections::hash_map::HashMap<u64, Vec<String>>, field : &schema_capnp::field::Reader, is_reader : bool) -> (String, FormattedText) { use schema_capnp::*; match field.which() { Err(_) => panic!("unrecognized field type"), Ok(field::Group(group)) => { let the_mod = scope_map[&group.get_type_id()].connect("::"); if is_reader { return (format!("{}::Reader<'a>", the_mod), Line("::capnp::traits::FromStructReader::new(self.reader)".to_string())); } else { return (format!("{}::Builder<'a>", the_mod), Line("::capnp::traits::FromStructBuilder::new(self.builder)".to_string())); } } Ok(field::Slot(reg_field)) => { let offset = reg_field.get_offset() as usize; let member = if is_reader { "reader" } else { "builder" }; let module = if is_reader { "Reader" } else { "Builder" }; let lifetime = if is_reader { "'a" } else {"'a"}; let module_with_var = if is_reader { "Reader<'a>" } else { "Builder<'a>" }; match tuple_result(reg_field.get_type().unwrap().which(), reg_field.get_default_value().unwrap().which()) { Ok((type_::Void(()), value::Void(()))) => { return ("()".to_string(), Line("()".to_string()))} Ok((type_::Bool(()), value::Bool(b))) => { if b { return ("bool".to_string(), Line(format!("self.{}.get_bool_field_mask({}, true)", member, offset))) } else { return ("bool".to_string(), Line(format!("self.{}.get_bool_field({})", member, offset))) } } Ok((type_::Int8(()), value::Int8(i))) => return common_case("i8", member, offset, i, 0), Ok((type_::Int16(()), value::Int16(i))) => return common_case("i16", member, offset, i, 0), Ok((type_::Int32(()), value::Int32(i))) => return common_case("i32", member, offset, i, 0), Ok((type_::Int64(()), value::Int64(i))) => return common_case("i64", member, offset, i, 0), Ok((type_::Uint8(()), value::Uint8(i))) => return common_case("u8", member, offset, i, 0), Ok((type_::Uint16(()), value::Uint16(i))) => return common_case("u16", member, offset, i, 0), Ok((type_::Uint32(()), value::Uint32(i))) => return common_case("u32", member, offset, i, 0), Ok((type_::Uint64(()), value::Uint64(i))) => return common_case("u64", member, offset, i, 0), Ok((type_::Float32(()), value::Float32(f))) => return common_case("f32", member, offset, unsafe { ::std::mem::transmute::<f32, u32>(f) }, 0), Ok((type_::Float64(()), value::Float64(f))) => return common_case("f64", member, offset, unsafe { ::std::mem::transmute::<f64, u64>(f) }, 0), Ok((type_::Text(()), _)) => { return (format!("Result<text::{}>", module_with_var), Line(format!("self.{}.get_pointer_field({}).get_text(::std::ptr::null(), 0)", member, offset))); } Ok((type_::Data(()), _)) => { return (format!("Result<data::{}>", module_with_var), Line(format!("self.{}.get_pointer_field({}).get_data(::std::ptr::null(), 0)", member, offset))); } Ok((type_::List(ot1), _)) => { let get_it = if is_reader { Line(format!( "::capnp::traits::FromPointerReader::get_from_pointer(&self.{}.get_pointer_field({}))", member, offset)) } else { Line(format!("::capnp::traits::FromPointerBuilder::get_from_pointer(self.{}.get_pointer_field({}))", member, offset)) }; match ot1.get_element_type().unwrap().which() { Err(_) => { panic!("unsupported type") } Ok(type_::Struct(st)) => { let the_mod = scope_map[&st.get_type_id()].connect("::"); return (format!("Result<struct_list::{}<{lifetime},{}::{}<{lifetime}>>>", module, the_mod, module, lifetime=lifetime), get_it); } Ok(type_::Enum(e)) => { let the_mod = scope_map[&e.get_type_id()].connect("::"); return (format!("Result<enum_list::{}<{},{}>>",module, lifetime, the_mod), get_it); } Ok(type_::List(t1)) => { let type_param = list_list_type_param(scope_map, t1.get_element_type().unwrap(), is_reader, lifetime); return (format!("Result<list_list::{}<{},{}>>", module, lifetime, type_param), get_it); } Ok(type_::Text(())) => { return (format!("Result<text_list::{}>", module_with_var), get_it); } Ok(type_::Data(())) => { return (format!("Result<data_list::{}>", module_with_var), get_it); } Ok(type_::Interface(_)) => {panic!("unimplemented") } Ok(type_::AnyPointer(_)) => {panic!("List(AnyPointer) is unsupported")} Ok(prim_type) => { return (format!("Result<primitive_list::{}<{},{}>>", module, lifetime, prim_type_str(prim_type)), get_it); } } } Ok((type_::Enum(en), _)) => { let scope = &scope_map[&en.get_type_id()]; let the_mod = scope.connect("::"); return // Enums don't have builders. (format!("::std::result::Result<{}, ::capnp::NotInSchema>", the_mod), Branch(vec!( Line(format!("::capnp::traits::FromU16::from_u16(self.{}.get_data_field::<u16>({}))", member, offset)) ))); } Ok((type_::Struct(st), _)) => { let the_mod = scope_map[&st.get_type_id()].connect("::"); let construct = if is_reader { Line(format!("::capnp::traits::FromPointerReader::get_from_pointer(&self.{}.get_pointer_field({}))", member, offset)) } else { Line(format!("::capnp::traits::FromPointerBuilder::get_from_pointer(self.{}.get_pointer_field({}))", member, offset)) }; return (format!("Result<{}::{}>", the_mod, module_with_var), construct); } Ok((type_::Interface(interface), _)) => { let the_mod = scope_map[&interface.get_type_id()].connect("::"); return (format!("Result<{}::Client>", the_mod), Line(format!("match self.{}.get_pointer_field({}).get_capability() {{ ::std::result::Result::Ok(c) => ::std::result::Result::Ok(FromClientHook::new(c)), ::std::result::Result::Err(e) => ::std::result::Result::Err(e)}}", member, offset))); } Ok((type_::AnyPointer(_), _)) => { return (format!("::capnp::any_pointer::{}<{}>", module, lifetime), Line(format!("::capnp::any_pointer::{}::new(self.{}.get_pointer_field({}))", module, member, offset))) } Err(_) => { // XXX should probably silently ignore, instead. panic!("unrecognized type") } _ => { panic!("default value was of wrong type"); } } } } fn common_case<T: PartialEq + ::std::fmt::Display>( typ: &str, member : &str, offset: usize, default : T, zero : T) -> (String, FormattedText) { let interior = if default == zero { Line(format!("self.{}.get_data_field::<{}>({})", member, typ, offset)) } else { Line(format!("self.{}.get_data_field_mask::<{typ}>({}, {})", member, offset, default, typ=typ)) }; return (typ.to_string(), interior); } } fn zero_fields_of_group(node_map : &collections::hash_map::HashMap<u64, schema_capnp::node::Reader>, node_id : u64 ) -> FormattedText { use schema_capnp::{node, field, type_}; match node_map[&node_id].which() { Ok(node::Struct(st)) => { let mut result = Vec::new(); if st.get_discriminant_count() != 0 { result.push( Line(format!("self.builder.set_data_field::<u16>({}, 0);", st.get_discriminant_offset()))); } let fields = st.get_fields().unwrap(); for field in fields.iter() { match field.which().unwrap() { field::Group(group) => { result.push(zero_fields_of_group(node_map, group.get_type_id())); } field::Slot(slot) => { let typ = slot.get_type().unwrap().which().unwrap(); match typ { type_::Void(()) => {} type_::Bool(()) => { let line = Line(format!("self.builder.set_bool_field({}, false);", slot.get_offset())); // PERF could dedup more efficiently if !result.contains(&line) { result.push(line) } } type_::Int8(()) | type_::Int16(()) | type_::Int32(()) | type_::Int64(()) | type_::Uint8(()) | type_::Uint16(()) | type_::Uint32(()) | type_::Uint64(()) | type_::Float32(()) | type_::Float64(()) | type_::Enum(_) => { let line = Line(format!("self.builder.set_data_field::<{0}>({1}, 0u8 as {0});", prim_type_str(typ), slot.get_offset())); // PERF could dedup more efficiently if !result.contains(&line) { result.push(line) } } type_::Struct(_) | type_::List(_) | type_::Text(()) | type_::Data(()) | type_::AnyPointer(_) | type_::Interface(_) // Is this the right thing to do for interfaces? => { let line = Line(format!("self.builder.get_pointer_field({}).clear();", slot.get_offset())); // PERF could dedup more efficiently if !result.contains(&line) { result.push(line) } } } } } } return Branch(result); } _ => { panic!("expected a struct") } } } fn generate_setter(node_map : &collections::hash_map::HashMap<u64, schema_capnp::node::Reader>, scope_map : &collections::hash_map::HashMap<u64, Vec<String>>, discriminant_offset : u32, styled_name : &str, field :&schema_capnp::field::Reader) -> FormattedText { use schema_capnp::*; let mut setter_interior = Vec::new(); let mut setter_param = "value".to_string(); let mut initter_interior = Vec::new(); let mut initter_params = Vec::new(); let discriminant_value = field.get_discriminant_value(); if discriminant_value != field::NO_DISCRIMINANT { setter_interior.push( Line(format!("self.builder.set_data_field::<u16>({}, {});", discriminant_offset as usize, discriminant_value as usize))); initter_interior.push( Line(format!("self.builder.set_data_field::<u16>({}, {});", discriminant_offset as usize, discriminant_value as usize))); } let mut setter_lifetime_param = ""; let mut return_result = false; let (maybe_reader_type, maybe_builder_type) : (Option<String>, Option<String>) = match field.which() { Err(_) => panic!("unrecognized field type"), Ok(field::Group(group)) => { let scope = &scope_map[&group.get_type_id()]; let the_mod = scope.connect("::"); initter_interior.push(zero_fields_of_group(node_map, group.get_type_id())); initter_interior.push(Line(format!("::capnp::traits::FromStructBuilder::new(self.builder)"))); (None, Some(format!("{}::Builder<'a>", the_mod))) } Ok(field::Slot(reg_field)) => { fn common_case (typ: &str, offset : usize, reg_field : field::slot::Reader, setter_interior : &mut Vec<FormattedText> ) -> (Option<String>, Option<String>) { match prim_default(&reg_field.get_default_value().unwrap()) { None => { setter_interior.push(Line(format!("self.builder.set_data_field::<{}>({}, value);", typ, offset))); } Some(s) => { setter_interior.push( Line(format!("self.builder.set_data_field_mask::<{}>({}, value, {});", typ, offset, s))); } } (Some(typ.to_string()), None) }; let offset = reg_field.get_offset() as usize; match reg_field.get_type().unwrap().which() { Ok(type_::Void(())) => { setter_param = "_value".to_string(); (Some("()".to_string()), None) } Ok(type_::Bool(())) => { match prim_default(&reg_field.get_default_value().unwrap()) { None => { setter_interior.push(Line(format!("self.builder.set_bool_field({}, value);", offset))); } Some(s) => { setter_interior.push( Line(format!("self.builder.set_bool_field_mask({}, value, {});", offset, s))); } } (Some("bool".to_string()), None) } Ok(type_::Int8(())) => common_case("i8", offset, reg_field, &mut setter_interior), Ok(type_::Int16(())) => common_case("i16", offset, reg_field, &mut setter_interior), Ok(type_::Int32(())) => common_case("i32", offset, reg_field, &mut setter_interior), Ok(type_::Int64(())) => common_case("i64", offset, reg_field, &mut setter_interior), Ok(type_::Uint8(())) => common_case("u8", offset, reg_field, &mut setter_interior), Ok(type_::Uint16(())) => common_case("u16", offset, reg_field, &mut setter_interior), Ok(type_::Uint32(())) => common_case("u32", offset, reg_field, &mut setter_interior), Ok(type_::Uint64(())) => common_case("u64", offset, reg_field, &mut setter_interior), Ok(type_::Float32(())) => common_case("f32", offset, reg_field, &mut setter_interior), Ok(type_::Float64(())) => common_case("f64", offset, reg_field, &mut setter_interior), Ok(type_::Text(())) => { setter_interior.push(Line(format!("self.builder.get_pointer_field({}).set_text(value);", offset))); initter_interior.push(Line(format!("self.builder.get_pointer_field({}).init_text(size)", offset))); initter_params.push("size : u32"); (Some("text::Reader".to_string()), Some("text::Builder<'a>".to_string())) } Ok(type_::Data(())) => { setter_interior.push(Line(format!("self.builder.get_pointer_field({}).set_data(value);", offset))); initter_interior.push(Line(format!("self.builder.get_pointer_field({}).init_data(size)", offset))); initter_params.push("size : u32"); (Some("data::Reader".to_string()), Some("data::Builder<'a>".to_string())) } Ok(type_::List(ot1)) => { return_result = true; setter_interior.push( Line(format!("::capnp::traits::SetPointerBuilder::set_pointer_builder(self.builder.get_pointer_field({}), value)", offset))); initter_params.push("size : u32"); initter_interior.push( Line(format!("::capnp::traits::FromPointerBuilder::init_pointer(self.builder.get_pointer_field({}), size)", offset))); match ot1.get_element_type().unwrap().which() { Err(_) => panic!("unsupported type"), Ok(t1) => { match t1 { type_::Void(()) | type_::Bool(()) | type_::Int8(()) | type_::Int16(()) | type_::Int32(()) | type_::Int64(()) | type_::Uint8(()) | type_::Uint16(()) | type_::Uint32(()) | type_::Uint64(()) | type_::Float32(()) | type_::Float64(()) => { let type_str = prim_type_str(t1); (Some(format!("primitive_list::Reader<'a,{}>", type_str)), Some(format!("primitive_list::Builder<'a,{}>", type_str))) } type_::Enum(e) => { let id = e.get_type_id(); let scope = &scope_map[&id]; let the_mod = scope.connect("::"); let type_str = format!("{}", the_mod); (Some(format!("enum_list::Reader<'a,{}>", type_str)), Some(format!("enum_list::Builder<'a,{}>", type_str))) } type_::Struct(st) => { let id = st.get_type_id(); let scope = &scope_map[&id]; let the_mod = scope.connect("::"); (Some(format!("struct_list::Reader<'a,{}::Reader<'a>>", the_mod)), Some(format!("struct_list::Builder<'a,{}::Builder<'a>>", the_mod))) } type_::Text(()) => { (Some(format!("text_list::Reader")), Some(format!("text_list::Builder<'a>"))) } type_::Data(()) => { (Some(format!("data_list::Reader")), Some(format!("data_list::Builder<'a>"))) } type_::List(t1) => { let type_param = list_list_type_param(scope_map, t1.get_element_type().unwrap(), false, "'a"); setter_lifetime_param = "<'b>"; (Some(format!("list_list::Reader<'b, {}>", list_list_type_param(scope_map, t1.get_element_type().unwrap(), true, "'b"))), Some(format!("list_list::Builder<'a, {}>", type_param))) } type_::AnyPointer(_) => {panic!("List(AnyPointer) not supported")} type_::Interface(_) => { panic!("unimplemented") } } } } } Ok(type_::Enum(e)) => { let id = e.get_type_id(); let the_mod = scope_map[&id].connect("::"); setter_interior.push( Line(format!("self.builder.set_data_field::<u16>({}, value as u16)", offset))); (Some(format!("{}", the_mod)), None) } Ok(type_::Struct(st)) => { let the_mod = scope_map[&st.get_type_id()].connect("::"); return_result = true; setter_interior.push( Line(format!("::capnp::traits::SetPointerBuilder::set_pointer_builder(self.builder.get_pointer_field({}), value)", offset))); initter_interior.push( Line(format!("::capnp::traits::FromPointerBuilder::init_pointer(self.builder.get_pointer_field({}), 0)", offset))); (Some(format!("{}::Reader", the_mod)), Some(format!("{}::Builder<'a>", the_mod))) } Ok(type_::Interface(interface)) => { let the_mod = scope_map[&interface.get_type_id()].connect("::"); setter_interior.push( Line(format!("self.builder.get_pointer_field({}).set_capability(value.client.hook);", offset))); (Some(format!("{}::Client",the_mod)), None) } Ok(type_::AnyPointer(_)) => { initter_interior.push(Line(format!("let mut result = ::capnp::any_pointer::Builder::new(self.builder.get_pointer_field({}));", offset))); initter_interior.push(Line("result.clear();".to_string())); initter_interior.push(Line("result".to_string())); (None, Some("::capnp::any_pointer::Builder<'a>".to_string())) } Err(_) => { panic!("unrecognized type") } } } }; let mut result = Vec::new(); match maybe_reader_type { Some(reader_type) => { let return_type = if return_result { "-> Result<()>" } else { "" }; result.push(Line("#[inline]".to_string())); result.push(Line(format!("pub fn set_{}{}(&mut self, {} : {}) {} {{", styled_name, setter_lifetime_param, setter_param, reader_type, return_type))); result.push(Indent(Box::new(Branch(setter_interior)))); result.push(Line("}".to_string())); } None => {} } match maybe_builder_type { Some(builder_type) => { result.push(Line("#[inline]".to_string())); let args = initter_params.connect(", "); result.push(Line(format!("pub fn init_{}(self, {}) -> {} {{", styled_name, args, builder_type))); result.push(Indent(Box::new(Branch(initter_interior)))); result.push(Line("}".to_string())); } None => {} } return Branch(result); } // return (the 'Which' enum, the 'which()' accessor, typedef) fn generate_union(node_map : &collections::hash_map::HashMap<u64, schema_capnp::node::Reader>, scope_map : &collections::hash_map::HashMap<u64, Vec<String>>, discriminant_offset : u32, fields : &[schema_capnp::field::Reader], is_reader : bool) -> (FormattedText, FormattedText, FormattedText) { use schema_capnp::*; fn new_ty_param(ty_params : &mut Vec<String>) -> String { let result = format!("A{}", ty_params.len()); ty_params.push(result.clone()); result } let mut getter_interior = Vec::new(); let mut interior = Vec::new(); let mut enum_interior = Vec::new(); let mut ty_params = Vec::new(); let mut ty_args = Vec::new(); let doffset = discriminant_offset as usize; for field in fields.iter() { let dvalue = field.get_discriminant_value() as usize; let field_name = field.get_name().unwrap(); let enumerant_name = capitalize_first_letter(field_name); let (ty, get) = getter_text(node_map, scope_map, field, is_reader); getter_interior.push(Branch(vec!( Line(format!("{} => {{", dvalue)), Indent(Box::new(Line(format!("return ::std::result::Result::Ok({}(", enumerant_name.clone())))), Indent(Box::new(Indent(Box::new(get)))), Indent(Box::new(Line("));".to_string()))), Line("}".to_string()) ))); let ty1 = match field.which() { Ok(field::Group(_)) => { ty_args.push(ty); new_ty_param(&mut ty_params) } Ok(field::Slot(reg_field)) => { match reg_field.get_type().unwrap().which() { Ok(type_::Text(())) | Ok(type_::Data(())) | Ok(type_::List(_)) | Ok(type_::Struct(_)) | Ok(type_::AnyPointer(_)) => { ty_args.push(ty); new_ty_param(&mut ty_params) } Ok(type_::Interface(_)) => { ty } _ => ty } } _ => ty }; enum_interior.push(Line(format!("{}({}),", enumerant_name, ty1))); } let enum_name = format!("Which{}", if ty_params.len() > 0 { format!("<{}>", ty_params.connect(",")) } else {"".to_string()} ); getter_interior.push(Line("x => return ::std::result::Result::Err(::capnp::NotInSchema(x))".to_string())); interior.push( Branch(vec!(Line(format!("pub enum {} {{", enum_name)), Indent(Box::new(Branch(enum_interior))), Line("}".to_string())))); let result = Branch(interior); let field_name = if is_reader { "reader" } else { "builder" }; let concrete_type = format!("Which{}{}", if is_reader {"Reader"} else {"Builder"}, if ty_params.len() > 0 { "<'a>" } else {""}); let typedef = Line(format!("pub type {} = Which{};", concrete_type, if ty_args.len() > 0 {format!("<{}>", ty_args.connect(","))} else {"".to_string()})); let getter_result = Branch(vec!(Line("#[inline]".to_string()), Line(format!("pub fn which(self) -> ::std::result::Result<{}, ::capnp::NotInSchema> {{", concrete_type)), Indent(Box::new(Branch(vec!( Line(format!("match self.{}.get_data_field::<u16>({}) {{", field_name, doffset)), Indent(Box::new(Branch(getter_interior))), Line("}".to_string()))))), Line("}".to_string()))); // TODO set_which() for builders? return (result, getter_result, typedef); } fn generate_haser(discriminant_offset : u32, styled_name : &str, field :&schema_capnp::field::Reader, is_reader : bool) -> FormattedText { use schema_capnp::*; let mut result = Vec::new(); let mut interior = Vec::new(); let member = if is_reader { "reader" } else { "builder" }; let discriminant_value = field.get_discriminant_value(); if discriminant_value != field::NO_DISCRIMINANT { interior.push( Line(format!("if self.{}.get_data_field::<u16>({}) != {} {{ return false; }}", member, discriminant_offset as usize, discriminant_value as usize))); } match field.which() { Err(_) | Ok(field::Group(_)) => {}, Ok(field::Slot(reg_field)) => { match reg_field.get_type().unwrap().which() { Ok(type_::Text(())) | Ok(type_::Data(())) | Ok(type_::List(_)) | Ok(type_::Struct(_)) | Ok(type_::AnyPointer(_)) => { interior.push( Line(format!("!self.{}.get_pointer_field({}).is_null()", member, reg_field.get_offset()))); result.push( Line(format!("pub fn has_{}(&self) -> bool {{", styled_name))); result.push( Indent(Box::new(Branch(interior)))); result.push(Line("}".to_string())); } _ => {} } } } Branch(result) } fn generate_pipeline_getter(_node_map : &collections::hash_map::HashMap<u64, schema_capnp::node::Reader>, scope_map : &collections::hash_map::HashMap<u64, Vec<String>>, field : schema_capnp::field::Reader) -> FormattedText { use schema_capnp::{field, type_}; let name = field.get_name().unwrap(); match field.which() { Err(_) => panic!("unrecognized field type"), Ok(field::Group(group)) => { let the_mod = scope_map[&group.get_type_id()].connect("::");<|fim▁hole|> camel_to_snake_case(name), the_mod)), Indent( Box::new(Line("FromTypelessPipeline::new(self._typeless.noop())".to_string()))), Line("}".to_string()))); } Ok(field::Slot(reg_field)) => { match reg_field.get_type().unwrap().which() { Err(_) => panic!("unrecognized type"), Ok(type_::Struct(st)) => { let the_mod = scope_map[&st.get_type_id()].connect("::"); return Branch(vec!( Line(format!("pub fn get_{}(&self) -> {}::Pipeline {{", camel_to_snake_case(name), the_mod)), Indent(Box::new(Line( format!("FromTypelessPipeline::new(self._typeless.get_pointer_field({}))", reg_field.get_offset())))), Line("}".to_string()))); } Ok(type_::Interface(interface)) => { let the_mod = scope_map[&interface.get_type_id()].connect("::"); return Branch(vec!( Line(format!("pub fn get_{}(&self) -> {}::Client {{", camel_to_snake_case(name), the_mod)), Indent(Box::new(Line( format!("FromClientHook::new(self._typeless.get_pointer_field({}).as_cap())", reg_field.get_offset())))), Line("}".to_string()))); } _ => { return Branch(Vec::new()); } } } } } fn generate_node(node_map : &collections::hash_map::HashMap<u64, schema_capnp::node::Reader>, scope_map : &collections::hash_map::HashMap<u64, Vec<String>>, node_id : u64, node_name: &str) -> FormattedText { use schema_capnp::*; let mut output: Vec<FormattedText> = Vec::new(); let mut nested_output: Vec<FormattedText> = Vec::new(); let node_reader = &node_map[&node_id]; let nested_nodes = node_reader.get_nested_nodes().unwrap(); for nested_node in nested_nodes.iter() { let id = nested_node.get_id(); nested_output.push(generate_node(node_map, scope_map, id, &scope_map[&id].last().unwrap())); } match node_reader.which() { Ok(node::File(())) => { output.push(Branch(nested_output)); } Ok(node::Struct(struct_reader)) => { output.push(BlankLine); output.push(Line(format!("pub mod {} {{", node_name))); let mut preamble = Vec::new(); let mut builder_members = Vec::new(); let mut reader_members = Vec::new(); let mut union_fields = Vec::new(); let mut which_enums = Vec::new(); let mut pipeline_impl_interior = Vec::new(); let mut private_mod_interior = Vec::new(); let data_size = struct_reader.get_data_word_count(); let pointer_size = struct_reader.get_pointer_count(); let is_group = struct_reader.get_is_group(); let discriminant_count = struct_reader.get_discriminant_count(); let discriminant_offset = struct_reader.get_discriminant_offset(); preamble.push(generate_import_statements()); preamble.push(BlankLine); let fields = struct_reader.get_fields().unwrap(); for field in fields.iter() { let name = field.get_name().unwrap(); let styled_name = camel_to_snake_case(name); let discriminant_value = field.get_discriminant_value(); let is_union_field = discriminant_value != field::NO_DISCRIMINANT; if !is_union_field { pipeline_impl_interior.push(generate_pipeline_getter(node_map, scope_map, field)); let (ty, get) = getter_text(node_map, scope_map, &field, true); reader_members.push( Branch(vec!( Line("#[inline]".to_string()), Line(format!("pub fn get_{}(self) -> {} {{", styled_name, ty)), Indent(Box::new(get)), Line("}".to_string())))); let (ty_b, get_b) = getter_text(node_map, scope_map, &field, false); builder_members.push( Branch(vec!( Line("#[inline]".to_string()), Line(format!("pub fn get_{}(self) -> {} {{", styled_name, ty_b)), Indent(Box::new(get_b)), Line("}".to_string())))); } else { union_fields.push(field); } builder_members.push(generate_setter(node_map, scope_map, discriminant_offset, &styled_name, &field)); reader_members.push(generate_haser(discriminant_offset, &styled_name, &field, true)); builder_members.push(generate_haser(discriminant_offset, &styled_name, &field, false)); match field.which() { Ok(field::Group(group)) => { let id = group.get_type_id(); let text = generate_node(node_map, scope_map, id, &scope_map[&id].last().unwrap()); nested_output.push(text); } _ => { } } } if discriminant_count > 0 { let (which_enums1, union_getter, typedef) = generate_union(node_map, scope_map, discriminant_offset, &union_fields, true); which_enums.push(which_enums1); which_enums.push(typedef); reader_members.push(union_getter); let (_, union_getter, typedef) = generate_union(node_map, scope_map, discriminant_offset, &union_fields, false); which_enums.push(typedef); builder_members.push(union_getter); let mut reexports = String::new(); reexports.push_str("pub use self::Which::{"); let whichs : Vec<String> = union_fields.iter().map(|f| {capitalize_first_letter(f.get_name().unwrap())}).collect(); reexports.push_str(&whichs.connect(",")); reexports.push_str("};"); preamble.push(Line(reexports)); preamble.push(BlankLine); } let builder_struct_size = if is_group { Branch(Vec::new()) } else { Branch(vec!( Line("impl <'a> ::capnp::traits::HasStructSize for Builder<'a> {".to_string()), Indent(Box::new( Branch(vec!(Line("#[inline]".to_string()), Line("fn struct_size() -> layout::StructSize { _private::STRUCT_SIZE }".to_string()))))), Line("}".to_string()))) }; if !is_group { private_mod_interior.push( Line( "use capnp::private::layout;".to_string())); private_mod_interior.push( Line( format!("pub const STRUCT_SIZE : layout::StructSize = layout::StructSize {{ data : {}, pointers : {} }};", data_size as usize, pointer_size as usize))); } private_mod_interior.push( Line( format!("pub const TYPE_ID: u64 = {:#x};", node_id))); let from_pointer_builder_impl = if is_group { Branch(Vec::new()) } else { Branch(vec![ Line("impl <'a> ::capnp::traits::FromPointerBuilder<'a> for Builder<'a> {".to_string()), Indent( Box::new( Branch(vec!( Line("fn init_pointer(builder: ::capnp::private::layout::PointerBuilder<'a>, _size : u32) -> Builder<'a> {".to_string()), Indent(Box::new(Line("::capnp::traits::FromStructBuilder::new(builder.init_struct(_private::STRUCT_SIZE))".to_string()))), Line("}".to_string()), Line("fn get_from_pointer(builder: ::capnp::private::layout::PointerBuilder<'a>) -> Result<Builder<'a>> {".to_string()), Indent(Box::new(Line("::std::result::Result::Ok(::capnp::traits::FromStructBuilder::new(try!(builder.get_struct(_private::STRUCT_SIZE, ::std::ptr::null()))))".to_string()))), Line("}".to_string()))))), Line("}".to_string()), BlankLine]) }; let accessors = vec!( Branch(preamble), Line("#[derive(Clone, Copy)]".to_string()), Line("pub struct Reader<'a> { reader : layout::StructReader<'a> }".to_string()), BlankLine, Branch(vec!( Line("impl <'a> ::capnp::traits::HasTypeId for Reader<'a> {".to_string()), Indent(Box::new(Branch(vec!(Line("#[inline]".to_string()), Line("fn type_id() -> u64 { _private::TYPE_ID }".to_string()))))), Line("}".to_string()))), Line("impl <'a> ::capnp::traits::FromStructReader<'a> for Reader<'a> {".to_string()), Indent( Box::new(Branch(vec!( Line("fn new(reader: ::capnp::private::layout::StructReader<'a>) -> Reader<'a> {".to_string()), Indent(Box::new(Line("Reader { reader : reader }".to_string()))), Line("}".to_string()))))), Line("}".to_string()), BlankLine, Line("impl <'a> ::capnp::traits::FromPointerReader<'a> for Reader<'a> {".to_string()), Indent( Box::new(Branch(vec!( Line("fn get_from_pointer(reader: &::capnp::private::layout::PointerReader<'a>) -> Result<Reader<'a>> {".to_string()), Indent(Box::new(Line("::std::result::Result::Ok(::capnp::traits::FromStructReader::new(try!(reader.get_struct(::std::ptr::null()))))".to_string()))), Line("}".to_string()))))), Line("}".to_string()), BlankLine, Line("impl <'a, 'b : 'a> ::capnp::traits::CastableTo<Reader<'a>> for Reader<'b> {".to_string()), Indent( Box::new(Line("fn cast(self) -> Reader<'a> { Reader { reader : self.reader } }".to_string()))), Line("}".to_string()), BlankLine, Line("impl <'a> Reader<'a> {".to_string()), Indent( Box::new(Branch(vec![ Line("pub fn borrow<'b>(&'b self) -> Reader<'b> {".to_string()), Indent(Box::new(Line("Reader { reader : self.reader}".to_string()))), Line("}".to_string()), BlankLine, Line("pub fn total_size(&self) -> Result<::capnp::MessageSize> {".to_string()), Indent(Box::new(Line("self.reader.total_size()".to_string()))), Line("}".to_string())]))), Indent(Box::new(Branch(reader_members))), Line("}".to_string()), BlankLine, Line("pub struct Builder<'a> { builder : ::capnp::private::layout::StructBuilder<'a> }".to_string()), builder_struct_size, Branch(vec!( Line("impl <'a> ::capnp::traits::HasTypeId for Builder<'a> {".to_string()), Indent(Box::new(Branch(vec!(Line("#[inline]".to_string()), Line("fn type_id() -> u64 { _private::TYPE_ID }".to_string()))))), Line("}".to_string()))), Line("impl <'a> ::capnp::traits::FromStructBuilder<'a> for Builder<'a> {".to_string()), Indent( Box::new(Branch(vec!( Line("fn new(builder : ::capnp::private::layout::StructBuilder<'a>) -> Builder<'a> {".to_string()), Indent(Box::new(Line("Builder { builder : builder }".to_string()))), Line("}".to_string()))))), Line("}".to_string()), BlankLine, from_pointer_builder_impl, Line("impl <'a> ::capnp::traits::SetPointerBuilder<Builder<'a>> for Reader<'a> {".to_string()), Indent(Box::new(Line("fn set_pointer_builder<'b>(pointer : ::capnp::private::layout::PointerBuilder<'b>, value : Reader<'a>) -> Result<()> { pointer.set_struct(&value.reader) }".to_string()))), Line("}".to_string()), BlankLine, Line("impl <'a, 'b : 'a> ::capnp::traits::CastableTo<Builder<'a>> for Builder<'b> {".to_string()), Indent(Box::new(Line("fn cast(self) -> Builder<'a> { Builder { builder : self.builder } }".to_string()))), Line("}".to_string()), BlankLine, Line("impl <'a> Builder<'a> {".to_string()), Indent( Box::new(Branch(vec![ Line("pub fn as_reader(self) -> Reader<'a> {".to_string()), Indent(Box::new(Line("::capnp::traits::FromStructReader::new(self.builder.as_reader())".to_string()))), Line("}".to_string()), Line("pub fn borrow<'b>(&'b mut self) -> Builder<'b> {".to_string()), Indent(Box::new(Line("Builder { builder : self.builder}".to_string()))), Line("}".to_string()), BlankLine, Line("pub fn total_size(&self) -> Result<::capnp::MessageSize> {".to_string()), Indent(Box::new(Line("self.builder.as_reader().total_size()".to_string()))), Line("}".to_string()) ]))), Indent(Box::new(Branch(builder_members))), Line("}".to_string()), BlankLine, Line("pub struct Pipeline { _typeless : ::capnp::any_pointer::Pipeline }".to_string()), Line("impl FromTypelessPipeline for Pipeline {".to_string()), Indent( Box::new(Branch(vec!( Line("fn new(typeless : ::capnp::any_pointer::Pipeline) -> Pipeline {".to_string()), Indent(Box::new(Line("Pipeline { _typeless : typeless }".to_string()))), Line("}".to_string()))))), Line("}".to_string()), Line("impl Pipeline {".to_string()), Indent(Box::new(Branch(pipeline_impl_interior))), Line("}".to_string()), Line("mod _private {".to_string()), Indent(Box::new(Branch(private_mod_interior))), Line("}".to_string()), ); output.push(Indent(Box::new(Branch(vec!(Branch(accessors), Branch(which_enums), Branch(nested_output)))))); output.push(Line("}".to_string())); } Ok(node::Enum(enum_reader)) => { let names = &scope_map[&node_id]; output.push(BlankLine); let mut members = Vec::new(); let mut match_branches = Vec::new(); let enumerants = enum_reader.get_enumerants().unwrap(); for ii in 0..enumerants.len() { let enumerant = capitalize_first_letter(enumerants.get(ii).get_name().unwrap()); members.push(Line(format!("{} = {},", enumerant, ii))); match_branches.push(Line(format!("{} => ::std::result::Result::Ok({}::{}),", ii, *names.last().unwrap(), enumerant))); } match_branches.push(Line("n => ::std::result::Result::Err(::capnp::NotInSchema(n)),".to_string())); output.push(Branch(vec!( Line("#[repr(u16)]".to_string()), Line("#[derive(Clone, Copy, PartialEq)]".to_string()), Line(format!("pub enum {} {{", *names.last().unwrap())), Indent(Box::new(Branch(members))), Line("}".to_string())))); output.push( Branch(vec!( Line(format!("impl ::capnp::traits::FromU16 for {} {{", *names.last().unwrap())), Indent(Box::new(Line("#[inline]".to_string()))), Indent( Box::new(Branch(vec![ Line(format!( "fn from_u16(value : u16) -> ::std::result::Result<{}, ::capnp::NotInSchema> {{", *names.last().unwrap())), Indent( Box::new(Branch(vec![ Line("match value {".to_string()), Indent(Box::new(Branch(match_branches))), Line("}".to_string()) ]))), Line("}".to_string())]))), Line("}".to_string()), Line(format!("impl ::capnp::traits::ToU16 for {} {{", *names.last().unwrap())), Indent(Box::new(Line("#[inline]".to_string()))), Indent( Box::new(Line("fn to_u16(self) -> u16 { self as u16 }".to_string()))), Line("}".to_string())))); output.push( Branch(vec!( Line(format!("impl ::capnp::traits::HasTypeId for {} {{", *names.last().unwrap())), Indent(Box::new(Line("#[inline]".to_string()))), Indent( Box::new(Line(format!("fn type_id() -> u64 {{ {:#x}u64 }}", node_id).to_string()))), Line("}".to_string())))); } Ok(node::Interface(interface)) => { let names = &scope_map[&node_id]; let mut client_impl_interior = Vec::new(); let mut server_interior = Vec::new(); let mut mod_interior = Vec::new(); let mut dispatch_arms = Vec::new(); let mut private_mod_interior = Vec::new(); private_mod_interior.push(Line(format!("pub const TYPE_ID: u64 = {:#x};", node_id))); mod_interior.push(Line ("#![allow(unused_variables)]".to_string())); mod_interior.push(Line("#![allow(unused_imports)]".to_string())); mod_interior.push( Line("use capnp::capability::{FromClientHook, Request, FromServer};".to_string())); mod_interior.push( Line("use capnp::private::capability::{ClientHook, ServerHook};".to_string())); mod_interior.push(Line("use capnp::capability;".to_string())); mod_interior.push(BlankLine); let methods = interface.get_methods().unwrap(); for ordinal in 0..methods.len() { let method = methods.get(ordinal); let name = method.get_name().unwrap(); method.get_code_order(); let params_id = method.get_param_struct_type(); let params_node = &node_map[&params_id]; let params_name = if params_node.get_scope_id() == 0 { let params_name = module_name(&format!("{}Params", name)); nested_output.push(generate_node(node_map, scope_map, params_id, &params_name)); params_name } else { scope_map[&params_node.get_id()].connect("::") }; let results_id = method.get_result_struct_type(); let results_node = node_map[&results_id]; let results_name = if results_node.get_scope_id() == 0 { let results_name = module_name(&format!("{}Results", name)); nested_output.push(generate_node(node_map, scope_map, results_id, &results_name)); results_name } else { scope_map[&results_node.get_id()].connect("::") }; dispatch_arms.push( Line(format!( "{} => server.{}(::capnp::private::capability::internal_get_typed_context(context)),", ordinal, camel_to_snake_case(name)))); mod_interior.push( Line(format!( "pub type {}Context<'a> = capability::CallContext<{}::Reader<'a>, {}::Builder<'a>>;", capitalize_first_letter(name), params_name, results_name))); server_interior.push( Line(format!( "fn {}<'a>(&mut self, {}Context<'a>);", camel_to_snake_case(name), capitalize_first_letter(name) ))); client_impl_interior.push( Line(format!("pub fn {}_request<'a>(&self) -> Request<{}::Builder<'a>,{}::Reader<'a>,{}::Pipeline> {{", camel_to_snake_case(name), params_name, results_name, results_name))); client_impl_interior.push(Indent( Box::new(Line(format!("self.client.new_call(_private::TYPE_ID, {}, None)", ordinal))))); client_impl_interior.push(Line("}".to_string())); method.get_annotations().unwrap(); } let mut base_dispatch_arms = Vec::new(); let server_base = { let mut base_traits = Vec::new(); let extends = interface.get_superclasses().unwrap(); for ii in 0..extends.len() { let base_id = extends.get(ii).get_id(); let the_mod = scope_map[&base_id].connect("::"); base_dispatch_arms.push( Line(format!( "0x{:x} => {}::ServerDispatch::<T>::dispatch_call_internal(&mut *self.server, method_id, context),", base_id, the_mod))); base_traits.push(format!("{}::Server", the_mod)); } if extends.len() > 0 { format!(": {}", base_traits.connect(" + ")) } else { "".to_string() } }; mod_interior.push(BlankLine); mod_interior.push(Line("pub struct Client{ pub client : ::capnp::private::capability::Client }".to_string())); mod_interior.push( Branch(vec!( Line("impl FromClientHook for Client {".to_string()), Indent(Box::new(Line("fn new(hook : Box<ClientHook+Send>) -> Client {".to_string()))), Indent(Box::new(Indent(Box::new(Line("Client { client : ::capnp::private::capability::Client::new(hook) }".to_string()))))), Indent(Box::new(Line("}".to_string()))), Line("}".to_string())))); mod_interior.push( Branch(vec!( Line("pub struct ToClient<U>(pub U);".to_string()), Line("impl <T:ServerHook, U : Server + Send + 'static> FromServer<T, Client> for ToClient<U> {".to_string()), Indent(Box::new(Branch( vec!( Line("fn from_server(self, _hook : Option<T>) -> Client {".to_string()), Indent( Box::new(Line("Client { client : ServerHook::new_client(None::<T>, ::std::boxed::Box::new(ServerDispatch { server : ::std::boxed::Box::new(self.0)}))}".to_string()))), Line("}".to_string()))))), Line("}".to_string())))); mod_interior.push( Branch(vec!( Line("impl ::capnp::traits::HasTypeId for Client {".to_string()), Indent(Box::new(Line("#[inline]".to_string()))), Indent(Box::new(Line("fn type_id() -> u64 { _private::TYPE_ID }".to_string()))), Line("}".to_string())))); mod_interior.push( Branch(vec!( Line("impl Clone for Client {".to_string()), Indent(Box::new(Line("fn clone(&self) -> Client {".to_string()))), Indent(Box::new(Indent(Box::new(Line("Client { client : ::capnp::private::capability::Client::new(self.client.hook.copy()) }".to_string()))))), Indent(Box::new(Line("}".to_string()))), Line("}".to_string())))); mod_interior.push( Branch(vec!(Line("impl Client {".to_string()), Indent(Box::new(Branch(client_impl_interior))), Line("}".to_string())))); mod_interior.push(Branch(vec!(Line(format!("pub trait Server {} {{", server_base)), Indent(Box::new(Branch(server_interior))), Line("}".to_string())))); mod_interior.push(Branch(vec!(Line("pub struct ServerDispatch<T> {".to_string()), Indent(Box::new(Line("pub server : Box<T>,".to_string()))), Line("}".to_string())))); mod_interior.push( Branch(vec!( Line("impl <T : Server> ::capnp::capability::Server for ServerDispatch<T> {".to_string()), Indent(Box::new(Line("fn dispatch_call(&mut self, interface_id : u64, method_id : u16, context : capability::CallContext<::capnp::any_pointer::Reader, ::capnp::any_pointer::Builder>) {".to_string()))), Indent(Box::new(Indent(Box::new(Line("match interface_id {".to_string()))))), Indent(Box::new(Indent(Box::new(Indent( Box::new(Line("_private::TYPE_ID => ServerDispatch::<T>::dispatch_call_internal(&mut *self.server, method_id, context),".to_string()))))))), Indent(Box::new(Indent(Box::new(Indent(Box::new(Branch(base_dispatch_arms))))))), Indent(Box::new(Indent(Box::new(Indent(Box::new(Line("_ => {}".to_string()))))))), Indent(Box::new(Indent(Box::new(Line("}".to_string()))))), Indent(Box::new(Line("}".to_string()))), Line("}".to_string())))); mod_interior.push( Branch(vec!( Line("impl <T : Server> ServerDispatch<T> {".to_string()), Indent(Box::new(Line("pub fn dispatch_call_internal(server :&mut T, method_id : u16, context : capability::CallContext<::capnp::any_pointer::Reader, ::capnp::any_pointer::Builder>) {".to_string()))), Indent(Box::new(Indent(Box::new(Line("match method_id {".to_string()))))), Indent(Box::new(Indent(Box::new(Indent(Box::new(Branch(dispatch_arms))))))), Indent(Box::new(Indent(Box::new(Indent(Box::new(Line("_ => {}".to_string()))))))), Indent(Box::new(Indent(Box::new(Line("}".to_string()))))), Indent(Box::new(Line("}".to_string()))), Line("}".to_string())))); mod_interior.push( Branch(vec!( Line("pub mod _private {".to_string()), Indent(Box::new(Branch(private_mod_interior))), Line("}".to_string()), ))); mod_interior.push(Branch(vec!(Branch(nested_output)))); output.push(BlankLine); output.push(Line(format!("pub mod {} {{", *names.last().unwrap()))); output.push(Indent(Box::new(Branch(mod_interior)))); output.push(Line("}".to_string())); } Ok(node::Const(c)) => { let names = &scope_map[&node_id]; let styled_name = snake_to_upper_case(&names.last().unwrap()); let (typ, txt) = match tuple_result(c.get_type().unwrap().which(), c.get_value().unwrap().which()) { Ok((type_::Void(()), value::Void(()))) => ("()".to_string(), "()".to_string()), Ok((type_::Bool(()), value::Bool(b))) => ("bool".to_string(), b.to_string()), Ok((type_::Int8(()), value::Int8(i))) => ("i8".to_string(), i.to_string()), Ok((type_::Int16(()), value::Int16(i))) => ("i16".to_string(), i.to_string()), Ok((type_::Int32(()), value::Int32(i))) => ("i32".to_string(), i.to_string()), Ok((type_::Int64(()), value::Int64(i))) => ("i64".to_string(), i.to_string()), Ok((type_::Uint8(()), value::Uint8(i))) => ("u8".to_string(), i.to_string()), Ok((type_::Uint16(()), value::Uint16(i))) => ("u16".to_string(), i.to_string()), Ok((type_::Uint32(()), value::Uint32(i))) => ("u32".to_string(), i.to_string()), Ok((type_::Uint64(()), value::Uint64(i))) => ("u64".to_string(), i.to_string()), // float string formatting appears to be a bit broken currently, in Rust. Ok((type_::Float32(()), value::Float32(f))) => ("f32".to_string(), format!("{}f32", f.to_string())), Ok((type_::Float64(()), value::Float64(f))) => ("f64".to_string(), format!("{}f64", f.to_string())), Ok((type_::Text(()), value::Text(_t))) => { panic!() } Ok((type_::Data(()), value::Data(_d))) => { panic!() } Ok((type_::List(_t), value::List(_p))) => { panic!() } Ok((type_::Struct(_t), value::Struct(_p))) => { panic!() } Ok((type_::Interface(_t), value::Interface(()))) => { panic!() } Ok((type_::AnyPointer(_), value::AnyPointer(_pr))) => { panic!() } Err(_) => { panic!("unrecognized type") } _ => { panic!("type does not match value") } }; output.push( Line(format!("pub const {} : {} = {};", styled_name, typ, txt))); } Ok(node::Annotation( annotation_reader )) => { println!(" annotation node:"); if annotation_reader.get_targets_file() { println!(" targets file"); } if annotation_reader.get_targets_const() { println!(" targets const"); } // ... if annotation_reader.get_targets_annotation() { println!(" targets annotation"); } } Err(_) => () } Branch(output) } pub fn main<T : ::std::io::Read>(mut inp : T, out_dir : &::std::path::Path) -> ::capnp::Result<()> { //! Generate Rust code according to a `schema_capnp::code_generator_request` read from `inp`. use capnp::serialize; use std::borrow::ToOwned; use std::io::Write; let message = try!(serialize::read_message(&mut inp, capnp::message::ReaderOptions::new())); let request : schema_capnp::code_generator_request::Reader = try!(message.get_root()); let mut node_map = collections::hash_map::HashMap::<u64, schema_capnp::node::Reader>::new(); let mut scope_map = collections::hash_map::HashMap::<u64, Vec<String>>::new(); for node in try!(request.get_nodes()).iter() { node_map.insert(node.get_id(), node); } for requested_file in try!(request.get_requested_files()).iter() { let id = requested_file.get_id(); let mut filepath = out_dir.to_path_buf(); filepath.push(try!(requested_file.get_filename())); let imports = try!(requested_file.get_imports()); for import in imports.iter() { let importpath = ::std::path::Path::new(try!(import.get_name())); let root_name : String = format!("::{}_capnp", importpath.file_stem().unwrap().to_owned() .into_string().unwrap().replace("-", "_")); populate_scope_map(&node_map, &mut scope_map, vec!(root_name), import.get_id()); } let root_name : String = format!("{}_capnp", filepath.file_stem().unwrap().to_owned(). into_string().unwrap().replace("-", "_")); filepath.set_file_name(&format!("{}.rs", root_name)); let root_mod = format!("::{}", root_name); populate_scope_map(&node_map, &mut scope_map, vec!(root_mod), id); let lines = Branch(vec!( Line("// Generated by the capnpc-rust plugin to the Cap'n Proto schema compiler.".to_string()), Line("// DO NOT EDIT.".to_string()), Line(format!("// source: {}", try!(requested_file.get_filename()))), BlankLine, generate_node(&node_map, &scope_map, id, &root_name))); let text = stringify(&lines); // It would be simpler to use try! instead of a pattern match, but then the error message // would not include `filepath`. match ::std::fs::File::create(&filepath) { Ok(ref mut writer) => { try!(writer.write_all(text.as_bytes())); } Err(e) => { let _ = writeln!(&mut ::std::io::stderr(), "could not open file {:?} for writing: {}", filepath, e); return Err(::capnp::Error::Io(e)); } } } Ok(()) }<|fim▁end|>
return Branch(vec!(Line(format!("pub fn get_{}(&self) -> {}::Pipeline {{",
<|file_name|>index.test.js<|end_file_name|><|fim▁begin|>'use strict'; const assert = require('assert'); const app = require('../../../src/app'); describe('authority service', function() {<|fim▁hole|> it('registered the authorities service', () => { assert.ok(app.service('authorities')); }); });<|fim▁end|>
<|file_name|>test_resampling.py<|end_file_name|><|fim▁begin|>import unittest import numpy as np from numpy.testing import assert_array_equal from dgw.evaluation.resampling import extend_point, shrink_to_a_single_point class TestExtending(unittest.TestCase): def test_extend_point(self): a = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) ans = np.array([0, 1, 2, 3, 4, 4, 4, 5, 6, 7, 8, 9, 10]) assert_array_equal(ans, extend_point(a, 4, 3)) # multi-dim a = np.array([[1, 2], [2, 3], [3, 4]]) ans = np.array([[1, 2], [2, 3], [2, 3], [3, 4]]) assert_array_equal(ans, extend_point(a, 1, 2)) def test_extend_point_left_boundary(self): a = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8]) ans = np.array([0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8]) assert_array_equal(ans, extend_point(a, 0, 4)) def test_extend_point_right_boundary(self): a = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8]) ans = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 8, 8, 8]) assert_array_equal(ans, extend_point(a, 8, 4)) def test_shrink_to_single_point(self): a = np.array([0, 1, 2, 3, 3, 3, 3, 4, 5, 6]) ans = np.array([0, 1, 2, 3, 4, 5, 6]) <|fim▁hole|> b = np.array([0, 1, 2, 3, 8, 9, 10, 4, 5, 6]) ans = np.array([0, 1, 2, np.mean([3, 8, 9, 10]), 4, 5, 6]) assert_array_equal(ans, shrink_to_a_single_point(b, 3, 4)) # multi-dim a = np.array([[1, 2], [2, 3], [2, 3], [3, 4]]) ans = np.array([[1, 2], [2, 3], [3, 4]]) assert_array_equal(ans, shrink_to_a_single_point(a, 1, 2)) def test_shrink_to_single_point_boundary(self): a = np.array([0, 1, 2, 3, 4, 5, 6, 6]) ans = np.array([0, 1, 2, 3, 4, 5, 6]) # Ignore points that go out of bound assert_array_equal(ans, shrink_to_a_single_point(a, 6, 4))<|fim▁end|>
assert_array_equal(ans, shrink_to_a_single_point(a, 3, 4))
<|file_name|>actionendpoint.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # Copyright 2015 Comcast Cable Communications Management, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # End Copyright # An example action endpoint for rules with language="POST". This # example is NOT an action executor. Instead, it's just an endpoint # in the role of any external system that deals directly with JSON # bodies. # curl -d '{"likes":"tacos"}' http://localhost:6667/ from BaseHTTPServer import BaseHTTPRequestHandler,HTTPServer # import json PORT = 6667 def protest (response, message): response.send_response(200) response.send_header('Content-type','application/json') response.end_headers() response.wfile.write(message) class handler(BaseHTTPRequestHandler): def do_GET(self): protest(self, "You should POST with json.\n") return def do_POST(self): try: content_len = int(self.headers.getheader('content-length')) body = self.rfile.read(content_len) print 'body ', body self.send_response(200)<|fim▁hole|> self.send_header('Content-type','application/json') self.end_headers() response = '{"Got":%s}' % (body) self.wfile.write(response) except Exception as broke: protest(self, str(broke)) try: server = HTTPServer(('', PORT), handler) print 'Started example action endpoint on port ' , PORT server.serve_forever() except KeyboardInterrupt: print '^C received, shutting down example action endpoint on ', PORT server.socket.close()<|fim▁end|>
<|file_name|>PublicMementoControl.java<|end_file_name|><|fim▁begin|>package controllers; import play.mvc.*; public class PublicMementoControl extends Controller { public static Result getRandomMemento(String lang) { /** @TODO */ return TODO; } public static Result getMemento(Long mid) { /** @TODO */ return TODO;<|fim▁hole|> public static Result getMementoList(Long decade, String place, String lat, String lon, String rad, String lang) { /** @TODO */ return TODO; } }<|fim▁end|>
}
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>use std::fmt; use std::error::Error; use super::Rule; #[derive(Debug, PartialEq, Clone)] pub struct ParseError { pub line: usize, pub col: usize, pub expected: Vec<Rule>, } <|fim▁hole|>impl fmt::Display for ParseError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.expected.is_empty() { write!(f, "no token expected at line {} col {}", self.line, self.col) } else { write!(f, "expected token(s): {} at line {} col {}", self.expected.iter().map(|r| format!("{}", r)).collect::<Vec<String>>().join(", "), self.line, self.col) } } } impl Error for ParseError { fn description(&self) -> &str { if self.expected.is_empty() { "no tokens expected" } else { "expected tokens which were not found" } } }<|fim▁end|>
<|file_name|>protractor.conf.js<|end_file_name|><|fim▁begin|>var path = require('path'), HtmlReporter = require('protractor-html-screenshot-reporter'); exports.config = { chromeDriver: 'node_modules/chromedriver/bin/chromedriver', // seleniumAddress: 'http://localhost:4444/wd/hub', // Boolean. If true, Protractor will connect directly to the browser Drivers // at the locations specified by chromeDriver and firefoxPath. Only Chrome // and Firefox are supported for direct connect. directConnect: true, // Use existing selenium local/remote // seleniumAddress: http://localhost:4444/wd/hub // When run without a command line parameter, all suites will run. If run // with --suite=login only the patterns matched by the specified suites will // run. // @todo specs: ['specs/aui-login.js'], // The timeout in milliseconds for each script run on the browser. This should // be longer than the maximum time your application needs to stabilize between // tasks. allScriptsTimeout: 20000, baseUrl: 'http://localhost:9010', multiCapabilities: [{ 'browserName': 'chrome' }, { 'browserName': 'firefox' }], onPrepare: function() { // Add a screenshot reporter and store screenshots to `result/screnshots`: jasmine.getEnv().addReporter(new HtmlReporter({ baseDirectory: './result/screenshots', takeScreenShotsOnlyForFailedSpecs: true, preserveDirectory: true, docTitle: 'E2E Result', docName: 'index.html', pathBuilder: function pathBuilder(spec, descriptions, results, capabilities) { var currentDate = new Date(), dateString = currentDate.getFullYear() + '-' + currentDate.getMonth() + '-' + currentDate.getDate(); <|fim▁hole|> } };<|fim▁end|>
return path.join(dateString, capabilities.caps_.browserName, descriptions.join('-')); } }));
<|file_name|>sparse.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # coding: utf-8 # pylint: disable=wildcard-import, unused-wildcard-import, too-many-lines """Sparse NDArray API of MXNet.""" from __future__ import absolute_import from __future__ import division try: from __builtin__ import slice as py_slice from __builtin__ import sum as py_sum except ImportError: from builtins import slice as py_slice from builtins import sum as py_sum import ctypes import warnings from array import array as native_array __all__ = ["_ndarray_cls", "csr_matrix", "row_sparse_array", "BaseSparseNDArray", "CSRNDArray", "RowSparseNDArray"] import numpy as np from ..base import NotSupportedForSparseNDArray from ..base import _LIB, numeric_types from ..base import c_array_buf, mx_real_t, integer_types from ..base import mx_uint, NDArrayHandle, check_call from ..context import Context from . import _internal from . import op try: from .gen_sparse import * # pylint: disable=redefined-builtin except ImportError: pass from ._internal import _set_ndarray_class from .ndarray import NDArray, _storage_type, _DTYPE_NP_TO_MX, _DTYPE_MX_TO_NP from .ndarray import _STORAGE_TYPE_STR_TO_ID, _STORAGE_TYPE_ROW_SPARSE, _STORAGE_TYPE_CSR from .ndarray import _STORAGE_TYPE_UNDEFINED, _STORAGE_TYPE_DEFAULT from .ndarray import zeros as _zeros_ndarray from .ndarray import array as _array try: import scipy.sparse as spsp except ImportError: spsp = None _STORAGE_AUX_TYPES = { 'row_sparse': [np.int64], 'csr': [np.int64, np.int64] } def _new_alloc_handle(stype, shape, ctx, delay_alloc, dtype, aux_types, aux_shapes=None): """Return a new handle with specified storage type, shape, dtype and context. Empty handle is only used to hold results Returns ------- handle A new empty ndarray handle """ hdl = NDArrayHandle() for aux_t in aux_types: if np.dtype(aux_t) != np.dtype("int64"): raise NotImplementedError("only int64 is supported for aux types") aux_type_ids = [int(_DTYPE_NP_TO_MX[np.dtype(aux_t).type]) for aux_t in aux_types] aux_shapes = [(0,) for aux_t in aux_types] if aux_shapes is None else aux_shapes aux_shape_lens = [len(aux_shape) for aux_shape in aux_shapes] aux_shapes = py_sum(aux_shapes, ()) num_aux = mx_uint(len(aux_types)) check_call(_LIB.MXNDArrayCreateSparseEx( ctypes.c_int(int(_STORAGE_TYPE_STR_TO_ID[stype])), c_array_buf(mx_uint, native_array('I', shape)), mx_uint(len(shape)), ctypes.c_int(ctx.device_typeid), ctypes.c_int(ctx.device_id), ctypes.c_int(int(delay_alloc)), ctypes.c_int(int(_DTYPE_NP_TO_MX[np.dtype(dtype).type])), num_aux, c_array_buf(ctypes.c_int, native_array('i', aux_type_ids)), c_array_buf(mx_uint, native_array('I', aux_shape_lens)), c_array_buf(mx_uint, native_array('I', aux_shapes)), ctypes.byref(hdl))) return hdl class BaseSparseNDArray(NDArray): """The base class of an NDArray stored in a sparse storage format. See CSRNDArray and RowSparseNDArray for more details. """ def __repr__(self): """Returns a string representation of the sparse array.""" shape_info = 'x'.join(['%d' % x for x in self.shape]) # The data content is not displayed since the array usually has big shape return '\n<%s %s @%s>' % (self.__class__.__name__, shape_info, self.context) def __iadd__(self, other): raise NotImplementedError() def __isub__(self, other): raise NotImplementedError() def __imul__(self, other): raise NotImplementedError() def __idiv__(self, other): raise NotImplementedError() def __itruediv__(self, other): raise NotImplementedError() def _sync_copyfrom(self, source_array): raise NotImplementedError() def _at(self, idx): raise NotSupportedForSparseNDArray(self._at, '[idx]', idx) def _slice(self, start, stop): raise NotSupportedForSparseNDArray(self._slice, None, start, stop) def reshape(self, shape): raise NotSupportedForSparseNDArray(self.reshape, None, shape) @property def size(self): # the `size` for a sparse ndarray is ambiguous, hence disabled. raise NotImplementedError() def _aux_type(self, i): """Data-type of the array's ith aux data. Returns ------- numpy.dtype This BaseSparseNDArray's aux data type. """ aux_type = ctypes.c_int() check_call(_LIB.MXNDArrayGetAuxType(self.handle, i, ctypes.byref(aux_type))) return _DTYPE_MX_TO_NP[aux_type.value] @property def _num_aux(self): """The number of aux data used to help store the sparse ndarray. """ return len(_STORAGE_AUX_TYPES[self.stype]) @property def _aux_types(self): """The data types of the aux data for the BaseSparseNDArray. """ aux_types = [] num_aux = self._num_aux for i in range(num_aux): aux_types.append(self._aux_type(i)) return aux_types def asnumpy(self): """Return a dense ``numpy.ndarray`` object with value copied from this array """ return self.tostype('default').asnumpy() def astype(self, dtype): """Returns a copy of the array after casting to a specified type. Parameters ---------- dtype : numpy.dtype or str The type of the returned array. Examples -------- >>> x = mx.nd.sparse.zeros('row_sparse', (2,3), dtype='float32') >>> y = x.astype('int32') >>> y.dtype <type 'numpy.int32'> """ res = zeros(shape=self.shape, ctx=self.context, dtype=dtype, stype=self.stype) self.copyto(res) return res def copyto(self, other): """Copies the value of this array to another array. Parameters ---------- other : NDArray or CSRNDArray or RowSparseNDArray or Context The destination array or context. Returns ------- NDArray or CSRNDArray or RowSparseNDArray The copied array. """ if isinstance(other, NDArray): if other.handle is self.handle: warnings.warn('You are attempting to copy an array to itself', RuntimeWarning) return return _internal._copyto(self, out=other) elif isinstance(other, Context): hret = _ndarray_cls(_new_alloc_handle(self.stype, self.shape, other, True, self.dtype, self._aux_types)) return _internal._copyto(self, out=hret) else: raise TypeError('copyto does not support type ' + str(type(other))) def check_format(self, full_check=True): """Check whether the NDArray format is valid. Parameters ---------- full_check : bool, optional If `True`, rigorous check, O(N) operations. Otherwise basic check, O(1) operations (default True). """ check_call(_LIB.MXNDArraySyncCheckFormat(self.handle, ctypes.c_bool(full_check))) def _data(self): """A deep copy NDArray of the data array associated with the BaseSparseNDArray. This function blocks. Do not use it in performance critical code. """ self.wait_to_read() hdl = NDArrayHandle() check_call(_LIB.MXNDArrayGetDataNDArray(self.handle, ctypes.byref(hdl))) return NDArray(hdl) def _aux_data(self, i): """ Get a deep copy NDArray of the i-th aux data array associated with the BaseSparseNDArray. This function blocks. Do not use it in performance critical code. """ self.wait_to_read() hdl = NDArrayHandle() check_call(_LIB.MXNDArrayGetAuxNDArray(self.handle, i, ctypes.byref(hdl))) return NDArray(hdl) # pylint: disable=abstract-method class CSRNDArray(BaseSparseNDArray): """A sparse representation of 2D NDArray in the Compressed Sparse Row format. A CSRNDArray represents an NDArray as three separate arrays: `data`, `indptr` and `indices`. It uses the CSR representation where the column indices for row i are stored in ``indices[indptr[i]:indptr[i+1]]`` and their corresponding values are stored in ``data[indptr[i]:indptr[i+1]]``. The column indices for a given row are expected to be sorted in ascending order. Duplicate column entries for the same row are not allowed. Example ------- >>> a = mx.nd.array([[0, 1, 0], [2, 0, 0], [0, 0, 0], [0, 0, 3]]) >>> a = a.tostype('csr') >>> a.data.asnumpy() array([ 1., 2., 3.], dtype=float32) >>> a.indices.asnumpy() array([1, 0, 2]) >>> a.indptr.asnumpy() array([0, 1, 2, 2, 3]) See Also -------- csr_matrix: Several ways to construct a CSRNDArray """ def __reduce__(self): return CSRNDArray, (None,), super(CSRNDArray, self).__getstate__() def __iadd__(self, other): (self + other).copyto(self) return self def __isub__(self, other): (self - other).copyto(self) return self def __imul__(self, other): (self * other).copyto(self) return self def __idiv__(self, other): (self / other).copyto(self) return self def __itruediv__(self, other): (self / other).copyto(self) return self def __getitem__(self, key): """x.__getitem__(i) <=> x[i] Returns a sliced view of this array. Parameters ---------- key : int or slice Indexing key. Examples -------- >>> indptr = np.array([0, 2, 3, 6]) >>> indices = np.array([0, 2, 2, 0, 1, 2]) >>> data = np.array([1, 2, 3, 4, 5, 6]) >>> a = mx.nd.sparse.csr_matrix((data, indices, indptr), shape=(3, 3)) >>> a.asnumpy() array([[ 1., 0., 2.], [ 0., 0., 3.], [ 4., 5., 6.]], dtype=float32) >>> a[1:2].asnumpy() array([[ 0., 0., 3.]], dtype=float32) >>> a[1].asnumpy() array([[ 0., 0., 3.]], dtype=float32) >>> a[-1].asnumpy() array([[ 4., 5., 6.]], dtype=float32) """ if isinstance(key, int): if key == -1: begin = self.shape[0] - 1 else: begin = key return op.slice(self, begin=begin, end=begin+1) if isinstance(key, py_slice): if key.step is not None: raise ValueError('CSRNDArray only supports continuous slicing on axis 0') if key.start is not None or key.stop is not None: begin = key.start if key.start else 0 end = key.stop if key.stop else self.shape[0] return op.slice(self, begin=begin, end=end) else: return self if isinstance(key, tuple): raise ValueError('Multi-dimension indexing is not supported') def __setitem__(self, key, value): """x.__setitem__(i, y) <=> x[i]=y Set self[key] to value. Only slice key [:] is supported. Parameters ---------- key : slice The indexing key. value : NDArray or CSRNDArray or numpy.ndarray The value to set. Examples -------- >>> src = mx.nd.sparse.zeros('csr', (3,3)) >>> src.asnumpy() array([[ 0., 0., 0.], [ 0., 0., 0.], [ 0., 0., 0.]], dtype=float32) >>> # assign CSRNDArray with same storage type >>> x = mx.nd.ones('row_sparse', (3,3)).tostype('csr') >>> x[:] = src >>> x.asnumpy() array([[ 1., 1., 1.], [ 1., 1., 1.], [ 1., 1., 1.]], dtype=float32) >>> # assign NDArray to CSRNDArray >>> x[:] = mx.nd.ones((3,3)) * 2 >>> x.asnumpy() array([[ 2., 2., 2.], [ 2., 2., 2.], [ 2., 2., 2.]], dtype=float32) """ if not self.writable: raise ValueError('Failed to assign to a readonly CSRNDArray') if isinstance(key, py_slice): if key.step is not None or key.start is not None or key.stop is not None: raise ValueError('Assignment with slice for CSRNDArray is not ' \ 'implmented yet.') if isinstance(value, NDArray): # avoid copying to itself if value.handle is not self.handle: value.copyto(self) elif isinstance(value, numeric_types): raise ValueError("Assigning numeric types to CSRNDArray is " \ "not implemented yet.") elif isinstance(value, (np.ndarray, np.generic)): # TODO(haibin/anisub) check scipy.sparse and use _sync_copy_from to # avoid the temporary copy warnings.warn('Assigning non-NDArray object to CSRNDArray is not efficient', RuntimeWarning) tmp = _array(value) tmp.copyto(self) else: raise TypeError('type %s not supported' % str(type(value))) else: assert(isinstance(key, (int, tuple))) raise Exception('CSRNDArray only supports [:] for assignment') @property def indices(self): """A deep copy NDArray of the indices array of the CSRNDArray. This generates a deep copy of the column indices of the current `csr` matrix. Returns ------- NDArray This CSRNDArray's indices array. """ return self._aux_data(1) @property def indptr(self): """A deep copy NDArray of the indptr array of the CSRNDArray. This generates a deep copy of the `indptr` of the current `csr` matrix. Returns ------- NDArray This CSRNDArray's indptr array. """ return self._aux_data(0) @property def data(self): """A deep copy NDArray of the data array of the CSRNDArray. This generates a deep copy of the `data` of the current `csr` matrix. Returns ------- NDArray This CSRNDArray's data array. """ return self._data() @indices.setter def indices(self, indices): raise NotImplementedError() @indptr.setter def indptr(self, indptr): raise NotImplementedError() @data.setter def data(self, data): raise NotImplementedError() def tostype(self, stype): """Return a copy of the array with chosen storage type. Returns ------- NDArray or CSRNDArray A copy of the array with the chosen storage stype """ if stype == 'row_sparse': raise ValueError("cast_storage from csr to row_sparse is not supported") return op.cast_storage(self, stype=stype) def copyto(self, other): """Copies the value of this array to another array. If ``other`` is a ``NDArray`` or ``CSRNDArray`` object, then ``other.shape`` and ``self.shape`` should be the same. This function copies the value from ``self`` to ``other``. If ``other`` is a context, a new ``CSRNDArray`` will be first created on the target context, and the value of ``self`` is copied. Parameters ---------- other : NDArray or CSRNDArray or Context The destination array or context. Returns ------- NDArray or CSRNDArray The copied array. If ``other`` is an ``NDArray`` or ``CSRNDArray``, then the return value and ``other`` will point to the same ``NDArray`` or ``CSRNDArray``. """ if isinstance(other, Context): return super(CSRNDArray, self).copyto(other) elif isinstance(other, NDArray): stype = other.stype if stype == 'default' or stype == 'csr': return super(CSRNDArray, self).copyto(other) else: raise TypeError('copyto does not support destination NDArray stype ' + str(stype)) else: raise TypeError('copyto does not support type ' + str(type(other))) def asscipy(self): """Returns a ``scipy.sparse.csr.csr_matrix`` object with value copied from this array Examples -------- >>> x = mx.nd.sparse.zeros('csr', (2,3)) >>> y = x.asscipy() >>> type(y) <type 'scipy.sparse.csr.csr_matrix'> >>> y <2x3 sparse matrix of type '<type 'numpy.float32'>' with 0 stored elements in Compressed Sparse Row format> """ data = self.data.asnumpy() indices = self.indices.asnumpy() indptr = self.indptr.asnumpy() if not spsp: raise ImportError("scipy is not available. \ Please check if the scipy python bindings are installed.") return spsp.csr_matrix((data, indices, indptr), shape=self.shape, dtype=self.dtype) # pylint: disable=abstract-method class RowSparseNDArray(BaseSparseNDArray): """A sparse representation of a set of NDArray row slices at given indices. A RowSparseNDArray represents a multidimensional NDArray using two separate arrays: `data` and `indices`. The number of dimensions has to be at least 2. - data: an NDArray of any dtype with shape [D0, D1, ..., Dn]. - indices: a 1-D int64 NDArray with shape [D0] with values sorted in ascending order. The `indices` stores the indices of the row slices with non-zeros, while the values are stored in `data`. The corresponding NDArray ``dense`` represented by RowSparseNDArray ``rsp`` has ``dense[rsp.indices[i], :, :, :, ...] = rsp.data[i, :, :, :, ...]`` >>> dense.asnumpy() array([[ 1., 2., 3.], [ 0., 0., 0.], [ 4., 0., 5.], [ 0., 0., 0.], [ 0., 0., 0.]], dtype=float32) >>> rsp = dense.tostype('row_sparse') >>> rsp.indices.asnumpy() array([0, 2], dtype=int64) >>> rsp.data.asnumpy() array([[ 1., 2., 3.], [ 4., 0., 5.]], dtype=float32) A RowSparseNDArray is typically used to represent non-zero row slices of a large NDArray of shape [LARGE0, D1, .. , Dn] where LARGE0 >> D0 and most row slices are zeros. RowSparseNDArray is used principally in the definition of gradients for operations that have sparse gradients (e.g. sparse dot and sparse embedding). See Also -------- row_sparse_array: Several ways to construct a RowSparseNDArray """ def __reduce__(self): return RowSparseNDArray, (None,), super(RowSparseNDArray, self).__getstate__() def __iadd__(self, other): (self + other).copyto(self) return self def __isub__(self, other): (self - other).copyto(self) return self def __imul__(self, other): (self * other).copyto(self) return self def __idiv__(self, other): (self / other).copyto(self) return self def __itruediv__(self, other): (self / other).copyto(self) return self def __getitem__(self, key): """x.__getitem__(i) <=> x[i] Returns a sliced view of this array. Parameters ---------- key : slice Indexing key. Examples -------- >>> x = mx.nd.sparse.zeros('row_sparse', (2, 3)) >>> x[:].asnumpy() array([[ 0., 0., 0.], [ 0., 0., 0.]], dtype=float32) """ if isinstance(key, int): raise Exception("__getitem__ with int key is not implemented for RowSparseNDArray yet") if isinstance(key, py_slice): if key.step is not None or key.start is not None or key.stop is not None: raise Exception('RowSparseNDArray only supports [:] for __getitem__') else: return self if isinstance(key, tuple): raise ValueError('Multi-dimension indexing is not supported') def __setitem__(self, key, value): """x.__setitem__(i, y) <=> x[i]=y Set self[key] to value. Only slice key [:] is supported. Parameters ---------- key : slice The indexing key. value : NDArray or numpy.ndarray The value to set. Examples -------- >>> src = mx.nd.row_sparse([[1, 0, 2], [4, 5, 6]], [0, 2], (3,3)) >>> src.asnumpy() array([[ 1., 0., 2.], [ 0., 0., 0.], [ 4., 5., 6.]], dtype=float32) >>> # assign RowSparseNDArray with same storage type >>> x = mx.nd.sparse.zeros('row_sparse', (3,3)) >>> x[:] = src >>> x.asnumpy() array([[ 1., 0., 2.], [ 0., 0., 0.], [ 4., 5., 6.]], dtype=float32) >>> # assign NDArray to RowSparseNDArray >>> x[:] = mx.nd.ones((3,3)) >>> x.asnumpy() array([[ 1., 1., 1.], [ 1., 1., 1.], [ 1., 1., 1.]], dtype=float32) """ if not self.writable: raise ValueError('Failed to assign to a readonly RowSparseNDArray') if isinstance(key, py_slice): if key.step is not None or key.start is not None or key.stop is not None: raise ValueError('Assignment with slice for RowSparseNDArray ' \ 'is not implmented yet.') if isinstance(value, NDArray): # avoid copying to itself if value.handle is not self.handle: value.copyto(self) elif isinstance(value, numeric_types): raise ValueError("Assigning numeric types to RowSparseNDArray " \ "is not implemented yet.") elif isinstance(value, (np.ndarray, np.generic)): warnings.warn('Assigning non-NDArray object to RowSparseNDArray is not efficient', RuntimeWarning) tmp = _array(value) tmp.copyto(self) else: raise TypeError('type %s not supported' % str(type(value))) else: assert(isinstance(key, (int, tuple))) raise TypeError('RowSparseNDArray only supports [:] for assignment') @property def indices(self): """A deep copy NDArray of the indices array of the RowSparseNDArray. This generates a deep copy of the row indices of the current `row_sparse` matrix. Returns ------- NDArray This RowSparseNDArray's indices array. """ return self._aux_data(0) @property def data(self): """A deep copy NDArray of the data array of the RowSparseNDArray. This generates a deep copy of the `data` of the current `row_sparse` matrix. Returns ------- NDArray This RowSparseNDArray's data array. """ return self._data() @indices.setter def indices(self, indices): raise NotImplementedError() @data.setter def data(self, data): raise NotImplementedError() def tostype(self, stype): """Return a copy of the array with chosen storage type. Returns ------- NDArray or RowSparseNDArray A copy of the array with the chosen storage stype """ if stype == 'csr': raise ValueError("cast_storage from row_sparse to csr is not supported") return op.cast_storage(self, stype=stype) def copyto(self, other): """Copies the value of this array to another array. If ``other`` is a ``NDArray`` or ``RowSparseNDArray`` object, then ``other.shape`` and ``self.shape`` should be the same. This function copies the value from ``self`` to ``other``. If ``other`` is a context, a new ``RowSparseNDArray`` will be first created on the target context, and the value of ``self`` is copied. Parameters ---------- other : NDArray or RowSparseNDArray or Context The destination array or context. Returns ------- NDArray or RowSparseNDArray The copied array. If ``other`` is an ``NDArray`` or ``RowSparseNDArray``, then the return value and ``other`` will point to the same ``NDArray`` or ``RowSparseNDArray``. """ if isinstance(other, Context): return super(RowSparseNDArray, self).copyto(other) elif isinstance(other, NDArray): stype = other.stype if stype == 'default' or stype == 'row_sparse': return super(RowSparseNDArray, self).copyto(other) else: raise TypeError('copyto does not support destination NDArray stype ' + str(stype)) else: raise TypeError('copyto does not support type ' + str(type(other))) def retain(self, *args, **kwargs): """Convenience fluent method for :py:func:`retain`. The arguments are the same as for :py:func:`retain`, with this array as data. """ return retain(self, *args, **kwargs) def _prepare_src_array(source_array, dtype): """Prepare `source_array` so that it can be used to construct NDArray. `source_array` is converted to a `np.ndarray` if it's neither an `NDArray` \ nor an `np.ndarray`. """ if not isinstance(source_array, NDArray) and not isinstance(source_array, np.ndarray): try: source_array = np.array(source_array, dtype=dtype) except: raise TypeError('values must be array like object') return source_array def _prepare_default_dtype(src_array, dtype): """Prepare the value of dtype if `dtype` is None. If `src_array` is an NDArray, numpy.ndarray or scipy.sparse.csr.csr_matrix, return src_array.dtype. float32 is returned otherwise.""" if dtype is None: if isinstance(src_array, (NDArray, np.ndarray)):<|fim▁hole|> elif spsp and isinstance(src_array, spsp.csr.csr_matrix): dtype = src_array.dtype else: dtype = mx_real_t return dtype def _check_shape(s1, s2): """check s1 == s2 if both are not None""" if s1 and s2 and s1 != s2: raise ValueError("Shape mismatch detected. " + str(s1) + " v.s. " + str(s2)) def csr_matrix(arg1, shape=None, ctx=None, dtype=None): """Creates a `CSRNDArray`, an 2D array with compressed sparse row (CSR) format. The CSRNDArray can be instantiated in several ways: - csr_matrix(D): to construct a CSRNDArray with a dense 2D array ``D`` - **D** (*array_like*) - An object exposing the array interface, an object whose \ `__array__` method returns an array, or any (nested) sequence. - **ctx** (*Context, optional*) - Device context \ (default is the current default context). - **dtype** (*str or numpy.dtype, optional*) - The data type of the output array. \ The default dtype is ``D.dtype`` if ``D`` is an NDArray or numpy.ndarray, \ float32 otherwise. - csr_matrix(S) to construct a CSRNDArray with a sparse 2D array ``S`` - **S** (*CSRNDArray or scipy.sparse.csr.csr_matrix*) - A sparse matrix. - **ctx** (*Context, optional*) - Device context \ (default is the current default context). - **dtype** (*str or numpy.dtype, optional*) - The data type of the output array. \ The default dtype is ``S.dtype``. - csr_matrix((M, N)) to construct an empty CSRNDArray with shape ``(M, N)`` - **M** (*int*) - Number of rows in the matrix - **N** (*int*) - Number of columns in the matrix - **ctx** (*Context, optional*) - Device context \ (default is the current default context). - **dtype** (*str or numpy.dtype, optional*) - The data type of the output array. \ The default dtype is float32. - csr_matrix((data, indices, indptr)) to construct a CSRNDArray based on the definition of compressed sparse row format \ using three separate arrays, \ where the column indices for row i are stored in ``indices[indptr[i]:indptr[i+1]]`` \ and their corresponding values are stored in ``data[indptr[i]:indptr[i+1]]``. \ The column indices for a given row are expected to be **sorted in ascending order.** \ Duplicate column entries for the same row are not allowed. - **data** (*array_like*) - An object exposing the array interface, which \ holds all the non-zero entries of the matrix in row-major order. - **indices** (*array_like*) - An object exposing the array interface, which \ stores the column index for each non-zero element in ``data``. - **indptr** (*array_like*) - An object exposing the array interface, which \ stores the offset into ``data`` of the first non-zero element number of each \ row of the matrix. - **shape** (*tuple of int, optional*) - The shape of the array. The default \ shape is inferred from the indices and indptr arrays. - **ctx** (*Context, optional*) - Device context \ (default is the current default context). - **dtype** (*str or numpy.dtype, optional*) - The data type of the output array. \ The default dtype is ``data.dtype`` if ``data`` is an NDArray or numpy.ndarray, \ float32 otherwise. - csr_matrix((data, (row, col))) to construct a CSRNDArray based on the COOrdinate format \ using three seperate arrays, \ where ``row[i]`` is the row index of the element, \ ``col[i]`` is the column index of the element \ and ``data[i]`` is the data corresponding to the element. All the missing \ elements in the input are taken to be zeroes. - **data** (*array_like*) - An object exposing the array interface, which \ holds all the non-zero entries of the matrix in COO format. - **row** (*array_like*) - An object exposing the array interface, which \ stores the row index for each non zero element in ``data``. - **col** (*array_like*) - An object exposing the array interface, which \ stores the col index for each non zero element in ``data``. - **shape** (*tuple of int, optional*) - The shape of the array. The default \ shape is inferred from the ``row`` and ``col`` arrays. - **ctx** (*Context, optional*) - Device context \ (default is the current default context). - **dtype** (*str or numpy.dtype, optional*) - The data type of the output array. \ The default dtype is float32. Parameters ---------- arg1: tuple of int, tuple of array_like, array_like, CSRNDArray, scipy.sparse.csr_matrix, \ scipy.sparse.coo_matrix, tuple of int or tuple of array_like The argument to help instantiate the csr matrix. See above for further details. shape : tuple of int, optional The shape of the csr matrix. ctx: Context, optional Device context (default is the current default context). dtype: str or numpy.dtype, optional The data type of the output array. Returns ------- CSRNDArray A `CSRNDArray` with the `csr` storage representation. Example ------- >>> a = mx.nd.sparse.csr_matrix(([1, 2, 3], [1, 0, 2], [0, 1, 2, 2, 3]), shape=(4, 3)) >>> a.asnumpy() array([[ 0., 1., 0.], [ 2., 0., 0.], [ 0., 0., 0.], [ 0., 0., 3.]], dtype=float32) See Also -------- CSRNDArray : MXNet NDArray in compressed sparse row format. """ # construct a csr matrix from (M, N) or (data, indices, indptr) if isinstance(arg1, tuple): arg_len = len(arg1) if arg_len == 2: # construct a sparse csr matrix from # scipy coo matrix if input format is coo if isinstance(arg1[1], tuple) and len(arg1[1]) == 2: data, (row, col) = arg1 if isinstance(data, NDArray): data = data.asnumpy() if isinstance(row, NDArray): row = row.asnumpy() if isinstance(col, NDArray): col = col.asnumpy() coo = spsp.coo_matrix((data, (row, col)), shape=shape) _check_shape(coo.shape, shape) csr = coo.tocsr() return array(csr, ctx=ctx, dtype=dtype) else: # empty matrix with shape _check_shape(arg1, shape) return empty('csr', arg1, ctx=ctx, dtype=dtype) elif arg_len == 3: # data, indices, indptr return _csr_matrix_from_definition(arg1[0], arg1[1], arg1[2], shape=shape, ctx=ctx, dtype=dtype) else: raise ValueError("Unexpected length of input tuple: " + str(arg_len)) else: # construct a csr matrix from a sparse / dense one if isinstance(arg1, CSRNDArray) or (spsp and isinstance(arg1, spsp.csr.csr_matrix)): # construct a csr matrix from scipy or CSRNDArray _check_shape(arg1.shape, shape) return array(arg1, ctx=ctx, dtype=dtype) elif isinstance(arg1, RowSparseNDArray): raise ValueError("Unexpected input type: RowSparseNDArray") else: # construct a csr matrix from a dense one # prepare default ctx and dtype since mx.nd.array doesn't use default values # based on source_array dtype = _prepare_default_dtype(arg1, dtype) # create dns array with provided dtype. ctx is not passed since copy across # ctx requires dtype to be the same dns = _array(arg1, dtype=dtype) if ctx is not None and dns.context != ctx: dns = dns.as_in_context(ctx) _check_shape(dns.shape, shape) return dns.tostype('csr') def _csr_matrix_from_definition(data, indices, indptr, shape=None, ctx=None, dtype=None, indices_type=None, indptr_type=None): """Create a `CSRNDArray` based on data, indices and indptr""" storage_type = 'csr' # context ctx = Context.default_ctx if ctx is None else ctx # types dtype = _prepare_default_dtype(data, dtype) indptr_type = _STORAGE_AUX_TYPES[storage_type][0] if indptr_type is None else indptr_type indices_type = _STORAGE_AUX_TYPES[storage_type][1] if indices_type is None else indices_type # prepare src array and types data = _prepare_src_array(data, dtype) indptr = _prepare_src_array(indptr, indptr_type) indices = _prepare_src_array(indices, indices_type) # TODO(junwu): Convert data, indptr, and indices to mxnet NDArrays # if they are not for now. In the future, we should provide a c-api # to accept np.ndarray types to copy from to result.data and aux_data if not isinstance(data, NDArray): data = _array(data, ctx, dtype) if not isinstance(indptr, NDArray): indptr = _array(indptr, ctx, indptr_type) if not isinstance(indices, NDArray): indices = _array(indices, ctx, indices_type) if shape is None: if indices.shape[0] == 0: raise ValueError('invalid shape') shape = (len(indptr) - 1, op.max(indices).asscalar() + 1) # verify shapes aux_shapes = [indptr.shape, indices.shape] if data.ndim != 1 or indptr.ndim != 1 or indices.ndim != 1 or \ indptr.shape[0] == 0 or len(shape) != 2: raise ValueError('invalid shape') result = CSRNDArray(_new_alloc_handle(storage_type, shape, ctx, False, dtype, [indptr_type, indices_type], aux_shapes)) check_call(_LIB.MXNDArraySyncCopyFromNDArray(result.handle, data.handle, ctypes.c_int(-1))) check_call(_LIB.MXNDArraySyncCopyFromNDArray(result.handle, indptr.handle, ctypes.c_int(0))) check_call(_LIB.MXNDArraySyncCopyFromNDArray(result.handle, indices.handle, ctypes.c_int(1))) return result def row_sparse_array(arg1, shape=None, ctx=None, dtype=None): """Creates a `RowSparseNDArray`, a multidimensional row sparse array with a set of \ tensor slices at given indices. The RowSparseNDArray can be instantiated in several ways: - row_sparse_array(D): to construct a RowSparseNDArray with a dense ndarray ``D`` - **D** (*array_like*) - An object exposing the array interface, an object whose \ `__array__` method returns an array, or any (nested) sequence. - **ctx** (*Context, optional*) - Device context \ (default is the current default context). - **dtype** (*str or numpy.dtype, optional*) - The data type of the output array. \ The default dtype is ``D.dtype`` if ``D`` is an NDArray or numpy.ndarray, \ float32 otherwise. - row_sparse_array(S) to construct a RowSparseNDArray with a sparse ndarray ``S`` - **S** (*RowSparseNDArray*) - A sparse ndarray. - **ctx** (*Context, optional*) - Device context \ (default is the current default context). - **dtype** (*str or numpy.dtype, optional*) - The data type of the output array. \ The default dtype is ``S.dtype``. - row_sparse_array((D0, D1 .. Dn)) to construct an empty RowSparseNDArray with shape ``(D0, D1, ... Dn)`` - **D0, D1 .. Dn** (*int*) - The shape of the ndarray - **ctx** (*Context, optional*) - Device context \ (default is the current default context). - **dtype** (*str or numpy.dtype, optional*) - The data type of the output array. \ The default dtype is float32. - row_sparse_array((data, indices)) to construct a RowSparseNDArray based on the definition of row sparse format \ using two separate arrays, \ where the `indices` stores the indices of the row slices with non-zeros, while the values are stored in `data`. The corresponding NDArray ``dense`` represented by RowSparseNDArray ``rsp`` has \ ``dense[rsp.indices[i], :, :, :, ...] = rsp.data[i, :, :, :, ...]`` The row indices for are expected to be **sorted in ascending order.** \ - **data** (*array_like*) - An object exposing the array interface, which \ holds all the non-zero row slices of the array. - **indices** (*array_like*) - An object exposing the array interface, which \ stores the row index for each row slice with non-zero elements. - **shape** (*tuple of int, optional*) - The shape of the array. The default \ shape is inferred from the indices and indptr arrays. - **ctx** (*Context, optional*) - Device context \ (default is the current default context). - **dtype** (*str or numpy.dtype, optional*) - The data type of the output array. \ The default dtype is float32. Parameters ---------- arg1: NDArray, numpy.ndarray, RowSparseNDArray, tuple of int or tuple of array_like The argument to help instantiate the row sparse ndarray. See above for further details. shape : tuple of int, optional The shape of the row sparse ndarray. ctx : Context, optional Device context (default is the current default context). dtype : str or numpy.dtype, optional The data type of the output array. Returns ------- RowSparseNDArray An `RowSparseNDArray` with the `row_sparse` storage representation. Example ------- >>> a = mx.nd.sparse.row_sparse_array(([[1, 2], [3, 4]], [1, 4]), shape=(6, 2)) >>> a.asnumpy() array([[ 0., 0.], [ 1., 2.], [ 0., 0.], [ 0., 0.], [ 3., 4.], [ 0., 0.]], dtype=float32) See Also -------- RowSparseNDArray : MXNet NDArray in row sparse format. """ # construct a row sparse array from (D0, D1 ..) or (data, indices) if isinstance(arg1, tuple): arg_len = len(arg1) if arg_len < 2: raise ValueError("Unexpected length of input tuple: " + str(arg_len)) elif arg_len > 2: # empty ndarray with shape _check_shape(arg1, shape) return empty('row_sparse', arg1, ctx=ctx, dtype=dtype) else: # len(arg1) = 2, is either shape or (data, indices) if isinstance(arg1[0], integer_types) and isinstance(arg1[1], integer_types): # empty ndarray with shape _check_shape(arg1, shape) return empty('row_sparse', arg1, ctx=ctx, dtype=dtype) else: # data, indices, indptr return _row_sparse_ndarray_from_definition(arg1[0], arg1[1], shape=shape, ctx=ctx, dtype=dtype) else: # construct a row sparse ndarray from a dense / sparse array if isinstance(arg1, RowSparseNDArray): # construct a row sparse ndarray from RowSparseNDArray _check_shape(arg1.shape, shape) return array(arg1, ctx=ctx, dtype=dtype) elif isinstance(arg1, CSRNDArray): raise ValueError("Unexpected input type: CSRNDArray") else: # construct a csr matrix from a dense one # prepare default dtype since mx.nd.array doesn't use default values # based on source_array dtype = _prepare_default_dtype(arg1, dtype) # create dns array with provided dtype. ctx is not passed since copy across # ctx requires dtype to be the same dns = _array(arg1, dtype=dtype) if ctx is not None and dns.context != ctx: dns = dns.as_in_context(ctx) _check_shape(dns.shape, shape) return dns.tostype('row_sparse') def _row_sparse_ndarray_from_definition(data, indices, shape=None, ctx=None, dtype=None, indices_type=None): """Create a `RowSparseNDArray` based on data and indices""" storage_type = 'row_sparse' # context ctx = Context.default_ctx if ctx is None else ctx # types dtype = _prepare_default_dtype(data, dtype) indices_type = _STORAGE_AUX_TYPES[storage_type][0] if indices_type is None else indices_type # prepare src array and types data = _prepare_src_array(data, dtype) indices = _prepare_src_array(indices, indices_type) # TODO(junwu): Convert data, indptr, and indices to mxnet NDArrays # if they are not for now. In the future, we should provide a c-api # to accept np.ndarray types to copy from to result.data and aux_data if not isinstance(data, NDArray): data = _array(data, ctx, dtype) if not isinstance(indices, NDArray): indices = _array(indices, ctx, indices_type) if shape is None: num_indices = indices.shape[0] if num_indices == 0: raise ValueError('invalid shape') dim0 = indices[num_indices - 1].asscalar() + 1 shape = (dim0, ) + data.shape[1:] # verify shapes if data.ndim != len(shape) or indices.ndim != 1 or np.prod(shape[1:]) == 0: raise ValueError("invalid shape") result = RowSparseNDArray(_new_alloc_handle(storage_type, shape, ctx, False, dtype, [indices_type], [indices.shape])) check_call(_LIB.MXNDArraySyncCopyFromNDArray(result.handle, data.handle, ctypes.c_int(-1))) check_call(_LIB.MXNDArraySyncCopyFromNDArray(result.handle, indices.handle, ctypes.c_int(0))) return result def _ndarray_cls(handle, writable=True, stype=_STORAGE_TYPE_UNDEFINED): if stype == _STORAGE_TYPE_UNDEFINED: stype = _storage_type(handle) if stype == _STORAGE_TYPE_DEFAULT: return NDArray(handle, writable=writable) elif stype == _STORAGE_TYPE_CSR: return CSRNDArray(handle, writable=writable) elif stype == _STORAGE_TYPE_ROW_SPARSE: return RowSparseNDArray(handle, writable=writable) else: raise Exception("unknown storage type: %s"%stype) _set_ndarray_class(_ndarray_cls) def zeros(stype, shape, ctx=None, dtype=None, **kwargs): """Return a new array of given shape and type, filled with zeros. Parameters ---------- stype: string The storage type of the empty array, such as 'row_sparse', 'csr', etc shape : int or tuple of int The shape of the empty array ctx : Context, optional An optional device context (default is the current default context) dtype : str or numpy.dtype, optional An optional value type (default is `float32`) Returns ------- RowSparseNDArray or CSRNDArray A created array Examples -------- >>> mx.nd.sparse.zeros('csr', (1,2)) <CSRNDArray 1x2 @cpu(0)> >>> mx.nd.sparse.zeros('row_sparse', (1,2), ctx=mx.cpu(), dtype='float16').asnumpy() array([[ 0., 0.]], dtype=float16) """ if stype == 'default': return _zeros_ndarray(shape, ctx=ctx, dtype=dtype, **kwargs) if ctx is None: ctx = Context.default_ctx dtype = mx_real_t if dtype is None else dtype if stype == 'row_sparse' or stype == 'csr': aux_types = _STORAGE_AUX_TYPES[stype] else: raise ValueError("unknown storage type" + stype) out = _ndarray_cls(_new_alloc_handle(stype, shape, ctx, True, dtype, aux_types)) return _internal._zeros(shape=shape, ctx=ctx, dtype=dtype, out=out, **kwargs) def empty(stype, shape, ctx=None, dtype=None): """Returns a new array of given shape and type, without initializing entries. Parameters ---------- stype: string The storage type of the empty array, such as 'row_sparse', 'csr', etc shape : int or tuple of int The shape of the empty array. ctx : Context, optional An optional device context (default is the current default context). dtype : str or numpy.dtype, optional An optional value type (default is `float32`). Returns ------- CSRNDArray or RowSparseNDArray A created array. """ if isinstance(shape, int): shape = (shape, ) if ctx is None: ctx = Context.default_ctx if dtype is None: dtype = mx_real_t assert(stype is not None) if stype == 'csr' or stype == 'row_sparse': return zeros(stype, shape, ctx=ctx, dtype=dtype) else: raise Exception("unknown stype : " + str(stype)) def array(source_array, ctx=None, dtype=None): """Creates a sparse array from any object exposing the array interface. Parameters ---------- source_array : RowSparseNDArray, CSRNDArray or scipy.sparse.csr.csr_matrix The source sparse array ctx : Context, optional The default context is ``source_array.context`` if ``source_array`` is an NDArray. \ The current default context otherwise. dtype : str or numpy.dtype, optional The data type of the output array. The default dtype is ``source_array.dtype`` if `source_array` is an `NDArray`, `numpy.ndarray` or `scipy.sparse.csr.csr_matrix`, \ `float32` otherwise. Returns ------- RowSparseNDArray or CSRNDArray An array with the same contents as the `source_array`. Examples -------- >>> import scipy.sparse as spsp >>> csr = spsp.csr_matrix((2, 100)) >>> mx.nd.sparse.array(csr) <CSRNDArray 2x100 @cpu(0)> >>> mx.nd.sparse.array(mx.nd.sparse.zeros('csr', (3, 2))) <CSRNDArray 3x2 @cpu(0)> >>> mx.nd.sparse.array(mx.nd.sparse.zeros('row_sparse', (3, 2))) <RowSparseNDArray 3x2 @cpu(0)> """ ctx = Context.default_ctx if ctx is None else ctx if isinstance(source_array, NDArray): assert(source_array.stype != 'default'), \ "Please use `tostype` to create RowSparseNDArray or CSRNDArray from an NDArray" # prepare dtype and ctx based on source_array, if not provided dtype = _prepare_default_dtype(source_array, dtype) # if both dtype and ctx are different from source_array, we cannot copy directly if source_array.dtype != dtype and source_array.context != ctx: arr = empty(source_array.stype, source_array.shape, dtype=dtype) arr[:] = source_array arr = arr.as_in_context(ctx) else: arr = empty(source_array.stype, source_array.shape, dtype=dtype, ctx=ctx) arr[:] = source_array return arr elif spsp and isinstance(source_array, spsp.csr.csr_matrix): # TODO(haibin) implement `_sync_copy_from` with scipy csr object to reduce a copy # preprocess scipy csr to canonical form csr = source_array.sorted_indices() csr.sum_duplicates() dtype = _prepare_default_dtype(source_array, dtype) return csr_matrix((csr.data, csr.indices, csr.indptr), shape=csr.shape, \ dtype=dtype, ctx=ctx) elif isinstance(source_array, (np.ndarray, np.generic)): raise ValueError("Please use mx.nd.array to create an NDArray with source_array of type ", type(source_array)) else: raise ValueError("Unexpected source_array type: ", type(source_array))<|fim▁end|>
dtype = src_array.dtype
<|file_name|>funcoes.js<|end_file_name|><|fim▁begin|>// Global Vars to set var musicas = new Array(11); musicas[0] = 0; // Wheel A musicas[1] = 0; // Whell B musicas[2] = "0;"; // A1 musicas[3] = "0;"; // A2 musicas[4] = "0;"; // A3 musicas[5] = "0;"; // A4 musicas[6] = "0;"; // B1 musicas[7] = "0;"; // B2 musicas[8] = "0;"; // B3 musicas[9] = "0;"; // B4 musicas[10] = 0; // Sings function ativa_facebook(){ alert('Aguarde...'); FB.api('/me', function(response) { // console.log(response); // NORMAL ACTION $.post('getUser.php', { facebookId: response.id}, function(data){ if(data.success){ //INSERE APENAS BATIDA $.post('salva_som.php?opc=1', { m1: musicas[0], m2: musicas[1], m3: musicas[2]+musicas[3]+musicas[4]+musicas[5]+musicas[6]+musicas[7]+musicas[8]+musicas[9], m4: musicas[10], usuario: data.usuario }, function(data){ if(data.success){ var image = Math.floor((Math.random()*3)+1); FB.api('/me/feed', 'post', { message: 'Sinta o sabor da minha batida no FLAVOR DJ: o gerador de som exclusivo do BH DANCE FESTIVAL. BH Dance Festival. A CIDADE NA PISTA.', link: 'https://apps.facebook.com/flavordj/?minhaBatida='+data.batida, picture: 'https://lit-castle-9930.herokuapp.com/img/share/flavor'+image+'.jpg' }, function(response) { if (!response || response.error) { alert('Error occured'); } else { alert('Sua batida foi compartilhada com sucesso!'); } }); } }, 'json'); }else{ //INSERE BATIDA E USUARIO $.post('salva_som.php?opc=2', { m1: musicas[0], m2: musicas[1], m3: musicas[2]+musicas[3]+musicas[4]+musicas[5]+musicas[6]+musicas[7]+musicas[8]+musicas[9], m4: musicas[10], facebookId: response.id, nome: response.name, email: response.email, sexo: response.gender, cidade: '' }, function(data){ if(data.success){ var image = Math.floor((Math.random()*3)+1); FB.api('/me/feed', 'post', { message: 'Sinta o sabor da minha batida no FLAVOR DJ: o gerador de som exclusivo do BH DANCE FESTIVAL. BH Dance Festival. A CIDADE NA PISTA.', link: 'https://apps.facebook.com/flavordj/?minhaBatida='+data.batida, picture: 'https://lit-castle-9930.herokuapp.com/img/share/flavor'+image+'.jpg' }, function(response) { if (!response || response.error) { alert('Error occured'); } else { alert('Sua batida foi compartilhada com sucesso!'); } }); } }, 'json'); } }, 'json'); }); } function computa_voto(batida){ FB.api('/me', function(response) { //console.log(response); // NORMAL ACTION $.post('getVoto.php', { facebookId: response.id}, function(data){ if(data.success){ alert('Você já votou em uma batida, obrigado por participar!'); }else{ //INSERE NO BD $.post('computa_voto.php', { facebookId: response.id, batida: batida }, function(data){ if(data.success){ } }, 'json'); } }, 'json'); }); } function login() { alert('Você ainda não tem o aplicativo do Flavor DJ. Instale-o primeiro para compartilhar sua batida.'); FB.login(function(response) { if (response.authResponse) { ativa_facebook(); } else { } }, {scope: 'email, publish_stream'}); } function desativaetp1(){ $('.audio1').jPlayer("stop"); $('.audio2').jPlayer("stop"); $('.audio3').jPlayer("stop"); $('.audio4').jPlayer("stop"); musicas[0] = "0;"; $('.etapa1, .guide .etapa1 div, .etapa1 li').removeClass('ativo'); $('.etapa1').css('z-index', 2); } function desativaetp2(){ $('.audio5').jPlayer("stop"); $('.audio6').jPlayer("stop"); $('.audio7').jPlayer("stop"); $('.audio8').jPlayer("stop"); musicas[1] = "0;"; $('.etapa2, .guide .etapa2 div, .etapa2 li').removeClass('ativo'); $('.etapa2').css('z-index', 2); } function desativaetpr(idPlayer, cod){ musicas[cod] = "0;"; $('.audio'+idPlayer).jPlayer("stop"); $('.etapa3').css('z-index', 2); } function desativaetp5(){ $('.audio17').jPlayer("stop"); $('.audio18').jPlayer("stop"); $('.audio19').jPlayer("stop"); $('.audio20').jPlayer("stop"); musicas[10] = "0;"; $('.etapa5, .guide .etapa5 div, .etapa5 li').removeClass('ativo'); $('.etapa5').css('z-index', 2); } function ativa_anima(){ $('.whel1 div.a').delay(300).animate({ height: '0px' }, 1000); $('.whel1 div.b').delay(300).animate({ height: '0px' }, 1000, function(){ $('.whel2 div.a').delay(300).animate({ width: '0px' }, 1000); $('.whel2 div.b').delay(300).animate({ width: '0px' }, 1000); }); } $(document).ready(function(){ //login_start(); $('.etapa1').click(function(){ if($(this).hasClass('ativo')){ desativaetp1(); }else{ desativaetp1(); $(this).addClass('ativo'); $(this).css('z-index', 1); var audioPlayer = $(this).attr('href'); musicas[0] = audioPlayer; $('.guide .etapa1 div.p'+audioPlayer).addClass('ativo'); $('.visor .etapa1 li.p'+audioPlayer).addClass('ativo'); $(".audio"+audioPlayer).jPlayer("play", 0); } return false; }) $('.etapa2').click(function(){ if($(this).hasClass('ativo')){ desativaetp2(); }else{ desativaetp2(); $(this).addClass('ativo'); $(this).css('z-index', 1); var audioPlayer = $(this).attr('href'); musicas[1] = audioPlayer; $('.guide .etapa2 div.p'+audioPlayer).addClass('ativo'); $('.visor .etapa2 li.p'+audioPlayer).addClass('ativo'); $(".audio"+audioPlayer).jPlayer("play", 0); } return false; }) $('.etapa3').click(function(){ var audioPlayer = $(this).attr('href'); var codigo = $(this).data('codigo'); if($(this).hasClass('ativo')){ desativaetpr(audioPlayer,codigo); $('.guide .etapa3 div.p'+audioPlayer).removeClass('ativo'); $('.visor .etapa3 li.p'+audioPlayer).removeClass('ativo'); $(this).removeClass('ativo'); }else{ $(this).addClass('ativo'); $('.guide .etapa3 div.p'+audioPlayer).addClass('ativo'); $('.visor .etapa3 li.p'+audioPlayer).addClass('ativo'); $(this).css('z-index', 1); musicas[codigo] = audioPlayer+";"; $(".audio"+audioPlayer).jPlayer("play", 0); } return false; }) $('.etapa4').click(function(){ var audioPlayer = $(this).attr('href'); var cod = $(this).data('codigo'); if($(this).hasClass('ativo')){ desativaetpr(audioPlayer, cod); $('.guide .etapa4 div.p'+audioPlayer).removeClass('ativo'); $('.visor .etapa4 li.p'+audioPlayer).removeClass('ativo'); $(this).removeClass('ativo'); }else{ $(this).addClass('ativo'); $('.guide .etapa4 div.p'+audioPlayer).addClass('ativo'); $('.visor .etapa4 li.p'+audioPlayer).addClass('ativo'); musicas[cod] = audioPlayer+";"; $(".audio"+audioPlayer).jPlayer("play", 0); } return false; }) $('.etapa5').click(function(){ if($(this).hasClass('ativo')){ desativaetp5(); }else{ desativaetp5(); $(this).addClass('ativo'); var audioPlayer = $(this).attr('href'); musicas[10] = audioPlayer; $('.guide .etapa5 div.p'+audioPlayer).addClass('ativo'); $('.visor .etapa5 li.p'+audioPlayer).addClass('ativo'); $(".audio"+audioPlayer).jPlayer("play", 0); } return false; }) $(".audio1").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct1/1.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct1/1.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio2").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct1/2.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct1/2.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio3").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct1/3.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct1/3.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio4").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct1/4.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct1/4.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio5").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct2/1.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct2/1.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio6").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct2/2.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct2/2.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio7").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct2/3.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct2/3.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio8").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct2/4.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct2/4.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio9").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct3/1.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct3/1.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio10").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct3/2.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct3/2.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio11").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct3/3.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct3/3.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio12").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct3/4.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct3/4.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio13").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct4/1.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct4/1.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio14").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct4/2.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct4/2.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio15").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct4/3.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct4/3.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio16").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct4/4.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct4/4.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio17").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct5/1.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct5/1.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio18").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct5/2.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct5/2.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio19").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct5/3.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct5/3.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); $(".audio20").jPlayer({ ready: function (event) { $(this).jPlayer("setMedia", { oga: "https://lit-castle-9930.herokuapp.com/sounds/pct5/4.ogg", mp3: "https://lit-castle-9930.herokuapp.com/sounds/pct5/4.mp3" } )}, swfPath: "js", supplied: "oga, mp3", wmode: "window", loop: true, preload: 'auto' }); <|fim▁hole|> function login_start() { //alert('Você ainda não tem o aplicativo do Flavor DJ. Instale-o primeiro para compartilhar sua batida.'); FB.login(function(response) { if (response.authResponse) { // ativa_facebook(); } else { } }, {scope: 'email, publish_stream'}); } // Share var cont = 0 var i = 0; $('a.share').click(function(){ cont = 0; for(i = 0; i<11; i++){ if((musicas[i] == "0;") || (musicas[i] == 0)){ cont++; } } if(cont == 11){ alert('Você precisa selecionar pelo menos um ingrediente para sua batida.'); }else{ FB.getLoginStatus(function(response) { // console.log(response); if (response.status === 'connected') { // NORMAL ACTION ativa_facebook(); } else if (response.status === 'not_authorized') { login(); window.location.reload(); } else { login(); window.location.reload(); } }); } return false; }); $('.votarBatida').click(function(){ var batida = $(this).attr('href'); FB.getLoginStatus(function(response) { if (response.status === 'connected') { // NORMAL ACTION computa_voto(batida); } else if (response.status === 'not_authorized') { FB.login(function(response) { if (response.authResponse) { // NORMAL ACTION computa_voto(batida); } else { // console.log('Sua batida não foi compartilhada.'); } }, {scope: 'email, publish_stream'}); } else { FB.login(function(response) { if (response.authResponse) { // NORMAL ACTION computa_voto(batida); } else { //console.log('Sua batida não foi compartilhada.'); } }, {scope: 'email, publish_stream'}); } }); }); });<|fim▁end|>
$('body').queryLoader2( { onLoadComplete: ativa_anima() } );
<|file_name|>services.py<|end_file_name|><|fim▁begin|>from model.flyweight import Flyweight from model.static.database import database class Service(Flyweight): def __init__(self,service_id): #prevents reinitializing if "_inited" in self.__dict__: return self._inited = None<|fim▁hole|> cursor = database.get_cursor( "select * from staServices where serviceID={};".format( self.service_id)) row = cursor.fetchone() self.service_name = row["serviceName"] self.description = row["description"] cursor.close()<|fim▁end|>
#prevents reinitializing self.service_id = service_id
<|file_name|>flaskApp.py<|end_file_name|><|fim▁begin|>from flask import Flask, jsonify, request, render_template, make_response from datetime import datetime from elasticsearch import Elasticsearch<|fim▁hole|> @app.route('/') def hello_world(): return render_template( 'index.html' ) @app.route('/buscar', methods = ["POST"]) def buscar(): buscado = request.form['buscado'] resultado = realizar_busqueda_2(buscado) return jsonify({ 'resultado': resultado }) def realizar_busqueda_2(buscado): bodyQuery2 = { "query": { "match": { "Title": { "query": buscado, "fuzziness": "AUTO", "boost" : 2.0, "prefix_length" : 1, "max_expansions": 100, #"minimum_should_match" : 10, "operator": "and" } } }, "highlight": { "fields": { "Title": {}, "Plot": {"fragment_size": 300, "number_of_fragments": 3} }, # Permite el hightlight sobre campos que no se han hecho query # como Plot en este ejemplo "require_field_match": False } } res = es.search(index="prueba-index", body= bodyQuery2) print("Got %d Hits:" % res['hits']['total']) # Uso el [0] porque solo hay 1 hit, si hubiese mas, pues habria mas campos # de la lista, habria que usar el for de arriba para sacar el highlight de # cada uno de la lista # print res['hits']['hits'][0]['highlight'] resultado = [] for hit in res['hits']['hits']: resultado.append(hit['highlight']) return resultado def realizar_busqueda(buscado): bodyQuery = { "query": { "match": { "Director": { "query": buscado, "fuzziness": "AUTO", "operator": "and" } } }, "highlight": { "fields": { "Title": {}, "Plot": {} } } } res = es.search(index="prueba-index", body= bodyQuery) print("Got %d Hits:" % res['hits']['total']) resultado = [] for hit in res['hits']['hits']: resultado.append("%(Title)s" % hit["_source"]) return resultado def realizar_busqueda_3(buscado): bodyQuery = { "query": { "regexp":{ "Title": buscado +".*" } }, "highlight": { "fields": { "Title": {}, "Plot": {"fragment_size": 300, "number_of_fragments": 3}, "Director": {} }, # Permite el hightlight sobre campos que no se han hecho query # como Plot en este ejemplo "require_field_match": False } } res = es.search(index="prueba-index", body= bodyQuery) print("Got %d Hits:" % res['hits']['total']) resultado = [] for hit in res['hits']['hits']: resultado.append(hit['highlight']) return resultado def realizar_busqueda_4(buscado): bodyQuery2 = { "query": { "bool": { "should": [ { "match": { "Title": { "query": buscado + ".*", "fuzziness": "AUTO", "prefix_length" : 1, "operator": "and" } }}, { "match": { "Plot": { "query": buscado, "fuzziness": 2, "prefix_length" : 1, "operator": "and" } } }, { "match": { "Genres": { "query": buscado, "fuzziness": "AUTO", "prefix_length" : 1, "operator": "and" } }}, { "match": { "Director": { "query": buscado, "fuzziness": "AUTO", "prefix_length" : 1, "operator": "and" } }}, { "match": { "Writer": { "query": buscado, "fuzziness": "AUTO", "prefix_length" : 1, "operator": "and" } }}, { "match": { "Cast": { "query": buscado, "fuzziness": "AUTO", "prefix_length" : 1, "operator": "and" } }}, { "match": { "Country": { "query": buscado, "fuzziness": "AUTO", "prefix_length" : 1, "operator": "and" } }}, { "match": { "Language": { "query": buscado, "fuzziness": "AUTO", "prefix_length" : 1, "operator": "and" } }}, { "match": { "Rating": { "query": buscado, "fuzziness": "AUTO", "prefix_length" : 1, "operator": "and" } }}, ] } }, "highlight": { "fields": { "Title": {}, "Plot": {}, "Director": {} }, # Permite el hightlight sobre campos que no se han hecho query # como Plot en este ejemplo "require_field_match": False } } res = es.search(index="prueba-index", body= bodyQuery) print("Got %d Hits:" % res['hits']['total']) resultado = [] for hit in res['hits']['hits']: resultado.append(hit['highlight']) return resultado if __name__ == '__main__': app.run(debug=True)<|fim▁end|>
es = Elasticsearch() app = Flask(__name__)
<|file_name|>HTMLTextAreaElement.js<|end_file_name|><|fim▁begin|>"use strict"; const conversions = require("webidl-conversions"); const utils = require("./utils.js"); const HTMLElement = require("./HTMLElement.js"); const impl = utils.implSymbol; function HTMLTextAreaElement() { throw new TypeError("Illegal constructor"); } Object.setPrototypeOf(HTMLTextAreaElement.prototype, HTMLElement.interface.prototype); Object.setPrototypeOf(HTMLTextAreaElement, HTMLElement.interface); HTMLTextAreaElement.prototype.select = function select() { if (!this || !module.exports.is(this)) { throw new TypeError("Illegal invocation"); } return this[impl].select(); }; HTMLTextAreaElement.prototype.setRangeText = function setRangeText(replacement) { if (!this || !module.exports.is(this)) { throw new TypeError("Illegal invocation"); } if (arguments.length < 1) { throw new TypeError( "Failed to execute 'setRangeText' on 'HTMLTextAreaElement': 1 argument required, but only " + arguments.length + " present." ); } const args = []; for (let i = 0; i < arguments.length && i < 4; ++i) { args[i] = arguments[i]; } args[0] = conversions["DOMString"](args[0], { context: "Failed to execute 'setRangeText' on 'HTMLTextAreaElement': parameter 1" }); return this[impl].setRangeText(...args); }; HTMLTextAreaElement.prototype.setSelectionRange = function setSelectionRange(start, end) { if (!this || !module.exports.is(this)) { throw new TypeError("Illegal invocation"); } if (arguments.length < 2) { throw new TypeError( "Failed to execute 'setSelectionRange' on 'HTMLTextAreaElement': 2 arguments required, but only " + arguments.length + " present." ); } const args = []; for (let i = 0; i < arguments.length && i < 3; ++i) { args[i] = arguments[i]; } args[0] = conversions["unsigned long"](args[0], { context: "Failed to execute 'setSelectionRange' on 'HTMLTextAreaElement': parameter 1" }); args[1] = conversions["unsigned long"](args[1], { context: "Failed to execute 'setSelectionRange' on 'HTMLTextAreaElement': parameter 2" }); if (args[2] !== undefined) { args[2] = conversions["DOMString"](args[2], { context: "Failed to execute 'setSelectionRange' on 'HTMLTextAreaElement': parameter 3" }); } return this[impl].setSelectionRange(...args); }; Object.defineProperty(HTMLTextAreaElement.prototype, "autocomplete", { get() { const value = this.getAttribute("autocomplete"); return value === null ? "" : value; }, set(V) { V = conversions["DOMString"](V, { context: "Failed to set the 'autocomplete' property on 'HTMLTextAreaElement': The provided value" }); this.setAttribute("autocomplete", V); }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "autofocus", { get() { return this.hasAttribute("autofocus"); }, set(V) { V = conversions["boolean"](V, { context: "Failed to set the 'autofocus' property on 'HTMLTextAreaElement': The provided value" }); if (V) { this.setAttribute("autofocus", ""); } else { this.removeAttribute("autofocus"); } }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "cols", { get() { return this[impl].cols; }, set(V) { V = conversions["unsigned long"](V, { context: "Failed to set the 'cols' property on 'HTMLTextAreaElement': The provided value" }); this[impl].cols = V; }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "dirName", { get() { const value = this.getAttribute("dirName"); return value === null ? "" : value; }, set(V) { V = conversions["DOMString"](V, { context: "Failed to set the 'dirName' property on 'HTMLTextAreaElement': The provided value" }); this.setAttribute("dirName", V); }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "disabled", { get() { return this.hasAttribute("disabled"); }, set(V) { V = conversions["boolean"](V, { context: "Failed to set the 'disabled' property on 'HTMLTextAreaElement': The provided value" }); if (V) { this.setAttribute("disabled", ""); } else { this.removeAttribute("disabled"); } }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "form", { get() { return utils.tryWrapperForImpl(this[impl].form); }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "inputMode", { get() { const value = this.getAttribute("inputMode"); return value === null ? "" : value; }, set(V) { V = conversions["DOMString"](V, { context: "Failed to set the 'inputMode' property on 'HTMLTextAreaElement': The provided value" }); this.setAttribute("inputMode", V); }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "maxLength", { get() { const value = parseInt(this.getAttribute("maxLength")); return isNaN(value) || value < -2147483648 || value > 2147483647 ? 0 : value; }, set(V) { V = conversions["long"](V, { context: "Failed to set the 'maxLength' property on 'HTMLTextAreaElement': The provided value" }); this.setAttribute("maxLength", String(V)); }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "minLength", { get() { const value = parseInt(this.getAttribute("minLength")); return isNaN(value) || value < -2147483648 || value > 2147483647 ? 0 : value; }, set(V) { V = conversions["long"](V, { context: "Failed to set the 'minLength' property on 'HTMLTextAreaElement': The provided value" }); this.setAttribute("minLength", String(V)); }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "name", { get() { const value = this.getAttribute("name"); return value === null ? "" : value; }, set(V) { V = conversions["DOMString"](V, { context: "Failed to set the 'name' property on 'HTMLTextAreaElement': The provided value" }); this.setAttribute("name", V); }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "placeholder", { get() { const value = this.getAttribute("placeholder"); return value === null ? "" : value; }, set(V) { V = conversions["DOMString"](V, { context: "Failed to set the 'placeholder' property on 'HTMLTextAreaElement': The provided value" }); this.setAttribute("placeholder", V); }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "readOnly", { get() { return this.hasAttribute("readOnly"); }, set(V) { V = conversions["boolean"](V, { context: "Failed to set the 'readOnly' property on 'HTMLTextAreaElement': The provided value" }); if (V) { this.setAttribute("readOnly", ""); } else { this.removeAttribute("readOnly"); } }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "required", { get() { return this.hasAttribute("required"); }, set(V) { V = conversions["boolean"](V, { context: "Failed to set the 'required' property on 'HTMLTextAreaElement': The provided value" }); if (V) { this.setAttribute("required", ""); } else { this.removeAttribute("required"); } }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "rows", { get() { return this[impl].rows; }, set(V) { V = conversions["unsigned long"](V, { context: "Failed to set the 'rows' property on 'HTMLTextAreaElement': The provided value" }); this[impl].rows = V; }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "wrap", { get() { const value = this.getAttribute("wrap"); return value === null ? "" : value; }, set(V) { V = conversions["DOMString"](V, { context: "Failed to set the 'wrap' property on 'HTMLTextAreaElement': The provided value" }); this.setAttribute("wrap", V); }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "type", { get() { return this[impl].type; }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "defaultValue", { get() { return this[impl].defaultValue; }, set(V) { V = conversions["DOMString"](V, { context: "Failed to set the 'defaultValue' property on 'HTMLTextAreaElement': The provided value" }); this[impl].defaultValue = V; }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "value", { get() { return this[impl].value; }, set(V) { V = conversions["DOMString"](V, { context: "Failed to set the 'value' property on 'HTMLTextAreaElement': The provided value", treatNullAsEmptyString: true }); this[impl].value = V; }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "textLength", { get() { return this[impl].textLength; }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "selectionStart", { get() { return this[impl].selectionStart; }, set(V) { if (V === null || V === undefined) { V = null; } else { V = conversions["unsigned long"](V, { context: "Failed to set the 'selectionStart' property on 'HTMLTextAreaElement': The provided value" }); } this[impl].selectionStart = V; }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "selectionEnd", { get() { return this[impl].selectionEnd; }, set(V) { if (V === null || V === undefined) { V = null; } else { V = conversions["unsigned long"](V, { context: "Failed to set the 'selectionEnd' property on 'HTMLTextAreaElement': The provided value" }); } this[impl].selectionEnd = V; }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, "selectionDirection", { get() { return this[impl].selectionDirection; }, set(V) { if (V === null || V === undefined) { V = null; } else { V = conversions["DOMString"](V, { context: "Failed to set the 'selectionDirection' property on 'HTMLTextAreaElement': The provided value" }); } this[impl].selectionDirection = V; }, enumerable: true, configurable: true }); Object.defineProperty(HTMLTextAreaElement.prototype, Symbol.toStringTag, { value: "HTMLTextAreaElement", writable: false, enumerable: false, configurable: true }); const iface = { mixedInto: [], is(obj) { if (obj) { if (obj[impl] instanceof Impl.implementation) { return true; }<|fim▁hole|> if (obj instanceof module.exports.mixedInto[i]) { return true; } } } return false; }, isImpl(obj) { if (obj) { if (obj instanceof Impl.implementation) { return true; } const wrapper = utils.wrapperForImpl(obj); for (let i = 0; i < module.exports.mixedInto.length; ++i) { if (wrapper instanceof module.exports.mixedInto[i]) { return true; } } } return false; }, convert(obj, { context = "The provided value" } = {}) { if (module.exports.is(obj)) { return utils.implForWrapper(obj); } throw new TypeError(`${context} is not of type 'HTMLTextAreaElement'.`); }, create(constructorArgs, privateData) { let obj = Object.create(HTMLTextAreaElement.prototype); this.setup(obj, constructorArgs, privateData); return obj; }, createImpl(constructorArgs, privateData) { let obj = Object.create(HTMLTextAreaElement.prototype); this.setup(obj, constructorArgs, privateData); return utils.implForWrapper(obj); }, _internalSetup(obj) { HTMLElement._internalSetup(obj); }, setup(obj, constructorArgs, privateData) { if (!privateData) privateData = {}; privateData.wrapper = obj; this._internalSetup(obj); Object.defineProperty(obj, impl, { value: new Impl.implementation(constructorArgs, privateData), writable: false, enumerable: false, configurable: true }); obj[impl][utils.wrapperSymbol] = obj; }, interface: HTMLTextAreaElement, expose: { Window: { HTMLTextAreaElement: HTMLTextAreaElement } } }; module.exports = iface; const Impl = require("../nodes/HTMLTextAreaElement-impl.js");<|fim▁end|>
for (let i = 0; i < module.exports.mixedInto.length; ++i) {
<|file_name|>vec.rs<|end_file_name|><|fim▁begin|>#[no_std]; #[no_core]; use zero; pub trait OwnedVector<T> { unsafe fn push_fast(&mut self, t: T); unsafe fn len(&self) -> uint; unsafe fn set_len(&mut self, newlen: uint); unsafe fn as_mut_buf<U>(&self, f: &fn(*mut T, uint) -> U) -> U; unsafe fn data(&self) -> *u8; }<|fim▁hole|> fill: uint, alloc: uint, data: T } impl<T> OwnedVector<T> for ~[T] { //FIXME: Does not check to see if we have space // See: https://github.com/mozilla/rust/blob/master/src/libstd/vec.rs#L1317 unsafe fn push_fast(&mut self, t: T) { let repr: **mut Vec<u8> = zero::transmute(self); let fill = (**repr).fill; (**repr).fill += zero::size_of::<T>(); let p = &(**repr).data as *u8 as uint; let mut i = 0; while i < zero::size_of::<T>() { *((p+fill+i) as *mut u8) = *((&t as *T as uint + i) as *mut u8); i += 1; } } unsafe fn len(&self) -> uint { let repr: **Vec<u8> = zero::transmute(self); ((**repr).fill / zero::size_of::<T>()) as uint } unsafe fn set_len(&mut self, newlen: uint) { let repr: **mut Vec<u8> = zero::transmute(self); (**repr).fill = zero::size_of::<T>() * newlen; } unsafe fn as_mut_buf<U>(&self, f: &fn(*mut T, uint) -> U) -> U { let repr: **mut Vec<T> = zero::transmute(self); f(&mut (**repr).data as *mut T, (**repr).fill / zero::size_of::<T>()) } unsafe fn data(&self) -> *u8 { let repr: **mut Vec<u8> = zero::transmute(self); &(**repr).data as *u8 } }<|fim▁end|>
pub struct Vec<T> {
<|file_name|>lzw.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import sys stderr = sys.stderr ## LZWDecoder ## class LZWDecoder(object): debug = 0 def __init__(self, fp): self.fp = fp self.buff = 0 self.bpos = 8 self.nbits = 9 self.table = None self.prevbuf = None return def readbits(self, bits): v = 0 while 1: # the number of remaining bits we can get from the current buffer. r = 8-self.bpos if bits <= r: # |-----8-bits-----| # |-bpos-|-bits-| | # | |----r----| v = (v<<bits) | ((self.buff>>(r-bits)) & ((1<<bits)-1)) self.bpos += bits break else: # |-----8-bits-----| # |-bpos-|---bits----... # | |----r----| v = (v<<r) | (self.buff & ((1<<r)-1)) bits -= r x = self.fp.read(1) if not x: raise EOFError self.buff = ord(x) self.bpos = 0 return v def feed(self, code): x = '' if code == 256: self.table = [ chr(c) for c in xrange(256) ] # 0-255 self.table.append(None) # 256 self.table.append(None) # 257 self.prevbuf = '' self.nbits = 9 elif code == 257: pass elif not self.prevbuf: x = self.prevbuf = self.table[code] else: if code < len(self.table): x = self.table[code] self.table.append(self.prevbuf+x[0]) else: self.table.append(self.prevbuf+self.prevbuf[0]) x = self.table[code] l = len(self.table) if l == 511: self.nbits = 10 elif l == 1023: self.nbits = 11 elif l == 2047: self.nbits = 12 self.prevbuf = x return x def run(self): while 1: try: code = self.readbits(self.nbits) except EOFError: break x = self.feed(code) yield x if self.debug:<|fim▁hole|> (self.nbits, code, x, self.table[258:])) return def main(argv): import StringIO input = '\x80\x0b\x60\x50\x22\x0c\x0c\x85\x01' fp = StringIO.StringIO(input) expected = '\x2d\x2d\x2d\x2d\x2d\x41\x2d\x2d\x2d\x42' LZWDecoder.debug = 1 output = ''.join(LZWDecoder(fp).run()) print (input, expected, output) print output == expected return 0 if __name__ == '__main__': sys.exit(main(sys.argv))<|fim▁end|>
print >>stderr, ('nbits=%d, code=%d, output=%r, table=%r' %
<|file_name|>checker.py<|end_file_name|><|fim▁begin|># Create your views here. import socket from pyasn1.error import PyAsn1Error import requests from .heartbleed import test_heartbleed from .models import Check try: from OpenSSL.SSL import Error as SSLError except ImportError: # In development, we might not have OpenSSL - it's only needed for SNI class SSLError(Exception): pass class SecurityChecker(object): def run_check(self, url): self.session = requests.session() self.session.headers = [('User-agent', "Sasha's pony checkup - http://ponycheckup.com/")] try: homepage = self.session.get(url, timeout=7) check_record = Check(url=url) check_record.hsts_header_found = self.check_supports_hsts(url) check_record.xframe_header_found = True if 'X-Frame-Options' in homepage.headers else False check_record.supports_https = self.check_supports_https(url) check_record.heartbleed_vuln = self.check_heartbleed_vuln(url) (check_record.admin_found, check_record.admin_forces_https) = self.check_admin(url) (check_record.login_found, check_record.login_forces_https) = self.check_login(url) check_record.allows_trace = self.check_trace(url) check_record.runs_debug = self.check_runs_debug(url) check_record.csrf_cookie_found = True if self.find_csrf_cookie() else False session_cookie = self.find_session_cookie() if session_cookie: check_record.session_cookie_found = True check_record.session_cookie_secure = session_cookie.secure check_record.session_cookie_httponly = session_cookie.has_nonstandard_attr('httponly') else: check_record.session_cookie_found = False check_record.update_recommendation_count() check_record.save() return check_record except (requests.RequestException, SSLError, PyAsn1Error) as error: return error def check_supports_https(self, url): try: self.session.get(url.replace("http", "https"), timeout=7) except: return False return True def check_heartbleed_vuln(self, url): try: url = url.replace("http://", "").replace("/", "") return bool(test_heartbleed(url)) except socket.error: return False def check_supports_hsts(self, url): try: ssltest = self.session.get(url.replace("http", "https"), timeout=7) except: return False return 'Strict-Transport-Security' in ssltest.headers def check_runs_debug(self, url): data = self.session.get(url+"/[][][][][]-this-tries-to-trigger-404....", timeout=7) return "You're seeing this error because you have <code>DEBUG = True</code>" in data.content def check_trace(self, url): response = self.session.request('TRACE', url, timeout=7) return 'Content-Type' in response.headers and response.headers['Content-Type'] == "message/http" def check_admin(self, url): response = self.session.get(url + "/admin", timeout=7) if response.status_code == 404: return (False, None) data = response.content.lower()<|fim▁hole|> def check_login(self, url): response = self.session.get(url + "/accounts/login", timeout=7) if response.status_code == 404: response = self.session.get(url + "/login", timeout=7) if response.status_code == 404: return (False, None) return (True, self._response_used_https(response)) def _response_used_https(self, response): return response.url[:5] == "https" def find_session_cookie(self): for cookie in self.session.cookies: if cookie.name == 'sessionid': return cookie return False def find_csrf_cookie(self): for cookie in self.session.cookies: if cookie.name == 'csrftoken': return cookie return False<|fim▁end|>
admin_found = '"id_username"' in data and ("csrfmiddlewaretoken" in data or "Django" in data or "__admin_media_prefix__" in data) return (admin_found, self._response_used_https(response))
<|file_name|>step3.rs<|end_file_name|><|fim▁begin|>extern crate lisp; mod eval; use lisp::eval::env; use lisp::util::interner::Interner; #[test] fn env() { let ref mut interner = Interner::new(); let ref mut env = env::default(interner); eval::eq("(+ 1 2)", "3", env, interner); eval::eq("(/ (- (+ 5 (* 2 3)) 3) 4)", "2", env, interner); } #[test] fn def() { let ref mut interner = Interner::new(); let ref mut env = env::default(interner); eval::eq("(def! x 3)", "3", env, interner); eval::eq("(def! x 4)", "4", env, interner); eval::eq("x", "4", env, interner); eval::eq("(def! y (+ 1 7))", "8", env, interner); eval::eq("y", "8", env, interner); } #[test] fn let_() { let ref mut interner = Interner::new(); let ref mut env = env::default(interner); // from previous test<|fim▁hole|> eval::eq("x", "4", env, interner); eval::eq("(let* (z (+ 2 3)) (+ 1 z))", "6", env, interner); eval::eq("(let* (p (+ 2 3) q (+ 2 p)) (+ p q))", "12", env, interner); } #[test] fn outer() { let ref mut interner = Interner::new(); let ref mut env = env::default(interner); eval::eq("(def! a 4)", "4", env, interner); eval::eq("(let* (q 9) q)", "9", env, interner); eval::eq("(let* (q 9) a)", "4", env, interner); eval::eq("(let* (z 2) (let* (q 9) a))", "4", env, interner); } #[test] fn vector() { let ref mut interner = Interner::new(); let ref mut env = env::default(interner); eval::eq("(let* [z 9] z)", "9", env, interner); eval::eq("(let* [p (+ 2 3) q (+ 2 p)] (+ p q))", "12", env, interner); eval::eq("(let* (a 5 b 6) [3 4 a [b 7] 8])", "[3 4 5 [6 7] 8]", env, interner); }<|fim▁end|>
eval::eq("(def! x 4)", "4", env, interner); eval::eq("(let* (z 9) z)", "9", env, interner); eval::eq("(let* (x 9) x)", "9", env, interner);
<|file_name|>dynamicQueriesRule.ts<|end_file_name|><|fim▁begin|>/** * @license * Copyright Google LLC All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {Replacement, RuleFailure, Rules} from 'tslint'; import * as ts from 'typescript'; import {identifyDynamicQueryNodes, removeOptionsParameter, removeStaticFlag} from '../dynamic-queries/util'; const RULE_NAME = 'dynamic-queries'; const FAILURE_MESSAGE = 'The static flag defaults to false, so setting it false manually is unnecessary.'; /** * TSLint rule that removes the `static` flag from dynamic queries. */ export class Rule extends Rules.TypedRule { applyWithProgram(sourceFile: ts.SourceFile, program: ts.Program): RuleFailure[] { const printer = ts.createPrinter(); const failures: RuleFailure[] = []; const result = identifyDynamicQueryNodes(program.getTypeChecker(), sourceFile); result.removeProperty.forEach(node => { failures.push(new RuleFailure( sourceFile, node.getStart(), node.getEnd(), FAILURE_MESSAGE, RULE_NAME, new Replacement( node.getStart(), node.getWidth(), printer.printNode(ts.EmitHint.Unspecified, removeStaticFlag(node), sourceFile)))); }); result.removeParameter.forEach(node => { failures.push(new RuleFailure( sourceFile, node.getStart(), node.getEnd(), FAILURE_MESSAGE, RULE_NAME, new Replacement( node.getStart(), node.getWidth(), printer.printNode(<|fim▁hole|> ts.EmitHint.Unspecified, removeOptionsParameter(node), sourceFile)))); }); return failures; } }<|fim▁end|>
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate syn; #[macro_use] extern crate quote; extern crate proc_macro; use proc_macro::TokenStream; use std::iter; use syn::*; use quote::Tokens; struct Context { location_ident: Ident, location_expr: Expr, location_type: Ty, deref_self_expr: Expr, } impl Context { pub fn new() -> Self { Context { location_ident: Ident::from("location"), location_expr: Expr::from(ExprKind::Path(None, Path::from("location"))), location_type: Ty::Path(None, Path { global: false, segments: vec![PathSegment { ident: Ident::from("Option"), parameters: PathParameters::AngleBracketed(AngleBracketedParameterData { lifetimes: vec![], types: vec![Ty::Path(None, Path::from("Span"))], bindings: vec![] }) }] }), deref_self_expr: Expr::from(ExprKind::Unary( UnOp::Deref, Box::new(Expr::from(ExprKind::Path(None, Path::from("self")))) )) } } fn expand_tracking_data(&self, path: Path, data: &VariantData, mutability: Mutability) -> Arm { let location_pat = Pat::Ident(BindingMode::ByRef(mutability), self.location_ident.clone(), None); let expr = self.location_expr.clone(); let (pat, expr) = match *data { VariantData::Struct(_) => ( Pat::Struct(path, vec![FieldPat { ident: self.location_ident.clone(), pat: Box::new(location_pat), is_shorthand: true }], true), if data.fields().iter().any(|field| field.ident.as_ref() == Some(&self.location_ident) && field.ty == self.location_type) { expr } else { panic!("Struct does not containt `location: Option<Span>`") } ), VariantData::Tuple(ref fields) => ( Pat::TupleStruct(path, { let mut v = Vec::with_capacity(fields.len()); v.push(location_pat); v.extend(iter::repeat(Pat::Wild).take(fields.len() - 1)); v }, None), if data.fields()[0].ty == self.location_type { expr } else { Expr::from(ExprKind::MethodCall( Ident::from(if mutability == Mutability::Immutable { "tracking_ref" } else { "tracking_mut" }), vec![], vec![expr] )) } ), VariantData::Unit => panic!("Empty unit is not trackable") }; Arm { attrs: vec![], pats: vec![pat], guard: None, body: Box::new(expr) } } fn expand_tracking(&self, ast: &MacroInput, mutability: Mutability) -> Tokens { // Helper is provided for handling complex generic types correctly and effortlessly let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl(); let (impl_name, method) = if mutability == Mutability::Immutable { (Ident::from("TrackingRef"), quote! { tracking_ref(&self) -> &Option<Span> }) } else { (Ident::from("TrackingMut"), quote! { tracking_mut(&mut self) -> &mut Option<Span> }) }; // Used in the quasi-quotation below as `#name` let name = &ast.ident; let body = Expr::from(ExprKind::Match( Box::new(self.deref_self_expr.clone()), match ast.body { Body::Struct(ref data) => { vec![self.expand_tracking_data(Path::from(name.clone()), data, mutability)] }, Body::Enum(ref variants) => { variants.iter().map(|var| { let path = Path { global: false, segments: vec![ PathSegment::from(name.clone()), PathSegment::from(var.ident.clone()) ] }; self.expand_tracking_data(path, &var.data, mutability) }).collect() } } )); quote! { // The generated impl impl #impl_generics #impl_name for #name #ty_generics #where_clause { fn #method { #body } } } } fn expand_untrack_data(&self, path: Path, data: &VariantData) -> Arm { let (pat, idents) = match *data { VariantData::Struct(ref fields) => { let mut field_pats = Vec::with_capacity(fields.len()); let mut idents = Vec::with_capacity(fields.len()); for field in fields { let ident = field.ident.as_ref().unwrap(); field_pats.push(FieldPat { ident: ident.clone(), pat: Box::new(Pat::Ident(BindingMode::ByRef(Mutability::Mutable), ident.clone(), None)), is_shorthand: true }); idents.push(ident.clone()); } (Pat::Struct(path, field_pats, false), idents) }, VariantData::Tuple(ref fields) => { let mut pats = Vec::with_capacity(fields.len()); let mut idents = Vec::with_capacity(fields.len()); for i in 0..fields.len() { let ident = Ident::from(format!("_f{}", i)); pats.push(Pat::Ident(BindingMode::ByRef(Mutability::Mutable), ident.clone(), None)); idents.push(ident); } (Pat::TupleStruct(path, pats, None), idents) }, VariantData::Unit => { (Pat::Path(None, path), vec![]) } }; let expr = Expr::from(ExprKind::Block(BlockCheckMode::Default, Block { stmts: idents.into_iter().map(|ident| { Stmt::Semi(Box::new(Expr::from(ExprKind::MethodCall( Ident::from("untrack"), vec![], vec![Expr::from(ExprKind::Path(None, Path::from(ident)))] )))) }).collect() })); Arm { attrs: vec![], pats: vec![pat], guard: None, body: Box::new(expr) } } pub fn expand_tracking_ref(&self, ast: &MacroInput) -> Tokens { self.expand_tracking(ast, Mutability::Immutable) } pub fn expand_tracking_mut(&self, ast: &MacroInput) -> Tokens { self.expand_tracking(ast, Mutability::Mutable) } pub fn expand_untrack(&self, ast: &MacroInput) -> Tokens { let mut generics = ast.generics.clone(); let bound = TyParamBound::Trait(PolyTraitRef { bound_lifetimes: vec![], trait_ref: Path::from("Untrack") }, TraitBoundModifier::None); for ty in &mut generics.ty_params {<|fim▁hole|> } let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); let name = &ast.ident; let body = Expr::from(ExprKind::Match( Box::new(self.deref_self_expr.clone()), match ast.body { Body::Struct(ref data) => { vec![self.expand_untrack_data(Path::from(name.clone()), data)] }, Body::Enum(ref variants) => { variants.iter().map(|var| { let path = Path { global: false, segments: vec![ PathSegment::from(name.clone()), PathSegment::from(var.ident.clone()) ] }; self.expand_untrack_data(path, &var.data) }).collect() } } )); quote! { // The generated impl impl #impl_generics Untrack for #name #ty_generics #where_clause { fn untrack(&mut self) { #body } } } } } macro_rules! register_tracking_derive { ($for_trait:ident, $method:ident) => { #[proc_macro_derive($for_trait)] pub fn $method(input: TokenStream) -> TokenStream { let source = input.to_string(); let context = Context::new(); // Parse the string representation into a syntax tree let ast = parse_macro_input(&source).unwrap(); // Build the output, possibly using quasi-quotation let expanded = context.$method(&ast); // Parse back to a token stream and return it expanded.parse().unwrap() } } } register_tracking_derive!(TrackingRef, expand_tracking_ref); register_tracking_derive!(TrackingMut, expand_tracking_mut); register_tracking_derive!(Untrack, expand_untrack);<|fim▁end|>
ty.bounds.push(bound.clone());
<|file_name|>cards.py<|end_file_name|><|fim▁begin|>import functools from . import ( constants, utils, ) class Card(): def __init__(self, kind=None, strength=None, value=None, verbose=None, **kwargs): if kind is None: raise(TypeError("Missing required 'kind' argument.")) self.kind = kind self.strength = strength self.value = value self.verbose = verbose if verbose is not None else kind super().__init__(**kwargs) def __valid_comparision(self, arg): return hasattr(arg, "kind") and hasattr(arg, "strength") _valid_comparision = __valid_comparision def __lt__(self, value): if not self.__valid_comparision(value): return NotImplemented if self.strength is not None: if value.strength is not None: return self.strength < value.strength else: return False elif value.strength is not None: return True return self.kind < value.kind def __str__(self): return self.kind class SimpleCard(Card): def __init__(self, colour=None, kind=None, strength=None, **kwargs): if colour is None: raise(TypeError("Missing required 'colour' argument.")) self.colour = colour if kind is None: if strength is not None: kind = str(strength) super().__init__(kind=kind, strength=strength, **kwargs) def __valid_comparision(self, arg): if super()._valid_comparision(arg): if hasattr(arg, "colour") and (arg.colour is not None): if arg.strength is not None: return True return False _valid_comparision = __valid_comparision def __lt__(self, value): if not self.__valid_comparision(value): return super().__lt__(value) if self.strength < value.strength: return True if self.strength == value.strength: return self.colour < value.colour return False def __eq__(self, value): if not self._valid_comparision(value): return False if (self.strength == value.strength) and (self.colour == value.colour): return True<|fim▁hole|> return self.kind + self.colour[0] class MahJongg(Card): def __init__(self): super().__init__(kind='1', strength=1) class Dragon(Card): def __init__(self): super().__init__(kind='R', value=25, verbose="Dragon") class Pheonix(Card): def __init__(self): super().__init__(kind='P', value=-25, verbose="Pheonix") class Dog(Card): def __init__(self): super().__init__(kind="D", verbose="Dog")<|fim▁end|>
def __str__(self):
<|file_name|>mouse-dragdrop-cursor.component.ts<|end_file_name|><|fim▁begin|>/* * @license * Copyright Hôpitaux Universitaires de Genève. All Rights Reserved. * * Use of this source code is governed by an Apache-2.0 license that can be * found in the LICENSE file at https://github.com/DSI-HUG/dejajs-components/blob/master/LICENSE */ <|fim▁hole|>import { Component, ElementRef, ViewChild, ViewEncapsulation } from '@angular/core'; import { Destroy } from '@deja-js/component/core'; import { Position } from '@deja-js/component/core/graphics'; import { BehaviorSubject, combineLatestWith, delay, filter, takeUntil, tap } from 'rxjs'; import { IDragCursorInfos } from './mouse-drag-cursor-infos.interface'; import { DejaMouseDragDropService } from './mouse-dragdrop.service'; @Component({ encapsulation: ViewEncapsulation.None, selector: 'deja-mouse-dragdrop-cursor', styleUrls: [ './mouse-dragdrop-cursor.component.scss' ], templateUrl: './mouse-dragdrop-cursor.component.html' }) export class DejaMouseDragDropCursorComponent extends Destroy { @ViewChild('block', { static: true }) private icon: ElementRef<HTMLElement>; @ViewChild('content', { static: true }) private content: ElementRef<HTMLElement>; private position$ = new BehaviorSubject<Position>(null); private cursor$ = new BehaviorSubject<IDragCursorInfos>(null); private currentCursor: IDragCursorInfos; public constructor(elementRef: ElementRef, private dragDropService: DejaMouseDragDropService) { super(); const element = elementRef.nativeElement as HTMLElement; this.position$.pipe( takeUntil(this.destroyed$) ).subscribe(pos => { element.style.left = pos ? `${pos.left}px` : '-1000px'; element.style.top = pos ? `${pos.top}px` : '-1000px'; }); // Hide this.cursor$.pipe( filter(cursor => !cursor), tap(cursor => { if (this.currentCursor) { if (this.contentElement) { this.contentElement.style.opacity = '0'; } if (this.iconElement) { this.iconElement.style.opacity = '0'; } } this.currentCursor = cursor; }), delay(300), takeUntil(this.destroyed$) ).subscribe(() => { this.position$.next(null); element.style.display = 'none'; }); // Show this.cursor$.pipe( filter(cursor => !!cursor), tap(cursor => { element.style.display = ''; if (this.contentElement) { this.contentElement.style.opacity = '0'; } if (this.iconElement) { this.iconElement.style.opacity = '0'; } this.currentCursor = cursor; }), filter(cursor => !cursor.className || cursor.className !== 'hidden'), tap(cursor => { if (cursor.html) { element.className = cursor.className; if (this.contentElement) { this.contentElement.innerHTML = cursor.html; this.contentElement.style.width = `${cursor.width || 48}px`; this.contentElement.style.height = `${cursor.height || 48}px`; } } else if (this.iconElement) { this.iconElement.style.opacity = '1'; } }), delay(1), takeUntil(this.destroyed$) ).subscribe(cursor => { if (!!cursor.html && this.contentElement) { this.contentElement.style.opacity = '1'; } }); this.dragDropService.dragCursor$.pipe( combineLatestWith(this.dragDropService.dropCursor$), takeUntil(this.destroyed$) ).subscribe(([dragCursor, dropCursor]) => { const cursor = (dragCursor || dropCursor) && { className: dropCursor?.className || dragCursor?.className, html: dropCursor?.html || dragCursor?.html || (dropCursor && dragCursor?.originalHtml), width: dropCursor?.width || dragCursor?.width, height: dropCursor?.height || dragCursor?.height, position: dragCursor?.position, originalEvent: dragCursor?.originalEvent } as IDragCursorInfos; if (cursor?.html !== this.currentCursor?.html || cursor?.className !== this.currentCursor?.className || cursor?.width !== this.currentCursor?.width || cursor?.height !== this.currentCursor?.height) { // Update Content this.cursor$.next(cursor); } else { // Update only Position this.position$.next(cursor?.position); } }); } private get iconElement(): HTMLElement { return this.icon?.nativeElement; } private get contentElement(): HTMLElement { return this.content?.nativeElement; } }<|fim▁end|>
<|file_name|>serialization.py<|end_file_name|><|fim▁begin|>from __future__ import division, print_function import json from collections import Iterable, OrderedDict, namedtuple import numpy as np from six import string_types def isnamedtuple(obj): """Heuristic check if an object is a namedtuple.""" return isinstance(obj, tuple) \ and hasattr(obj, "_fields") \ and hasattr(obj, "_asdict") \ and callable(obj._asdict)<|fim▁hole|> if isinstance(data, list): return [serialize(val) for val in data] if isinstance(data, OrderedDict): return {"py/collections.OrderedDict": [[serialize(k), serialize(v)] for k, v in data.items()]} if isnamedtuple(data): return {"py/collections.namedtuple": { "type": type(data).__name__, "fields": list(data._fields), "values": [serialize(getattr(data, f)) for f in data._fields]}} if isinstance(data, dict): if all(isinstance(k, str) for k in data): return {k: serialize(v) for k, v in data.items()} return {"py/dict": [[serialize(k), serialize(v)] for k, v in data.items()]} if isinstance(data, tuple): return {"py/tuple": [serialize(val) for val in data]} if isinstance(data, set): return {"py/set": [serialize(val) for val in data]} if isinstance(data, np.ndarray): return {"py/numpy.ndarray": { "values": data.tolist(), "dtype": str(data.dtype)}} raise TypeError("Type %s not data-serializable" % type(data)) def restore(dct): if "py/dict" in dct: return dict(dct["py/dict"]) if "py/tuple" in dct: return tuple(dct["py/tuple"]) if "py/set" in dct: return set(dct["py/set"]) if "py/collections.namedtuple" in dct: data = dct["py/collections.namedtuple"] return namedtuple(data["type"], data["fields"])(*data["values"]) if "py/numpy.ndarray" in dct: data = dct["py/numpy.ndarray"] return np.array(data["values"], dtype=data["dtype"]) if "py/collections.OrderedDict" in dct: return OrderedDict(dct["py/collections.OrderedDict"]) return dct def data_to_json(data): return json.dumps(serialize(data)) def json_to_data(s): return json.loads(s, object_hook=restore)<|fim▁end|>
def serialize(data): if data is None or isinstance(data, (bool, int, float, str, string_types)): return data
<|file_name|>asymmetric.spec.js<|end_file_name|><|fim▁begin|>var axios = require("axios"); var expect = require("chai").expect; var MockAdapter = require("../src"); describe("MockAdapter asymmetric matchers", function () { var instance; var mock; beforeEach(function () { instance = axios.create(); mock = new MockAdapter(instance);<|fim▁hole|> it("mocks a post request with a body matching the matcher", function () { mock .onPost("/anyWithBody", { asymmetricMatch: function (actual) { return actual.params === "1"; }, }) .reply(200); return instance .post("/anyWithBody", { params: "1" }) .then(function (response) { expect(response.status).to.equal(200); }); }); it("mocks a post request with a body not matching the matcher", function () { mock .onPost("/anyWithBody", { asymmetricMatch: function (actual) { return actual.params === "1"; }, }) .reply(200); return instance .post("/anyWithBody", { params: "2" }) .catch(function (error) { expect(error.message).to.eq("Request failed with status code 404"); }); }); });<|fim▁end|>
});
<|file_name|>cluster.py<|end_file_name|><|fim▁begin|>""" Run on cluster """ import argparse import os import itertools import networkx as nx import pandas as pd from . import compare_cases def generate_run(graph, iterations, epsilon_control, epsilon_damage, out_dir, nodes=None, mem=6000, runtime=120, activate=''): """ Generate bash scripts for an array run in qsub/bsub cluster environments<|fim▁hole|> ``graph`` (string): can be either "regular", "scalefree", or the path to a GraphML file. ``nodes`` must be given if graph is regular or scalefree. Other default parameters as specified in the corresponding ``run_`` functions in compare_cases.py are used, and cannot be overriden here. ``activate`` (string): additional commands to execute before calling sandpile (e.g. activating a virtualenv) """ if graph == 'regular' or graph == 'scalefree': assert nodes is not None runs = [i for i in itertools.product(epsilon_control, epsilon_damage)] name = out_dir.replace("/", "_") df_runs = pd.DataFrame(runs, columns=['epsilon_control', 'epsilon_damage']) df_runs.to_csv(os.path.join(out_dir, 'iterations.csv')) strings = ['#!/bin/sh\ncase "$1" in\n'] for index, run in enumerate(runs): e1, ed = run if nodes: nodes_string = '--nodes={}'.format(nodes) else: nodes_string = '' run_string = ('{idx}) {act}\n' 'sandpile {idx} {G} {i} {e1} {ed} {nodes}\n' ';;\n'.format(idx=index + 1, G=graph, i=iterations, e1=e1, ed=ed, nodes=nodes_string, act=activate)) strings.append(run_string) strings.append('esac') bsub_run_str = ('#!/bin/sh\n' '#BSUB -J {name}[1-{to}]\n' '#BSUB -R "rusage[mem={mem}]"\n' '#BSUB -n 1\n' '#BSUB -W {runtime}\n' '#BSUB -o logs/run_%I.log\n\n'.format(name=name, to=index + 1, mem=mem, runtime=runtime)) bsub_run_str += './array_run.sh ${LSB_JOBINDEX}\n' qsub_run_str = ('#!/bin/sh\n' '#$ -t 1-{to}\n' '#$ -N {name}\n' '#$ -j y -o logs/run_$TASK_ID.log\n' '#$ -l mem_total={mem:.1f}G\n' '#$ -cwd\n'.format(name=name, to=index + 1, mem=mem / 1000)) qsub_run_str += './array_run.sh ${SGE_TASK_ID}\n' with open(os.path.join(out_dir, 'array_run.sh'), 'w') as f: for l in strings: f.write(l + '\n') with open(os.path.join(out_dir, 'run_bsub.sh'), 'w') as f: f.write(bsub_run_str + '\n') with open(os.path.join(out_dir, 'run_qsub.sh'), 'w') as f: f.write(qsub_run_str + '\n') with open(os.path.join(out_dir, 'prep.sh'), 'w') as f: f.write('chmod +x *.sh\n') f.write('mkdir logs\n') f.write('mkdir results\n') def main(): parser = argparse.ArgumentParser(description='Run model.') parser.add_argument('run_id', metavar='run_id', type=int) parser.add_argument('graph', metavar='graph', type=str) parser.add_argument('iterations', metavar='iterations', type=int) parser.add_argument('epsilon_control', metavar='epsilon_control', type=float) parser.add_argument('epsilon_damage', metavar='epsilon_damage', type=float) parser.add_argument('--nodes', metavar='nodes', type=int) args = parser.parse_args() if args.graph == 'regular': runner = compare_cases.run_regular elif args.graph == 'scalefree': runner = compare_cases.run_scalefree else: runner = compare_cases.run_on_graph G = nx.read_graphml(args.graph) G = G.to_undirected() # Force undirected if runner == compare_cases.run_on_graph: result = runner(G=G, iterations=args.iterations, epsilon_control=args.epsilon_control, epsilon_damage=args.epsilon_damage) else: result = runner(nodes=args.nodes, iterations=args.iterations, epsilon_control=args.epsilon_control, epsilon_damage=args.epsilon_damage) (uncontrolled, controlled, df, costs) = result df.to_csv('results/cascades_{:0>4d}.csv'.format(args.run_id)) with open('results/costs_{:0>4d}.csv'.format(args.run_id), 'w') as f: f.write(str(costs[0]) + '\n') f.write(str(costs[1]) + '\n') if __name__ == '__main__': main()<|fim▁end|>
<|file_name|>main.py<|end_file_name|><|fim▁begin|>from couchpotato.core.downloaders.base import Downloader, StatusList from couchpotato.core.helpers.encoding import tryUrlencode, ss from couchpotato.core.helpers.variable import cleanHost, mergeDicts from couchpotato.core.logger import CPLog from couchpotato.environment import Env from datetime import timedelta from urllib2 import URLError import json import traceback log = CPLog(__name__) class Sabnzbd(Downloader): type = ['nzb'] def download(self, data = {}, movie = {}, filedata = None): log.info('Sending "%s" to SABnzbd.', data.get('name')) req_params = { 'cat': self.conf('category'), 'mode': 'addurl', 'nzbname': self.createNzbName(data, movie), } if filedata: if len(filedata) < 50: log.error('No proper nzb available: %s', (filedata)) return False # If it's a .rar, it adds the .rar extension, otherwise it stays .nzb nzb_filename = self.createFileName(data, filedata, movie) req_params['mode'] = 'addfile' else: req_params['name'] = data.get('url') try: if req_params.get('mode') is 'addfile': sab_data = self.call(req_params, params = {'nzbfile': (ss(nzb_filename), filedata)}, multipart = True) else: sab_data = self.call(req_params) except URLError: log.error('Failed sending release, probably wrong HOST: %s', traceback.format_exc(0)) return False except: log.error('Failed sending release, use API key, NOT the NZB key: %s', traceback.format_exc(0)) return False log.debug('Result from SAB: %s', sab_data) if sab_data.get('status') and not sab_data.get('error'): log.info('NZB sent to SAB successfully.') if filedata: return self.downloadReturnId(sab_data.get('nzo_ids')[0]) else: return True else: log.error('Error getting data from SABNZBd: %s', sab_data) return False def getAllDownloadStatus(self): log.debug('Checking SABnzbd download status.') # Go through Queue try: queue = self.call({ 'mode': 'queue', }) except: log.error('Failed getting queue: %s', traceback.format_exc(1)) return False # Go through history items try: history = self.call({ 'mode': 'history', 'limit': 15, }) except: log.error('Failed getting history json: %s', traceback.format_exc(1)) return False statuses = StatusList(self) # Get busy releases for item in queue.get('slots', []): statuses.append({ 'id': item['nzo_id'], 'name': item['filename'], 'original_status': item['status'],<|fim▁hole|> }) # Get old releases for item in history.get('slots', []): status = 'busy' if item['status'] == 'Failed' or (item['status'] == 'Completed' and item['fail_message'].strip()): status = 'failed' elif item['status'] == 'Completed': status = 'completed' statuses.append({ 'id': item['nzo_id'], 'name': item['name'], 'status': status, 'original_status': item['status'], 'timeleft': str(timedelta(seconds = 0)), 'folder': item['storage'], }) return statuses def removeFailed(self, item): log.info('%s failed downloading, deleting...', item['name']) try: self.call({ 'mode': 'history', 'name': 'delete', 'del_files': '1', 'value': item['id'] }, use_json = False) except: log.error('Failed deleting: %s', traceback.format_exc(0)) return False return True def call(self, request_params, use_json = True, **kwargs): url = cleanHost(self.conf('host')) + 'api?' + tryUrlencode(mergeDicts(request_params, { 'apikey': self.conf('api_key'), 'output': 'json' })) data = self.urlopen(url, timeout = 60, show_error = False, headers = {'User-Agent': Env.getIdentifier()}, **kwargs) if use_json: d = json.loads(data) if d.get('error'): log.error('Error getting data from SABNZBd: %s', d.get('error')) return {} return d.get(request_params['mode']) or d else: return data<|fim▁end|>
'timeleft': item['timeleft'] if not queue['paused'] else -1,
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd. // This file is part of Parity. // Parity is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Parity is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Parity. If not, see <http://www.gnu.org/licenses/>.<|fim▁hole|>#[cfg(feature = "with-syntex")] include!(concat!(env!("OUT_DIR"), "/lib.rs")); #[cfg(not(feature = "with-syntex"))] include!("lib.rs.in");<|fim▁end|>
<|file_name|>testcases.js<|end_file_name|><|fim▁begin|>"use strict"; /* global describe it before */ const { assert } = require("chai"); const Database = require("./lib/database"); const testcases = (env) => { describe("Basic operations", () => { before(async () => { await Database.start(); }); it("should list zero objects", async () => { const list = await env.client.list("thing"); assert.isArray(list); assert.equal(list.length, 0); }); it("should create one object", async () => { const thing = await env.client.create("thing", { _id: "1", thingy: "Hello" }); assert.equal(thing.type, "scom.thing"); assert.equal(thing.thingy, "Hello"); }); it("should list one object", async () => { const list = await env.client.list("thing"); assert.isArray(list); assert.equal(list.length, 1); }); it("should update one object", async () => { const thing = await env.client.update("thing", "1", { thingy: "World" }); <|fim▁hole|> assert.equal(thing.type, "scom.thing"); assert.equal(thing.thingy, "World"); }); it("should delete one object", async () => { const thing = await env.client.remove("thing", "1"); assert.equal(thing.type, "scom.thing"); assert.equal(thing.thingy, "World"); }); it("should list zero objects", async () => { const list = await env.client.list("thing"); assert.isArray(list); assert.equal(list.length, 0); }); }); describe("Action operations", () => { before(async () => { await Database.start(); }); it("should create an object and tag it", async () => { await env.client.create("thing", { _id: "1", thingy: "Hello" }); const thing2 = await env.client.tag("thing", "1", { tag: [ "tag1", "tag2" ] }); assert.equal(thing2.type, "scom.thing"); assert.equal(thing2.thingy, "Hello"); assert.deepEqual(thing2.tags, [ "tag1", "tag2" ]); }); it("should get error with invalid action", async () => { try { await env.client.stuff("thing", "1", { 1: 0 }); assert(false, "Should have thrown"); } catch (error) { assert.equal(error.status, 400); } }); }); describe("Getter operations", () => { before(async () => { await Database.start(); }); it("should create an object and get something on it", async () => { await env.client.create("thing", { _id: "1", thingy: "Hello" }); const value = await env.client.thingy("thing", "1"); assert.equal(value, "Hello"); }); it("should get error with invalid getter", async () => { try { await env.client.stuff("thing", "1"); assert(false, "Should have thrown"); } catch (error) { assert.equal(error.status, 400); } }); }); }; module.exports = testcases;<|fim▁end|>
<|file_name|>ckeyboard.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ This file is part of coffeedatabase. coffeedatabase is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. coffeedatabase is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with coffeedatabase.. If not, see <http://www.gnu.org/licenses/>. """ # system import readline import datetime import configparser # coffeedatabase from lib import cuser from lib import cpayment from lib import citem from lib import cdatabase from lib import cprice from lib import cbalance # Completer Class # For further reference please see # https://stackoverflow.com/questions/7821661/how-to-code-autocompletion-in-python class MyCompleter(object): # Custom completer def __init__(self, options): self.options = sorted(options) def complete(self, text, state): if state == 0: # on first trigger, build possible matches if text: # cache matches (entries that start with entered text) self.matches = [s for s in self.options if text in s] else: # no text entered, all matches possible self.matches = self.options[:] # return match indexed by state try: return self.matches[state] except IndexError: return None class ckeyboard: def __init__(self): # First, load the config config = configparser.ConfigParser() config.sections() config.read('config.ini') if not ('FILENAME' in config) or not ('LIST' in config): print("Broken config file \"config.ini\".") raise self.fileUser = config['FILENAME']['fileUser'] self.filePayment = config['FILENAME']['filePayment'] self.fileItem = config['FILENAME']['fileItem'] self.fileMarks = config['FILENAME']['fileMarks'] self.filePrice = config['FILENAME']['filePrice'] self.inactiveMonths = config['LIST']['inactiveMonths'] self.fileTemplateBalanceMonth = config['FILENAME']['fileTemplateBalanceMonth'] self.fileOutBalanceMonth = config['FILENAME']['fileOutBalanceMonth'] self.fileTemplateListMonth = config['FILENAME']['fileTemplateListMonth'] self.fileOutListMonth = config['FILENAME']['fileOutListMonth'] self.fileOutFolder = config['FILENAME']['fileOutFolder'] if (self.fileUser == "") or \ (self.filePayment == "") or \ (self.fileMarks == "") or \ (self.filePrice == "") or \ (self.fileItem == ""): print("Broken config file \"config.ini\".") raise # create databases, if they do not exist. database = cdatabase.cdatabase(self.fileUser, self.filePayment, self.fileItem, self.fileMarks, self.filePrice) self.user = cuser.cuser(self.fileUser, self.inactiveMonths) self.payment = cpayment.cpayment(self.filePayment, self.user) self.item = citem.citem(self.fileItem, self.fileMarks, self.user) self.price = cprice.cprice(self.filePrice, self.item) self.balance = cbalance.cbalance(self.user, self.payment, self.price, self.item, self.inactiveMonths, self.fileTemplateBalanceMonth, self.fileOutBalanceMonth, self.fileTemplateListMonth, self.fileOutListMonth, self.fileOutFolder) def inputStandard(self, valueDescription, valueStandard): """ Displays an input field, nicely formatted. If valueDescription contains \"Name\" or \"name\", autocompletion for the name database will be activated. valueDescription: List of description for input values. valueStandard: List of standard values. """ if not len(valueDescription) == len(valueStandard): print("Input vector", valueDescription, "has not the same length as standard value vector", valueStandard) raise counter = 0 for description in valueDescription: if description.lower() == "status": # display special user input field print("New status:") print("1 - active") print("2 - auto") print("3 - inactive") textInput = input(str(description) + " [" + valueStandard[counter] + "]: ") if textInput == "": textInput = valueStandard[counter] if textInput == "1" or textInput == "active": valueStandard[counter] = "active" elif textInput == "2" or textInput == "auto": valueStandard[counter] = "auto" elif textInput == "3" or textInput == "inactive": valueStandard[counter] = "inactive" else: print("The input " + str(textInput) + " was not understood. Please use 1, 2, or 3, active, auto, or inactive.") raise else: if not valueStandard[counter] == "": textInput = input(str(description) + " [" + valueStandard[counter] + "]: ") else: textInput = input(str(description) + ": ") if not textInput == "": valueStandard[counter] = textInput counter += 1 return valueStandard def userAdd(self): """ Adds a user to the user database """ userDescription = ["Name", "Mail"] userStandard = ["", "[email protected]"] inputUser = self.inputStandard(userDescription, userStandard) inputUser.append("active") self.user.userAdd(inputUser) # Make a dummy payment now = datetime.datetime.now() year = now.strftime("%Y") month = now.strftime("%m") day = now.strftime("%d") user = self.user.getRowByName(inputUser[1], 1) payment = [user[0], year, month, day, 0] self.payment.paymentAdd(payment) # Make dummy marks mark = [user[0], year, month, day, 0] for _marks in self.item.marks: _marks.marksAdd(mark) return 0 def userChangeInfo(self): """ Displays user information and allows to change them. """ user = self.getRowByTextname(self.user.getNamelist(), self.user) # remove id userId = user[0] del user[0] print("") userDescription = ["Name", "Mail", "Status"] inputUser = self.inputStandard(userDescription, user) # add user id inputUser.insert(0, userId) # save in database self.user.setUser(inputUser) return 0 def paymentAdd(self): """ Adds a payment to the payment database """ user = self.getRowByTextname(self.user.getNamelist(), self.user) # create dates now = datetime.datetime.now() year = now.strftime("%Y") month = now.strftime("%m") day = now.strftime("%d") payment1 = [user[0], int(year), int(month), int(day)] print("") userDescription = ["Payment"] payment2 = [""] inputUser = self.inputStandard(userDescription, payment2) # fill payment payment = payment1 + payment2 # save in database self.payment.paymentAdd(payment) # print new balance self.payment.getDataBinMonth() self.balance.getDataBinMonth() self.balance.getBalance(user[0]) return 0 def itemAdd(self): """ Adds a user to the user database """ itemDescription = ["Name", "Unit"] itemStandard = ["Coffee", "per cup"] inputItem = self.inputStandard(itemDescription, itemStandard) inputItem.append("active") self.item.itemAdd(inputItem) return 0 def itemChangeInfo(self): """ Displays item information and allows to change them. """ item = self.getRowByTextname(self.item.getColumn(1), self.item) # remove id itemId = item[0] del item[0] print("") itemDescription = ["Name", "Unit", "Status"] inputItem = self.inputStandard(itemDescription, item) # add item id inputItem.insert(0, itemId) # save in database self.item.setItem(inputItem) return 0 def getRowByTextname(self, array, database): """ Displays a name field and returns row. array: Array used for auto completion in text input field. database: Reference to database class, e.g. self.item, self.user, ... """ completer = MyCompleter(array) readline.set_completer(completer.complete) readline.parse_and_bind('tab: complete') print("Search in item database:") inputText = input("Name: ") return database.getRowByName(inputText, 1) def marksAdd(self): """ Adds marks to the marks database """ # create dates now = datetime.datetime.now() year = now.strftime("%Y") month = now.strftime("%m") day = now.strftime("%d") self.item.data # get user user = self.getRowByTextname(self.user.getNamelist(), self.user) # get item list markDescription = [] markDefault = [] for row in self.item.data: if str(row[3]) == "active": markDescription.append(row[1]) markDefault.append("0") # query user input print("") inputMark = self.inputStandard(markDescription, markDefault) # create array for cmark class markArray = [[0 for x in range(0)] for x in range(0)] counter = 0 for row in self.item.data: if str(row[3]) == "active": markArray.append([user[0], int(year), int(month), int(day), int(inputMark[counter])]) counter += 1 else: markArray.append([user[0], int(year), int(month), int(day), 0]) # save in database self.item.marksAdd(markArray) return 0 def marksAddAll(self): """ Adds marks to the marks database for all active users """ # This list holds all our active and auto active users userActive = self.user.getIdByStatus("active") # Check for auto active users in payment and marks userAuto = self.user.getIdByStatus("auto") userAutoM = self.payment.getIdDataBinMonthActive(self.inactiveMonths) for marks in self.item.marks: userAutoT = marks.getIdDataBinMonthActive(self.inactiveMonths) userAutoM = userAutoM + userAutoT userAutoM = list(set(userAutoM)) # which user is active in last n months and auto active? userAuto = list(set(userAuto).intersection(userAutoM)) # merge both lists userActive = userActive + userAuto # remove double entries userActive = list(set(userActive)) # remove inactive users userInactive = self.user.getIdByStatus("inactive") userInactive = list(set(userActive).intersection(userInactive)) userActive = [x for x in userActive if x not in userInactive] # sort userActive.sort() # create dates now = datetime.datetime.now() year = int(now.strftime("%Y")) month = int(now.strftime("%m")) day = int(now.strftime("%d")) # This is done usually in the following month, meaning we need to adapt the date to last month month -= 1 day = 1 if month == 0: month = 12 for userId in userActive: user = self.user.getRowById(userId) print("\n", user[1]) # get item list markDescription = [] markDefault = [] for row in self.item.data: if str(row[3]) == "active": markDescription.append(row[1]) markDefault.append("0") # query user input print("") inputMark = self.inputStandard(markDescription, markDefault) # create array for cmark class markArray = [[0 for x in range(0)] for x in range(0)] counter = 0 for row in self.item.data: if str(row[3]) == "active": markArray.append([user[0], int(year), int(month), int(day), int(inputMark[counter])]) counter += 1 else: markArray.append([user[0], int(year), int(month), int(day), 0]) # save in database self.item.marksAdd(markArray) return 0 def priceAdd(self): """ Adds a price the price database """ priceDescription = [] priceStandard = [] itemId = [] priceOld = [[0 for x in range(0)] for x in range(0)] # acquiere old prices, save as [itemId, price] for row in self.price.dataBinMonth: if len(row) >= 2: for x in range(0, len(row)-1): if not float(row[-1-x]) == 0: priceOld.append([row[0], str(row[-1-x])]) break # create input fields for row in self.item.data: priceDescription.append(str(row[1]) + " " + str(row[2])) priceOldAdded = False for row1 in priceOld: if row[0] == row1[0]: priceStandard.append(row1[1]) priceOldAdded = True if not priceOldAdded: priceStandard.append("0") itemId.append(row[0]) inputPrice= self.inputStandard(priceDescription, priceStandard) # create dates now = datetime.datetime.now() year = now.strftime("%Y") month = now.strftime("%m") day = now.strftime("%d") counter = 0 for row in itemId: self.price.priceAdd([row, year, month, day, inputPrice[counter]]) counter += 1 return 0 def priceFill(self): """ Checks the marks database and matches marks with prices. If a price does not exist, it is requested and added to the price database. """ itemId=0 for row in self.item.data: print ("Checking for item " + str(row[1])) # Check for marks self.item.marks[itemId].getDataBinMonth() marks = self.item.marks[itemId].dataBinMonthHeader # Check for prices pricesH = self.price.dataBinMonthHeader pricesF = self.price.dataBinMonth prices = [] <|fim▁hole|> prices = rowId del prices[0] # If Id was not found, we create an empty array if len(prices) == 0: if len(pricesF) >= 1: prices = [0 for x in range(len(pricesF[0])-1)] # Find missing prices in Header for mark in marks: priceFound = False for price in pricesH: if mark == price: priceFound = True if not priceFound: pricesH.append(mark) prices.append(0) # Find empty prices priceMissing = [[0 for x in range(0)] for x in range(0)] counter = 0 for price in prices: if price == 0: priceMissing.append(pricesH[counter]) counter += 1 # Request user input for missing prices princeLatest = "0" for price in priceMissing: priceDescription = ["Enter price for " + str(row[1]) + " for year " + str(price[0]) + " and month " + str(price[1])] priceStandard = [princeLatest] inputPrice= self.inputStandard(priceDescription, priceStandard) princeLatest = inputPrice[0] # save prices self.price.priceAdd([row[0], price[0], price[1], 1, str(inputPrice[0])]) itemId += 1 return 0 def balanceExportPDF(self): """ Compute the balance """ # create dates now = datetime.datetime.now() year = int(now.strftime("%Y")) month = int(now.strftime("%m")) dateDescription = ["Year", "Month"] dateStandard = [str(year), str(month)] inputDate = self.inputStandard(dateDescription, dateStandard) # create balance class self.balance.exportMonthPDF(inputDate[0], inputDate[1], 1) def listExportPDF(self): """ Compute the name list """ # create dates now = datetime.datetime.now() year = int(now.strftime("%Y")) month = int(now.strftime("%m")) dateDescription = ["Year", "Month"] dateStandard = [str(year), str(month)] inputDate = self.inputStandard(dateDescription, dateStandard) # create balance class self.balance.exportMonthListPDF(inputDate[0], inputDate[1], 1) def balanceCheck(self): """ Prints a users balance """ user = self.getRowByTextname(self.user.getNamelist(), self.user) # print balance self.balance.getBalance(user[0]) return 0<|fim▁end|>
# Find Id in pricesF for rowId in pricesF: if rowId[0] == row[0]:
<|file_name|>Vector4.js<|end_file_name|><|fim▁begin|>/** * @author Richard Davey <[email protected]> * @copyright 2019 Photon Storm Ltd. * @license {@link https://opensource.org/licenses/MIT|MIT License} */ // Adapted from [gl-matrix](https://github.com/toji/gl-matrix) by toji // and [vecmath](https://github.com/mattdesl/vecmath) by mattdesl var Class = require('../utils/Class'); /** * @classdesc * A representation of a vector in 4D space. * * A four-component vector. * * @class Vector4 * @memberof Phaser.Math * @constructor * @since 3.0.0 * * @param {number} [x] - The x component. * @param {number} [y] - The y component. * @param {number} [z] - The z component. * @param {number} [w] - The w component. */ var Vector4 = new Class({ initialize: function Vector4 (x, y, z, w) { /** * The x component of this Vector. * * @name Phaser.Math.Vector4#x * @type {number} * @default 0 * @since 3.0.0 */ this.x = 0; /** * The y component of this Vector. * * @name Phaser.Math.Vector4#y * @type {number} * @default 0 * @since 3.0.0 */ this.y = 0; /** * The z component of this Vector. * * @name Phaser.Math.Vector4#z * @type {number} * @default 0 * @since 3.0.0 */ this.z = 0; /** * The w component of this Vector. * * @name Phaser.Math.Vector4#w * @type {number} * @default 0 * @since 3.0.0 */ this.w = 0; if (typeof x === 'object') { this.x = x.x || 0; this.y = x.y || 0; this.z = x.z || 0; this.w = x.w || 0; } else { this.x = x || 0; this.y = y || 0; this.z = z || 0; this.w = w || 0; } }, /** * Make a clone of this Vector4. * * @method Phaser.Math.Vector4#clone * @since 3.0.0 * * @return {Phaser.Math.Vector4} A clone of this Vector4. */ clone: function () { return new Vector4(this.x, this.y, this.z, this.w); }, /** * Copy the components of a given Vector into this Vector. * * @method Phaser.Math.Vector4#copy * @since 3.0.0 * * @param {Phaser.Math.Vector4} src - The Vector to copy the components from. * * @return {Phaser.Math.Vector4} This Vector4. */ copy: function (src) { this.x = src.x; this.y = src.y; this.z = src.z || 0; this.w = src.w || 0; return this; }, /** * Check whether this Vector is equal to a given Vector. * * Performs a strict quality check against each Vector's components. * * @method Phaser.Math.Vector4#equals * @since 3.0.0 * * @param {Phaser.Math.Vector4} v - The vector to check equality with. * * @return {boolean} A boolean indicating whether the two Vectors are equal or not. */ equals: function (v) { return ((this.x === v.x) && (this.y === v.y) && (this.z === v.z) && (this.w === v.w)); }, /** * Set the `x`, `y`, `z` and `w` components of the this Vector to the given `x`, `y`, `z` and `w` values. * * @method Phaser.Math.Vector4#set * @since 3.0.0 * * @param {(number|object)} x - The x value to set for this Vector, or an object containing x, y, z and w components. * @param {number} y - The y value to set for this Vector. * @param {number} z - The z value to set for this Vector. * @param {number} w - The z value to set for this Vector. * * @return {Phaser.Math.Vector4} This Vector4. */ set: function (x, y, z, w) { if (typeof x === 'object') { this.x = x.x || 0; this.y = x.y || 0; this.z = x.z || 0; this.w = x.w || 0; } else { this.x = x || 0; this.y = y || 0; this.z = z || 0; this.w = w || 0; } return this; }, /** * Add a given Vector to this Vector. Addition is component-wise. * * @method Phaser.Math.Vector4#add * @since 3.0.0 * * @param {(Phaser.Math.Vector2|Phaser.Math.Vector3|Phaser.Math.Vector4)} v - The Vector to add to this Vector. * * @return {Phaser.Math.Vector4} This Vector4. */ add: function (v) { this.x += v.x; this.y += v.y; this.z += v.z || 0; this.w += v.w || 0; return this; }, /** * Subtract the given Vector from this Vector. Subtraction is component-wise. * * @method Phaser.Math.Vector4#subtract * @since 3.0.0 * * @param {(Phaser.Math.Vector2|Phaser.Math.Vector3|Phaser.Math.Vector4)} v - The Vector to subtract from this Vector. * * @return {Phaser.Math.Vector4} This Vector4. */ subtract: function (v) { this.x -= v.x; this.y -= v.y; this.z -= v.z || 0; this.w -= v.w || 0; return this; }, /** * Scale this Vector by the given value. * * @method Phaser.Math.Vector4#scale * @since 3.0.0 * * @param {number} scale - The value to scale this Vector by. * * @return {Phaser.Math.Vector4} This Vector4. */ scale: function (scale) { this.x *= scale; this.y *= scale; this.z *= scale; this.w *= scale; return this; }, /** * Calculate the length (or magnitude) of this Vector. * * @method Phaser.Math.Vector4#length * @since 3.0.0 * * @return {number} The length of this Vector. */ length: function () { var x = this.x; var y = this.y; var z = this.z; var w = this.w; return Math.sqrt(x * x + y * y + z * z + w * w); }, /** * Calculate the length of this Vector squared. * * @method Phaser.Math.Vector4#lengthSq * @since 3.0.0 * * @return {number} The length of this Vector, squared. */ lengthSq: function () { var x = this.x; var y = this.y; var z = this.z; var w = this.w; return x * x + y * y + z * z + w * w; }, /** * Normalize this Vector. * * Makes the vector a unit length vector (magnitude of 1) in the same direction. * * @method Phaser.Math.Vector4#normalize * @since 3.0.0 * * @return {Phaser.Math.Vector4} This Vector4. */ normalize: function () { var x = this.x; var y = this.y; var z = this.z; var w = this.w; var len = x * x + y * y + z * z + w * w; if (len > 0) { len = 1 / Math.sqrt(len); this.x = x * len; this.y = y * len; this.z = z * len; this.w = w * len; } return this; }, /** * Calculate the dot product of this Vector and the given Vector. * * @method Phaser.Math.Vector4#dot * @since 3.0.0 * * @param {Phaser.Math.Vector4} v - The Vector4 to dot product with this Vector4. * * @return {number} The dot product of this Vector and the given Vector. */ dot: function (v) { return this.x * v.x + this.y * v.y + this.z * v.z + this.w * v.w; }, /** * Linearly interpolate between this Vector and the given Vector. * * Interpolates this Vector towards the given Vector. * * @method Phaser.Math.Vector4#lerp * @since 3.0.0 * * @param {Phaser.Math.Vector4} v - The Vector4 to interpolate towards. * @param {number} [t=0] - The interpolation percentage, between 0 and 1. * * @return {Phaser.Math.Vector4} This Vector4. */ lerp: function (v, t) { if (t === undefined) { t = 0; } var ax = this.x; var ay = this.y; var az = this.z; var aw = this.w; this.x = ax + t * (v.x - ax); this.y = ay + t * (v.y - ay); this.z = az + t * (v.z - az); this.w = aw + t * (v.w - aw); return this; }, /** * Perform a component-wise multiplication between this Vector and the given Vector. * * Multiplies this Vector by the given Vector. * * @method Phaser.Math.Vector4#multiply * @since 3.0.0 * * @param {(Phaser.Math.Vector2|Phaser.Math.Vector3|Phaser.Math.Vector4)} v - The Vector to multiply this Vector by. * * @return {Phaser.Math.Vector4} This Vector4. */ multiply: function (v) { this.x *= v.x; this.y *= v.y; this.z *= v.z || 1; this.w *= v.w || 1; return this; }, /** * Perform a component-wise division between this Vector and the given Vector. * * Divides this Vector by the given Vector. * * @method Phaser.Math.Vector4#divide * @since 3.0.0 * * @param {(Phaser.Math.Vector2|Phaser.Math.Vector3|Phaser.Math.Vector4)} v - The Vector to divide this Vector by. * * @return {Phaser.Math.Vector4} This Vector4. */ divide: function (v) { this.x /= v.x; this.y /= v.y; this.z /= v.z || 1; this.w /= v.w || 1; return this; }, /** * Calculate the distance between this Vector and the given Vector. * * @method Phaser.Math.Vector4#distance * @since 3.0.0 * * @param {(Phaser.Math.Vector2|Phaser.Math.Vector3|Phaser.Math.Vector4)} v - The Vector to calculate the distance to. * * @return {number} The distance from this Vector to the given Vector. */ distance: function (v) { var dx = v.x - this.x; var dy = v.y - this.y; var dz = v.z - this.z || 0; var dw = v.w - this.w || 0; return Math.sqrt(dx * dx + dy * dy + dz * dz + dw * dw); }, /** * Calculate the distance between this Vector and the given Vector, squared. * * @method Phaser.Math.Vector4#distanceSq * @since 3.0.0 * * @param {(Phaser.Math.Vector2|Phaser.Math.Vector3|Phaser.Math.Vector4)} v - The Vector to calculate the distance to. * * @return {number} The distance from this Vector to the given Vector, squared. */ distanceSq: function (v) { var dx = v.x - this.x; var dy = v.y - this.y; var dz = v.z - this.z || 0; var dw = v.w - this.w || 0; return dx * dx + dy * dy + dz * dz + dw * dw; }, /** * Negate the `x`, `y`, `z` and `w` components of this Vector. * * @method Phaser.Math.Vector4#negate<|fim▁hole|> * @since 3.0.0 * * @return {Phaser.Math.Vector4} This Vector4. */ negate: function () { this.x = -this.x; this.y = -this.y; this.z = -this.z; this.w = -this.w; return this; }, /** * Transform this Vector with the given Matrix. * * @method Phaser.Math.Vector4#transformMat4 * @since 3.0.0 * * @param {Phaser.Math.Matrix4} mat - The Matrix4 to transform this Vector4 with. * * @return {Phaser.Math.Vector4} This Vector4. */ transformMat4: function (mat) { var x = this.x; var y = this.y; var z = this.z; var w = this.w; var m = mat.val; this.x = m[0] * x + m[4] * y + m[8] * z + m[12] * w; this.y = m[1] * x + m[5] * y + m[9] * z + m[13] * w; this.z = m[2] * x + m[6] * y + m[10] * z + m[14] * w; this.w = m[3] * x + m[7] * y + m[11] * z + m[15] * w; return this; }, /** * Transform this Vector with the given Quaternion. * * @method Phaser.Math.Vector4#transformQuat * @since 3.0.0 * * @param {Phaser.Math.Quaternion} q - The Quaternion to transform this Vector with. * * @return {Phaser.Math.Vector4} This Vector4. */ transformQuat: function (q) { // TODO: is this really the same as Vector3? // Also, what about this: http://molecularmusings.wordpress.com/2013/05/24/a-faster-quaternion-vector-multiplication/ // benchmarks: http://jsperf.com/quaternion-transform-vec3-implementations var x = this.x; var y = this.y; var z = this.z; var qx = q.x; var qy = q.y; var qz = q.z; var qw = q.w; // calculate quat * vec var ix = qw * x + qy * z - qz * y; var iy = qw * y + qz * x - qx * z; var iz = qw * z + qx * y - qy * x; var iw = -qx * x - qy * y - qz * z; // calculate result * inverse quat this.x = ix * qw + iw * -qx + iy * -qz - iz * -qy; this.y = iy * qw + iw * -qy + iz * -qx - ix * -qz; this.z = iz * qw + iw * -qz + ix * -qy - iy * -qx; return this; }, /** * Make this Vector the zero vector (0, 0, 0, 0). * * @method Phaser.Math.Vector4#reset * @since 3.0.0 * * @return {Phaser.Math.Vector4} This Vector4. */ reset: function () { this.x = 0; this.y = 0; this.z = 0; this.w = 0; return this; } }); // TODO: Check if these are required internally, if not, remove. Vector4.prototype.sub = Vector4.prototype.subtract; Vector4.prototype.mul = Vector4.prototype.multiply; Vector4.prototype.div = Vector4.prototype.divide; Vector4.prototype.dist = Vector4.prototype.distance; Vector4.prototype.distSq = Vector4.prototype.distanceSq; Vector4.prototype.len = Vector4.prototype.length; Vector4.prototype.lenSq = Vector4.prototype.lengthSq; module.exports = Vector4;<|fim▁end|>
<|file_name|>dbscanplot.py<|end_file_name|><|fim▁begin|>import requests import time import dblayer from sklearn.cluster import DBSCAN import plotly<|fim▁hole|>import plotly.graph_objs as go import pandas as pd import numpy as np import random import testfile # Create random colors in list color_list = [] def generate_color(ncluster): for i in range(ncluster): color = '#{:02x}{:02x}{:02x}'.format(*map(lambda x: random.randint(0, 255), range(ncluster))) color_list.append(color) def showLatLongInCluster(data): # Run the DBSCAN from sklearn dbscan = DBSCAN(eps=2, min_samples=5, metric='euclidean', algorithm='auto').fit(data) cluster_labels = dbscan.labels_ n_clusters = len(set(cluster_labels)) - (1 if -1 in cluster_labels else 0) generate_color(n_clusters) plot_data = [] # get the cluster for i in range(n_clusters): ds = data[np.where(cluster_labels == i)] clustername = "Cluster " + str(i + 1) trace = go.Scattergeo(lon=ds[:,0], lat=ds[:,1],mode='markers',marker=dict(color=color_list[i], size=5), name=clustername) plot_data.append(trace) layout = go.Layout(showlegend=False, title='Earthquakes In North and South America', titlefont=dict(family='Courier New, monospace',size=20,color='#7f7f7f'), geo=dict(scope=('north america', 'south america'), projection=dict(type='orthographic',rotation=dict(lon=-60)), showland=True, landcolor='#191919', showcountries=True, showocean=True, oceancolor='rgb(217,217,255)', showframe=False, ), xaxis=dict(showgrid=False, zeroline=False), yaxis=dict(showgrid=False, zeroline=False)) fig = go.Figure(data=plot_data, layout=layout) div = plotly.offline.plot(fig, include_plotlyjs=True, output_type='div') return div def mkLatLong(): #### TME: Get start time start_time = time.time() #### sess = requests.Session() dbobj=dblayer.classDBLayer() projection = [{"$project": {"_id": 0, "mag": "$properties.mag", "depth": {"$arrayElemAt": ["$geometry.coordinates", 2]}, "longitude": {"$arrayElemAt": ["$geometry.coordinates", 0]}, "latitude": {"$arrayElemAt": ["$geometry.coordinates", 1]}}}] df = pd.DataFrame(list(dbobj.doaggregate(projection))) df = df[['longitude', 'latitude']].copy() #### TME: Elapsed time taken to read data from MongoDB fileobj = testfile.classFileWrite() elapsed = time.time() - start_time fileobj.writeline() str1 = str(elapsed) + " secs required to read " + str(df['latitude'].count()) + " records from database." fileobj.writelog("Reading Longitude and Latitude") fileobj.writelog(str1) #### #### TME: Get start time start_time = time.time() #### div = showLatLongInCluster(df.values) response = """<html><title></title><head><meta charset=\"utf8\"> </head> <body>""" + div + """</body> </html>""" dbobj.closedb() #### TME: Elapsed time taken to cluster and plot data elapsed = time.time() - start_time fileobj.writeline() str1 = "Time taken: " + str(elapsed) fileobj.writelog("Applying DBSCAN clustering and plotting its output") fileobj.writelog(str1) fileobj.writeline() fileobj.closefile() #### return response<|fim▁end|>
<|file_name|>resourcetiming-decompression.js<|end_file_name|><|fim▁begin|>// // resourcetiming-decompression.js // // Decompresses ResourceTiming data compressed via resourcetiming-compression.js. // // See http://nicj.net/compressing-resourcetiming/ // // https://github.com/nicjansma/resourcetiming-compression.js // (function(window) { "use strict"; // save old ResourceTimingDecompression object for noConflict() var root; var previousObj; if (typeof window !== "undefined") { root = window; previousObj = root.ResourceTimingDecompression; } // model var ResourceTimingDecompression = {}; // // Functions // /** * Returns the index of the first value in the array such that it is * greater or equal to x. * The search is performed using binary search and the array is assumed * to be sorted in ascending order. * * @param {array} arr haystack * @param {any} x needle * @param {function} by transform function (optional) * * @returns {number} the desired index or arr.length if x is more than all values. */ ResourceTimingDecompression.searchSortedFirst = function(arr, x, by) { if (!arr || arr.length === 0) { return -1; } function ident(a) { return a; } by = by || ident; x = by(x); var min = -1; var max = arr.length; var m = 0; while (min < (max - 1)) { m = (min + max) >>> 1; if (by(arr[m]) < x) { min = m; } else { max = m; } } return max; }; /** * Returns the index of the last value in the array such that is it less * than or equal to x. * The search is performed using binary search and the array is assumed * to be sorted in ascending order. * * @param {array} arr haystack * @param {any} x needle * @param {function} by transform function (optional) * * @returns {number} the desired index or -1 if x is less than all values. */ ResourceTimingDecompression.searchSortedLast = function(arr, x, by) { if (!arr || arr.length === 0) { return -1; } function ident(a) { return a; } by = by || ident; x = by(x); var min = -1; var max = arr.length; var m = 0; while (min < (max - 1)) { m = (min + max) >>> 1; if (x < by(arr[m])) { max = m; } else { min = m; } } return min; }; /** * Changes the value of ResourceTimingDecompression back to its original value, returning * a reference to the ResourceTimingDecompression object. * * @returns {object} Original ResourceTimingDecompression object */ ResourceTimingDecompression.noConflict = function() { root.ResourceTimingDecompression = previousObj; return ResourceTimingDecompression; }; /** * Initiator type map */ ResourceTimingDecompression.INITIATOR_TYPES = { "other": 0, "img": 1, "link": 2, "script": 3, "css": 4, "xmlhttprequest": 5, "html": 6, // IMAGE element inside a SVG "image": 7, "beacon": 8, "fetch": 9 }; /** * Dimension name map */ ResourceTimingDecompression.DIMENSION_NAMES = { "height": 0, "width": 1, "y": 2, "x": 3, "naturalHeight": 4, "naturalWidth": 5 }; /** * Script mask map */ ResourceTimingDecompression.SCRIPT_ATTRIBUTES = { "scriptAsync": 1, "scriptDefer": 2, "scriptBody": 4 }; /** * Returns a map with key/value pairs reversed. * * @param {object} origMap Map we want to reverse. * * @returns {object} New map with reversed mappings. */ ResourceTimingDecompression.getRevMap = function(origMap) { var revMap = {}; for (var key in origMap) { if (origMap.hasOwnProperty(key)) { revMap[origMap[key]] = key; } } return revMap; }; /** * Reverse initiator type map */ ResourceTimingDecompression.REV_INITIATOR_TYPES = ResourceTimingDecompression. getRevMap(ResourceTimingDecompression.INITIATOR_TYPES); /** * Reverse dimension name map */ ResourceTimingDecompression.REV_DIMENSION_NAMES = ResourceTimingDecompression. getRevMap(ResourceTimingDecompression.DIMENSION_NAMES); /** * Reverse script attribute map */ ResourceTimingDecompression.REV_SCRIPT_ATTRIBUTES = ResourceTimingDecompression. getRevMap(ResourceTimingDecompression.SCRIPT_ATTRIBUTES); // Any ResourceTiming data time that starts with this character is not a time, // but something else (like dimension data) var SPECIAL_DATA_PREFIX = "*"; // Dimension data special type var SPECIAL_DATA_DIMENSION_TYPE = "0"; var SPECIAL_DATA_DIMENSION_PREFIX = SPECIAL_DATA_PREFIX + SPECIAL_DATA_DIMENSION_TYPE; // Dimension data special type var SPECIAL_DATA_SIZE_TYPE = "1"; // Dimension data special type var SPECIAL_DATA_SCRIPT_TYPE = "2"; // Regular Expression to parse a URL var HOSTNAME_REGEX = /^(https?:\/\/)([^\/]+)(.*)/; /** * Decompresses a compressed ResourceTiming trie * * @param {object} rt ResourceTiming trie * @param {string} prefix URL prefix for the current node * * @returns {ResourceTiming[]} ResourceTiming array */ ResourceTimingDecompression.decompressResources = function(rt, prefix) { var resources = []; // Dimension data for resources. var dimensionData; prefix = prefix || ""; for (var key in rt) { // skip over inherited properties if (!rt.hasOwnProperty(key)) { continue; } var node = rt[key]; var nodeKey = prefix + key; // strip trailing pipe, which is used to designate a node that is a prefix for // other nodes but has resTiming data if (nodeKey.indexOf("|", nodeKey.length - 1) !== -1) { nodeKey = nodeKey.substring(0, nodeKey.length - 1); } if (typeof node === "string") { // add all occurences var timings = node.split("|"); if (timings.length === 0) { continue; } // Make sure we reset the dimensions before each new resource. dimensionData = undefined; if (this.isDimensionData(timings[0])) { dimensionData = this.decompressDimension(timings[0]); // Remove the dimension data from our timings array timings = timings.splice(1); } // end-node for (var i = 0; i < timings.length; i++) { var resourceData = timings[i]; if (resourceData.length > 0 && resourceData[0] === SPECIAL_DATA_PREFIX) { // dimensions or sizes for this resource continue; } // Decode resource and add dimension data to it. resources.push( this.addDimension( this.decodeCompressedResource(resourceData, nodeKey), dimensionData ) ); } } else { // continue down var nodeResources = this.decompressResources(node, nodeKey); resources = resources.concat(nodeResources); } } return resources; }; /* * Checks that the input contains dimension information. * * @param {string} resourceData The string we want to check. * * @returns boolean True if resourceData starts with SPECIAL_DATA_DIMENSION_PREFIX, false otherwise. */ ResourceTimingDecompression.isDimensionData = function(resourceData) { return resourceData && resourceData.substring(0, SPECIAL_DATA_DIMENSION_PREFIX.length) === SPECIAL_DATA_DIMENSION_PREFIX; }; /** * Extract height, width, y and x from a string. * * @param {string} resourceData A string containing dimension data. * * @returns {object} Dimension data with keys defined by DIMENSION_NAMES. */ ResourceTimingDecompression.decompressDimension = function(resourceData) { var dimensions, i; var dimensionData = {}; // If the string does not contain dimension information, do nothing. if (!this.isDimensionData(resourceData)) { return dimensionData; } // Remove special prefix resourceData = resourceData.substring(SPECIAL_DATA_DIMENSION_PREFIX.length); dimensions = resourceData.split(","); // The data should contain at least height/width. if (dimensions.length < 2) { return dimensionData; } // Base 36 decode and assign to correct keys of dimensionData. for (i = 0; i < dimensions.length; i++) { if (dimensions[i] === "") { dimensionData[this.REV_DIMENSION_NAMES[i]] = 0; } else { dimensionData[this.REV_DIMENSION_NAMES[i]] = parseInt(dimensions[i], 36); } } return dimensionData; }; /** * Adds dimension data to the given resource. * * @param {object} resource The resource we want to edit. * @param {object} dimensionData The dimension data we want to add. * * @returns {object} The resource with added dimensions. */ ResourceTimingDecompression.addDimension = function(resource, dimensionData) { // If the resource or data are not defined, do nothing. if (!resource || !dimensionData) { return resource; } // Add all the dimensions to our resource. for (var key in this.DIMENSION_NAMES) { if (this.DIMENSION_NAMES.hasOwnProperty(key) && dimensionData.hasOwnProperty(key)) { resource[key] = dimensionData[key]; } } return resource; }; /** * Compute a list of cells based on the start/end times of the * given array of resources. * The returned list of cells is sorted in chronological order. * * @param {array} rts array of resource timings. * * @returns {array} Array of cells. */ ResourceTimingDecompression.getSortedCells = function(rts) { // We have exactly 2 events per resource (start and end). // var cells = new Array(rts.length * 2); var cells = []; for (var i = 0; i < rts.length; i++) { // Ignore resources with duration <= 0 if (rts[i].responseEnd <= rts[i].startTime) { continue; } // Increment on resource start cells.push({ ts: rts[i].startTime, val: 1.0 }); // Decrement on resource end cells.push({ ts: rts[i].responseEnd, val: -1.0 }); } // Sort in chronological order cells.sort(function(x, y) { return x.ts - y.ts;<|fim▁hole|> }); return cells; }; /** * Add contributions to the array of cells. * * @param {array} cells array of cells that need contributions. * * @returns {array} Array of cells with their contributions. */ ResourceTimingDecompression.addCellContributions = function(cells) { var tot = 0.0; var incr = 0.0; var deleteIdx = []; var currentSt = cells[0].ts; var cellLen = cells.length; var c = {}; for (var i = 0; i < cellLen; i++) { c = cells[i]; // The next timestamp is the same. // We don't want to have cells of duration 0, so // we aggregate them. if ((i < (cellLen - 1)) && (cells[i + 1].ts === c.ts)) { cells[i + 1].val += c.val; deleteIdx.push(i); continue; } incr = c.val; if (tot > 0) { // divide time delta by number of active resources. c.val = (c.ts - currentSt) / tot; } currentSt = c.ts; tot += incr; } // Delete timestamps that don't delimit cells. for (i = deleteIdx.length - 1; i >= 0; i--) { cells.splice(deleteIdx[i], 1); } return cells; }; /** * Sum the contributions of a single resource based on an array of cells. * * @param {array} cells Array of cells with their contributions. * @param {ResourceTiming} rt a single resource timing object. * * @returns {number} The total contribution for that resource. */ ResourceTimingDecompression.sumContributions = function(cells, rt) { if (!rt || typeof rt.startTime === "undefined" || typeof rt.responseEnd === "undefined") { return 0.0; } var startTime = rt.startTime + 1; var responseEnd = rt.responseEnd; function getTs(x) { return x.ts; } // Find indices of cells that were affected by our resource. var low = this.searchSortedFirst(cells, {ts: startTime}, getTs); var up = this.searchSortedLast(cells, {ts: responseEnd}, getTs); var tot = 0.0; // Sum contributions across all those cells for (var i = low; i <= up; i++) { tot += cells[i].val; } return tot; }; /** * Adds contribution scores to all resources in the array. * * @param {array} rts array of resource timings. * * @returns {array} Array of resource timings with their contributions. */ ResourceTimingDecompression.addContribution = function(rts) { if (!rts || rts.length === 0) { return rts; } // Get cells in chronological order. var cells = this.getSortedCells(rts); // We need at least two cells and they need to begin // with a start event. Furthermore, the last timestamp // should be > 0. if (cells.length < 2 || cells[0].val < 1.0 || cells[cells.length - 1].ts <= 0 ) { return rts; } // Compute each cell's contribution. this.addCellContributions(cells); // Total load time for this batch of resources. var loadTime = cells[cells.length - 1].ts; for (var i = 0; i < rts.length; i++) { // Compute the contribution of each resource. // Normalize by total load time. rts[i].contribution = this.sumContributions(cells, rts[i]) / loadTime; } return rts; }; /** * Determines the initiatorType from a lookup * * @param {number} index Initiator type index * * @returns {string} initiatorType, or "other" if not known */ ResourceTimingDecompression.getInitiatorTypeFromIndex = function(index) { if (this.REV_INITIATOR_TYPES.hasOwnProperty(index)) { return this.REV_INITIATOR_TYPES[index]; } else { return "other"; } }; /** * Decodes a compressed ResourceTiming data string * * @param {string} data Compressed timing data * @param {string} url URL * * @returns {ResourceTiming} ResourceTiming pseudo-object (containing all of the properties of a * ResourceTiming object) */ ResourceTimingDecompression.decodeCompressedResource = function(data, url) { if (!data || !url) { return {}; } url = ResourceTimingDecompression.reverseHostname(url); var initiatorType = parseInt(data[0], 10); data = data.length > 1 ? data.split(SPECIAL_DATA_PREFIX) : []; var timings = data.length > 0 && data[0].length > 1 ? data[0].substring(1).split(",") : []; var sizes = data.length > 1 ? data[1] : ""; var specialData = data.length > 1 ? data[1] : ""; // convert all timings from base36 for (var i = 0; i < timings.length; i++) { if (timings[i] === "") { // startTime being 0 timings[i] = 0; } else { // de-base36 timings[i] = parseInt(timings[i], 36); } } // special case timestamps var startTime = timings.length >= 1 ? timings[0] : 0; // fetchStart is either the redirectEnd time, or startTime var fetchStart = timings.length < 10 ? startTime : this.decodeCompressedResourceTimeStamp(timings, 9, startTime); // all others are offset from startTime var res = { name: url, initiatorType: this.getInitiatorTypeFromIndex(initiatorType), startTime: startTime, redirectStart: this.decodeCompressedResourceTimeStamp(timings, 9, startTime) > 0 ? startTime : 0, redirectEnd: this.decodeCompressedResourceTimeStamp(timings, 9, startTime), fetchStart: fetchStart, domainLookupStart: this.decodeCompressedResourceTimeStamp(timings, 8, startTime), domainLookupEnd: this.decodeCompressedResourceTimeStamp(timings, 7, startTime), connectStart: this.decodeCompressedResourceTimeStamp(timings, 6, startTime), secureConnectionStart: this.decodeCompressedResourceTimeStamp(timings, 5, startTime), connectEnd: this.decodeCompressedResourceTimeStamp(timings, 4, startTime), requestStart: this.decodeCompressedResourceTimeStamp(timings, 3, startTime), responseStart: this.decodeCompressedResourceTimeStamp(timings, 2, startTime), responseEnd: this.decodeCompressedResourceTimeStamp(timings, 1, startTime) }; res.duration = res.responseEnd > 0 ? (res.responseEnd - res.startTime) : 0; // decompress resource size data if (sizes.length > 0) { this.decompressSpecialData(specialData, res); } return res; }; /** * Decodes a timestamp from a compressed RT array * * @param {number[]} timings ResourceTiming timings * @param {number} idx Index into array * @param {number} startTime NavigationTiming The Resource's startTime * * @returns {number} Timestamp, or 0 if unknown or missing */ ResourceTimingDecompression.decodeCompressedResourceTimeStamp = function(timings, idx, startTime) { if (timings && timings.length >= (idx + 1)) { if (timings[idx] !== 0) { return timings[idx] + startTime; } } return 0; }; /** * Decompresses script load type into the specified resource. * * @param {string} compressed String with a single integer. * @param {ResourceTiming} resource ResourceTiming object. * @returns {ResourceTiming} ResourceTiming object with decompressed script type. */ ResourceTimingDecompression.decompressScriptType = function(compressed, resource) { var data = parseInt(compressed, 10); if (!resource) { resource = {}; } for (var key in this.SCRIPT_ATTRIBUTES) { if (this.SCRIPT_ATTRIBUTES.hasOwnProperty(key)) { resource[key] = (data & this.SCRIPT_ATTRIBUTES[key]) === this.SCRIPT_ATTRIBUTES[key]; } } return resource; }; /** * Decompresses size information back into the specified resource * * @param {string} compressed Compressed string * @param {ResourceTiming} resource ResourceTiming bject * @returns {ResourceTiming} ResourceTiming object with decompressed sizes */ ResourceTimingDecompression.decompressSize = function(compressed, resource) { var split, i; if (typeof resource === "undefined") { resource = {}; } split = compressed.split(","); for (i = 0; i < split.length; i++) { if (split[i] === "_") { // special non-delta value split[i] = 0; } else { // fill in missing numbers if (split[i] === "") { split[i] = 0; } // convert back from Base36 split[i] = parseInt(split[i], 36); if (i > 0) { // delta against first number split[i] += split[0]; } } } // fill in missing if (split.length === 1) { // transferSize is a delta from encodedSize split.push(split[0]); } if (split.length === 2) { // decodedSize is a delta from encodedSize split.push(split[0]); } // re-add attributes to the resource resource.encodedBodySize = split[0]; resource.transferSize = split[1]; resource.decodedBodySize = split[2]; return resource; }; /** * Decompresses special data such as resource size or script type into the given resource. * * @param {string} compressed Compressed string * @param {ResourceTiming} resource ResourceTiming object * @returns {ResourceTiming} ResourceTiming object with decompressed special data */ ResourceTimingDecompression.decompressSpecialData = function(compressed, resource) { var dataType; if (!compressed || compressed.length === 0) { return resource; } dataType = compressed[0]; compressed = compressed.substring(1); if (dataType === SPECIAL_DATA_SIZE_TYPE) { resource = this.decompressSize(compressed, resource); } else if (dataType === SPECIAL_DATA_SCRIPT_TYPE) { resource = this.decompressScriptType(compressed, resource); } return resource; }; /** * Reverse the hostname portion of a URL * * @param {string} url a fully-qualified URL * @returns {string} the input URL with the hostname portion reversed, if it can be found */ ResourceTimingDecompression.reverseHostname = function(url) { return url.replace(HOSTNAME_REGEX, function(m, p1, p2, p3) { // p2 is everything after the first `://` and before the next `/` // which includes `<username>:<password>@` and `:<port-number>`, if present return p1 + ResourceTimingDecompression.reverseString(p2) + p3; }); }; /** * Reverse a string * * @param {string} i a string * @returns {string} the reversed string */ ResourceTimingDecompression.reverseString = function(i) { var l = i.length, o = ""; while (l--) { o += i[l]; } return o; }; // // Export to the appropriate location // if (typeof define === "function" && define.amd) { // // AMD / RequireJS // define([], function() { return ResourceTimingDecompression; }); } else if (typeof module !== "undefined" && module.exports) { // // Node.js // module.exports = ResourceTimingDecompression; } else if (typeof root !== "undefined") { // // Browser Global // root.ResourceTimingDecompression = ResourceTimingDecompression; } }(typeof window !== "undefined" ? window : undefined));<|fim▁end|>
<|file_name|>query_list.ts<|end_file_name|><|fim▁begin|>import {ListWrapper, MapWrapper} from 'angular2/src/facade/collection'; import {getSymbolIterator} from 'angular2/src/facade/lang'; import {Observable, EventEmitter} from 'angular2/src/facade/async'; /** * An unmodifiable list of items that Angular keeps up to date when the state * of the application changes. * * The type of object that {@link QueryMetadata} and {@link ViewQueryMetadata} provide. * * Implements an iterable interface, therefore it can be used in both ES6 * javascript `for (var i of items)` loops as well as in Angular templates with * `*ngFor="#i of myList"`. * * Changes can be observed by subscribing to the changes `Observable`. * * NOTE: In the future this class will implement an `Observable` interface. * * ### Example ([live demo](http://plnkr.co/edit/RX8sJnQYl9FWuSCWme5z?p=preview)) * ```typescript * @Component({...}) * class Container { * constructor(@Query(Item) items: QueryList<Item>) { * items.changes.subscribe(_ => console.log(items.length)); * } * } * ``` */ export class QueryList<T> { private _results: Array<T> = []; private _emitter = new EventEmitter(); get changes(): Observable<any> { return this._emitter; } get length(): number { return this._results.length; } get first(): T { return ListWrapper.first(this._results); } get last(): T { return ListWrapper.last(this._results); } /** * returns a new array with the passed in function applied to each element. */ map<U>(fn: (item: T) => U): U[] { return this._results.map(fn); } /** * returns a filtered array. */ filter(fn: (item: T) => boolean): T[] { return this._results.filter(fn); } /** * returns a reduced value. */ reduce<U>(fn: (acc: U, item: T) => U, init: U): U { return this._results.reduce(fn, init); } /** * converts QueryList into an array */ toArray(): T[] { return ListWrapper.clone(this._results); } [getSymbolIterator()](): any { return this._results[getSymbolIterator()](); } toString(): string { return this._results.toString(); } <|fim▁hole|> /** @internal */ notifyOnChanges(): void { this._emitter.emit(this); } }<|fim▁end|>
/** * @internal */ reset(res: T[]): void { this._results = res; }
<|file_name|>VelocityDriver.java<|end_file_name|><|fim▁begin|>package io.github.dantesun.petclinic.data.velocity; <|fim▁hole|>import org.apache.ibatis.mapping.BoundSql; import org.apache.ibatis.mapping.MappedStatement; import org.apache.ibatis.mapping.SqlSource; import org.apache.ibatis.parsing.XNode; import org.apache.ibatis.scripting.LanguageDriver; import org.apache.ibatis.session.Configuration; import org.apache.ibatis.type.Alias; import org.mybatis.scripting.velocity.Driver; /** * Created by dsun on 15/2/22. */ @Alias("velocity") public class VelocityDriver implements LanguageDriver { private Driver driverImpl = new Driver(); @Override public ParameterHandler createParameterHandler(MappedStatement mappedStatement, Object parameterObject, BoundSql boundSql) { return driverImpl.createParameterHandler(mappedStatement, parameterObject, boundSql); } @Override public SqlSource createSqlSource(Configuration configuration, XNode script, Class<?> parameterType) { return createSqlSource(configuration, script.getNode().getTextContent(), parameterType); } @Override public SqlSource createSqlSource(Configuration configuration, String script, Class<?> parameterType) { if (parameterType == null) { parameterType = Object.class; } return new VelocitySqlSource(configuration, script, parameterType); } }<|fim▁end|>
import org.apache.ibatis.executor.parameter.ParameterHandler;
<|file_name|>bitboard.rs<|end_file_name|><|fim▁begin|>// use std::num::Int; pub type Bitboard = u32; pub trait BitMove { fn up_left(&self) -> Self; fn up_right(&self) -> Self; fn down_left(&self) -> Self; fn down_right(&self) -> Self; fn is(&self) -> bool; } pub const S: [Bitboard; 32] = [ 1 << 18, 1 << 12, 1 << 6, 1 << 0, 1 << 19, 1 << 13, 1 << 7, 1 << 1, 1 << 26, 1 << 20, 1 << 14, 1 << 8, 1 << 27, 1 << 21, 1 << 15, 1 << 9, 1 << 2, 1 << 28, 1 << 22, 1 << 16, 1 << 3, 1 << 29, 1 << 23, 1 << 17, 1 << 10, 1 << 4, 1 << 30, 1 << 24, 1 << 11, 1 << 05, 1 << 31, 1 << 25]; pub const BP_INIT: Bitboard = S[0] | S[1] | S[2] | S[3] | S[4] | S[5] | S[6] | S[7] | S[8] | S[9] | S[10] | S[11]; pub const WP_INIT: Bitboard = S[20] | S[21] | S[22] | S[23] | S[24] | S[25] | S[26] | S[27] | S[28] | S[29] | S[30] | S[31]; pub const ROW_1: Bitboard = S[0] | S[1] | S[2] | S[3]; pub const ROW_2: Bitboard = S[4] | S[5] | S[6] | S[7]; pub const ROW_7: Bitboard = S[24] | S[25] | S[26] | S[27]; pub const ROW_8: Bitboard = S[28] | S[29] | S[30] | S[31]; pub const CAN_UPLEFT: Bitboard = !(S[0] | S[8] | S[16] | S[24] | ROW_8); pub const CAN_UPRIGHT: Bitboard = !(S[7] | S[15] | S[23] | S[31] | ROW_8); pub const CAN_DOWNLEFT: Bitboard = !(S[0] | S[8] | S[16] | S[24] | ROW_1); pub const CAN_DOWNRIGHT: Bitboard = !(S[7] | S[15] | S[23] | S[31] | ROW_1); #[derive(Clone, PartialEq, Show, Copy)] pub struct Move { pub src: u16, pub dst: u16, pub jump: bool, __dummy: () } #[derive(Clone, PartialEq, Show, Copy)] pub enum Direction{ UpLeft, UpRight, DownLeft, DownRight } impl Move { pub fn new(src: u16, dst: u16, jump: bool) -> Move { assert!(src < 32); assert!(dst < 32); Move { src: src, dst: dst, jump: jump, __dummy: () } } pub fn calc_direction(&self) -> Option<Direction> { let (src, dst) = (S[self.src as usize], S[self.dst as usize]); if src.up_left() ^ dst == 0 { Some(Direction::UpLeft) } else if src.up_right() ^ dst == 0 { Some(Direction::UpRight) } else if src.down_right() ^ dst == 0 { Some(Direction::DownRight) } else if src.down_left() ^ dst == 0 { Some(Direction::DownLeft) } else { None } } } #[derive(Clone, Copy)] pub enum Cell { Empty = 0, Pw, Pb, Kw, Kb } pub const CELLTABLE: [&'static str; 5] = [ "---", "WHI", "BLK", "KWH", "KBK" ]; #[derive(Show, Clone, Copy, PartialEq)] pub enum MoveCode { Success, VoidPiece, IllegalMove, WrongPiece, Quit, InputFail } impl BitMove for Bitboard { #[inline] fn up_left(&self) -> Bitboard { self.rotate_left(7) } <|fim▁hole|> #[inline] fn up_right(&self) -> Bitboard { self.rotate_left(1) } #[inline] fn down_left(&self) -> Bitboard { self.rotate_right(1) } #[inline] fn down_right(&self) -> Bitboard { self.rotate_right(7) } #[inline] fn is(&self) -> bool { *self != 0 } } // Maps from Bitboard indicating a position to the number // of that position itself pub fn bbumap(b: Bitboard) -> Bitboard { S.iter().position(|&x| x == b).unwrap() as u32 } #[inline] pub fn high_bit(mut board: Bitboard) -> Bitboard { board |= board >> 1; board |= board >> 2; board |= board >> 4; board |= board >> 8; board |= board >> 16; board - (board >> 1) } #[cfg(test)] mod test { use super::*; #[test] #[should_panic(expected = "assertion failed")] fn test_move_validation() { let _ = Move::new(0, 100, false); } #[test] fn test_move_validation2() { let _ = Move::new(1,2,false); let _ = Move::new(31,0,false); } #[test] fn test_direction_calculation() { let m = Move::new(9, 12, true); assert_eq!(m.calc_direction(), Some(Direction::UpLeft)); let m = Move::new(9, 13, false); assert_eq!(m.calc_direction(), Some(Direction::UpRight)); let m = Move::new(21, 17, false); assert_eq!(m.calc_direction(), Some(Direction::DownLeft)); let m = Move::new(21, 18, false); assert_eq!(m.calc_direction(), Some(Direction::DownRight)); let m = Move::new(21, 12, false); assert_eq!(m.calc_direction(), None); } }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># # adapters/tensorflow/imagenet/__init__.py - a service adapter for the tensorflow ImageNet pre-trained graph # # Copyright (c) 2018 SingularityNET # # Distributed under the MIT software license, see LICENSE file. # import base64 import logging import os from pathlib import Path from typing import List import tensorflow as tf from adapters.tensorflow.imagenet.node_lookup import NodeLookup from sn_agent.job.job_descriptor import JobDescriptor from sn_agent.ontology import Service from sn_agent.service_adapter import ServiceManager, ServiceAdapterABC IMAGENET_CLASSIFIER_ID = 'deadbeef-aaaa-bbbb-cccc-111111111102' logger = logging.getLogger(__name__) FLAGS = None AGENT_DIRECTORY = Path(__file__).parent CHECK_ACCURACY = False MINIMUM_SCORE = 0.20 class TensorflowImageNet(ServiceAdapterABC): type_name = "TensorflowImageNet" def __init__(self, app, service: Service, required_services: List[Service] = None): super().__init__(app, service, required_services) if not service.node_id == IMAGENET_CLASSIFIER_ID: raise RuntimeError("TensorflowImageNet cannot perform service %s", service.node_id) def example_job(self): bucket_image_64 = '/9j/4AAQSkZJRgABAQAAAQABAAD/2wCEAAkGBxAQEBUQEhAQFRUQEg8SEA8VEBUQFRAPFRUWFhUVFRUYHSggGBolGxUVITEhJSkr' \ + 'Li4uFx8zODMtNygtLisBCgoKDg0OFxAQGi0lHR0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0t' \ + 'LS0tLf/AABEIAOEA4QMBEQACEQEDEQH/xAAbAAEAAgMBAQAAAAAAAAAAAAAAAgMBBQYEB//EAEEQAAIBAgIHBAgEBAMJAAAAAAAB' \ + 'AgMRBCEFBhIxQVFhInGBkRMyUmKhscHRFEJykkOC4fAHU/EVFhcjM1RjotL/xAAaAQEBAQEBAQEAAAAAAAAAAAAAAQIDBQQG/8QA' \ + 'LBEBAQABAwMEAQMDBQAAAAAAAAECAwQREiExBRNBUSIyYaEVcZEjUoGx8P/aAAwDAQACEQMRAD8A+4gAAAAAAAAAAAAAAAAAAAAA' \ + 'AAAAAAAAAAAAAAAAAAAAAAAGGwK54iC3zgu+SQZuUnyqlpGgt9an++P3Ce5h9xB6Xw6/jU/3Icp72n9xH/bOG/zqfmOU97T+4ktL' \ + 'Yd/xqf7kOV97D7iyOPovdVp/vQX3MftdGtF7pRfc0w1zEwoAAAAAAAAAAAAAAAAAGB4cTpahT9apG/Jdp/AcuWWthj5rUYvXGhD1' \ + 'Yyl1bUUTl8+e+wx8NHjNfJ7o7Ee5bT+Ji6kj5M/Uvpp8RrhiJfxZ+D2fkZurHy5eoZ35a+tpytPfOT75NmLquN3Wd+Xnljqj4k9y' \ + 'se7lUPxM+bJ11OvI9NLmx106qyq0uZeunVU44qfTyLNStddWLGy5R8mX3KvuVNaSa/L5SaNTUX37HqpadnHdOqu6dy+5HWbrKfNb' \ + 'DDa211urS/njf7l647Y7/P7bWhrtUXrRpyXFrJ/34GuX0Y+oWeY22E1woT9aMo9cpL7l5fRhvtPLy3GF0pQq+pUi3yvZ+TK+nHVw' \ + 'y8V7A6AAAAAAAAACutWjCLlKSilvbdglsk5rntJa1Rjf0aT9+WS8FvZOXx6m8k/S43S+tk5ZObl03R8kc8tSR5etv7e3LncRpepP' \ + 'jZHK6tvh5+e5zyeV1JPe2c7bXHqt81mMQ1IsiiNyJpBuRJIKzYqslVkDJRkDDRSwsE4EgSAVJTZeqi+li5R4m5m3jqWN5ozWatTy' \ + 'VR29mXaXk/odJlK+zS3mePy6zR2tlOeVWOy/bXaj4rejXL0dPeY5fqdDSqRklKLTT3NO6ZX1yy94mFAAAAB4tK6Rhh4bUs2/Vjxk' \ + '/sHPV1ZpzmuC0zpac71Kksluj+WPcjNvDyNfXt/LKuN0hpOVR5OyPmz1LfDydTWyz/s8KObinFERZFBqRZFB0iaDcSRVZQVkqpAZ' \ + 'KoBkoACgACAAAB6cNinF2e7mdMc/tvDPh0miNLVKDvB3i/Wpt5SXTk+p1eho6+WHjw77AYyFamqkHk+HGL4p9TT1sM5nOY9IbAAA' \ + 'DgNN4x1a0pcItxiuUVkZeTr59WdcbrLXe0ocErvx3HDWvw8nd5flMWlSOD5OVkYkE0inCaDcTQbiSDSSCsoqshWSjIVlFADJQAAA' \ + 'AAoBGANto6V4dzt4HfC9n06V5xdVqXi3GtKlwnFyt70ePk/gbj0dln+VxdqV6QAAwwPmld9uX6n8zLxc/wBVaHWLCOVqqz2Vsz7u' \ + 'DOOrjz3fBu9Pn840iifM+LhJILwykGuEkF4SQaSTCs3Cs3KMoKyVWbgCjJVZuAAFAIBWQAGYQcnZK7e5Fk5JLe0bmhS2IqPLe+b4' \ + 'n0YziPqxnTOG31OntY1JflhUb8rfUsvd9uynOdr6GaeoAAAHz3WvASoVnNLsVM4vk+MSeHj7zTuGfVPFamlXUvqh5fNMpWuxmiE8' \ + '6dl7j3fyv6Hz56PPeOGe3+cf8NVUpOLtJNPk1Y+e42eXzXGztUSIAZuFZuFLgSTKrNwrNyqzcDJeVZuUAMlAAAuBko9NHBSlv7K6' \ + '7/I3MLXTHTte6nCFNcube9naSYusmOEavHaYTexTzu7bS+S5kuXPhjrud4xfSNQ9Ayw1L0tVWqVUuy98Ib7Pq978DWM4e5tdH28O' \ + '7qjT6gAAApxeGhVg6c4qUZb0/wC8mGc8JlOK+eaf1Vq4dupSvUp78vXguq4rqjnZZ4eLuNlnp/lh3jS0cY1vLMvt8uOpXpvGas0p' \ + 'Lz/0LZK6czKd3kq6JhL1W49PWXxzOWWjjfDnlt8b4eKromot2zLudvgzldDKeHHLQyjy1MPOO+El4HO4ZTzHO45TzFW0ZY5EwvLK' \ + 'YXlJMKkmGmUUZKrJRkBcqrIUpPdF+RqY2rxaujgpvku9m5p5NTCr4YCK9aXgsjc0p8tTTnzU51qNL2V14/c1+OK9WGLWYzWOK9RN' \ + '9dyMXV+mbq2+I8GDo4vSFRUqcXLaf6YR6tmPyya09DPVvEfVtUNRaWCtVqtVay3St2Kb9xPj1fwO+OPD2dts8dLve9dgbfaAAAAA' \ + 'Bhq+QHznTujYbcuzZpvNZXMWPJ3Whjzzw0FTDOLyf0Jw864WeF2D9NUqRpRW1KbtFO2fiWWtYdeWUxnmul/3XxKhtP0V0rumpv5t' \ + 'WNd3oTZanHPZop4iKbjJ7Mou0oyi04vk7GeY+TLKS8XtUJOnLe6T77fVDtWfxv0rlg6T/hx8GvoydGN+Gbp4X4QejqXsyXc2S6WH' \ + '0ntYfSD0bS9/zZn2cE9rBB4Cnzl5/wBB7OKe3ix+Dpe1Lz/oPaxTow+2fw9Dm/Me3gvTgbGHX+rL06cT/THXw8eEfmPwh16cQlpW' \ + 'jHdbwRfcxie/hPDz1dPxW5Nmbqxn378R4q2sEuCS78zN1anuZ14aukq89zfgrE6sqcW+arhhJzzlJLx2n8CdNaxwjY4HRlNO7W1+' \ + 'rd5I1MY+jTwj6XqHhO05WSUI2StazfLwud8Y9jbY8R2xp9QAAAAAAAByms1Dtt81czXybnHlyWKgHlZx7dUpxjio34xmovlJoR12' \ + 'Vk1py7v0hp7jgdddF1FV/FRjeEk4VWt8WvVk1y4XOWU4vLyPUtC3jUx/5cVV0nTU3G0nsuzkkmr9M8znbHk3BNY+m+L8mZ6oxeyf' \ + '4te38y9X7s9V+0ZYz3/ix1funVkreNXtrzJz+7POorljF7a8xynGorli17a8yL0ZqpYqPtfML7eSqWJjzfkRqaVQ9OuUvgg3NNly' \ + 'V7KL8X9jcxi8RJdy8i8SKupRvnfeWNeXrpQK1G1wUBH1aUfStS7KEo8XsvwzR2j2dGcYulK6gAAAAAAAGl1kpXipcrolcdac4uJx' \ + 'sM2R5GpGuldO6bTWafJh895l5jf6L1nltRhWtZ5elW9Pg5fcSvQ0N/eZjqf5dWlFx2Wk4yWaeakmbep2sfK9dtVPwsvS0l/ypPL/' \ + 'AMbfB/RnzamHDwt9tbpXrx/Tf4crFWODzbeXogWOVZcCpMlM6Rmx0map0yNzJBwDXKLiVeWNkcrynShdmse9S1ZTjmdWeV+zcWKu' \ + 'oQaQxnZqPZCJXad22wECx9ejHdatVLVUuacfhf6HSeXr4eHWmmwAAAAAAADx6Xp7VKXTMVnOcxwWOhmZeTqzu1NVB8mTzzRHKt/q' \ + '3p30dqNV9jdCb/I+T6fIsr0NnvOn8M/DrqtONSLpzSlGSs4vNNM3e717JlOL4r5ZrZqxLCS24XlRk+zLe4P2ZfRny6mnw/Ob3ZXQ' \ + 'vVj+m/w51HJ59WRYjFjNioqnAljcqqUTLpKg0GpUWiryspqyOuMS1ZSiWEWRWaJle8bxeqKOjT10Vcjtj3brRsc78s/I1i9DRjpN' \ + 'AVO3CXvr4v8AqXGvR0/Dujo2AAAAAAAARqw2otc00BwGk6dm+hl5evj3aWsg+HJ5ZkcqpkiMV0eren9m1Gs+zup1G/V92T5dTUye' \ + 'ls95x+Gfj4rqa0Yzi4TSlGSs4vNNGr3evZMpxfD5xrTqrLDt1aV5Ut7W+VLv5rqfNnp8d4/P730+6f56ffH/AKcucXlpKReU4ZNI' \ + 'rnEzY3KqaI2wo3yEnKrZRtY7xEopIvDUTptN9UZuPLeL0QefmbbnPL24ZB3wjpNB4dTlsvds1JS/TGLb+hqPS0I2eiE1GHfH5oYe' \ + 'H26c7O/OjYAAAAAAAAA43WCjapJdb+eZmvh3OLmcQg83J46gcclMjLnVTDLf6C1gdO1Ks24boz3uHR818izJ6O033R+Gp4+3Vqom' \ + 'uDTW/emmbe3LLOzkNY9UFO9XDJJ75Udyf6OT6HHPT+Y8nd+mzLnPS8/ThatOUW4yTTi7OLVmn1R89nDw8sbjeL5RUhyzwncqcITj' \ + 'yHDUqdKnbM3jjw0lNXRpVdN8GaIzh09pkbx8vXDj3FdI2OFW4Pr046LRTtRrz5whRj+qpLP/ANYsvPavQ0vFbnA07OCW/aj80akf' \ + 'bjOI7c2oAAAAAAAAA53Welmpc1byJXz7idnG4qJHk5x4agcMlEiVzqqRlmq2GWx0RpueH7L7VPjDjHrF8O4sy4fVtt7lo9r3x/8A' \ + 'eHX4TGwqx26crrjwcXya4M3Ly9/S1sNXHqwrx6Y0PQxS7atNerVjlJd/NGcsJk57ja6evPynf7cFprV2vhrtrbhwqxzVveX5T58s' \ + 'Li8HcbHU0e/mfbUJmI+PhJ3NeGWVOxZViamjcrcqFWPFGotidGpffv8AmWxrHJ6oR3+BHaYvbTqpfcza+rGyOj1cTqUllaPpJz/U' \ + '7KMX3JJ+bNYd3o7Wc48us0NQ2qyfCF5Pv3L++h1j7XTFQAAAAAAAAAavWGlelf2X8H/aJXPVnOLhcZEjyNSNbUQfNk88kRyqqSIz' \ + 'VciM1XIjFZw2KqUpbcJOL+DXJriizsunrZ6WXVheK6fR2sVOpaNS0Jc/ySffwNzLl7u29Swz7Z9r/DcqbXVPxTRXp9q0ukNXMNWb' \ + 'lFeim+MV2W+sfsYuE8vh1/T9LU7ztf2c7pDVvEUs9j0kfah2suq3ozca8nW9P1dPxOZ+zR1cO1/8vJnO4/T4rjZ5UNNGfDLO0WZN' \ + 'cl0amZ2WxqpL+2Lk6zLjw6PQur1as1OrFwp79lq0p+HBFmNr0tvtM871Z9p9O2pUowiopJJKySO0nD2ZJJxHS6Hwvo6d2u1PN9Fw' \ + 'Roe8AAAAAAAAAAox1PapyXuvzWYS+Hz7HQzMvJ1se7VVUHx5PNNEc7FUkRiqpEYquRGaqkg51XJBHrwOlq1HKMrx9iWcfDl4FmVj' \ + '69Deauj+m9vqt/hNY6M8pp03z9aPnvRrqj2NH1LTy7Zdr/DbUayktqE1Jc07mnoY545TnG8o4mjTqf8AUpQl1az8ycSs56OGf6py' \ + '1tbVvCT3bcPHaXxM3CPkz9N0cvHZ4p6l0nurtfyoz7T576Tj8ZUhqRS415PuSRPaJ6Tj85VtMBq5hKLUlHaks1KT2rPotyNTTkfX' \ + 'pbHS07zJ3bb06TUVvbSS4tvodH19o3+i9FbHbqWcuEeEPuyq2oQAAAAAAAAAAAHCaYo7M5Lk2jLztxj3aGsg8/J5ZkcqokRiqZkY' \ + 'quQYquRGKrkGVbCosKzTqyi7xk4vmnYOuOeWN/G8NhQ1gxMPzqS96KfxLMq+zDf62Pzz/d7aWtUvzUovuk18y9T6sfU8vnFctaY/' \ + '5L/ei9Tr/Up/tRlrM/y0l4y+xOtb6h9YqpaZrTyuor3Vb4vMnVWLutTJvdT6DniYN52bm289y+9jWLvoTm930g2+0AAAAAAAAAAA' \ + 'ADldZ6Nql/aSf0M18m4xcniFmHl5PFUI4V55krnVUiMVXIM1CRGKrYRWwItBpFoNRGwWFg3Ekg6RbBEdI9mGiV203fag0O1OfsxU' \ + 'fFu/0OmL1NvO3LtDT6QAAAAAAAAAAAANJrNRvCMuV0SuOvOcXEYuOZHkak7tfVQfPk80iOVVMyygwxVciM1BhlWwqLQWItEa+WA0' \ + 'FaiaQbi2miOuL3YZFj6dOPp2peH2cNte3JvwWS+TOserozjFvyuoAAAAAAAAAAAAHi0xS2qMuma8CVnOc4uAx0cyPH1p3auqg+TK' \ + 'PLMjlVUiMq5EZqEgxVciIgwiLDTFgsYsG4ykGolEjcW00V1xjYYSOZY+vSnd9e0VQ9HQpw9mEb99rv4nV6+M4kj1hQAAAAAAAAAA' \ + 'AAI1I3TXNNAfPtK0XGTT4Noy8rcY8VpKyD4co8s0RxqmRGKrkRmq5BmoMjKLC8otBUbBYxYNMoNROKI6RbTQdcY3ur2EdavTppb5' \ + 'JvpFZt+SN4vv2+POUfWUdHpsgAAAAAAAAAAAAAAc5rVo5temity7a6e0Svk3OnzOqOIxCI8nJ46hHCqJEYqtkYVsMoMjKDQGGg0i' \ + 'GoMNCI1EkHSL6Yd8Y+nakaFdGn6aorTqpWT3wp8F3vf5HXGPX2+n048105p9AAAAAAAAAAAAAAABhoDjtY9W5K9WirrfKmt8esea' \ + '6E4ebudrf1Yf4cbVVjLy8o88iOdVyRGKraDKLRGUGgIhYiGoEagkGonTi20km23ZJZtvkkHbCc3iPoeqOqDg1XxKW0s6dF57L4Sn' \ + '16cDpjj9vW2+26fyydubfcAAAAAAAAAAAAAAAAAADT6Y1doYntNbM/8AMjx71uZOHza21w1fPlxek9UsTSu4x9JHnDfbrHf5GbHl' \ + '6ux1MfHdz1anKLtJNPk1Z+TMvhyxs8qmgxUGiM1BoIjYLGLBqRKlRlN7MYyk+CScm/BEdccMsvEdJonUfFVrOolRjzlnLwivrY1M' \ + 'a+/S2GeXfLs7zQerWHwmcI7U+NWWcvD2fA6THh6elt8NPxO7cldwAAAAAAAAAAAAAAAAAAAAACjEYSnUVp04S/VFMM5YY5eY02J1' \ + 'Nwc81CUP0zaXk7memPly2Ojl8NfU/wAP6D3Vqq8Iv6Dpcb6Zp/dU/wDDyn/3E/2L7k6Gf6Xh9raX+HuHXrVar7tmP0L0xvH03Tnm' \ + '1sMNqZgYb6Tn+ubfwVkOmO2Oy0cfhusLg6VJWp04QXuxUfkafTjhjj4i8NAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' \ + 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//2Q=='; cup_image_64 = '/9j/4AAQSkZJRgABAQAAAQABAAD/2wCEAAkGBxAPDw0PDQ0PDg0PDQ0PDQ0PDQ8ODQ0NFRIWFhUSExUYHyghGB4lJxMWITEhJSor' \ + 'Li4uGB8zODMsNygtLisBCgoKDQ0NDxAPFSsdFRktLSs4LCsrKysrKysrKzcrLSsrKy0tKzcrKysrKysrKystNy0rKysrKysrKysr' \ + 'KysrK//AABEIAOEA4QMBIgACEQEDEQH/xAAcAAEAAQUBAQAAAAAAAAAAAAAAAQIDBAUGBwj/xABAEAEAAgECAgcEBAoLAQAAAAAA' \ + 'AQIDBBEFIQYHEhMxQWFRcZGxMkKBghQiI3KDkqGissEIF1NUYpOjwtHh8BX/xAAWAQEBAQAAAAAAAAAAAAAAAAAAAQL/xAAWEQEB' \ + 'AQAAAAAAAAAAAAAAAAAAEQH/2gAMAwEAAhEDEQA/APcQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' \ + 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' \ + 'AEJavjHGKabsRe1K3vv2O3aKVnbbfn9scgbRG7jtV0g1XjSKxWfCa0i1fjzYF+OaqfHPMfmxWPkD0Dc3ee//AEs0/SzXn70q6ay0' \ + '+N7frSlHf7m7iseaZ+tPxlk4r+s/GVHWG7n8NLz4Tf8AWmGTSuSNvyto+9MiVuBqq58sfW329tYU243XHMRmmkbzFYmLecztHKRW' \ + '3EQkAAAAAAAAAAAAAABby56UibXvWlY8bWmKxH2y13SPjWPRae+fJtO3KlZtFe3f2b+UcpmZ8oiXzv0g41rOOavutPGfUT2prWkX' \ + 'tGGN5+rjjlWvrbefOdvAH0Nk6T6Cv0tdp4/TUl551q8c0mpppq6fU481/wAtE1pbteVZ5uMp1McVmsTP4LW08+xOptvHpvFWB0g6' \ + 'DcQ4Vp41GpjT1xfhGPfusk5LTNpiu2+0bRyhUYmk4jk0/b7jJOObREb18vWPZ4s7F0y1lYiLXrl287d5Fp98xZps/jLGmVSOjv03' \ + '1EzMzHYjs8opett7b+M9uszt6Ndbp7xKsztfBNd+W+Cm+3q02SWLkNV0tesjicf3f/Jj/lcjrO4p7dNH6H/tyMoQdl/Wjxfy1GGv' \ + 'l+LhhTbrH4raPxtZbf8Aw1rVx6uAdFm6Wa/J9PWZ538fykx8mz6LZb5dXpu8vbJPfYtptabfXj2uQxO06vcXb12jr7dRh393bjcH' \ + '0qECKAAAAAAAAAAAAAA8O/pBcbvXLp9JWZikaeMt9vC1r3tG3+nHxbrqI4TSmnz55rE5N6Ui3nG9Ztaft3j4Of8A6RXDLxl0urrE' \ + '93fDGG0+UXpe1o+MX/Y3HUXxuk0y6e1oi2SKZMUT52rE1vX38o+Erg9ecV1xaLvuC62IjnSMeSPSa3iXaxLXdI9H3+j1WKY37eDJ' \ + 'G3rtvHyQfLVrdqtbR9albfGIY9pXcNdsVYnxpNqT762mv8li0tIoySxrr1pWLoKJQlAEKoUK4BfxPSeqLS95r8M+VO1f3bVnb+Tz' \ + 'XD4va+ozQ721OaY5Ux0pE+tpmZ/hQevwAKAAAAAAAAAAAAAA03Szo7i4lpMulz8otzx5Ij8bFlj6N4/94TL5u4jwniHANVtel+xF' \ + '+1iy07Vcd4ieVsd48J9PH5vqpj63RYs9Jx58VMuO0bWpkrF6z9kg8e4P14460rXV6bJa8RtN68pn37bxLbf138PmOeDUe6YiGx4t' \ + '1P8AC802tipl0lp/sctrUj3UvvEfY4/inUXkjedJrcWSee1NRinHM/epv8io4HUWre2ovjiYx31WovjiY2mMd8k2pvHumGtyOy4l' \ + '0O1ugwXnXY6xvetaXpkjJS0RTaNp8fKPGHH6iNpUY9lmy5ZasCiUJlAJhMKd1UAyNNG9ofS/VTwzuOG4rTG1s8zln29nwr8v2vn7' \ + 'olwy2q1enwUjnkyRX3R5z9kRMvqzSYK4sePHSNqY6UpWPZWsbQir4jc3BIjdIAAAAAAAAAAAAAAAANV0m4VGs0ubBPjakzjn2ZI5' \ + '1l8x8b0VsOTJS9drVtatonymJ2fWEw8f65OjG141uGm9cnLP2a8q5IjlafZv84XDXit1qWTnpsxpEUShMqQSrqt7tl0f4Xk1eoxY' \ + 'MUb3yWiPbFY352n0jxQeudRnAezGXX5K7bTOHTzPtmI7do+O3xevd40PCNPTS4MOnxRtjxUiseU2nztPrM82dXNuK2PeJ7xg1uuR' \ + 'YGZFlcSxK2X6SC8IhIAAAAAAAAAAAAAACm9ItExaImJ8YmN4mFQDlOK9XfCtVM2y6GlbT9fDfJgn9yYhodR1M8Jn6P4VT0jUzaP3' \ + 'ol6SpsDyfN1K6D6up1cfexz/ALWPHUvoonnqdVMe/HH8nrlqLU4geZYOqDhlfpVz5PztRav8OzpODdEtJo940unri35WtG83mPW0' \ + 'zM/tdT3SYxA11NIvV07N7tVFAY1MK53a92U9kFqtF2tVUQkBKEgAAAAAAAAAAAAAAAAI2SApmDZUAp2TskBGxskBAkBAkAAAAAAA' \ + 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB//2Q==' return [ { "input_type": "attached", "input_data": { "images": [bucket_image_64, cup_image_64], "image_types": ['jpg', 'jpg'] }, "output_type": "attached" } ] def post_load_initialize(self, service_manager: ServiceManager): # Load the graph model. graph_path = os.path.join(AGENT_DIRECTORY, 'model_data', 'classify_image_graph_def.pb') with tf.gfile.FastGFile(graph_path, 'rb') as f: self.graph_def = tf.GraphDef() self.graph_def.ParseFromString(f.read()) tf.import_graph_def(self.graph_def, name='') # Create our long-running Tensorflow session self.session = tf.Session() # Save the softmax tensor which will run the model. self.softmax_tensor = self.session.graph.get_tensor_by_name('softmax:0') # Creates node ID --> English string lookup. self.node_lookup = NodeLookup() def perform(self, job: JobDescriptor): # Process the items in the job. A single job may include a request to classify # many different images. Each item, in turn, may include an array of images. results = [] for job_item in job: # Make sure the input type is one we can handle... input_type = job_item['input_type'] if input_type != 'attached': logger.error("BAD input dict %s", str(job_item)) raise RuntimeError("TensorflowImageNet - job item 'input_type' must be 'attached'.") # Get the images to classify, while making sure our job item dict is of the appropriate format. input_data = job_item['input_data'] if input_data is None: raise RuntimeError("TensorflowImageNet - job item 'input_data' must be defined.") images_to_classify = input_data.get('images') if images_to_classify is None: raise RuntimeError("TensorflowImageNet - job item 'input_data' missing 'images'") image_types = input_data.get('image_types') if image_types is None: raise RuntimeError("TensorflowImageNet - job item 'input_data' missing 'image_types'") # Clear the predictions for the new job item. predictions = [] prediction_confidences = [] # Classify all the images for this job item. for image, image_type in zip(images_to_classify, image_types): binary_image = base64.b64decode(image) if (image_type == 'jpeg' or image_type == 'jpg'): decoder_key = 'DecodeJpeg/contents:0' elif (image_type == 'png'): decoder_key = 'DecodeJpeg/contents:0' elif (image_type == 'gif'): decoder_key = 'DecodeGif/contents:0' raise RuntimeError("TensorflowImageNet - cannot decode gif images") elif (image_type == 'bmp'): decoder_key = 'DecodeBmp/contents:0' raise RuntimeError("TensorflowImageNet - cannot decode bmp images") else: decoder_key = 'DecodeJpeg/contents:0' logger.warn("Missing image type {0}".format(image_type))<|fim▁hole|> raw_predictions = self.session.run(self.softmax_tensor, {decoder_key: binary_image}) logger.debug("classifying '{0}' image".format(image_type)) # Pull the predicted scorces out of the raw predictions. predicted_scores = raw_predictions[0] # Sort and strip off the top 5 predictions. top_predictions = predicted_scores.argsort()[-5:][::-1] image_predictions = [] image_scores = [] for predicted_node_id in top_predictions: # Get a text description for the top predicted node. description = self.node_lookup.id_to_string(predicted_node_id) # Cast to a float so JSON can serialize it. Normal Tensorflow float32 are not serializable. score = float(predicted_scores[predicted_node_id]) logger.debug(" prediction = '{0}', score = {1}".format(description, score)) # Add only those that exceed our minimum score to the predictions and scores lists. if (score > MINIMUM_SCORE): image_predictions.append(description) image_scores.append(score) # Append the filtered predictions and scores for this image. predictions.append(image_predictions) prediction_confidences.append(image_scores) # Add the job results to our combined results array for all job items. single_job_result = { 'predictions': predictions, 'confidences': prediction_confidences, } results.append(single_job_result) return results<|fim▁end|>
<|file_name|>karma.conf.js<|end_file_name|><|fim▁begin|>// Karma configuration // http://karma-runner.github.io/0.12/config/configuration-file.html // Generated on 2015-05-11 using // generator-karma 0.8.3 module.exports = function(config) { 'use strict'; config.set({ // enable / disable watching file and executing tests whenever any file changes autoWatch: true, // base path, that will be used to resolve files and exclude basePath: '../', // testing framework to use (jasmine/mocha/qunit/...) frameworks: ['jasmine'], // list of files / patterns to load in the browser files: [ 'app/scripts/**/*.js', 'test/mock/**/*.js', 'test/spec/**/*.js' ], // list of files / patterns to exclude exclude: [], // web server port port: 8080, <|fim▁hole|> // - ChromeCanary // - Firefox // - Opera // - Safari (only Mac) // - PhantomJS // - IE (only Windows) browsers: [ 'PhantomJS' ], // Which plugins to enable plugins: [ 'karma-phantomjs-launcher', 'karma-jasmine' ], // Continuous Integration mode // if true, it capture browsers, run tests and exit singleRun: false, colors: true, // level of logging // possible values: LOG_DISABLE || LOG_ERROR || LOG_WARN || LOG_INFO || LOG_DEBUG logLevel: config.LOG_INFO, // Uncomment the following lines if you are using grunt's server to run the tests // proxies: { // '/': 'http://localhost:9000/' // }, // URL root prevent conflicts with the site root // urlRoot: '_karma_' }); };<|fim▁end|>
// Start these browsers, currently available: // - Chrome
<|file_name|>run_tests.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """Execute the tests for bs_tools. The golden test outputs are generated by the script generate_outputs.sh. You have to give the root paths to the source and the binaries as arguments to the program. These are the paths to the directory that contains the 'projects' directory. Usage: run_tests.py SOURCE_ROOT_PATH BINARY_ROOT_PATH """ import logging import os.path import sys # Automagically add util/py_lib to PYTHONPATH environment variable. path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..', 'util', 'py_lib')) sys.path.insert(0, path) import seqan.app_tests as app_tests def main(source_base, binary_base): """Main entry point of the script.""" print 'Executing test for bs_tools' print '=========================' print ############################################################## ### Casbar ############################################################## ph = app_tests.TestPathHelper( source_base, binary_base, 'apps/bs_tools/tests') # tests dir # ============================================================ # Auto-detect the binary path. # ============================================================ path_to_bisar = app_tests.autolocateBinary( binary_base, 'apps/bs_tools', 'bisar') path_to_casbar = app_tests.autolocateBinary( binary_base, 'apps/bs_tools', 'casbar') # ============================================================ # Built TestConf list. # ============================================================ # Build list with TestConf objects, analoguely to how the output # was generated in generate_outputs.sh. conf_list = [] ph.outFile('-') # To ensure that the out path is set. transforms = [ app_tests.ReplaceTransform(os.path.join(ph.source_base_path, 'apps/bs_tools/tests') + os.sep, '', right=True), app_tests.ReplaceTransform(ph.temp_dir + os.sep, '', right=True), app_tests.RegexpReplaceTransform(r'\tVN:[^\t]*', r'\tVN:VERSION', right=True, left=True) ] # We prepare a list of transforms to apply to the output files. This is # used to strip the input/output paths from the programs' output to # make it more canonical and host independent. # Transforms for SAM output format only. Make VN field of @PG header canonical. #sam_transforms = [app_tests.RegexpReplaceTransform(r'\tVN:[^\t]*', r'\tVN:VERSION', right=True, left=True)] # ============================================================ # se # ============================================================ # App TestConf objects to conf_list, just like this for each # test you want to run. # 0 conf = app_tests.TestConf( program=path_to_bisar, redir_stdout=ph.outFile('other.stdout'), args=['-e3', str(4), '-e4', str(5), #-e3 4 -e4 5 '-o', ph.outFile('reads_se_N6000_0.CT_GA.verified.sam'), ph.inFile('reads_se_N6000.CT_GA.sam'), ph.inFile('hg18_chr21_3000.fa'), ph.inFile('reads_se_N6000.fastq')], to_diff=[#(ph.inFile('STDOUT_FILE'), #ph.outFile('STDOUT_FILE')), (ph.inFile('reads_se_N6000_0.CT_GA.verified.sam'), ph.outFile('reads_se_N6000_0.CT_GA.verified.sam'), transforms)]) conf_list.append(conf) # 1 conf = app_tests.TestConf( program=path_to_bisar, redir_stdout=ph.outFile('other.stdout'), args=['-gas', str(-4.5), '-ges', str(-2.0), '-der', str(0.001), '-bsc', str(0.99), '-gmr', str(0.5), '-i', str(0.8), '-rn', str(0.001), '-pms', str(0.9), '-e3', str(4), '-e4', str(5), # -gas -4.5 -ges -2.0 -der 0.001 -bsc 0.99 -gmr 0.5 -i 0.8 -rn 0.001 -pms 0.9 -mq 0 -e3 4 -e4 5 '-o', ph.outFile('reads_se_N6000_1.CT_GA.verified.sam'), ph.inFile('reads_se_N6000.CT_GA.sam'), ph.inFile('hg18_chr21_3000.fa'), ph.inFile('reads_se_N6000.fastq')], to_diff=[#(ph.inFile('STDOUT_FILE'), #ph.outFile('STDOUT_FILE')), (ph.inFile('reads_se_N6000_1.CT_GA.verified.sam'), ph.outFile('reads_se_N6000_1.CT_GA.verified.sam'), transforms)]) conf_list.append(conf) # 2 conf = app_tests.TestConf( program=path_to_bisar, redir_stdout=ph.outFile('other.stdout'), args=['-nse', '-nsi', '-nsd', '-gas', str(-4.5), '-ges', str(-2.0), '-der', str(0.001), '-bsc', str(0.99), '-gmr', str(0.5), '-i', str(0.8), '-rn', str(0.001), '-pms', str(0.9), '-e3', str(4), '-e4', str(5), # -nse -nsi -nsd -gas -4.5 -ges -2.0 -der 0.001 -bsc 0.99 -gmr 0.5 -i 0.8 -rn 0.001 -pms 0.9 -mq 0 -e3 4 -e4 5 '-o', ph.outFile('reads_se_N6000_2.CT_GA.verified.sam'), ph.inFile('reads_se_N6000.CT_GA.sam'), ph.inFile('hg18_chr21_3000.fa'), ph.inFile('reads_se_N6000.fastq')], to_diff=[(ph.inFile('reads_se_N6000_2.CT_GA.verified.sam'), ph.outFile('reads_se_N6000_2.CT_GA.verified.sam'), transforms)]) conf_list.append(conf) # 3 conf = app_tests.TestConf( program=path_to_bisar, redir_stdout=ph.outFile('other.stdout'), args=['-nse', '-nsi', '-nsd', '-gas', str(-4.5), '-ges', str(-2.0), '-der', str(0.001), '-bsc', str(0.99), '-gmr', str(0.2), '-i', str(0.8), '-rn', str(0.001), '-pms', str(0.9), '-e3', str(4), '-e4', str(5), # -nse -nsi -nsd -gas -4.5 -ges -2.0 -der 0.001 -bsc 0.99 -gmr 0.2 -i 0.8 -rn 0.001 -pms 0.9 -mq 0 -e3 4 -e4 5 '-o', ph.outFile('reads_se_N6000_3.CT_GA.verified.sam'), ph.inFile('reads_se_N6000.CT_GA.sam'), ph.inFile('hg18_chr21_3000.fa'), ph.inFile('reads_se_N6000.fastq')], to_diff=[(ph.inFile('reads_se_N6000_3.CT_GA.verified.sam'), ph.outFile('reads_se_N6000_3.CT_GA.verified.sam'), transforms)]) conf_list.append(conf) # 4 conf = app_tests.TestConf( program=path_to_bisar, redir_stdout=ph.outFile('other.stdout'), args=['-nse', '-nsi', '-nsd', '-gas', str(-4.5), '-ges', str(-2.0), '-der', str(0.001), '-bsc', str(0.99), '-gmr', str(0.8), '-i', str(0.8), '-rn', str(0.001), '-pms', str(0.9), '-e3', str(4), '-e4', str(5), # -nse -nsi -nsd -gas -4.5 -ges -2.0 -der 0.001 -bsc 0.99 -gmr 0.8 -i 0.8 -rn 0.001 -pms 0.9 -mq 0 -e3 4 -e4 5 '-o', ph.outFile('reads_se_N6000_4.CT_GA.verified.sam'), ph.inFile('reads_se_N6000.CT_GA.sam'), ph.inFile('hg18_chr21_3000.fa'), ph.inFile('reads_se_N6000.fastq')], to_diff=[(ph.inFile('reads_se_N6000_4.CT_GA.verified.sam'), ph.outFile('reads_se_N6000_4.CT_GA.verified.sam'), transforms)]) conf_list.append(conf) # ============================================================ # pe # ============================================================ # 0 conf = app_tests.TestConf( program=path_to_bisar, redir_stdout=ph.outFile('other.stdout'), args=['-e3', str(4), '-e4', str(5), #-e3 4 -e4 5 '-o', ph.outFile('reads_pe_N6000_0.CT_GA.verified.sam'), ph.inFile('reads_pe_N6000.CT_GA.sam'), ph.inFile('hg18_chr21_3000.fa'), ph.inFile('reads_pe_N6000.L.fastq'), ph.inFile('reads_pe_N6000.R.fastq')], to_diff=[(ph.inFile('reads_pe_N6000_0.CT_GA.verified.sam'), ph.outFile('reads_pe_N6000_0.CT_GA.verified.sam'), transforms)]) conf_list.append(conf) ############################################################## ### Casbar ############################################################## # 0 conf = app_tests.TestConf( program=path_to_casbar, redir_stdout=ph.outFile('other.stdout'), args=['-nec', '-mc', str(6), '-msc', str(5), '-mpc', str(0.5), '-hes', str(0.005), '-o', ph.outFile('snps_se_0.vcf'), '-b', ph.outFile('meths_se_0.bed'), ph.inFile('hg18_chr21_3000.fa'), ph.inFile('reads_se_N6000_2.CT_GA.verified.pos_so.sam')], to_diff=[(ph.inFile('snps_se_0.vcf'), ph.outFile('snps_se_0.vcf')), (ph.inFile('meths_se_0.bed'), ph.outFile('meths_se_0.bed'))]) conf_list.append(conf) # 1 conf = app_tests.TestConf(<|fim▁hole|> redir_stdout=ph.outFile('other.stdout'), args=['-nec', '-mc', str(2), '-msc', str(3), '-mpc', str(0.5), '-hes', str(0.005), '-o', ph.outFile('snps_se_1.vcf'), '-b', ph.outFile('meths_se_1.bed'), ph.inFile('hg18_chr21_3000.fa'), ph.inFile('reads_se_N6000_2.CT_GA.verified.pos_so.sam')], to_diff=[(ph.inFile('snps_se_1.vcf'), ph.outFile('snps_se_1.vcf')), (ph.inFile('meths_se_1.bed'), ph.outFile('meths_se_1.bed'))]) conf_list.append(conf) # ============================================================ # pe # ============================================================ # 0 conf = app_tests.TestConf( program=path_to_casbar, redir_stdout=ph.outFile('other.stdout'), args=['-nec', '-mc', str(6), '-msc', str(5), '-mpc', str(0.5), '-hes', str(0.005), '-o', ph.outFile('snps_pe_0.vcf'), '-b', ph.outFile('meths_pe_0.bed'), ph.inFile('hg18_chr21_3000.fa'), ph.inFile('reads_pe_N6000_0.CT_GA.verified.pos_so.sam')], to_diff=[(ph.inFile('snps_pe_0.vcf'), ph.outFile('snps_pe_0.vcf')), (ph.inFile('meths_pe_0.bed'), ph.outFile('meths_pe_0.bed'))]) conf_list.append(conf) # ============================================================ # Execute the tests. # ============================================================ failures = 0 for conf in conf_list: # Output to the user. print ' '.join([os.path.basename(conf.program)] + conf.args) res = app_tests.runTest(conf) if res: print 'OK' else: failures += 1 print 'FAILED' # Cleanup. ph.deleteTempDir() print '==============================' print ' total tests: %d' % len(conf_list) print ' failed tests: %d' % failures print 'successful tests: %d' % (len(conf_list) - failures) print '==============================' # Compute and return return code. return failures != 0 if __name__ == '__main__': sys.exit(app_tests.main(main))<|fim▁end|>
program=path_to_casbar,
<|file_name|>axum.rs<|end_file_name|><|fim▁begin|>use axum::{ body::{boxed, Full}, handler::Handler, http::{header, StatusCode, Uri}, response::{Html, IntoResponse, Response}, routing::{get, Router}, }; use mime_guess; use rust_embed::RustEmbed; use std::net::SocketAddr; #[tokio::main] async fn main() { // Define our app routes, including a fallback option for anything not matched. let app = Router::new() .route("/", get(index_handler)) .route("/index.html", get(index_handler)) .route("/dist/*file", static_handler.into_service()) .fallback(get(not_found)); // Start listening on the given address. let addr = SocketAddr::from(([127, 0, 0, 1], 3000)); println!("listening on {}", addr); axum::Server::bind(&addr).serve(app.into_make_service()).await.unwrap(); } // We use static route matchers ("/" and "/index.html") to serve our home // page. async fn index_handler() -> impl IntoResponse { static_handler("/index.html".parse::<Uri>().unwrap()).await } // We use a wildcard matcher ("/dist/*file") to match against everything // within our defined assets directory. This is the directory on our Asset // struct below, where folder = "examples/public/". async fn static_handler(uri: Uri) -> impl IntoResponse { let mut path = uri.path().trim_start_matches('/').to_string(); if path.starts_with("dist/") { path = path.replace("dist/", ""); } StaticFile(path) } // Finally, we use a fallback route for anything that didn't match. async fn not_found() -> Html<&'static str> { Html("<h1>404</h1><p>Not Found</p>") } #[derive(RustEmbed)] #[folder = "examples/public/"] struct Asset; pub struct StaticFile<T>(pub T); impl<T> IntoResponse for StaticFile<T> where T: Into<String>, { fn into_response(self) -> Response { let path = self.0.into(); <|fim▁hole|> let mime = mime_guess::from_path(path).first_or_octet_stream(); Response::builder().header(header::CONTENT_TYPE, mime.as_ref()).body(body).unwrap() } None => Response::builder().status(StatusCode::NOT_FOUND).body(boxed(Full::from("404"))).unwrap(), } } }<|fim▁end|>
match Asset::get(path.as_str()) { Some(content) => { let body = boxed(Full::from(content.data));
<|file_name|>sqrt.rs<|end_file_name|><|fim▁begin|>use {Style, Sign, Float}; impl Float { pub fn sqrt(mut self) -> Float { self.debug_assert_valid(); let prec = self.prec; match self.style { Style::NaN => Float::nan(prec), Style::Infinity => { match self.sign { Sign::Pos => Float::inf(prec, Sign::Pos), Sign::Neg => Float::nan(prec), } } Style::Zero => Float::zero_(prec, self.sign), Style::Normal => { if self.sign == Sign::Neg { return Float::nan(prec); } // use this instead of % 2 to get the right sign // (should be 0 or 1, even if exp is negative) let c = self.exp & 1; let exp = (self.exp - c) / 2; // we compute sqrt(m1 * 2**c * 2**(p + 1)) to ensure // we get the full significand, and the rounding bit, // and can use the remainder to check for sticky bits. let shift = prec as usize + 1 + c as usize; self.signif <<= shift; let (mut sqrt, rem) = self.signif.sqrt_rem().unwrap(); let bits = sqrt.bit_length(); assert!(bits >= prec + 1); let unshift = bits - prec; let ulp_bit = sqrt.bit(unshift); let half_ulp_bit = sqrt.bit(unshift - 1); let has_trailing_ones = rem != 0 || sqrt.trailing_zeros() < unshift - 1; sqrt >>= unshift as usize; let mut ret = Float { prec: prec, sign: Sign::Pos, exp: exp,<|fim▁hole|> }; if half_ulp_bit && (ulp_bit || has_trailing_ones) { ret.add_ulp(); } ret } } } }<|fim▁end|>
signif: sqrt, style: Style::Normal,
<|file_name|>progressLinear.js<|end_file_name|><|fim▁begin|>/*! * Angular Material Design * https://github.com/angular/material * @license MIT * v1.1.0-rc3-master-30e6657 */ (function( window, angular, undefined ){ "use strict"; /** * @ngdoc module * @name material.components.progressLinear * @description Linear Progress module! */ angular.module('material.components.progressLinear', [ 'material.core' ]) .directive('mdProgressLinear', MdProgressLinearDirective); /** * @ngdoc directive * @name mdProgressLinear * @module material.components.progressLinear * @restrict E * * @description * The linear progress directive is used to make loading content * in your app as delightful and painless as possible by minimizing * the amount of visual change a user sees before they can view * and interact with content. * * Each operation should only be represented by one activity indicator * For example: one refresh operation should not display both a * refresh bar and an activity circle. * * For operations where the percentage of the operation completed * can be determined, use a determinate indicator. They give users * a quick sense of how long an operation will take. * * For operations where the user is asked to wait a moment while * something finishes up, and it’s not necessary to expose what's * happening behind the scenes and how long it will take, use an * indeterminate indicator. * * @param {string} md-mode Select from one of four modes: determinate, indeterminate, buffer or query. * * Note: if the `md-mode` value is set as undefined or specified as 1 of the four (4) valid modes, then `indeterminate` * will be auto-applied as the mode. * * Note: if not configured, the `md-mode="indeterminate"` will be auto injected as an attribute. If `value=""` is also specified, however, * then `md-mode="determinate"` would be auto-injected instead. * @param {number=} value In determinate and buffer modes, this number represents the percentage of the primary progress bar. Default: 0 * @param {number=} md-buffer-value In the buffer mode, this number represents the percentage of the secondary progress bar. Default: 0 * @param {boolean=} ng-disabled Determines whether to disable the progress element. * * @usage * <hljs lang="html"> * <md-progress-linear md-mode="determinate" value="..."></md-progress-linear> * * <md-progress-linear md-mode="determinate" ng-value="..."></md-progress-linear> * * <md-progress-linear md-mode="indeterminate"></md-progress-linear> * * <md-progress-linear md-mode="buffer" value="..." md-buffer-value="..."></md-progress-linear> * * <md-progress-linear md-mode="query"></md-progress-linear> * </hljs> */ function MdProgressLinearDirective($mdTheming, $mdUtil, $log) { var MODE_DETERMINATE = "determinate"; var MODE_INDETERMINATE = "indeterminate"; var MODE_BUFFER = "buffer"; var MODE_QUERY = "query"; var DISABLED_CLASS = "_md-progress-linear-disabled"; return { restrict: 'E', template: '<div class="_md-container">' + '<div class="_md-dashed"></div>' + '<div class="_md-bar _md-bar1"></div>' + '<div class="_md-bar _md-bar2"></div>' + '</div>', compile: compile }; function compile(tElement, tAttrs, transclude) { tElement.attr('aria-valuemin', 0); tElement.attr('aria-valuemax', 100); tElement.attr('role', 'progressbar'); return postLink;<|fim▁hole|> $mdTheming(element); var lastMode; var isDisabled = attr.hasOwnProperty('disabled'); var toVendorCSS = $mdUtil.dom.animator.toCss; var bar1 = angular.element(element[0].querySelector('._md-bar1')); var bar2 = angular.element(element[0].querySelector('._md-bar2')); var container = angular.element(element[0].querySelector('._md-container')); element .attr('md-mode', mode()) .toggleClass(DISABLED_CLASS, isDisabled); validateMode(); watchAttributes(); /** * Watch the value, md-buffer-value, and md-mode attributes */ function watchAttributes() { attr.$observe('value', function(value) { var percentValue = clamp(value); element.attr('aria-valuenow', percentValue); if (mode() != MODE_QUERY) animateIndicator(bar2, percentValue); }); attr.$observe('mdBufferValue', function(value) { animateIndicator(bar1, clamp(value)); }); attr.$observe('disabled', function(value) { if (value === true || value === false) { isDisabled = value; } else { isDisabled = angular.isDefined(value); } element.toggleClass(DISABLED_CLASS, !!isDisabled); }); attr.$observe('mdMode',function(mode){ if (lastMode) container.removeClass( lastMode ); switch( mode ) { case MODE_QUERY: case MODE_BUFFER: case MODE_DETERMINATE: case MODE_INDETERMINATE: container.addClass( lastMode = "_md-mode-" + mode ); break; default: container.addClass( lastMode = "_md-mode-" + MODE_INDETERMINATE ); break; } }); } /** * Auto-defaults the mode to either `determinate` or `indeterminate` mode; if not specified */ function validateMode() { if ( angular.isUndefined(attr.mdMode) ) { var hasValue = angular.isDefined(attr.value); var mode = hasValue ? MODE_DETERMINATE : MODE_INDETERMINATE; var info = "Auto-adding the missing md-mode='{0}' to the ProgressLinear element"; //$log.debug( $mdUtil.supplant(info, [mode]) ); element.attr("md-mode",mode); attr.mdMode = mode; } } /** * Is the md-mode a valid option? */ function mode() { var value = (attr.mdMode || "").trim(); if ( value ) { switch(value) { case MODE_DETERMINATE: case MODE_INDETERMINATE: case MODE_BUFFER: case MODE_QUERY: break; default: value = MODE_INDETERMINATE; break; } } return value; } /** * Manually set CSS to animate the Determinate indicator based on the specified * percentage value (0-100). */ function animateIndicator(target, value) { if ( isDisabled || !mode() ) return; var to = $mdUtil.supplant("translateX({0}%) scale({1},1)", [ (value-100)/2, value/100 ]); var styles = toVendorCSS({ transform : to }); angular.element(target).css( styles ); } } /** * Clamps the value to be between 0 and 100. * @param {number} value The value to clamp. * @returns {number} */ function clamp(value) { return Math.max(0, Math.min(value || 0, 100)); } } MdProgressLinearDirective.$inject = ["$mdTheming", "$mdUtil", "$log"]; })(window, window.angular);<|fim▁end|>
} function postLink(scope, element, attr) {
<|file_name|>connections.py<|end_file_name|><|fim▁begin|>""" This module implements connections for MySQLdb. Presently there is only one class: Connection. Others are unlikely. However, you might want to make your own subclasses. In most cases, you will probably override Connection.default_cursor with a non-standard Cursor class. """ from MySQLdb import cursors from _mysql_exceptions import Warning, Error, InterfaceError, DataError, \ DatabaseError, OperationalError, IntegrityError, InternalError, \ NotSupportedError, ProgrammingError import types, _mysql import re def defaulterrorhandler(connection, cursor, errorclass, errorvalue): """ If cursor is not None, (errorclass, errorvalue) is appended to cursor.messages; otherwise it is appended to connection.messages. Then errorclass is raised with errorvalue as the value. You can override this with your own error handler by assigning it to the instance. """ error = errorclass, errorvalue if cursor: cursor.messages.append(error) else: connection.messages.append(error) del cursor del connection raise errorclass, errorvalue re_numeric_part = re.compile(r"^(\d+)") def numeric_part(s): """Returns the leading numeric part of a string. >>> numeric_part("20-alpha") 20 >>> numeric_part("foo") >>> numeric_part("16b") 16 """ m = re_numeric_part.match(s) if m: return int(m.group(1)) return None class Connection(_mysql.connection): """MySQL Database Connection Object""" default_cursor = cursors.Cursor def __init__(self, *args, **kwargs): """ Create a connection to the database. It is strongly recommended that you only use keyword parameters. Consult the MySQL C API documentation for more information. host string, host to connect user string, user to connect as passwd string, password to use db string, database to use port integer, TCP/IP port to connect to unix_socket string, location of unix_socket to use conv conversion dictionary, see MySQLdb.converters connect_timeout number of seconds to wait before the connection attempt fails. compress if set, compression is enabled named_pipe if set, a named pipe is used to connect (Windows only) init_command command which is run once the connection is created read_default_file file from which default client values are read read_default_group configuration group to use from the default file cursorclass class object, used to create cursors (keyword only) use_unicode If True, text-like columns are returned as unicode objects using the connection's character set. Otherwise, text-like columns are returned as strings. columns are returned as normal strings. Unicode objects will always be encoded to the connection's character set regardless of this setting. charset If supplied, the connection character set will be changed to this character set (MySQL-4.1 and newer). This implies use_unicode=True. sql_mode If supplied, the session SQL mode will be changed to this setting (MySQL-4.1 and newer). For more details and legal values, see the MySQL documentation. client_flag integer, flags to use or 0 (see MySQL docs or constants/CLIENTS.py) ssl dictionary or mapping, contains SSL connection parameters; see the MySQL documentation for more details (mysql_ssl_set()). If this is set, and the client does not support SSL, NotSupportedError will be raised. local_infile integer, non-zero enables LOAD LOCAL INFILE; zero disables autocommit If False (default), autocommit is disabled. If True, autocommit is enabled. If None, autocommit isn't set and server default is used. There are a number of undocumented, non-standard methods. See the documentation for the MySQL C API for some hints on what they do. """ from MySQLdb.constants import CLIENT, FIELD_TYPE from MySQLdb.converters import conversions from weakref import proxy kwargs2 = kwargs.copy() if 'conv' in kwargs: conv = kwargs['conv'] else: conv = conversions conv2 = {} for k, v in conv.items(): if isinstance(k, int) and isinstance(v, list): conv2[k] = v[:] else: conv2[k] = v kwargs2['conv'] = conv2 cursorclass = kwargs2.pop('cursorclass', self.default_cursor) charset = kwargs2.pop('charset', '') if charset: use_unicode = True else: use_unicode = False use_unicode = kwargs2.pop('use_unicode', use_unicode) sql_mode = kwargs2.pop('sql_mode', '') client_flag = kwargs.get('client_flag', 0) client_version = tuple([ numeric_part(n) for n in _mysql.get_client_info().split('.')[:2] ]) if client_version >= (4, 1): client_flag |= CLIENT.MULTI_STATEMENTS if client_version >= (5, 0): client_flag |= CLIENT.MULTI_RESULTS kwargs2['client_flag'] = client_flag # PEP-249 requires autocommit to be initially off autocommit = kwargs2.pop('autocommit', False) super(Connection, self).__init__(*args, **kwargs2) self.cursorclass = cursorclass self.encoders = dict([ (k, v) for k, v in conv.items() if type(k) is not int ]) self._server_version = tuple([ numeric_part(n) for n in self.get_server_info().split('.')[:2] ]) db = proxy(self) def _get_string_literal(): def string_literal(obj, dummy=None): return db.string_literal(obj) return string_literal def _get_unicode_literal(): def unicode_literal(u, dummy=None): <|fim▁hole|> return db.literal(u.encode(unicode_literal.charset)) return unicode_literal def _get_string_decoder(): def string_decoder(s): return s.decode(string_decoder.charset) return string_decoder string_literal = _get_string_literal() self.unicode_literal = unicode_literal = _get_unicode_literal() self.string_decoder = string_decoder = _get_string_decoder() if not charset: charset = self.character_set_name() self.set_character_set(charset) if sql_mode: self.set_sql_mode(sql_mode) if use_unicode: self.converter[FIELD_TYPE.STRING].append((None, string_decoder)) self.converter[FIELD_TYPE.VAR_STRING].append((None, string_decoder)) self.converter[FIELD_TYPE.VARCHAR].append((None, string_decoder)) self.converter[FIELD_TYPE.BLOB].append((None, string_decoder)) self.encoders[types.StringType] = string_literal self.encoders[types.UnicodeType] = unicode_literal self._transactional = self.server_capabilities & CLIENT.TRANSACTIONS if self._transactional: if autocommit is not None: self.autocommit(autocommit) self.messages = [] def autocommit(self, on): on = bool(on) if self.get_autocommit() != on: _mysql.connection.autocommit(self, on) def cursor(self, cursorclass=None): """ Create a cursor on which queries may be performed. The optional cursorclass parameter is used to create the Cursor. By default, self.cursorclass=cursors.Cursor is used. """ return (cursorclass or self.cursorclass)(self) def __enter__(self): if self.get_autocommit(): self.query("BEGIN") return self.cursor() def __exit__(self, exc, value, tb): if exc: self.rollback() else: self.commit() def literal(self, o): """ If o is a single object, returns an SQL literal as a string. If o is a non-string sequence, the items of the sequence are converted and returned as a sequence. Non-standard. For internal use; do not use this in your applications. """ return self.escape(o, self.encoders) def begin(self): """Explicitly begin a connection. Non-standard. DEPRECATED: Will be removed in 1.3. Use an SQL BEGIN statement instead.""" from warnings import warn warn("begin() is non-standard and will be removed in 1.3", DeprecationWarning, 2) self.query("BEGIN") if not hasattr(_mysql.connection, 'warning_count'): def warning_count(self): """Return the number of warnings generated from the last query. This is derived from the info() method.""" from string import atoi info = self.info() if info: return atoi(info.split()[-1]) else: return 0 def set_character_set(self, charset): """Set the connection character set to charset. The character set can only be changed in MySQL-4.1 and newer. If you try to change the character set from the current value in an older version, NotSupportedError will be raised.""" if charset == "utf8mb4": py_charset = "utf8" else: py_charset = charset if self.character_set_name() != charset: try: super(Connection, self).set_character_set(charset) except AttributeError: if self._server_version < (4, 1): raise NotSupportedError("server is too old to set charset") self.query('SET NAMES %s' % charset) self.store_result() self.string_decoder.charset = py_charset self.unicode_literal.charset = py_charset def set_sql_mode(self, sql_mode): """Set the connection sql_mode. See MySQL documentation for legal values.""" if self._server_version < (4, 1): raise NotSupportedError("server is too old to set sql_mode") self.query("SET SESSION sql_mode='%s'" % sql_mode) self.store_result() def show_warnings(self): """Return detailed information about warnings as a sequence of tuples of (Level, Code, Message). This is only supported in MySQL-4.1 and up. If your server is an earlier version, an empty sequence is returned.""" if self._server_version < (4,1): return () self.query("SHOW WARNINGS") r = self.store_result() warnings = r.fetch_row(0) return warnings Warning = Warning Error = Error InterfaceError = InterfaceError DatabaseError = DatabaseError DataError = DataError OperationalError = OperationalError IntegrityError = IntegrityError InternalError = InternalError ProgrammingError = ProgrammingError NotSupportedError = NotSupportedError errorhandler = defaulterrorhandler<|fim▁end|>
<|file_name|>frontend.py<|end_file_name|><|fim▁begin|># -*-Python-*- ################################################################################ # # File: frontend.py # RCS: $Header: $ # Description: frontend: # responsibility: # init backend # init processors # handle two query types: # 1) metadata # response: metadata from backend and processors # 2) informational # response: proccess(proc(query))(backend(info(query))) # Author: Staal Vinterbo # Created: Wed May 8 16:28:56 2013 # Modified: Sun Jun 23 14:31:31 2013 (Staal Vinterbo) staal@mats # Language: Python # Package: N/A # Status: Experimental #<|fim▁hole|># # frontend.py is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # frontend.py is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with frontend.py; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # ################################################################################ from backend import init_backend, query_backend def init_frontend(database, processors, reinit=False): if len(processors) == 0: raise Exception('Failed to initialize frontend: no processors given.') try: if reinit: backend = reinit_backend(backend) else: backend = init_backend(database) except Exception as e: raise Exception('Could not initialize backend: ' + str(e)) pdict = {} for (k,v) in processors.items(): pdict[k] = v['meta'] meta = dict(backend['meta']) meta['processors'] = pdict return {'backend' : backend, 'processors' : processors, 'meta' : meta} def handle_query(frontend, eps, query): if eps <= 0: raise Exception('Privacy risk must be positive.') try: (ddesc, proc) = query (pname, parms) = proc (dname, sel, pro) = ddesc except Exception as e: raise Exception('Malformed data query.') # check if data set exists and if processor is allowed if dname not in frontend['backend']['meta']['datasets'].keys(): raise Exception('Requested data set not available.') if pname not in frontend['backend']['meta']['datasets'][dname]['processors']: raise Exception('Requested information not appropriate for data set.') try: proc = frontend['processors'][pname] except Exception as e: raise Exception('Could not find query type: ' + str(e)) try: if proc.has_key('query_edit'): parms += [('orig_query', {'predicate' :sel, 'attributes' : pro})] (sel, pro) = proc['query_edit'](sel, pro) ddesc = (dname, sel, pro) except Exception as e: raise Exception('Query edit failed: ' + str(e)) try: res = query_backend(frontend['backend'], ddesc) except Exception as e: raise Exception('Data query failed: ' + str(e)) try: pres = proc['f'](eps, parms, res) except Exception as e: raise Exception('Information processing failed: ' + str(e)) return pres<|fim▁end|>
# (c) Copyright 2013, Staal Vinterbo, all rights reserved.
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # agile-analytics documentation build configuration file, created by # sphinx-quickstart on Fri Jun 17 13:58:53 2016. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out<|fim▁hole|># If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # # import os # import sys # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode', 'sphinx.ext.githubpages', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. # # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'agile-analytics' copyright = u'2016, Chris Heisel' author = u'Chris Heisel' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = u'0.1' # The full version, including alpha/beta/rc tags. release = u'0.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # # today = '' # # Else, today_fmt is used as the format for a strftime call. # # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # The reST default role (used for this markup: `text`) to use for all # documents. # # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'alabaster' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. # "<project> v<release> documentation" by default. # # html_title = u'agile-analytics v0.1' # A shorter title for the navigation bar. Default is the same as html_title. # # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # # html_logo = None # The name of an image file (relative to this directory) to use as a favicon of # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # # html_extra_path = [] # If not None, a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. # The empty string is equivalent to '%b %d, %Y'. # # html_last_updated_fmt = None # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # # html_additional_pages = {} # If false, no module index is generated. # # html_domain_indices = True # If false, no index is generated. # # html_use_index = True # If true, the index is split into individual pages for each letter. # # html_split_index = False # If true, links to the reST sources are added to the pages. # # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' # # html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # 'ja' uses this config value. # 'zh' user can custom change `jieba` dictionary path. # # html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. # # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'agile-analyticsdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'agile-analytics.tex', u'agile-analytics Documentation', u'Chris Heisel', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # # latex_use_parts = False # If true, show page references after internal links. # # latex_show_pagerefs = False # If true, show URL addresses after external links. # # latex_show_urls = False # Documents to append as an appendix to all manuals. # # latex_appendices = [] # If false, no module index is generated. # # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'agile-analytics', u'agile-analytics Documentation', [author], 1) ] # If true, show URL addresses after external links. # # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'agile-analytics', u'agile-analytics Documentation', author, 'agile-analytics', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # # texinfo_appendices = [] # If false, no module index is generated. # # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # # texinfo_no_detailmenu = False<|fim▁end|>
# serve to show the default.
<|file_name|>marker_selection_test.js<|end_file_name|><|fim▁begin|>jasmine.DEFAULT_TIMEOUT_INTERVAL = 1000 * 50; // 50 seconds describe('marker_selection_test', function () { it('all_aml', function (done) { var dataset; var referenceDataset; var promises = []; promises.push(morpheus.DatasetUtil.read('test_files/all_aml_train.gct').done(function (d) { dataset = d; })); promises.push(morpheus.DatasetUtil.read('test_files/aml_aml_train_marker_selection.gct').done(function (d) { referenceDataset = d; }));<|fim▁hole|> new morpheus.MarkerSelection().execute({ project: project, input: { background: false, permutations: 1000, number_of_markers: 0, field: 'id', metric: morpheus.SignalToNoise.toString(), class_a: ['AML_12', 'AML_13', 'AML_14', 'AML_16', 'AML_20', 'AML_1', 'AML_2', 'AML_3', 'AML_5', 'AML_6', 'AML_7'], class_b: ['ALL_19769_B-cell', 'ALL_23953_B-cell', 'ALL_28373_B-cell', 'ALL_9335_B-cell', 'ALL_9692_B-cell', 'ALL_14749_B-cell', 'ALL_17281_B-cell', 'ALL_19183_B-cell', 'ALL_20414_B-cell', 'ALL_21302_B-cell', 'ALL_549_B-cell', 'ALL_17929_B-cell', 'ALL_20185_B-cell', 'ALL_11103_B-cell', 'ALL_18239_B-cell', 'ALL_5982_B-cell', 'ALL_7092_B-cell', 'ALL_R11_B-cell', 'ALL_R23_B-cell', 'ALL_16415_T-cell', 'ALL_19881_T-cell', 'ALL_9186_T-cell', 'ALL_9723_T-cell', 'ALL_17269_T-cell', 'ALL_14402_T-cell', 'ALL_17638_T-cell', 'ALL_22474_T-cell'] } }); // compare metadata fields var vector = dataset.getRowMetadata().getByName('p_value'); var referenceVector = referenceDataset.getRowMetadata().getByName('p-value'); for (var i = 0, size = vector.size(); i < size; i++) { expect(vector.getValue(i)).toBeCloseTo(referenceVector.getValue(i), 0.001); } var vector = dataset.getRowMetadata().getByName('FDR(BH)'); var referenceVector = referenceDataset.getRowMetadata().getByName('FDR(BH)'); for (var i = 0, size = vector.size(); i < size; i++) { expect(vector.getValue(i)).toBeCloseTo(referenceVector.getValue(i), 0.001); } var vector = dataset.getRowMetadata().getByName('Signal to noise'); var referenceVector = referenceDataset.getRowMetadata().getByName('Signal to noise'); for (var i = 0, size = vector.size(); i < size; i++) { expect(vector.getValue(i)).toBeCloseTo(referenceVector.getValue(i), 0.001); } done(); }); }); });<|fim▁end|>
Promise.all(promises).then(function () { var project = new morpheus.Project(dataset);