prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>test_account_rest_permissions.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # This file is part of EUDAT B2Share. # Copyright (C) 2017 University of Tuebingen, CERN, CSC, KTH. # # B2Share is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # B2Share is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with B2Share; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. # # In applying this license, CERN does not # waive the privileges and immunities granted to it by virtue of its status # as an Intergovernmental Organization or submit itself to any jurisdiction. """Test permissions of user account REST API.""" import json from invenio_db import db from flask import url_for from invenio_accounts.models import User from invenio_oauth2server.models import Token from invenio_oauth2server import current_oauth2server from b2share_unit_tests.helpers import create_user def test_accounts_search_permission(app, test_users, test_community, login_user): """Test permission of listing user accounts.""" def account_search(user, expected_code): headers = [('Content-Type', 'application/json'), ('Accept', 'application/json')] with app.app_context(): url = url_for('invenio_accounts_rest.users_list') if user: scopes = current_oauth2server.scope_choices() allowed_token = Token.create_personal( 'allowed_token', user.id, scopes=[s[0] for s in scopes] ) # application authentication token header headers.append(('Authorization', 'Bearer {}'.format(allowed_token.access_token))) with app.test_client() as client: if user is not None: login_user(user, client)<|fim▁hole|> # anonymous users can't list accounts account_search(None, 401) # authenticated users can't list other users' account account_search(test_users['normal'], 403) # community members cannot list all users' accounts account_search(test_community.member, 403) # community admins can list all users account_search(test_community.admin, 200) # admin is allowed to list all accounts account_search(test_users['admin'], 200) def test_account_read_permission(app, test_users, test_community, login_user): """Test permission of listing user accounts.""" with app.app_context(): read_user = create_user('read_user') url = url_for('invenio_accounts_rest.user', user_id=read_user.id) db.session.commit() headers = [('Content-Type', 'application/json'), ('Accept', 'application/json')] def account_read(user, expected_code): with app.test_client() as client: if user is not None: login_user(user, client) res = client.get(url, headers=headers) assert res.status_code == expected_code # anonymous users can't read accounts account_read(None, 401) # authenticated users can't read other users' account account_read(test_users['normal'], 403) # community members cannot read other users' account account_read(test_community.member, 403) # users can read their own account account_read(read_user, 200) # community admins can list all users account_read(test_community.admin, 200) # admin is allowed to read all accounts account_read(test_users['admin'], 200) def test_account_activation_permission(app, test_users, test_community, login_user): """Test deactivating a user account.""" counter = [0] def account_update(user, expected_code, modified_user=None): def account_update_sub(patch_content, content_type): with app.app_context(): if modified_user is None: test_user = create_user( 'test_user{}'.format(counter[0])) else: test_user = modified_user counter[0] += 1 url = url_for( 'invenio_accounts_rest.user', user_id=test_user.id, ) db.session.commit() headers = [('Content-Type', content_type), ('Accept', 'application/json')] with app.test_client() as client: if user is not None: login_user(user, client) res = client.patch(url, headers=headers, data=json.dumps(patch_content)) assert res.status_code == expected_code # test with a simple JSON account_update_sub({'active': False}, 'application/json') # test with a JSON patch account_update_sub([{ 'op': 'replace', 'path': '/active','value': False }], 'application/json-patch+json') # anonymous users can't activate/deactivate accounts account_update(None, 401) # authenticated users can't activate/deactivate other users' account account_update(test_users['normal'], 403) # users can't deactivate their own accounts account_update(test_users['normal'], 403, test_users['normal']) # admin is allowed to activate/deactivate accounts account_update(test_users['admin'], 200) def test_account_roles_search_permission(app, test_users, test_community, login_user): """Test permission of listing user accounts.""" with app.app_context(): read_user = create_user('read_user') url = url_for('invenio_accounts_rest.user_roles_list', user_id=read_user.id) db.session.commit() headers = [('Content-Type', 'application/json'), ('Accept', 'application/json')] def roles_read(user, expected_code): with app.test_client() as client: if user is not None: login_user(user, client) res = client.get(url, headers=headers) assert res.status_code == expected_code # anonymous users can't read other users' roles roles_read(None, 401) # any authenticated user can read other users' roles roles_read(test_users['normal'], 200)<|fim▁end|>
res = client.get(url, headers=headers) assert res.status_code == expected_code
<|file_name|>index.tsx<|end_file_name|><|fim▁begin|>import * as React from "react"; import { VirtualFarmBotProps } from "../../interfaces"; import { BooleanSetting } from "../../../../session_keys"; import { BotFigure } from "./bot_figure"; import { BotTrail } from "./bot_trail"; import { BotPeripherals } from "./bot_peripherals"; import { NegativePositionLabel } from "./negative_position_labels"; export function VirtualFarmBot(props: VirtualFarmBotProps) { const { mapTransformProps, plantAreaOffset, peripherals, eStopStatus, getConfigValue } = props; const displayTrail = !!getConfigValue(BooleanSetting.display_trail); const encoderFigure = !!getConfigValue(BooleanSetting.encoder_figure); return <g id="virtual-farmbot"> <NegativePositionLabel position={props.botLocationData.position} mapTransformProps={mapTransformProps} plantAreaOffset={plantAreaOffset} /> <BotPeripherals position={props.botLocationData.position} mapTransformProps={mapTransformProps}<|fim▁hole|> plantAreaOffset={plantAreaOffset} peripherals={peripherals} getConfigValue={getConfigValue} /> <BotFigure name={"motor-position"} position={props.botLocationData.position} mapTransformProps={mapTransformProps} plantAreaOffset={plantAreaOffset} eStopStatus={eStopStatus} /> {encoderFigure && <BotFigure name={"encoder-position"} position={props.botLocationData.scaled_encoders} mapTransformProps={mapTransformProps} plantAreaOffset={plantAreaOffset} />} {displayTrail && <BotTrail position={props.botLocationData.position} mapTransformProps={mapTransformProps} peripherals={peripherals} />} </g>; }<|fim▁end|>
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* Copyright (c) 2016 Saurav Sachidanand Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /*! This is a Rust library that taps into functionality that enables hardware-accelerated execution of virtual machines on OS X. It binds to the `Hypervisor` framework on OS X, and exposes a safe Rust interface through the `hypervisor` module, and an unsafe foreign function interface through the `hypervisor::ffi` module. To use this library, you need * OS X Yosemite (10.10), or newer * an Intel processor with the VT-x feature set that includes Extended Page Tables (EPT) and Unrestricted Mode. To verify this, run and expect the following in your Terminal: ```shell $ sysctl kern.hv_support kern.hv_support: 1 ``` !*/ extern crate libc; extern crate core; #[allow(non_camel_case_types)] pub mod ffi; pub mod consts; use self::core::fmt; use libc::*; use self::ffi::*; /// Error returned after every call pub enum Error { /// Success Success, /// Error Error, /// Busy Busy, /// Bad argument BadArg, /// No resources NoRes, /// No device NoDev, /// Unsupported Unsupp } impl fmt::Debug for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Error::Success => write!(f, "Success"), Error::Error => write!(f, "Error"), Error::Busy => write!(f, "Busy"), Error::BadArg => write!(f, "Bad argument"), Error::NoRes => write!(f, "No resources"), Error::NoDev => write!(f, "No device"), Error::Unsupp => write!(f, "Unsupported"), } } } // Returns an Error for a hv_return_t fn match_error_code(code: hv_return_t) -> Error { match code { HV_SUCCESS => Error::Success, HV_BUSY => Error::Busy, HV_BAD_ARGUMENT => Error::BadArg, HV_NO_RESOURCES => Error::NoRes, HV_NO_DEVICE => Error::NoDev, HV_UNSUPPORTED => Error::Unsupp, _ => Error::Error } } /// Creates a VM instance for the current Mach task pub fn create_vm() -> Error { match_error_code(unsafe { hv_vm_create(HV_VM_DEFAULT) }) } /// Destroys the VM instance associated with the current Mach task pub fn destroy_vm() -> Error { match_error_code(unsafe { hv_vm_destroy() }) } /// Guest physical memory region permissions pub enum MemPerm { /// Read Read, /// Write (implies read) Write, /// Execute Exec, /// Execute and write (implies read) ExecAndWrite, /// Execute and read ExecAndRead } #[allow(non_snake_case)] #[inline(always)] fn match_MemPerm(mem_perm: &MemPerm) -> uint64_t { match mem_perm { &MemPerm::Read => HV_MEMORY_READ, &MemPerm::Write => HV_MEMORY_WRITE | HV_MEMORY_READ, &MemPerm::Exec => HV_MEMORY_EXEC, &MemPerm::ExecAndWrite => HV_MEMORY_EXEC | HV_MEMORY_WRITE | HV_MEMORY_READ, &MemPerm::ExecAndRead => HV_MEMORY_EXEC | HV_MEMORY_READ, } } /// Maps a region in the virtual address space of the current Mach task into the guest physical /// address space of the virutal machine pub fn map_mem(mem: &[u8], gpa: u64, mem_perm: &MemPerm) -> Error { match_error_code(unsafe { hv_vm_map( mem.as_ptr() as *const c_void, gpa as hv_gpaddr_t, mem.len() as size_t, match_MemPerm(mem_perm) ) }) } /// Unmaps a region in the guest physical address space of the virutal machine pub fn unmap_mem(gpa: u64, size: usize) -> Error { match_error_code(unsafe { hv_vm_unmap(gpa as hv_gpaddr_t, size as size_t) }) } /// Modifies the permissions of a region in the guest physical address space of the virtual /// machine pub fn protect_mem(gpa: u64, size: usize, mem_perm: &MemPerm) -> Error { match_error_code(unsafe { hv_vm_protect(gpa as hv_gpaddr_t, size as size_t, match_MemPerm(mem_perm)) }) } /// Synchronizes the guest Timestamp-Counters (TSC) across all vCPUs /// /// * `tsc` Guest TSC value pub fn sync_tsc(tsc: u64) -> Error { match_error_code(unsafe { hv_vm_sync_tsc(tsc as uint64_t) }) } /// Forces an immediate VMEXIT of a set of vCPUs /// /// * `vcpu_ids` Array of vCPU IDs pub fn interrupt_vcpus(vcpu_ids: &[u32]) -> Error { match_error_code(unsafe { hv_vcpu_interrupt(vcpu_ids.as_ptr(), vcpu_ids.len() as c_uint) }) } /// Virtual CPU #[allow(non_camel_case_types)] pub struct vCPU { /// Virtual CPU ID pub id: u32 } /// x86 architectural register #[allow(non_camel_case_types)] #[derive(Clone)] #[repr(C)] pub enum x86Reg { RIP, RFLAGS, RAX, RCX, RDX, RBX, RSI, RDI, RSP, RBP, R8, R9, R10, R11, R12, R13, R14, R15, CS, SS, DS, ES, FS, GS, IDT_BASE, IDT_LIMIT, GDT_BASE, GDT_LIMIT, LDTR, LDT_BASE, LDT_LIMIT, LDT_AR, TR, TSS_BASE, TSS_LIMIT, TSS_AR, CR0, CR1, CR2,<|fim▁hole|> DR0, DR1, DR2, DR3, DR4, DR5, DR6, DR7, TPR, XCR0, REGISTERS_MAX, } impl vCPU { /// Creates a vCPU instance for the current thread pub fn new() -> Result<vCPU, Error> { let mut vcpuid: hv_vcpuid_t = 0; let error = match_error_code(unsafe { hv_vcpu_create(&mut vcpuid, HV_VCPU_DEFAULT) }); match error { Error::Success => Ok(vCPU { id: vcpuid as u32 }), _ => Err(error) } } /// Destroys the vCPU instance associated with the current thread pub fn destroy(&self) -> Error { match_error_code(unsafe { hv_vcpu_destroy(self.id as hv_vcpuid_t) }) } /// Executes the vCPU pub fn run(&self) -> Error { match_error_code(unsafe { hv_vcpu_run(self.id as hv_vcpuid_t) }) } /// Forces an immediate VMEXIT of the vCPU pub fn interrupt(&self) -> Error { match_error_code(unsafe { hv_vcpu_interrupt(&(self.id), 1 as c_uint) }) } /// Returns the cumulative execution time of the vCPU in nanoseconds pub fn exec_time(&self) -> Result<u64, Error> { let mut exec_time: uint64_t = 0; let error = match_error_code(unsafe { hv_vcpu_get_exec_time(self.id, &mut exec_time) }); match error { Error::Success => Ok(exec_time as u64), _ => Err(error) } } /// Forces flushing of cached vCPU state pub fn flush(&self) -> Error { match_error_code(unsafe { hv_vcpu_flush(self.id as hv_vcpuid_t) }) } /// Invalidates the translation lookaside buffer (TLB) of the vCPU pub fn invalidate_tlb(&self) -> Error { match_error_code(unsafe { hv_vcpu_invalidate_tlb(self.id as hv_vcpuid_t) }) } /// Enables an MSR to be used natively by the VM pub fn enable_native_msr(&self, msr: u32, enable: bool) -> Error { match_error_code(unsafe { hv_vcpu_enable_native_msr(self.id as hv_vcpuid_t, msr as uint32_t, enable) }) } /// Returns the current value of an MSR of the vCPU pub fn read_msr(&self, msr: u32) -> Result<u64, Error> { let mut value: uint64_t = 0; let error = match_error_code(unsafe { hv_vcpu_read_msr(self.id as hv_vcpuid_t, msr as uint32_t, &mut value) }); match error { Error::Success => Ok(value as u64), _ => Err(error) } } /// Set the value of an MSR of the vCPU pub fn write_msr(&self, msr: u32, value: u64) -> Error { match_error_code(unsafe { hv_vcpu_write_msr(self.id as hv_vcpuid_t, msr as uint32_t, &(value as uint64_t)) }) } /// Returns the current value of an architectural x86 register /// of the vCPU pub fn read_register(&self, reg: &x86Reg) -> Result<u64, Error> { let mut value: uint64_t = 0; let error = match_error_code(unsafe { hv_vcpu_read_register(self.id as hv_vcpuid_t, (*reg).clone(), &mut value) }); match error { Error::Success => Ok(value as u64), _ => Err(error) } } /// Sets the value of an architectural x86 register of the vCPU pub fn write_register(&self, reg: &x86Reg, value: u64) -> Error { match_error_code(unsafe { hv_vcpu_write_register(self.id as hv_vcpuid_t, (*reg).clone(), value as uint64_t) }) } /// Returns the current value of a VMCS field of the vCPU pub fn read_vmcs(&self, field: u32) -> Result<u64, Error> { let mut value: uint64_t = 0; let error = match_error_code(unsafe { hv_vmx_vcpu_read_vmcs(self.id as hv_vcpuid_t, field as uint32_t, &mut value) }); match error { Error::Success => Ok(value as u64), _ => Err(error) } } /// Sets the value of a VMCS field of the vCPU pub fn write_vmcs(&self, field: u32, value: u64) -> Error { match_error_code(unsafe { hv_vmx_vcpu_write_vmcs(self.id as hv_vcpuid_t, field as uint32_t, value as uint64_t) }) } /// Sets the address of the guest APIC for the vCPU in the /// guest physical address space of the VM pub fn set_apic_addr(&self, gpa: u64) -> Error { match_error_code(unsafe { hv_vmx_vcpu_set_apic_address(self.id as hv_vcpuid_t, gpa as uint64_t) }) } /// Reads the current architectural x86 floating point and SIMD state of the vCPU pub fn read_fpstate(&self, buffer: &mut [u8]) -> Error { match_error_code(unsafe { hv_vcpu_read_fpstate(self.id as hv_vcpuid_t, buffer.as_mut_ptr() as *mut c_void, buffer.len() as size_t) }) } /// Sets the architectural x86 floating point and SIMD state of the vCPU pub fn write_fpstate(&self, buffer: &[u8]) -> Error { match_error_code(unsafe { hv_vcpu_write_fpstate(self.id as hv_vcpuid_t, buffer.as_ptr() as *const c_void, buffer.len() as size_t) }) } } impl fmt::Debug for vCPU { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "vCPU ID: {}", (*self).id) } } /// VMX cabability #[allow(non_camel_case_types)] #[derive(Clone)] #[repr(C)] pub enum VMXCap { /// Pin-based VMX capabilities PINBASED = 0, /// Primary proc-based VMX capabilities PROCBASED = 1, /// Secondary proc-based VMX capabilities PROCBASED2 = 2, /// VM-entry VMX capabilities ENTRY = 3, /// VM-exit VMX capabilities EXIT = 4, /// VMX preemption timer frequency PREEMPTION_TIMER = 32, } /// Reads a VMX capability of the host processor pub fn read_vmx_cap(vmx_cap: &VMXCap) -> Result<u64, Error> { let mut value: uint64_t = 0; let error = match_error_code(unsafe { hv_vmx_read_capability((*vmx_cap).clone(), &mut value) }); match error { Error::Success => Ok(value as u64), _ => Err(error) } }<|fim▁end|>
CR3, CR4,
<|file_name|>homework2-4_csv_ex.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # coding: utf-8 #copyRight by heibanke import csv import re <|fim▁hole|>csvfile = open('beijing_jt.csv','r') reader = csv.reader(csvfile) # reader.next() only can use in py2 next(reader) jt_info = next(reader) print(jt_info[1].decode('utf-8')) csvfile.close() # convert stations info format station_pattern = (r'(?P<number>[0-9]+)\s(?P<name>\D+)') station_list = [] stations = re.findall(station_pattern,jt_info[-1].decode('utf-8')) for tmp in stations: print(tmp[0],tmp[1].strip()) station_list.append(tmp[1].strip()) result={} result[jt_info[1]]=station_list print(result)<|fim▁end|>
<|file_name|>receipt.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The go-ethereum Authors // This file is part of the go-ethereum library. // // The go-ethereum library is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // The go-ethereum library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>. package types import ( "bytes"<|fim▁hole|> "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core/state" "github.com/ethereum/go-ethereum/rlp" ) type Receipt struct { PostState []byte CumulativeGasUsed *big.Int Bloom Bloom TxHash common.Hash ContractAddress common.Address logs state.Logs GasUsed *big.Int } func NewReceipt(root []byte, cumulativeGasUsed *big.Int) *Receipt { return &Receipt{PostState: common.CopyBytes(root), CumulativeGasUsed: new(big.Int).Set(cumulativeGasUsed)} } func (self *Receipt) SetLogs(logs state.Logs) { self.logs = logs } func (self *Receipt) Logs() state.Logs { return self.logs } func (self *Receipt) EncodeRLP(w io.Writer) error { return rlp.Encode(w, []interface{}{self.PostState, self.CumulativeGasUsed, self.Bloom, self.logs}) } func (self *Receipt) DecodeRLP(s *rlp.Stream) error { var r struct { PostState []byte CumulativeGasUsed *big.Int Bloom Bloom TxHash common.Hash ContractAddress common.Address Logs state.Logs GasUsed *big.Int } if err := s.Decode(&r); err != nil { return err } self.PostState, self.CumulativeGasUsed, self.Bloom, self.TxHash, self.ContractAddress, self.logs, self.GasUsed = r.PostState, r.CumulativeGasUsed, r.Bloom, r.TxHash, r.ContractAddress, r.Logs, r.GasUsed return nil } type ReceiptForStorage Receipt func (self *ReceiptForStorage) EncodeRLP(w io.Writer) error { storageLogs := make([]*state.LogForStorage, len(self.logs)) for i, log := range self.logs { storageLogs[i] = (*state.LogForStorage)(log) } return rlp.Encode(w, []interface{}{self.PostState, self.CumulativeGasUsed, self.Bloom, self.TxHash, self.ContractAddress, storageLogs, self.GasUsed}) } func (self *Receipt) RlpEncode() []byte { bytes, err := rlp.EncodeToBytes(self) if err != nil { fmt.Println("TMP -- RECEIPT ENCODE ERROR", err) } return bytes } func (self *Receipt) Cmp(other *Receipt) bool { if bytes.Compare(self.PostState, other.PostState) != 0 { return false } return true } func (self *Receipt) String() string { return fmt.Sprintf("receipt{med=%x cgas=%v bloom=%x logs=%v}", self.PostState, self.CumulativeGasUsed, self.Bloom, self.logs) } type Receipts []*Receipt func (self Receipts) RlpEncode() []byte { bytes, err := rlp.EncodeToBytes(self) if err != nil { fmt.Println("TMP -- RECEIPTS ENCODE ERROR", err) } return bytes } func (self Receipts) Len() int { return len(self) } func (self Receipts) GetRlp(i int) []byte { return common.Rlp(self[i]) }<|fim▁end|>
"fmt" "io" "math/big"
<|file_name|>test_parsable.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # # name: test_parsable.py # author: Harold Bradley III # email: [email protected] # created on: 01/16/2016<|fim▁hole|>A unit test for ext_pylib file module's Parsable mixin class. """ import pytest from . import utils from ext_pylib.files import File, Parsable class ParsableFile(Parsable, File): """Dummy class extending Parsable and File.""" FILE = """This is a sample file. This is a sample file. This is a sample file. DocumentRoot /var/www/google.com This is a sample file. DEBUG = True SECURE = False DocumentRoot /var/www/example.com LIST = first_item LIST = second_item """ EMPTY_FILE = '' def test_parsable_parse_with_existing_attribute(): """Test Parsable setup_parsing() method on an existing attribute.""" parsable = ParsableFile() parsable.existing = 'already exists' # pylint: disable=attribute-defined-outside-init with pytest.raises(AttributeError): parsable.setup_parsing({'existing' : '*'}) def test_parsable_setup_parsing(): """Test Parsable setup_parsing() method.""" the_file = Parsable() Parsable.read = utils.mock_read_data the_file.data = FILE the_file.setup_parsing({ 'htdocs' : ('DocumentRoot (.*)',), 'debug' : 'DEBUG = (.*)', 'secure' : ('SECURE[ ]*=[ ]*([^ \n]*)', 'SECURE = {0}'), 'speed' : ('SPEED[ ]*=[ ]*([^ \n]*)', 'SPEED = {0}'), 'list' : ('LIST[ ]*=[ ]*([^ \n]*)', 'LIST = {0}'), }) assert the_file.htdocs[0] == '/var/www/google.com' assert the_file.htdocs[1] == '/var/www/example.com' assert the_file.debug == 'True' assert the_file.secure == 'False' the_file.secure = 'True' assert the_file.secure == 'True' assert the_file.speed is None the_file.speed = 'fastest' assert the_file.speed == 'fastest' the_file.speed = 'fastest' # setting it more than once with the same value # shouldn't affect the number of times it is added. assert isinstance(the_file.speed, str) \ or isinstance(the_file.speed, unicode) # Shouldn't be a list, checking unicode # for Python 2 support. assert len(the_file.list) == 2 # Should be a list def test_parsable_setup_parsing_on_empty_file(): """Test Parsable setup_paring() using an empty file.""" the_file = Parsable() Parsable.read = utils.mock_read_data the_file.data = EMPTY_FILE the_file.setup_parsing({ 'htdocs' : ('DocumentRoot (.*)', 'DocumentRoot {0}'), 'secure' : ('SECURE[ ]*=[ ]*([^ \n]*)', 'SECURE = {0}'), }) assert the_file.htdocs is None the_file.htdocs = '/var/www/google.com' assert the_file.htdocs == '/var/www/google.com' assert the_file.secure is None the_file.secure = 'True' assert the_file.secure == 'True'<|fim▁end|>
# # pylint: disable=invalid-name,no-member """
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # -*- coding: utf-8 -*- import sys import inselect REQUIREMENTS = [ # TODO How to specify OpenCV? 'cv2>=3.1.0', 'numpy>=1.11.1,<1.12', 'Pillow>=3.4.2,<3.5', 'python-dateutil>=2.6.0,<2.7', 'pytz>=2016.7', 'PyYAML>=3.12,<3.2', 'schematics>=1.1.1,<1.2', 'scikit-learn>=0.18.1,<0.19', 'scipy>=0.18.1,<0.19', 'unicodecsv>=0.14.1,<0.15', ] SCRIPTS = ('export_metadata', 'ingest', 'read_barcodes', 'save_crops', 'segment') setup_data = { 'name': 'inselect', 'version': inselect.__version__, 'author': (u'Lawrence Hudson, Alice Heaton, Pieter Holtzhausen, ' u'Stéfan van der Walt'), 'author_email': '[email protected]', 'maintainer': 'Lawrence Hudson', 'maintainer_email': '[email protected]', 'url': 'https://github.com/NaturalHistoryMuseum/inselect/', 'license': 'Modified BSD', 'description': inselect.__doc__, 'long_description': inselect.__doc__, 'packages': [ 'inselect', 'inselect.gui', 'inselect.gui.plugins', 'inselect.gui.views', 'inselect.gui.views.boxes', 'inselect.lib', 'inselect.lib.templates', 'inselect.scripts', ], 'include_package_data': True, 'test_suite': 'inselect.tests', 'scripts': ['inselect/scripts/{0}.py'.format(script) for script in SCRIPTS], 'install_requires': REQUIREMENTS, 'extras_require': { 'gui': [ 'ExifRead>=2.1.2', 'humanize>=0.5.1', 'psutil>=5.0.0', 'PyQt5>=5.6.0' ], 'barcodes': ['gouda>=0.1.13', 'pylibdmtx>=0.1.6', 'pyzbar>=0.1.3'], 'windows': ['pywin32>=220'], 'development': ['coveralls>=1.1', 'mock>=2.0.0', 'nose>=1.3.7'], }, 'entry_points': { 'gui_scripts': ['inselect = inselect.gui.app:main'], 'console_scripts': ['{0} = inselect.scripts.{0}:main'.format(script) for script in SCRIPTS], }, 'classifiers': [ 'Development Status :: 4 - Beta', 'Topic :: Utilities', 'Topic :: Scientific/Engineering :: Bio-Informatics' 'Programming Language :: Python :: 3.5', ], } def setuptools_setup(): """setuptools setup""" from setuptools import setup setup(**setup_data) def _qt_files(site_packages): """Returns a list of tuples (src, dest) of Qt dependencies to be installed. Elements are instances of Path. site_packages should be an instance of Path to the site-packages directory. IF we leave cx_Freeze to do its thing then the entirety of PyQt5, Qt5 and uic are included in the installer. The only way to avoid horrible bloat is to hand-tune which files we include. This whole system is fucked beyond belief. """ from pathlib import Path return [ # Qt DLLs ( site_packages.joinpath('PyQt5/Qt/bin').joinpath(dep), dep ) for dep in ('Qt5Core.dll', 'Qt5Gui.dll', 'Qt5Widgets.dll') ] + [ # Qt plugins ( site_packages.joinpath('PyQt5/Qt/plugins/platforms').joinpath(dep), Path('platforms').joinpath(dep) ) for dep in ('qwindows.dll',) ] + [ # PyQt extension modules ( site_packages.joinpath('PyQt5').joinpath(dep), Path('PyQt5').joinpath(dep) ) for dep in ('__init__.py', 'Qt.pyd', 'QtCore.pyd', 'QtGui.pyd', 'QtWidgets.pyd') ] def cx_setup(): """cx_Freeze setup. Used for building Windows installers""" import scipy from pathlib import Path from distutils.sysconfig import get_python_lib from cx_Freeze import setup, Executable from pylibdmtx import pylibdmtx from pyzbar import pyzbar # Useful paths environment_root = Path(sys.executable).parent site_packages = Path(get_python_lib()) project_root = Path(__file__).parent # Files as tuples (source, dest) include_files = [ # Evil, evil, evil # cx_Freeze breaks pywintypes and pythoncom on Python 3.5 # https://bitbucket.org/anthony_tuininga/cx_freeze/issues/194/error-with-frozen-executable-using-35-and (site_packages.joinpath('win32/lib/pywintypes.py'), 'pywintypes.py'), (site_packages.joinpath('pythoncom.py'), 'pythoncom.py'), # Binary dependencies that are not detected (environment_root.joinpath('Library/bin/mkl_core.dll'), 'mkl_core.dll'), (environment_root.joinpath('Library/bin/mkl_intel_thread.dll'), 'mkl_intel_thread.dll'), (environment_root.joinpath('Library/bin/libiomp5md.dll'), 'libiomp5md.dll'), # Stylesheet (project_root.joinpath('inselect/gui/inselect.qss'), 'inselect.qss'), ] + [ # DLLs that are not detected because they are loaded by ctypes (dep._name, Path(dep._name).name)<|fim▁hole|> # Convert instances of Path to strs include_files = [(str(source), str(dest)) for source, dest in include_files] # Directories as strings include_files += [ # Fixes scipy freeze # http://stackoverflow.com/a/32822431/1773758 str(Path(scipy.__file__).parent), ] # Packages to exclude. exclude_packages = [ str(p.relative_to(site_packages)).replace('\\', '.') for p in site_packages.rglob('*/tests') ] setup( name=setup_data['name'], version=setup_data['version'], options={ 'build_exe': { 'packages': setup_data.get('packages', []) + [ 'urllib', 'sklearn.neighbors', 'win32com.gen_py', 'win32timezone', ], 'excludes': [ # '_bz2', # Required by sklearn '_decimal', '_elementtree', '_hashlib', '_lzma', '_ssl', 'curses', 'distutils', 'email', 'http', 'lib2to3', 'mock', 'nose', 'PyQt5', # 'pydoc', # Required by sklearn 'tcl', 'Tkinter', 'ttk', 'Tkconstants', # 'unittest', # Required by numpy.core.multiarray 'win32com.HTML', 'win32com.test', 'win32evtlog', 'win32pdh', 'win32trace', 'win32ui', 'win32wnet', 'xml', 'xmlrpc', 'inselect.tests', ] + exclude_packages, 'includes': [ ], 'include_files': include_files, 'include_msvcr': True, 'optimize': 2, }, 'bdist_msi': { 'upgrade_code': '{fe2ed61d-cd5e-45bb-9d16-146f725e522f}' } }, executables=[ Executable( script='inselect/scripts/inselect.py', targetName='inselect.exe', icon='icons/inselect.ico', base='Win32GUI', shortcutName='Inselect', # See http://stackoverflow.com/a/15736406 shortcutDir='ProgramMenuFolder' ) ] + [ Executable( script='inselect/scripts/{0}.py'.format(script), targetName='{0}.exe'.format(script), icon='icons/inselect.ico', base='Console' ) for script in SCRIPTS ], ) if (3, 5) <= sys.version_info: if 'bdist_msi' in sys.argv: cx_setup() else: setuptools_setup() else: sys.exit('Only Python >= 3.5 is supported')<|fim▁end|>
for dep in pylibdmtx.EXTERNAL_DEPENDENCIES + pyzbar.EXTERNAL_DEPENDENCIES ] + _qt_files(site_packages)
<|file_name|>BasicPublishMethodHandler.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.andes.server.handler; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.andes.AMQException; import org.wso2.andes.amqp.AMQPUtils; import org.wso2.andes.exchange.ExchangeDefaults; import org.wso2.andes.framing.AMQShortString; import org.wso2.andes.framing.BasicPublishBody; import org.wso2.andes.framing.abstraction.MessagePublishInfo; import org.wso2.andes.protocol.AMQConstant; import org.wso2.andes.server.AMQChannel; import org.wso2.andes.server.exchange.Exchange; import org.wso2.andes.server.protocol.AMQProtocolSession; import org.wso2.andes.server.state.AMQStateManager; import org.wso2.andes.server.state.StateAwareMethodListener; import org.wso2.andes.server.virtualhost.VirtualHost; public class BasicPublishMethodHandler implements StateAwareMethodListener<BasicPublishBody> { private static final Log _logger = LogFactory.getLog(BasicPublishMethodHandler.class); private static final BasicPublishMethodHandler _instance = new BasicPublishMethodHandler(); public static BasicPublishMethodHandler getInstance() { return _instance; } private BasicPublishMethodHandler() { } public void methodReceived(AMQStateManager stateManager, BasicPublishBody body, int channelId) throws AMQException { AMQProtocolSession session = stateManager.getProtocolSession(); if (_logger.isDebugEnabled()) { _logger.debug("Publish received on channel " + channelId); } AMQShortString exchangeName = body.getExchange();<|fim▁hole|> // TODO: check the delivery tag field details - is it unique across the broker or per subscriber? if (exchangeName == null) { exchangeName = ExchangeDefaults.DEFAULT_EXCHANGE_NAME; } VirtualHost vHost = session.getVirtualHost(); Exchange exch = vHost.getExchangeRegistry().getExchange(exchangeName); // if the exchange does not exist we raise a channel exception if (exch == null) { throw body.getChannelException(AMQConstant.NOT_FOUND, "Unknown exchange name"); } else { // The partially populated BasicDeliver frame plus the received route body // is stored in the channel. Once the final body frame has been received // it is routed to the exchange. AMQChannel channel = session.getChannel(channelId); if (channel == null) { throw body.getChannelNotFoundException(channelId); } MessagePublishInfo info = session.getMethodRegistry().getProtocolVersionMethodConverter().convertToInfo(body); if (ExchangeDefaults.TOPIC_EXCHANGE_NAME.equals(exchangeName) && AMQPUtils.isWildCardDestination(info.getRoutingKey().toString())) { throw body.getChannelException(AMQConstant.INVALID_ROUTING_KEY, "Publishing messages to a wildcard " + "destination is not allowed"); } info.setExchange(exchangeName); channel.setPublishFrame(info, exch); } } }<|fim▁end|>
<|file_name|>auto_copy_daemon.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- """ The daemon that calls auto_copy.py uppon optical disc insertion """ import signal import sys import time sys.path.append('/usr/local/bin') import auto_copy SIGNAL_RECEIVED = False def run_daemon(config): """ Run the damon <|fim▁hole|> time.sleep(1) global SIGNAL_RECEIVED if SIGNAL_RECEIVED: auto_copy.auto_copy(config) SIGNAL_RECEIVED = False def signal_handler(dump1, dump2): global SIGNAL_RECEIVED SIGNAL_RECEIVED = True if __name__ == "__main__": main_config = auto_copy.read_config('/etc/auto_copy.yml') auto_copy.setup_logging(main_config) run_daemon(main_config)<|fim▁end|>
config: configParser object """ signal.signal(signal.SIGUSR1, signal_handler) while True:
<|file_name|>t411.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals, division, absolute_import from builtins import * # pylint: disable=unused-import, redefined-builtin from flexget import options, plugin from flexget.event import event from flexget.terminal import console from flexget.manager import Session try: from flexget.plugins.internal.api_t411 import (T411Proxy) except: raise plugin.DependencyError(issued_by='cli_series', missing='api_t411', message='Torrent411 commandline interface not loaded') def do_cli(manager, options): """ Dispach cli action<|fim▁hole|> :param manager: :param options: :return: """ if options.t411_action == 'list-cats': print_categories(parent_category_name=options.category) elif options.t411_action == 'add-auth': add_credential(username=options.username, password=options.password) elif options.t411_action == 'list-auth': pass elif options.t411_action == 'list-terms': print_terms(category_name=options.category, term_type_name=options.type) def add_credential(username, password): """ Add (or update) credential into database :param username: :param password: :return: """ proxy = T411Proxy() is_new = proxy.add_credential(username=username, password=password) if is_new: console('Credential successfully added') else: console('Credential successfully updated') def print_terms(category_name=None, term_type_name=None): proxy = T411Proxy() proxy.set_credential() formatting_main = '%-60s %-5s %-5s' formatting_sub = ' %-55s %-5s %-5s' console(formatting_main % ('Name', 'PID', 'ID')) if term_type_name: console("Not yet implemented !") else: with Session() as session: categories = proxy.find_categories(category_name=category_name, is_sub_category=True, session=session) for category in categories: console(formatting_main % (category.name, category.parent_id, category.id)) for term_type in category.term_types: console(formatting_main % (term_type.name, '', term_type.id)) for term in term_type.terms: console(formatting_sub % (term.name, term_type.id, term.id)) def print_categories(parent_category_name=None): """ Print category and its sub-categories :param parent_category_name: if None, all categories will be displayed :return: """ proxy = T411Proxy() proxy.set_credential() with Session() as session: if parent_category_name is None: categories = proxy.main_categories(session=session) else: categories = proxy.find_categories(parent_category_name, session=session) formatting_main = '%-30s %-5s %-5s' formatting_sub = ' %-25s %-5s %-5s' console(formatting_main % ('Category name', 'PID', 'ID')) for category in categories: console(formatting_main % (category.name, category.parent_id, category.id)) for sub_category in category.sub_categories: console(formatting_sub % (sub_category.name, sub_category.parent_id, sub_category.id)) @event('options.register') def register_parser_arguments(): # Register the command parser = options.register_command('t411', do_cli, help='view and manipulate the Torrent411 plugin database') # Set up our subparsers action_parsers = parser.add_subparsers(title='actions', metavar='<action>', dest='t411_action') auth_parser = action_parsers.add_parser('add-auth', help='authorize Flexget to access your Torrent411 account') auth_parser.add_argument('username', metavar='<username>', help='Your t411 username') auth_parser.add_argument('password', metavar='<password>', help='Your t411 password') list_categories_parser = action_parsers.add_parser('list-cats', help='list available categories on Torrent411') list_categories_parser.add_argument('category', nargs='?', metavar='<category>', help='limit list to all, main or sub categories (default: %(default)s)') list_terms = action_parsers.add_parser('list-terms', help='list available terms usable on Torrent411') list_terms.add_argument('--category', help='show terms only for this category') list_terms.add_argument('--type', help='show terms only for this term type')<|fim▁end|>
<|file_name|>BookmarkList-mocha.js<|end_file_name|><|fim▁begin|>/*global beforeEach, describe, it,*/ /*eslint no-unused-expressions: 0*/ 'use strict'; require('./testdom')('<html><body></body></html>'); var expect = require('chai').expect; var React = require('react/addons'); var TestUtils = React.addons.TestUtils; var BookmarkList = require('./../../react/component/BookmarkList'); var BookmarkListItem = require('./../../react/component/BookmarkListItem'); describe('BookmarkList', function() { var component; beforeEach(function() { <|fim▁hole|> component = TestUtils.renderIntoDocument(<BookmarkList />); }); it('renders a dom element', function() { expect(component.getDOMNode().tagName.toLowerCase()).to.be.not.empty; }); describe('with bookmarks', function() { var bookmarks, items; beforeEach(function() { bookmarks = [1, 23, 42, 678].map(v => ({ url: `http://some-url.com/${v}`, tags: `#static #${v}` })); component.setProps({bookmarks: bookmarks}); items = TestUtils.scryRenderedComponentsWithType(component, BookmarkListItem); }); it('displays a BookmarkListItem for each data element', function() { expect(items).to.have.length.of(bookmarks.length); }); it('passes the url as children to the BookmarkListItem', function() { items.forEach((item, idx) => expect(item.props.children).to.equal(bookmarks[idx].url)); }); it('passes the tags via the tags attribute to the BookmarkListItem', function() { items.forEach((item, idx) => expect(item.props.tags).to.equal(bookmarks[idx].tags)); }); }); });<|fim▁end|>
<|file_name|>testUtils.ts<|end_file_name|><|fim▁begin|>import { ICompilerOptions } from '../compilerOptions/interfaces'; import { transformModule } from './core/transformModule'; import { generate } from './generator/generator'; import { ASTNode } from './interfaces/AST'; import { ITransformer, ITransformerCommon } from './interfaces/ITransformer'; import { ITransformerRequireStatementCollection } from './interfaces/ITransformerRequireStatements'; import { ImportType } from './interfaces/ImportType'; import { parseTypeScript } from './parser'; import { ISerializableTransformationContext } from './transformer'; function cleanupForTest(node) { delete node.loc; delete node.raw; delete node.range; } export function initCommonTransform(props: { code: string; compilerOptions?: ICompilerOptions; jsx?: boolean; props?: ISerializableTransformationContext;<|fim▁hole|> transformers: Array<ITransformer>; }) { const requireStatementCollection: ITransformerRequireStatementCollection = []; function onRequireCallExpression(importType: ImportType, statement: ASTNode) { // making sure we have haven't emitted the same property twice if (!statement['emitted']) { Object.defineProperty(statement, 'emitted', { enumerable: false, value: true }); cleanupForTest(statement.arguments[0]); cleanupForTest(statement); cleanupForTest(statement.callee); requireStatementCollection.push({ importType, statement }); } } const ast = parseTypeScript(props.code, { jsx: props.jsx }); const userProps: ISerializableTransformationContext = props.props || {}; userProps.compilerOptions = props.compilerOptions || {}; const visitorProps: ITransformerCommon = { onRequireCallExpression, transformationContext: userProps }; const tranformers = []; for (const t of props.transformers) { if (t.commonVisitors) { tranformers.push(t.commonVisitors(visitorProps)); } } transformModule({ root: ast, transformers: tranformers }); const res = generate(ast, {}); return { code: res, requireStatementCollection }; }<|fim▁end|>
<|file_name|>ClusterFactory.java<|end_file_name|><|fim▁begin|>package org.eggermont.hm.cluster; import cern.colt.matrix.DoubleFactory1D; import cern.colt.matrix.DoubleMatrix1D; import cern.colt.matrix.DoubleMatrix2D; public class ClusterFactory { private final DoubleMatrix2D x; private final DoubleMatrix1D blocks; private final DoubleMatrix1D vMin; private final DoubleMatrix1D vMax; private final int ndof; public ClusterFactory(int d, int nidx, int ndof) { this.ndof = ndof; this.blocks = DoubleFactory1D.dense.make(d * nidx); this.x = blocks.like2D(nidx, d); this.vMin = DoubleFactory1D.dense.make(d); this.vMax = DoubleFactory1D.dense.make(d); }<|fim▁hole|> }<|fim▁end|>
<|file_name|>chip8.rs<|end_file_name|><|fim▁begin|>use std::io::Read; use std::fmt; extern crate rand; use self::rand::Rng; use instruction::{Opcode, Instruction}; use enum_primitive::FromPrimitive; const RAM_SIZE: usize = 4096; const GPR_COUNT: usize = 16; const NUMBER_OF_KEYS: usize = 16; // 60Hz const TIME_STEP: f32 = 1.0 / 60.0; const FONT_SET: [u8; 80] = [ 0xf0, 0x90, 0x90, 0x90, 0xf0, // 0 0x20, 0x60, 0x20, 0x20, 0x70, // 1 0xf0, 0x10, 0xf0, 0x80, 0xf0, // 2 0xf0, 0x10, 0xf0, 0x10, 0xf0, // 3 0x90, 0x90, 0xf0, 0x10, 0x10, // 4 0xf0, 0x80, 0xf0, 0x10, 0xf0, // 5 0xf0, 0x80, 0xf0, 0x90, 0xf0, // 6 0xf0, 0x10, 0x20, 0x40, 0x40, // 7 0xf0, 0x90, 0xf0, 0x90, 0xf0, // 8 0xf0, 0x90, 0xf0, 0x10, 0xf0, // 9 0xf0, 0x90, 0xf0, 0x90, 0x90, // A 0xe0, 0x90, 0xe0, 0x90, 0xe0, // B 0xf0, 0x80, 0x80, 0x80, 0x80, // C 0xe0, 0x90, 0x90, 0x90, 0xe0, // D 0xf0, 0x80, 0xf0, 0x80, 0xf0, // E 0xf0, 0x80, 0xf0, 0x80, 0x80, // F ]; pub struct CHIP8 { ram: Vec<u8>, // General Purpose Registers v: Vec<u8>, // Index Register i: u16, // Program Counter pc: u16, // Pixels pub gfx: Vec<u8>, // Timers delay_timer: u8, sound_timer: u8, // Stack & Stack Pointer stack: Vec<u16>, // Input state pub key: Vec<u8>, jmp: bool,<|fim▁hole|> pub draw: bool, pub done: bool, } impl fmt::Display for CHIP8 { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "PC: {}\nInstruction: {}\nRegisters: {:?}\nDelay Timer: {}\nSound Timer: {}\nI: {}", self.pc, self.instruction, self.v, self.delay_timer, self.sound_timer, self.i) } } impl CHIP8 { pub fn tick(&mut self, dt: f32) { self.jmp = false; self.draw = false; self.instruction.parse(self.ram[self.pc as usize], self.ram[self.pc as usize + 1]); // println!("{}", self); match Opcode::from_u16(self.instruction.opcode).unwrap() { Opcode::sys | Opcode::noop => {}, Opcode::cls => self.cls(), Opcode::ret => self.ret(), Opcode::jmp => self.jmp(), Opcode::call => self.call(), Opcode::se_vb => self.se_vb(), Opcode::sne_vb => self.sne_vb(), Opcode::se_vv => self.se_vv(), Opcode::ld_vb => self.ld_vb(), Opcode::add_vb => self.add_vb(), Opcode::ld_vv => self.ld_vv(), Opcode::or => self.or(), Opcode::and => self.and(), Opcode::xor => self.xor(), Opcode::add_vv => self.add_vv(), Opcode::sub => self.sub(), Opcode::shr => self.shr(), Opcode::subn => self.subn(), Opcode::shl => self.shl(), Opcode::sne_vv => self.sne_vv(), Opcode::ld_i => self.ld_i(), Opcode::jmp_v => self.jmp_v(), Opcode::rnd => self.rnd(), Opcode::drw => self.drw(), Opcode::skp => self.skp(), Opcode::sknp => self.sknp(), Opcode::ld_vdt => self.ld_vdt(), Opcode::ld_vk => self.ld_vk(), Opcode::ld_dtv => self.ld_dtv(), Opcode::ld_stv => self.ld_stv(), Opcode::add_iv => self.add_iv(), Opcode::ld_fv => self.ld_fv(), Opcode::ld_bv => self.ld_bv(), Opcode::ld_iv => self.ld_iv(), Opcode::ld_vi => self.ld_vi(), _ => panic!("Unrecognized opcode {:X}", self.instruction.opcode), } if !self.jmp { self.pc += 2; } self.time_acc += dt; while self.time_acc >= TIME_STEP { if self.delay_timer > 0 { self.delay_timer -= 1; } if self.sound_timer > 0 { self.sound_timer -= 1; } self.time_acc -= TIME_STEP; } } fn cls(&mut self) { self.gfx = vec![0; (::SCREEN_WIDTH * ::SCREEN_HEIGHT) as usize]; } fn ret(&mut self) { self.pc = self.stack.pop().unwrap(); } fn jmp(&mut self) { self.pc = self.instruction.nnn; self.jmp = true; } fn call(&mut self) { self.stack.push(self.pc); self.pc = self.instruction.nnn; self.jmp = true; } fn se_vb(&mut self) { if self.v[self.instruction.x] == self.instruction.kk { self.pc += 2; } } fn sne_vb(&mut self) { if self.v[self.instruction.x] != self.instruction.kk { self.pc += 2; } } fn se_vv(&mut self) { if self.v[self.instruction.x] == self.v[self.instruction.y] { self.pc += 2; } } fn ld_vb(&mut self) { self.v[self.instruction.x] = self.instruction.kk; } fn add_vb(&mut self) { self.v[self.instruction.x] = self.v[self.instruction.x].wrapping_add(self.instruction.kk); } fn ld_vv(&mut self) { self.v[self.instruction.x] = self.v[self.instruction.y]; } fn or(&mut self) { self.v[self.instruction.x] |= self.v[self.instruction.y]; } fn and(&mut self) { self.v[self.instruction.x] &= self.v[self.instruction.y]; } fn xor(&mut self) { self.v[self.instruction.x] ^= self.v[self.instruction.y]; } fn add_vv(&mut self) { let x = self.v[self.instruction.x] as u16; let y = self.v[self.instruction.y] as u16; let res = x + y; // println!("add_vv result: {}", res); self.v[0xF] = (res > 255) as u8; self.v[self.instruction.x] = res as u8; // println!("{} + {}; VF is {}", self.v[self.instruction.y], self.v[self.instruction.x], self.v[0xF]); } fn sub(&mut self) { // println!("sub x:{} y:{}", self.v[self.instruction.x], self.v[self.instruction.y]) self.v[0xF] = (self.v[self.instruction.x] > self.v[self.instruction.y]) as u8; self.v[self.instruction.x] = self.v[self.instruction.x].wrapping_sub(self.v[self.instruction.y]); // println!("{} - {}; VF is {}", self.v[self.instruction.x], self.v[self.instruction.y], self.v[0xF]); } fn shr(&mut self) { self.v[0xf] = ((self.v[self.instruction.x] & 0xF0) >> 7 == 1) as u8; self.v[self.instruction.x] = self.v[self.instruction.x] / 2; } fn subn(&mut self) { self.v[0xF] = (self.v[self.instruction.y] > self.v[self.instruction.x]) as u8; self.v[self.instruction.x] = self.v[self.instruction.y].wrapping_sub(self.v[self.instruction.x]); // println!("{} - {}; VF is {}", self.v[self.instruction.y], self.v[self.instruction.x], self.v[0xF]); } fn shl(&mut self) { self.v[0xf] = (self.v[self.instruction.x] & 0xF == 1) as u8; self.v[self.instruction.x] *= 2; } fn sne_vv(&mut self) { if self.v[self.instruction.x] != self.v[self.instruction.y] { self.pc += 2; } } fn ld_i(&mut self) { self.i = self.instruction.nnn; } fn jmp_v(&mut self) { self.pc = (self.v[0] as u16) + self.instruction.nnn; self.jmp = true; } fn rnd(&mut self) { let mut rng = rand::thread_rng(); self.v[self.instruction.x] = rng.gen::<u8>() & self.instruction.kk; } fn drw(&mut self) { let x = self.v[self.instruction.x] as usize; let y = self.v[self.instruction.y] as usize; let height = self.instruction.n as usize; self.v[0xF] = 0; for yline in 0..height { let pixel = self.ram[(self.i + yline as u16) as usize]; for xline in 0..8 { if pixel & (0x80 >> xline) != 0 { let mut index = (x + xline + (y + yline) * 64) as u16; index = if index >= 64*32 { 64*32-1 } else { index }; if self.gfx[index as usize] == 1 { self.v[0xF] = 1; } self.gfx[index as usize] ^= 1; } } } self.draw = true; } fn skp(&mut self) { if self.key[self.v[self.instruction.x] as usize] == 1 { self.pc += 2; } } fn sknp(&mut self) { if self.key[self.v[self.instruction.x] as usize] == 0 { self.pc += 2; } } fn ld_vdt(&mut self) { self.v[self.instruction.x] = self.delay_timer; } fn ld_vk(&mut self) { let mut key_pressed = false; for i in 0..self.key.len() { if self.key[i] == 1 { self.v[self.instruction.x] = i as u8; key_pressed = true; } } if !key_pressed { self.pc -= 2; } } fn ld_dtv(&mut self) { self.delay_timer = self.v[self.instruction.x]; } fn ld_stv(&mut self) { self.sound_timer = self.v[self.instruction.x]; } fn add_iv(&mut self) { self.i += self.v[self.instruction.x] as u16; } fn ld_fv(&mut self) { self.i = (self.v[self.instruction.x] * 5) as u16; } fn ld_bv(&mut self) { self.ram[self.i as usize] = self.v[self.instruction.x] / 100; self.ram[self.i as usize + 1] = (self.v[self.instruction.x] / 10) % 10; self.ram[self.i as usize + 2] = (self.v[self.instruction.x] % 100) % 10; } fn ld_iv(&mut self) { for i in 0usize..self.instruction.x + 1 { self.ram[self.i as usize + i] = self.v[i]; } } fn ld_vi(&mut self) { for i in 0usize..self.instruction.x + 1 { self.v[i] = self.ram[self.i as usize + i]; } } pub fn new(r: &mut Read) -> CHIP8 { let mut ram = vec![0; RAM_SIZE]; // initialize font set for i in 0..80 { ram[i] = FONT_SET[i]; } let mut rom: Vec<u8> = Vec::new(); r.read_to_end(&mut rom).unwrap(); for i in 0..rom.len() { ram[i + 512] = rom[i]; } CHIP8 { ram: ram, v: vec![0; GPR_COUNT], i: 0, pc: 0x200, gfx: vec![0; (::SCREEN_WIDTH * ::SCREEN_HEIGHT) as usize], delay_timer: 0, sound_timer: 0, stack: Vec::new(), key: vec![0; NUMBER_OF_KEYS], jmp: false, time_acc: 0.0, instruction: Instruction::new(), draw: false, done: false, } } }<|fim▁end|>
time_acc: f32, instruction: Instruction,
<|file_name|>script.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2009-2010 Satoshi Nakamoto // Copyright (c) 2009-2012 The Bitcoin developers // Copyright (c) 2013-2014 Bongger Developers // Distributed under the MIT/X11 software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #include <boost/foreach.hpp> #include <boost/tuple/tuple.hpp> using namespace std; using namespace boost; #include "script.h" #include "keystore.h" #include "bignum.h" #include "key.h" #include "main.h" #include "sync.h" #include "util.h" bool CheckSig(vector<unsigned char> vchSig, const vector<unsigned char> &vchPubKey, const CScript &scriptCode, const CTransaction& txTo, unsigned int nIn, int nHashType, int flags); typedef vector<unsigned char> valtype; static const valtype vchFalse(0); static const valtype vchZero(0); static const valtype vchTrue(1, 1); static const CBigNum bnZero(0); static const CBigNum bnOne(1); static const CBigNum bnFalse(0); static const CBigNum bnTrue(1); static const size_t nMaxNumSize = 4; CBigNum CastToBigNum(const valtype& vch) { if (vch.size() > nMaxNumSize) throw runtime_error("CastToBigNum() : overflow"); // Get rid of extra leading zeros return CBigNum(CBigNum(vch).getvch()); } bool CastToBool(const valtype& vch) { for (unsigned int i = 0; i < vch.size(); i++) { if (vch[i] != 0) { // Can be negative zero if (i == vch.size()-1 && vch[i] == 0x80) return false; return true; } } return false; } // // Script is a stack machine (like Forth) that evaluates a predicate // returning a bool indicating valid or not. There are no loops. // #define stacktop(i) (stack.at(stack.size()+(i))) #define altstacktop(i) (altstack.at(altstack.size()+(i))) static inline void popstack(vector<valtype>& stack) { if (stack.empty()) throw runtime_error("popstack() : stack empty"); stack.pop_back(); } const char* GetTxnOutputType(txnouttype t) { switch (t) { case TX_NONSTANDARD: return "nonstandard"; case TX_PUBKEY: return "pubkey"; case TX_PUBKEYHASH: return "pubkeyhash"; case TX_SCRIPTHASH: return "scripthash"; case TX_MULTISIG: return "multisig"; } return NULL; } const char* GetOpName(opcodetype opcode) { switch (opcode) { // push value case OP_0 : return "0"; case OP_PUSHDATA1 : return "OP_PUSHDATA1"; case OP_PUSHDATA2 : return "OP_PUSHDATA2"; case OP_PUSHDATA4 : return "OP_PUSHDATA4"; case OP_1NEGATE : return "-1"; case OP_RESERVED : return "OP_RESERVED"; case OP_1 : return "1"; case OP_2 : return "2"; case OP_3 : return "3"; case OP_4 : return "4"; case OP_5 : return "5"; case OP_6 : return "6"; case OP_7 : return "7"; case OP_8 : return "8"; case OP_9 : return "9"; case OP_10 : return "10"; case OP_11 : return "11"; case OP_12 : return "12"; case OP_13 : return "13"; case OP_14 : return "14"; case OP_15 : return "15"; case OP_16 : return "16"; // control case OP_NOP : return "OP_NOP"; case OP_VER : return "OP_VER"; case OP_IF : return "OP_IF"; case OP_NOTIF : return "OP_NOTIF"; case OP_VERIF : return "OP_VERIF"; case OP_VERNOTIF : return "OP_VERNOTIF"; case OP_ELSE : return "OP_ELSE"; case OP_ENDIF : return "OP_ENDIF"; case OP_VERIFY : return "OP_VERIFY"; case OP_RETURN : return "OP_RETURN"; // stack ops case OP_TOALTSTACK : return "OP_TOALTSTACK"; case OP_FROMALTSTACK : return "OP_FROMALTSTACK"; case OP_2DROP : return "OP_2DROP"; case OP_2DUP : return "OP_2DUP"; case OP_3DUP : return "OP_3DUP"; case OP_2OVER : return "OP_2OVER"; case OP_2ROT : return "OP_2ROT"; case OP_2SWAP : return "OP_2SWAP"; case OP_IFDUP : return "OP_IFDUP"; case OP_DEPTH : return "OP_DEPTH"; case OP_DROP : return "OP_DROP"; case OP_DUP : return "OP_DUP"; case OP_NIP : return "OP_NIP"; case OP_OVER : return "OP_OVER"; case OP_PICK : return "OP_PICK"; case OP_ROLL : return "OP_ROLL"; case OP_ROT : return "OP_ROT"; case OP_SWAP : return "OP_SWAP"; case OP_TUCK : return "OP_TUCK"; // splice ops case OP_CAT : return "OP_CAT"; case OP_SUBSTR : return "OP_SUBSTR"; case OP_LEFT : return "OP_LEFT"; case OP_RIGHT : return "OP_RIGHT"; case OP_SIZE : return "OP_SIZE"; // bit logic case OP_INVERT : return "OP_INVERT"; case OP_AND : return "OP_AND"; case OP_OR : return "OP_OR"; case OP_XOR : return "OP_XOR"; case OP_EQUAL : return "OP_EQUAL"; case OP_EQUALVERIFY : return "OP_EQUALVERIFY"; case OP_RESERVED1 : return "OP_RESERVED1"; case OP_RESERVED2 : return "OP_RESERVED2"; // numeric case OP_1ADD : return "OP_1ADD"; case OP_1SUB : return "OP_1SUB"; case OP_2MUL : return "OP_2MUL"; case OP_2DIV : return "OP_2DIV"; case OP_NEGATE : return "OP_NEGATE"; case OP_ABS : return "OP_ABS"; case OP_NOT : return "OP_NOT"; case OP_0NOTEQUAL : return "OP_0NOTEQUAL"; case OP_ADD : return "OP_ADD"; case OP_SUB : return "OP_SUB"; case OP_MUL : return "OP_MUL"; case OP_DIV : return "OP_DIV"; case OP_MOD : return "OP_MOD"; case OP_LSHIFT : return "OP_LSHIFT"; case OP_RSHIFT : return "OP_RSHIFT"; case OP_BOOLAND : return "OP_BOOLAND"; case OP_BOOLOR : return "OP_BOOLOR"; case OP_NUMEQUAL : return "OP_NUMEQUAL"; case OP_NUMEQUALVERIFY : return "OP_NUMEQUALVERIFY"; case OP_NUMNOTEQUAL : return "OP_NUMNOTEQUAL"; case OP_LESSTHAN : return "OP_LESSTHAN"; case OP_GREATERTHAN : return "OP_GREATERTHAN"; case OP_LESSTHANOREQUAL : return "OP_LESSTHANOREQUAL"; case OP_GREATERTHANOREQUAL : return "OP_GREATERTHANOREQUAL"; case OP_MIN : return "OP_MIN"; case OP_MAX : return "OP_MAX"; case OP_WITHIN : return "OP_WITHIN"; // crypto case OP_RIPEMD160 : return "OP_RIPEMD160"; case OP_SHA1 : return "OP_SHA1"; case OP_SHA256 : return "OP_SHA256"; case OP_HASH160 : return "OP_HASH160"; case OP_HASH256 : return "OP_HASH256"; case OP_CODESEPARATOR : return "OP_CODESEPARATOR"; case OP_CHECKSIG : return "OP_CHECKSIG"; case OP_CHECKSIGVERIFY : return "OP_CHECKSIGVERIFY"; case OP_CHECKMULTISIG : return "OP_CHECKMULTISIG"; case OP_CHECKMULTISIGVERIFY : return "OP_CHECKMULTISIGVERIFY"; // expanson case OP_NOP1 : return "OP_NOP1"; case OP_NOP2 : return "OP_NOP2"; case OP_NOP3 : return "OP_NOP3"; case OP_NOP4 : return "OP_NOP4"; case OP_NOP5 : return "OP_NOP5"; case OP_NOP6 : return "OP_NOP6"; case OP_NOP7 : return "OP_NOP7"; case OP_NOP8 : return "OP_NOP8"; case OP_NOP9 : return "OP_NOP9"; case OP_NOP10 : return "OP_NOP10"; // template matching params case OP_PUBKEYHASH : return "OP_PUBKEYHASH"; case OP_PUBKEY : return "OP_PUBKEY"; case OP_INVALIDOPCODE : return "OP_INVALIDOPCODE"; default: return "OP_UNKNOWN"; } } bool IsCanonicalPubKey(const valtype &vchPubKey) { if (vchPubKey.size() < 33) return error("Non-canonical public key: too short"); if (vchPubKey[0] == 0x04) { if (vchPubKey.size() != 65) return error("Non-canonical public key: invalid length for uncompressed key"); } else if (vchPubKey[0] == 0x02 || vchPubKey[0] == 0x03) { if (vchPubKey.size() != 33) return error("Non-canonical public key: invalid length for compressed key"); } else { return error("Non-canonical public key: compressed nor uncompressed"); } return true; } bool IsCanonicalSignature(const valtype &vchSig) { // See https://bitcointalk.org/index.php?topic=8392.msg127623#msg127623 // A canonical signature exists of: <30> <total len> <02> <len R> <R> <02> <len S> <S> <hashtype> // Where R and S are not negative (their first byte has its highest bit not set), and not // excessively padded (do not start with a 0 byte, unless an otherwise negative number follows, // in which case a single 0 byte is necessary and even required). if (vchSig.size() < 9) return error("Non-canonical signature: too short"); if (vchSig.size() > 73) return error("Non-canonical signature: too long"); unsigned char nHashType = vchSig[vchSig.size() - 1] & (~(SIGHASH_ANYONECANPAY)); if (nHashType < SIGHASH_ALL || nHashType > SIGHASH_SINGLE) return error("Non-canonical signature: unknown hashtype byte"); if (vchSig[0] != 0x30) return error("Non-canonical signature: wrong type"); if (vchSig[1] != vchSig.size()-3) return error("Non-canonical signature: wrong length marker"); unsigned int nLenR = vchSig[3]; if (5 + nLenR >= vchSig.size()) return error("Non-canonical signature: S length misplaced"); unsigned int nLenS = vchSig[5+nLenR]; if ((unsigned long)(nLenR+nLenS+7) != vchSig.size()) return error("Non-canonical signature: R+S length mismatch"); const unsigned char *R = &vchSig[4]; if (R[-2] != 0x02) return error("Non-canonical signature: R value type mismatch"); if (nLenR == 0) return error("Non-canonical signature: R length is zero"); if (R[0] & 0x80) return error("Non-canonical signature: R value negative"); if (nLenR > 1 && (R[0] == 0x00) && !(R[1] & 0x80)) return error("Non-canonical signature: R value excessively padded"); const unsigned char *S = &vchSig[6+nLenR]; if (S[-2] != 0x02) return error("Non-canonical signature: S value type mismatch"); if (nLenS == 0) return error("Non-canonical signature: S length is zero"); if (S[0] & 0x80) return error("Non-canonical signature: S value negative"); if (nLenS > 1 && (S[0] == 0x00) && !(S[1] & 0x80)) return error("Non-canonical signature: S value excessively padded"); return true; } bool EvalScript(vector<vector<unsigned char> >& stack, const CScript& script, const CTransaction& txTo, unsigned int nIn, unsigned int flags, int nHashType) { CAutoBN_CTX pctx; CScript::const_iterator pc = script.begin(); CScript::const_iterator pend = script.end(); CScript::const_iterator pbegincodehash = script.begin(); opcodetype opcode; valtype vchPushValue; vector<bool> vfExec; vector<valtype> altstack; if (script.size() > 10000) return false; int nOpCount = 0; bool fStrictEncodings = flags & SCRIPT_VERIFY_STRICTENC; try { while (pc < pend) { bool fExec = !count(vfExec.begin(), vfExec.end(), false); // // Read instruction // if (!script.GetOp(pc, opcode, vchPushValue)) return false; if (vchPushValue.size() > MAX_SCRIPT_ELEMENT_SIZE) return false; if (opcode > OP_16 && ++nOpCount > 201) return false; if (opcode == OP_CAT || opcode == OP_SUBSTR || opcode == OP_LEFT || opcode == OP_RIGHT || opcode == OP_INVERT || opcode == OP_AND || opcode == OP_OR || opcode == OP_XOR || opcode == OP_2MUL || opcode == OP_2DIV || opcode == OP_MUL || opcode == OP_DIV || opcode == OP_MOD || opcode == OP_LSHIFT || opcode == OP_RSHIFT) return false; // Disabled opcodes. if (fExec && 0 <= opcode && opcode <= OP_PUSHDATA4) stack.push_back(vchPushValue); else if (fExec || (OP_IF <= opcode && opcode <= OP_ENDIF)) switch (opcode) { // // Push value // case OP_1NEGATE: case OP_1: case OP_2: case OP_3: case OP_4: case OP_5: case OP_6: case OP_7: case OP_8: case OP_9: case OP_10: case OP_11: case OP_12: case OP_13: case OP_14: case OP_15: case OP_16: { // ( -- value) CBigNum bn((int)opcode - (int)(OP_1 - 1)); stack.push_back(bn.getvch()); } break; // // Control // case OP_NOP: case OP_NOP1: case OP_NOP2: case OP_NOP3: case OP_NOP4: case OP_NOP5: case OP_NOP6: case OP_NOP7: case OP_NOP8: case OP_NOP9: case OP_NOP10: break; case OP_IF: case OP_NOTIF: { // <expression> if [statements] [else [statements]] endif bool fValue = false; if (fExec) { if (stack.size() < 1) return false; valtype& vch = stacktop(-1); fValue = CastToBool(vch); if (opcode == OP_NOTIF) fValue = !fValue; popstack(stack); } vfExec.push_back(fValue); } break; case OP_ELSE: { if (vfExec.empty()) return false; vfExec.back() = !vfExec.back(); } break; case OP_ENDIF: { if (vfExec.empty()) return false; vfExec.pop_back(); } break; case OP_VERIFY: { // (true -- ) or // (false -- false) and return if (stack.size() < 1) return false; bool fValue = CastToBool(stacktop(-1)); if (fValue) popstack(stack); else return false; } break; case OP_RETURN: { return false; } break; // // Stack ops // case OP_TOALTSTACK: { if (stack.size() < 1) return false; altstack.push_back(stacktop(-1)); popstack(stack); } break; case OP_FROMALTSTACK: { if (altstack.size() < 1) return false; stack.push_back(altstacktop(-1)); popstack(altstack); } break; case OP_2DROP: { // (x1 x2 -- ) if (stack.size() < 2) return false; popstack(stack); popstack(stack); } break; case OP_2DUP: { // (x1 x2 -- x1 x2 x1 x2) if (stack.size() < 2) return false; valtype vch1 = stacktop(-2); valtype vch2 = stacktop(-1); stack.push_back(vch1); stack.push_back(vch2); } break; case OP_3DUP: { // (x1 x2 x3 -- x1 x2 x3 x1 x2 x3) if (stack.size() < 3) return false; valtype vch1 = stacktop(-3); valtype vch2 = stacktop(-2); valtype vch3 = stacktop(-1); stack.push_back(vch1); stack.push_back(vch2); stack.push_back(vch3); } break; case OP_2OVER: { // (x1 x2 x3 x4 -- x1 x2 x3 x4 x1 x2) if (stack.size() < 4) return false; valtype vch1 = stacktop(-4); valtype vch2 = stacktop(-3); stack.push_back(vch1); stack.push_back(vch2); } break; case OP_2ROT: { // (x1 x2 x3 x4 x5 x6 -- x3 x4 x5 x6 x1 x2) if (stack.size() < 6) return false; valtype vch1 = stacktop(-6); valtype vch2 = stacktop(-5); stack.erase(stack.end()-6, stack.end()-4); stack.push_back(vch1); stack.push_back(vch2); } break; case OP_2SWAP: { // (x1 x2 x3 x4 -- x3 x4 x1 x2) if (stack.size() < 4) return false; swap(stacktop(-4), stacktop(-2)); swap(stacktop(-3), stacktop(-1)); } break; case OP_IFDUP: { // (x - 0 | x x) if (stack.size() < 1) return false; valtype vch = stacktop(-1); if (CastToBool(vch)) stack.push_back(vch); } break; case OP_DEPTH: { // -- stacksize CBigNum bn(stack.size()); stack.push_back(bn.getvch()); } break; case OP_DROP: { // (x -- ) if (stack.size() < 1) return false; popstack(stack); } break; case OP_DUP: { // (x -- x x) if (stack.size() < 1) return false; valtype vch = stacktop(-1); stack.push_back(vch); } break; case OP_NIP: { // (x1 x2 -- x2) if (stack.size() < 2) return false; stack.erase(stack.end() - 2); } break; case OP_OVER: { // (x1 x2 -- x1 x2 x1) if (stack.size() < 2) return false; valtype vch = stacktop(-2); stack.push_back(vch); } break; case OP_PICK: case OP_ROLL: { // (xn ... x2 x1 x0 n - xn ... x2 x1 x0 xn) // (xn ... x2 x1 x0 n - ... x2 x1 x0 xn) if (stack.size() < 2) return false; int n = CastToBigNum(stacktop(-1)).getint(); popstack(stack); if (n < 0 || n >= (int)stack.size()) return false; valtype vch = stacktop(-n-1); if (opcode == OP_ROLL) stack.erase(stack.end()-n-1); stack.push_back(vch); } break; case OP_ROT: { // (x1 x2 x3 -- x2 x3 x1) // x2 x1 x3 after first swap // x2 x3 x1 after second swap if (stack.size() < 3) return false; swap(stacktop(-3), stacktop(-2)); swap(stacktop(-2), stacktop(-1)); } break; case OP_SWAP: { // (x1 x2 -- x2 x1) if (stack.size() < 2) return false; swap(stacktop(-2), stacktop(-1)); } break; case OP_TUCK: { // (x1 x2 -- x2 x1 x2) if (stack.size() < 2) return false; valtype vch = stacktop(-1); stack.insert(stack.end()-2, vch); } break; case OP_SIZE: { // (in -- in size) if (stack.size() < 1) return false; CBigNum bn(stacktop(-1).size()); stack.push_back(bn.getvch()); } break; // // Bitwise logic // case OP_EQUAL: case OP_EQUALVERIFY: //case OP_NOTEQUAL: // use OP_NUMNOTEQUAL { // (x1 x2 - bool) if (stack.size() < 2) return false; valtype& vch1 = stacktop(-2); valtype& vch2 = stacktop(-1); bool fEqual = (vch1 == vch2); // OP_NOTEQUAL is disabled because it would be too easy to say // something like n != 1 and have some wiseguy pass in 1 with extra // zero bytes after it (numerically, 0x01 == 0x0001 == 0x000001) //if (opcode == OP_NOTEQUAL) // fEqual = !fEqual; popstack(stack); popstack(stack); stack.push_back(fEqual ? vchTrue : vchFalse); if (opcode == OP_EQUALVERIFY) { if (fEqual) popstack(stack); else return false; } } break; // // Numeric // case OP_1ADD: case OP_1SUB: case OP_NEGATE: case OP_ABS: case OP_NOT: case OP_0NOTEQUAL: { // (in -- out) if (stack.size() < 1) return false; CBigNum bn = CastToBigNum(stacktop(-1)); switch (opcode) { case OP_1ADD: bn += bnOne; break; case OP_1SUB: bn -= bnOne; break; case OP_NEGATE: bn = -bn; break; case OP_ABS: if (bn < bnZero) bn = -bn; break; case OP_NOT: bn = (bn == bnZero); break; case OP_0NOTEQUAL: bn = (bn != bnZero); break; default: assert(!"invalid opcode"); break; } popstack(stack); stack.push_back(bn.getvch()); } break; case OP_ADD: case OP_SUB: case OP_BOOLAND: case OP_BOOLOR: case OP_NUMEQUAL: case OP_NUMEQUALVERIFY: case OP_NUMNOTEQUAL: case OP_LESSTHAN: case OP_GREATERTHAN: case OP_LESSTHANOREQUAL: case OP_GREATERTHANOREQUAL: case OP_MIN: case OP_MAX: { // (x1 x2 -- out) if (stack.size() < 2) return false; CBigNum bn1 = CastToBigNum(stacktop(-2)); CBigNum bn2 = CastToBigNum(stacktop(-1)); CBigNum bn; switch (opcode) { case OP_ADD: bn = bn1 + bn2; break; case OP_SUB: bn = bn1 - bn2; break; case OP_BOOLAND: bn = (bn1 != bnZero && bn2 != bnZero); break; case OP_BOOLOR: bn = (bn1 != bnZero || bn2 != bnZero); break; case OP_NUMEQUAL: bn = (bn1 == bn2); break; case OP_NUMEQUALVERIFY: bn = (bn1 == bn2); break; case OP_NUMNOTEQUAL: bn = (bn1 != bn2); break; case OP_LESSTHAN: bn = (bn1 < bn2); break; case OP_GREATERTHAN: bn = (bn1 > bn2); break; case OP_LESSTHANOREQUAL: bn = (bn1 <= bn2); break; case OP_GREATERTHANOREQUAL: bn = (bn1 >= bn2); break; case OP_MIN: bn = (bn1 < bn2 ? bn1 : bn2); break; case OP_MAX: bn = (bn1 > bn2 ? bn1 : bn2); break; default: assert(!"invalid opcode"); break; } popstack(stack); popstack(stack); stack.push_back(bn.getvch()); if (opcode == OP_NUMEQUALVERIFY) { if (CastToBool(stacktop(-1))) popstack(stack); else return false; } } break; case OP_WITHIN: { // (x min max -- out) if (stack.size() < 3) return false; CBigNum bn1 = CastToBigNum(stacktop(-3)); CBigNum bn2 = CastToBigNum(stacktop(-2)); CBigNum bn3 = CastToBigNum(stacktop(-1)); bool fValue = (bn2 <= bn1 && bn1 < bn3); popstack(stack); popstack(stack); popstack(stack); stack.push_back(fValue ? vchTrue : vchFalse); } break; // // Crypto // case OP_RIPEMD160: case OP_SHA1: case OP_SHA256: case OP_HASH160: case OP_HASH256: { // (in -- hash) if (stack.size() < 1) return false; valtype& vch = stacktop(-1); valtype vchHash((opcode == OP_RIPEMD160 || opcode == OP_SHA1 || opcode == OP_HASH160) ? 20 : 32); if (opcode == OP_RIPEMD160) RIPEMD160(&vch[0], vch.size(), &vchHash[0]); else if (opcode == OP_SHA1) SHA1(&vch[0], vch.size(), &vchHash[0]); else if (opcode == OP_SHA256) SHA256(&vch[0], vch.size(), &vchHash[0]); else if (opcode == OP_HASH160) { uint160 hash160 = Hash160(vch); memcpy(&vchHash[0], &hash160, sizeof(hash160)); } else if (opcode == OP_HASH256) { uint256 hash = Hash(vch.begin(), vch.end()); memcpy(&vchHash[0], &hash, sizeof(hash)); } popstack(stack); stack.push_back(vchHash); } break; case OP_CODESEPARATOR: { // Hash starts after the code separator pbegincodehash = pc; } break; case OP_CHECKSIG: case OP_CHECKSIGVERIFY: { // (sig pubkey -- bool) if (stack.size() < 2) return false; valtype& vchSig = stacktop(-2); valtype& vchPubKey = stacktop(-1); ////// debug print //PrintHex(vchSig.begin(), vchSig.end(), "sig: %s\n"); //PrintHex(vchPubKey.begin(), vchPubKey.end(), "pubkey: %s\n"); // Subset of script starting at the most recent codeseparator CScript scriptCode(pbegincodehash, pend); // Drop the signature, since there's no way for a signature to sign itself scriptCode.FindAndDelete(CScript(vchSig)); bool fSuccess = (!fStrictEncodings || (IsCanonicalSignature(vchSig) && IsCanonicalPubKey(vchPubKey))); if (fSuccess) fSuccess = CheckSig(vchSig, vchPubKey, scriptCode, txTo, nIn, nHashType, flags); popstack(stack); popstack(stack); stack.push_back(fSuccess ? vchTrue : vchFalse); if (opcode == OP_CHECKSIGVERIFY) { if (fSuccess) popstack(stack); else return false; } } break; case OP_CHECKMULTISIG: case OP_CHECKMULTISIGVERIFY: { // ([sig ...] num_of_signatures [pubkey ...] num_of_pubkeys -- bool) int i = 1; if ((int)stack.size() < i) return false; int nKeysCount = CastToBigNum(stacktop(-i)).getint(); if (nKeysCount < 0 || nKeysCount > 20) return false; nOpCount += nKeysCount; if (nOpCount > 201) return false; int ikey = ++i; i += nKeysCount; if ((int)stack.size() < i) return false; int nSigsCount = CastToBigNum(stacktop(-i)).getint(); if (nSigsCount < 0 || nSigsCount > nKeysCount) return false; int isig = ++i; i += nSigsCount; if ((int)stack.size() < i) return false; // Subset of script starting at the most recent codeseparator CScript scriptCode(pbegincodehash, pend); // Drop the signatures, since there's no way for a signature to sign itself for (int k = 0; k < nSigsCount; k++) { valtype& vchSig = stacktop(-isig-k); scriptCode.FindAndDelete(CScript(vchSig)); } bool fSuccess = true; while (fSuccess && nSigsCount > 0) { valtype& vchSig = stacktop(-isig); valtype& vchPubKey = stacktop(-ikey); // Check signature bool fOk = (!fStrictEncodings || (IsCanonicalSignature(vchSig) && IsCanonicalPubKey(vchPubKey))); if (fOk) fOk = CheckSig(vchSig, vchPubKey, scriptCode, txTo, nIn, nHashType, flags); if (fOk) { isig++; nSigsCount--; } ikey++; nKeysCount--; // If there are more signatures left than keys left, // then too many signatures have failed if (nSigsCount > nKeysCount) fSuccess = false; } while (i-- > 0) popstack(stack); stack.push_back(fSuccess ? vchTrue : vchFalse); if (opcode == OP_CHECKMULTISIGVERIFY) { if (fSuccess) popstack(stack); else return false; } } break; default: return false; } // Size limits if (stack.size() + altstack.size() > 1000) return false; } } catch (...) { return false; } if (!vfExec.empty()) return false; return true; } uint256 SignatureHash(CScript scriptCode, const CTransaction& txTo, unsigned int nIn, int nHashType) { if (nIn >= txTo.vin.size()) { printf("ERROR: SignatureHash() : nIn=%d out of range\n", nIn); return 1; } CTransaction txTmp(txTo); // In case concatenating two scripts ends up with two codeseparators, // or an extra one at the end, this prevents all those possible incompatibilities. scriptCode.FindAndDelete(CScript(OP_CODESEPARATOR)); // Blank out other inputs' signatures for (unsigned int i = 0; i < txTmp.vin.size(); i++) txTmp.vin[i].scriptSig = CScript(); txTmp.vin[nIn].scriptSig = scriptCode; // Blank out some of the outputs if ((nHashType & 0x1f) == SIGHASH_NONE) { // Wildcard payee txTmp.vout.clear(); // Let the others update at will for (unsigned int i = 0; i < txTmp.vin.size(); i++) if (i != nIn) txTmp.vin[i].nSequence = 0; } else if ((nHashType & 0x1f) == SIGHASH_SINGLE) { // Only lock-in the txout payee at same index as txin unsigned int nOut = nIn; if (nOut >= txTmp.vout.size()) { printf("ERROR: SignatureHash() : nOut=%d out of range\n", nOut); return 1; } txTmp.vout.resize(nOut+1); for (unsigned int i = 0; i < nOut; i++) txTmp.vout[i].SetNull(); // Let the others update at will for (unsigned int i = 0; i < txTmp.vin.size(); i++) if (i != nIn) txTmp.vin[i].nSequence = 0; } // Blank out other inputs completely, not recommended for open transactions if (nHashType & SIGHASH_ANYONECANPAY) { txTmp.vin[0] = txTmp.vin[nIn]; txTmp.vin.resize(1); } // Serialize and hash CHashWriter ss(SER_GETHASH, 0); ss << txTmp << nHashType; return ss.GetHash(); } // Valid signature cache, to avoid doing expensive ECDSA signature checking // twice for every transaction (once when accepted into memory pool, and // again when accepted into the block chain) class CSignatureCache { private: // sigdata_type is (signature hash, signature, public key): typedef boost::tuple<uint256, std::vector<unsigned char>, CPubKey> sigdata_type; std::set< sigdata_type> setValid; boost::shared_mutex cs_sigcache; public: bool Get(const uint256 &hash, const std::vector<unsigned char>& vchSig, const CPubKey& pubKey) { boost::shared_lock<boost::shared_mutex> lock(cs_sigcache); sigdata_type k(hash, vchSig, pubKey); std::set<sigdata_type>::iterator mi = setValid.find(k); if (mi != setValid.end()) return true; return false; } void Set(const uint256 &hash, const std::vector<unsigned char>& vchSig, const CPubKey& pubKey) { // DoS prevention: limit cache size to less than 10MB // (~200 bytes per cache entry times 50,000 entries) // Since there are a maximum of 20,000 signature operations per block // 50,000 is a reasonable default. int64 nMaxCacheSize = GetArg("-maxsigcachesize", 50000); if (nMaxCacheSize <= 0) return; boost::unique_lock<boost::shared_mutex> lock(cs_sigcache); while (static_cast<int64>(setValid.size()) > nMaxCacheSize) { // Evict a random entry. Random because that helps // foil would-be DoS attackers who might try to pre-generate // and re-use a set of valid signatures just-slightly-greater // than our cache size. uint256 randomHash = GetRandHash(); std::vector<unsigned char> unused; std::set<sigdata_type>::iterator it = setValid.lower_bound(sigdata_type(randomHash, unused, unused)); if (it == setValid.end()) it = setValid.begin(); setValid.erase(*it); } sigdata_type k(hash, vchSig, pubKey); setValid.insert(k); } }; bool CheckSig(vector<unsigned char> vchSig, const vector<unsigned char> &vchPubKey, const CScript &scriptCode, const CTransaction& txTo, unsigned int nIn, int nHashType, int flags) { static CSignatureCache signatureCache; CPubKey pubkey(vchPubKey); if (!pubkey.IsValid()) return false; // Hash type is one byte tacked on to the end of the signature if (vchSig.empty()) return false; if (nHashType == 0) nHashType = vchSig.back(); else if (nHashType != vchSig.back()) return false; vchSig.pop_back(); uint256 sighash = SignatureHash(scriptCode, txTo, nIn, nHashType); if (signatureCache.Get(sighash, vchSig, pubkey)) return true; if (!pubkey.Verify(sighash, vchSig)) return false; if (!(flags & SCRIPT_VERIFY_NOCACHE)) signatureCache.Set(sighash, vchSig, pubkey); return true; } typedef struct { txnouttype txType; CScript *tScript; } SScriptPairRec; bool Solver(const CScript& scriptPubKey, txnouttype& typeRet, vector<vector<unsigned char> >& vSolutionsRet) { static SScriptPairRec *sTemplates = NULL; SScriptPairRec *curTemplate; if (sTemplates == NULL) { CScript *tScript; sTemplates = (SScriptPairRec *)malloc(sizeof(SScriptPairRec) * 4); // order templates such that most common transaction types are checked first tScript = new CScript(); *tScript << OP_DUP << OP_HASH160 << OP_PUBKEYHASH << OP_EQUALVERIFY << OP_CHECKSIG; sTemplates[0].txType = TX_PUBKEYHASH; sTemplates[0].tScript = tScript; tScript = new CScript(); *tScript << OP_PUBKEY << OP_CHECKSIG; sTemplates[1].txType = TX_PUBKEY; sTemplates[1].tScript = tScript; tScript = new CScript(); *tScript << OP_SMALLINTEGER << OP_PUBKEYS << OP_SMALLINTEGER << OP_CHECKMULTISIG; sTemplates[2].txType = TX_MULTISIG; sTemplates[2].tScript = tScript; sTemplates[3].txType = (txnouttype)-1; sTemplates[3].tScript = NULL; } // Shortcut for pay-to-script-hash, which are more constrained than the other types: // it is always OP_HASH160 20 [20 byte hash] OP_EQUAL if (scriptPubKey.IsPayToScriptHash()) { typeRet = TX_SCRIPTHASH; vector<unsigned char> hashBytes(scriptPubKey.begin()+2, scriptPubKey.begin()+22); vSolutionsRet.push_back(hashBytes); return true; } // Scan templates const CScript& script1 = scriptPubKey; curTemplate = &sTemplates[0]; while (curTemplate->tScript != NULL) { const CScript *testScript = curTemplate->tScript; opcodetype opcode1, opcode2; vector<unsigned char> vch1; vSolutionsRet.clear(); // Compare CScript::const_iterator pc1 = script1.begin(); CScript::const_iterator pc2 = testScript->begin(); CScript::const_iterator end1 = script1.end(); CScript::const_iterator end2 = testScript->end(); loop { if (pc1 == end1 && pc2 == end2) { // Found a match typeRet = curTemplate->txType; if (typeRet == TX_MULTISIG) { // Additional checks for TX_MULTISIG: unsigned char m = vSolutionsRet.front()[0]; unsigned char n = vSolutionsRet.back()[0]; if (m < 1 || n < 1 || m > n || vSolutionsRet.size()-2 != n) return false; } return true; } if (!script1.GetOp2(pc1, opcode1, &vch1)) break; if (!testScript->GetOp2(pc2, opcode2, NULL)) // templates push no data, no need to get vch break; // Template matching opcodes: if (opcode2 == OP_PUBKEYS) { while (vch1.size() >= 33 && vch1.size() <= 120) { vSolutionsRet.push_back(vch1); if (!script1.GetOp2(pc1, opcode1, &vch1)) break; } if (!testScript->GetOp2(pc2, opcode2, NULL)) break; // Normal situation is to fall through // to other if/else statements } if (opcode2 == OP_PUBKEY) { if (vch1.size() < 33 || vch1.size() > 120) break; vSolutionsRet.push_back(vch1); } else if (opcode2 == OP_PUBKEYHASH) { if (vch1.size() != sizeof(uint160)) break; vSolutionsRet.push_back(vch1); } else if (opcode2 == OP_SMALLINTEGER) { // Single-byte small integer pushed onto vSolutions if (opcode1 == OP_0 || (opcode1 >= OP_1 && opcode1 <= OP_16)) { char n = (char)CScript::DecodeOP_N(opcode1); vSolutionsRet.push_back(valtype(1, n)); } else break; } else if (opcode1 != opcode2) { // Others must match exactly break; } } curTemplate++; } vSolutionsRet.clear(); typeRet = TX_NONSTANDARD; return false; } bool Sign1(const CKeyID& address, const CKeyStore& keystore, uint256 hash, int nHashType, CScript& scriptSigRet) { CKey key; if (!keystore.GetKey(address, key)) return false; vector<unsigned char> vchSig; if (!key.Sign(hash, vchSig)) return false; vchSig.push_back((unsigned char)nHashType); scriptSigRet << vchSig; return true; } bool SignN(const vector<valtype>& multisigdata, const CKeyStore& keystore, uint256 hash, int nHashType, CScript& scriptSigRet) { int nSigned = 0; int nRequired = multisigdata.front()[0]; for (unsigned int i = 1; i < multisigdata.size()-1 && nSigned < nRequired; i++) { const valtype& pubkey = multisigdata[i]; CKeyID keyID = CPubKey(pubkey).GetID(); if (Sign1(keyID, keystore, hash, nHashType, scriptSigRet)) ++nSigned; } return nSigned==nRequired; } // // Sign scriptPubKey with private keys stored in keystore, given transaction hash and hash type. // Signatures are returned in scriptSigRet (or returns false if scriptPubKey can't be signed), // unless whichTypeRet is TX_SCRIPTHASH, in which case scriptSigRet is the redemption script. // Returns false if scriptPubKey could not be completely satisfied. // bool Solver(const CKeyStore& keystore, const CScript& scriptPubKey, uint256 hash, int nHashType, CScript& scriptSigRet, txnouttype& whichTypeRet) { scriptSigRet.clear(); vector<valtype> vSolutions; if (!Solver(scriptPubKey, whichTypeRet, vSolutions)) return false; CKeyID keyID; switch (whichTypeRet) { case TX_NONSTANDARD: return false; case TX_PUBKEY: keyID = CPubKey(vSolutions[0]).GetID(); return Sign1(keyID, keystore, hash, nHashType, scriptSigRet); case TX_PUBKEYHASH: keyID = CKeyID(uint160(vSolutions[0])); if (!Sign1(keyID, keystore, hash, nHashType, scriptSigRet)) return false; else { CPubKey vch; keystore.GetPubKey(keyID, vch); scriptSigRet << vch; } return true; case TX_SCRIPTHASH: return keystore.GetCScript(uint160(vSolutions[0]), scriptSigRet); case TX_MULTISIG: scriptSigRet << OP_0; // workaround CHECKMULTISIG bug return (SignN(vSolutions, keystore, hash, nHashType, scriptSigRet)); } return false; } int ScriptSigArgsExpected(txnouttype t, const std::vector<std::vector<unsigned char> >& vSolutions) { switch (t) { case TX_NONSTANDARD: return -1; case TX_PUBKEY: return 1; case TX_PUBKEYHASH: return 2; case TX_MULTISIG: if (vSolutions.size() < 1 || vSolutions[0].size() < 1) return -1; return vSolutions[0][0] + 1; case TX_SCRIPTHASH: return 1; // doesn't include args needed by the script } return -1; } bool IsStandard(const CScript& scriptPubKey) { vector<valtype> vSolutions; txnouttype whichType; if (!Solver(scriptPubKey, whichType, vSolutions)) return false; if (whichType == TX_MULTISIG) { unsigned char m = vSolutions.front()[0]; unsigned char n = vSolutions.back()[0]; // Support up to x-of-3 multisig txns as standard if (n < 1 || n > 3) return false; if (m < 1 || m > n) return false; } return whichType != TX_NONSTANDARD; } unsigned int HaveKeys(const vector<valtype>& pubkeys, const CKeyStore& keystore) { unsigned int nResult = 0; BOOST_FOREACH(const valtype& pubkey, pubkeys) { CKeyID keyID = CPubKey(pubkey).GetID(); if (keystore.HaveKey(keyID)) ++nResult; } return nResult; } class CKeyStoreIsMineVisitor : public boost::static_visitor<bool> { private: const CKeyStore *keystore; public: CKeyStoreIsMineVisitor(const CKeyStore *keystoreIn) : keystore(keystoreIn) { } bool operator()(const CNoDestination &dest) const { return false; } bool operator()(const CKeyID &keyID) const { return keystore->HaveKey(keyID); } bool operator()(const CScriptID &scriptID) const { return keystore->HaveCScript(scriptID); } };<|fim▁hole|> return boost::apply_visitor(CKeyStoreIsMineVisitor(&keystore), dest); } bool IsMine(const CKeyStore &keystore, const CScript& scriptPubKey) { vector<valtype> vSolutions; txnouttype whichType; if (!Solver(scriptPubKey, whichType, vSolutions)) return false; CKeyID keyID; switch (whichType) { case TX_NONSTANDARD: return false; case TX_PUBKEY: keyID = CPubKey(vSolutions[0]).GetID(); return keystore.HaveKey(keyID); case TX_PUBKEYHASH: keyID = CKeyID(uint160(vSolutions[0])); return keystore.HaveKey(keyID); case TX_SCRIPTHASH: { CScript subscript; if (!keystore.GetCScript(CScriptID(uint160(vSolutions[0])), subscript)) return false; return IsMine(keystore, subscript); } case TX_MULTISIG: { // Only consider transactions "mine" if we own ALL the // keys involved. multi-signature transactions that are // partially owned (somebody else has a key that can spend // them) enable spend-out-from-under-you attacks, especially // in shared-wallet situations. vector<valtype> keys(vSolutions.begin()+1, vSolutions.begin()+vSolutions.size()-1); return HaveKeys(keys, keystore) == keys.size(); } } return false; } bool ExtractDestination(const CScript& scriptPubKey, CTxDestination& addressRet) { vector<valtype> vSolutions; txnouttype whichType; if (!Solver(scriptPubKey, whichType, vSolutions)) return false; if (whichType == TX_PUBKEY) { addressRet = CPubKey(vSolutions[0]).GetID(); return true; } else if (whichType == TX_PUBKEYHASH) { addressRet = CKeyID(uint160(vSolutions[0])); return true; } else if (whichType == TX_SCRIPTHASH) { addressRet = CScriptID(uint160(vSolutions[0])); return true; } // Multisig txns have more than one address... return false; } // ExtractDestinationAndMine is an amalgam of ExtractDestination and IsMine. Since they do very // similar work and are both called from CWalletTx::GetAmounts we can reduce kill two birds with // one stone by combining them and speed CWalletTx::GetAmounts considerably. bool ExtractDestinationAndMine(const CKeyStore &keystore, const CScript& scriptPubKey, CTxDestination& addressRet, bool *outIsMine) { vector<valtype> vSolutions; txnouttype whichType; bool hasDestination = false; *outIsMine = false; if (Solver(scriptPubKey, whichType, vSolutions)) { CKeyID keyID; switch (whichType) { case TX_NONSTANDARD: break; case TX_PUBKEY: keyID = CPubKey(vSolutions[0]).GetID(); addressRet = keyID; *outIsMine = keystore.HaveKey(keyID); hasDestination = true; break; case TX_PUBKEYHASH: keyID = CKeyID(uint160(vSolutions[0])); addressRet = keyID; *outIsMine = keystore.HaveKey(keyID); hasDestination = true; break; case TX_SCRIPTHASH: { CScript subscript; CScriptID scriptID = CScriptID(uint160(vSolutions[0])); addressRet = scriptID; hasDestination = true; if (keystore.GetCScript(scriptID, subscript)) *outIsMine = IsMine(keystore, subscript); } break; case TX_MULTISIG: { // Only consider transactions "mine" if we own ALL the // keys involved. multi-signature transactions that are // partially owned (somebody else has a key that can spend // them) enable spend-out-from-under-you attacks, especially // in shared-wallet situations. vector<valtype> keys(vSolutions.begin()+1, vSolutions.begin()+vSolutions.size()-1); *outIsMine = HaveKeys(keys, keystore) == keys.size(); } } } return hasDestination; } bool ExtractDestinations(const CScript& scriptPubKey, txnouttype& typeRet, vector<CTxDestination>& addressRet, int& nRequiredRet) { addressRet.clear(); typeRet = TX_NONSTANDARD; vector<valtype> vSolutions; if (!Solver(scriptPubKey, typeRet, vSolutions)) return false; if (typeRet == TX_MULTISIG) { nRequiredRet = vSolutions.front()[0]; for (unsigned int i = 1; i < vSolutions.size()-1; i++) { CTxDestination address = CPubKey(vSolutions[i]).GetID(); addressRet.push_back(address); } } else { nRequiredRet = 1; CTxDestination address; if (!ExtractDestination(scriptPubKey, address)) return false; addressRet.push_back(address); } return true; } bool VerifyScript(const CScript& scriptSig, const CScript& scriptPubKey, const CTransaction& txTo, unsigned int nIn, unsigned int flags, int nHashType) { vector<vector<unsigned char> > stack, stackCopy; if (!EvalScript(stack, scriptSig, txTo, nIn, flags, nHashType)) return false; if (flags & SCRIPT_VERIFY_P2SH) stackCopy = stack; if (!EvalScript(stack, scriptPubKey, txTo, nIn, flags, nHashType)) return false; if (stack.empty()) return false; if (CastToBool(stack.back()) == false) return false; // Additional validation for spend-to-script-hash transactions: if ((flags & SCRIPT_VERIFY_P2SH) && scriptPubKey.IsPayToScriptHash()) { if (!scriptSig.IsPushOnly()) // scriptSig must be literals-only return false; // or validation fails // stackCopy cannot be empty here, because if it was the // P2SH HASH <> EQUAL scriptPubKey would be evaluated with // an empty stack and the EvalScript above would return false. assert(!stackCopy.empty()); const valtype& pubKeySerialized = stackCopy.back(); CScript pubKey2(pubKeySerialized.begin(), pubKeySerialized.end()); popstack(stackCopy); if (!EvalScript(stackCopy, pubKey2, txTo, nIn, flags, nHashType)) return false; if (stackCopy.empty()) return false; return CastToBool(stackCopy.back()); } return true; } bool SignSignature(const CKeyStore &keystore, const CScript& fromPubKey, CTransaction& txTo, unsigned int nIn, int nHashType) { assert(nIn < txTo.vin.size()); CTxIn& txin = txTo.vin[nIn]; // Leave out the signature from the hash, since a signature can't sign itself. // The checksig op will also drop the signatures from its hash. uint256 hash = SignatureHash(fromPubKey, txTo, nIn, nHashType); txnouttype whichType; if (!Solver(keystore, fromPubKey, hash, nHashType, txin.scriptSig, whichType)) return false; if (whichType == TX_SCRIPTHASH) { // Solver returns the subscript that need to be evaluated; // the final scriptSig is the signatures from that // and then the serialized subscript: CScript subscript = txin.scriptSig; // Recompute txn hash using subscript in place of scriptPubKey: uint256 hash2 = SignatureHash(subscript, txTo, nIn, nHashType); txnouttype subType; bool fSolved = Solver(keystore, subscript, hash2, nHashType, txin.scriptSig, subType) && subType != TX_SCRIPTHASH; // Append serialized subscript whether or not it is completely signed: txin.scriptSig << static_cast<valtype>(subscript); if (!fSolved) return false; } // Test solution return VerifyScript(txin.scriptSig, fromPubKey, txTo, nIn, SCRIPT_VERIFY_P2SH | SCRIPT_VERIFY_STRICTENC, 0); } bool SignSignature(const CKeyStore &keystore, const CTransaction& txFrom, CTransaction& txTo, unsigned int nIn, int nHashType) { assert(nIn < txTo.vin.size()); CTxIn& txin = txTo.vin[nIn]; assert(txin.prevout.n < txFrom.vout.size()); const CTxOut& txout = txFrom.vout[txin.prevout.n]; return SignSignature(keystore, txout.scriptPubKey, txTo, nIn, nHashType); } static CScript PushAll(const vector<valtype>& values) { CScript result; BOOST_FOREACH(const valtype& v, values) result << v; return result; } static CScript CombineMultisig(CScript scriptPubKey, const CTransaction& txTo, unsigned int nIn, const vector<valtype>& vSolutions, vector<valtype>& sigs1, vector<valtype>& sigs2) { // Combine all the signatures we've got: set<valtype> allsigs; BOOST_FOREACH(const valtype& v, sigs1) { if (!v.empty()) allsigs.insert(v); } BOOST_FOREACH(const valtype& v, sigs2) { if (!v.empty()) allsigs.insert(v); } // Build a map of pubkey -> signature by matching sigs to pubkeys: assert(vSolutions.size() > 1); unsigned int nSigsRequired = vSolutions.front()[0]; unsigned int nPubKeys = vSolutions.size()-2; map<valtype, valtype> sigs; BOOST_FOREACH(const valtype& sig, allsigs) { for (unsigned int i = 0; i < nPubKeys; i++) { const valtype& pubkey = vSolutions[i+1]; if (sigs.count(pubkey)) continue; // Already got a sig for this pubkey if (CheckSig(sig, pubkey, scriptPubKey, txTo, nIn, 0, 0)) { sigs[pubkey] = sig; break; } } } // Now build a merged CScript: unsigned int nSigsHave = 0; CScript result; result << OP_0; // pop-one-too-many workaround for (unsigned int i = 0; i < nPubKeys && nSigsHave < nSigsRequired; i++) { if (sigs.count(vSolutions[i+1])) { result << sigs[vSolutions[i+1]]; ++nSigsHave; } } // Fill any missing with OP_0: for (unsigned int i = nSigsHave; i < nSigsRequired; i++) result << OP_0; return result; } static CScript CombineSignatures(CScript scriptPubKey, const CTransaction& txTo, unsigned int nIn, const txnouttype txType, const vector<valtype>& vSolutions, vector<valtype>& sigs1, vector<valtype>& sigs2) { switch (txType) { case TX_NONSTANDARD: // Don't know anything about this, assume bigger one is correct: if (sigs1.size() >= sigs2.size()) return PushAll(sigs1); return PushAll(sigs2); case TX_PUBKEY: case TX_PUBKEYHASH: // Signatures are bigger than placeholders or empty scripts: if (sigs1.empty() || sigs1[0].empty()) return PushAll(sigs2); return PushAll(sigs1); case TX_SCRIPTHASH: if (sigs1.empty() || sigs1.back().empty()) return PushAll(sigs2); else if (sigs2.empty() || sigs2.back().empty()) return PushAll(sigs1); else { // Recur to combine: valtype spk = sigs1.back(); CScript pubKey2(spk.begin(), spk.end()); txnouttype txType2; vector<vector<unsigned char> > vSolutions2; Solver(pubKey2, txType2, vSolutions2); sigs1.pop_back(); sigs2.pop_back(); CScript result = CombineSignatures(pubKey2, txTo, nIn, txType2, vSolutions2, sigs1, sigs2); result << spk; return result; } case TX_MULTISIG: return CombineMultisig(scriptPubKey, txTo, nIn, vSolutions, sigs1, sigs2); } return CScript(); } CScript CombineSignatures(CScript scriptPubKey, const CTransaction& txTo, unsigned int nIn, const CScript& scriptSig1, const CScript& scriptSig2) { txnouttype txType; vector<vector<unsigned char> > vSolutions; Solver(scriptPubKey, txType, vSolutions); vector<valtype> stack1; EvalScript(stack1, scriptSig1, CTransaction(), 0, SCRIPT_VERIFY_STRICTENC, 0); vector<valtype> stack2; EvalScript(stack2, scriptSig2, CTransaction(), 0, SCRIPT_VERIFY_STRICTENC, 0); return CombineSignatures(scriptPubKey, txTo, nIn, txType, vSolutions, stack1, stack2); } unsigned int CScript::GetSigOpCount(bool fAccurate) const { unsigned int n = 0; const_iterator pc = begin(); opcodetype lastOpcode = OP_INVALIDOPCODE; while (pc < end()) { opcodetype opcode; if (!GetOp(pc, opcode)) break; if (opcode == OP_CHECKSIG || opcode == OP_CHECKSIGVERIFY) n++; else if (opcode == OP_CHECKMULTISIG || opcode == OP_CHECKMULTISIGVERIFY) { if (fAccurate && lastOpcode >= OP_1 && lastOpcode <= OP_16) n += DecodeOP_N(lastOpcode); else n += 20; } lastOpcode = opcode; } return n; } unsigned int CScript::GetSigOpCount(const CScript& scriptSig) const { if (!IsPayToScriptHash()) return GetSigOpCount(true); // This is a pay-to-script-hash scriptPubKey; // get the last item that the scriptSig // pushes onto the stack: const_iterator pc = scriptSig.begin(); vector<unsigned char> data; while (pc < scriptSig.end()) { opcodetype opcode; if (!scriptSig.GetOp(pc, opcode, data)) return 0; if (opcode > OP_16) return 0; } /// ... and return its opcount: CScript subscript(data.begin(), data.end()); return subscript.GetSigOpCount(true); } bool CScript::IsPayToScriptHash() const { // Extra-fast test for pay-to-script-hash CScripts: return (this->size() == 23 && this->at(0) == OP_HASH160 && this->at(1) == 0x14 && this->at(22) == OP_EQUAL); } class CScriptVisitor : public boost::static_visitor<bool> { private: CScript *script; public: CScriptVisitor(CScript *scriptin) { script = scriptin; } bool operator()(const CNoDestination &dest) const { script->clear(); return false; } bool operator()(const CKeyID &keyID) const { script->clear(); *script << OP_DUP << OP_HASH160 << keyID << OP_EQUALVERIFY << OP_CHECKSIG; return true; } bool operator()(const CScriptID &scriptID) const { script->clear(); *script << OP_HASH160 << scriptID << OP_EQUAL; return true; } }; void CScript::SetDestination(const CTxDestination& dest) { boost::apply_visitor(CScriptVisitor(this), dest); } void CScript::SetMultisig(int nRequired, const std::vector<CPubKey>& keys) { this->clear(); *this << EncodeOP_N(nRequired); BOOST_FOREACH(const CPubKey& key, keys) *this << key; *this << EncodeOP_N(keys.size()) << OP_CHECKMULTISIG; } bool CScriptCompressor::IsToKeyID(CKeyID &hash) const { if (script.size() == 25 && script[0] == OP_DUP && script[1] == OP_HASH160 && script[2] == 20 && script[23] == OP_EQUALVERIFY && script[24] == OP_CHECKSIG) { memcpy(&hash, &script[3], 20); return true; } return false; } bool CScriptCompressor::IsToScriptID(CScriptID &hash) const { if (script.size() == 23 && script[0] == OP_HASH160 && script[1] == 20 && script[22] == OP_EQUAL) { memcpy(&hash, &script[2], 20); return true; } return false; } bool CScriptCompressor::IsToPubKey(CPubKey &pubkey) const { if (script.size() == 35 && script[0] == 33 && script[34] == OP_CHECKSIG && (script[1] == 0x02 || script[1] == 0x03)) { pubkey.Set(&script[1], &script[34]); return true; } if (script.size() == 67 && script[0] == 65 && script[66] == OP_CHECKSIG && script[1] == 0x04) { pubkey.Set(&script[1], &script[66]); return pubkey.IsFullyValid(); // if not fully valid, a case that would not be compressible } return false; } bool CScriptCompressor::Compress(std::vector<unsigned char> &out) const { CKeyID keyID; if (IsToKeyID(keyID)) { out.resize(21); out[0] = 0x00; memcpy(&out[1], &keyID, 20); return true; } CScriptID scriptID; if (IsToScriptID(scriptID)) { out.resize(21); out[0] = 0x01; memcpy(&out[1], &scriptID, 20); return true; } CPubKey pubkey; if (IsToPubKey(pubkey)) { out.resize(33); memcpy(&out[1], &pubkey[1], 32); if (pubkey[0] == 0x02 || pubkey[0] == 0x03) { out[0] = pubkey[0]; return true; } else if (pubkey[0] == 0x04) { out[0] = 0x04 | (pubkey[64] & 0x01); return true; } } return false; } unsigned int CScriptCompressor::GetSpecialSize(unsigned int nSize) const { if (nSize == 0 || nSize == 1) return 20; if (nSize == 2 || nSize == 3 || nSize == 4 || nSize == 5) return 32; return 0; } bool CScriptCompressor::Decompress(unsigned int nSize, const std::vector<unsigned char> &in) { switch(nSize) { case 0x00: script.resize(25); script[0] = OP_DUP; script[1] = OP_HASH160; script[2] = 20; memcpy(&script[3], &in[0], 20); script[23] = OP_EQUALVERIFY; script[24] = OP_CHECKSIG; return true; case 0x01: script.resize(23); script[0] = OP_HASH160; script[1] = 20; memcpy(&script[2], &in[0], 20); script[22] = OP_EQUAL; return true; case 0x02: case 0x03: script.resize(35); script[0] = 33; script[1] = nSize; memcpy(&script[2], &in[0], 32); script[34] = OP_CHECKSIG; return true; case 0x04: case 0x05: unsigned char vch[33] = {}; vch[0] = nSize - 2; memcpy(&vch[1], &in[0], 32); CPubKey pubkey(&vch[0], &vch[33]); if (!pubkey.Decompress()) return false; assert(pubkey.size() == 65); script.resize(67); script[0] = 65; memcpy(&script[1], pubkey.begin(), 65); script[66] = OP_CHECKSIG; return true; } return false; }<|fim▁end|>
bool IsMine(const CKeyStore &keystore, const CTxDestination &dest) {
<|file_name|>vgg.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015-2016 Anish Athalye. Released under GPLv3. import tensorflow as tf import numpy as np import scipy.io import pdb MEAN_PIXEL = np.array([ 123.68 , 116.779, 103.939]) def net(data_path, input_image): layers = ( 'conv1_1', 'relu1_1', 'conv1_2', 'relu1_2', 'pool1', 'conv2_1', 'relu2_1', 'conv2_2', 'relu2_2', 'pool2',<|fim▁hole|> 'conv4_1', 'relu4_1', 'conv4_2', 'relu4_2', 'conv4_3', 'relu4_3', 'conv4_4', 'relu4_4', 'pool4', 'conv5_1', 'relu5_1', 'conv5_2', 'relu5_2', 'conv5_3', 'relu5_3', 'conv5_4', 'relu5_4' ) data = scipy.io.loadmat(data_path) mean = data['normalization'][0][0][0] mean_pixel = np.mean(mean, axis=(0, 1)) weights = data['layers'][0] net = {} current = input_image for i, name in enumerate(layers): kind = name[:4] if kind == 'conv': kernels, bias = weights[i][0][0][0][0] # matconvnet: weights are [width, height, in_channels, out_channels] # tensorflow: weights are [height, width, in_channels, out_channels] kernels = np.transpose(kernels, (1, 0, 2, 3)) bias = bias.reshape(-1) current = _conv_layer(current, kernels, bias) elif kind == 'relu': current = tf.nn.relu(current) elif kind == 'pool': current = _pool_layer(current) net[name] = current assert len(net) == len(layers) return net def _conv_layer(input, weights, bias): conv = tf.nn.conv2d(input, tf.constant(weights), strides=(1, 1, 1, 1), padding='SAME') return tf.nn.bias_add(conv, bias) def _pool_layer(input): return tf.nn.max_pool(input, ksize=(1, 2, 2, 1), strides=(1, 2, 2, 1), padding='SAME') def preprocess(image): return image - MEAN_PIXEL def unprocess(image): return image + MEAN_PIXEL<|fim▁end|>
'conv3_1', 'relu3_1', 'conv3_2', 'relu3_2', 'conv3_3', 'relu3_3', 'conv3_4', 'relu3_4', 'pool3',
<|file_name|>BSPTree.py<|end_file_name|><|fim▁begin|>from random import randint, seed, choice, random from numpy import zeros, uint8, cumsum, floor, ceil from math import sqrt, log from collections import namedtuple from PIL import Image from logging import info, getLogger class Tree: def __init__(self, leaf): self.leaf = leaf self.lchild = None self.rchild = None def get_leafs(self): if self.lchild == None and self.rchild == None: return [self.leaf] else: return self.lchild.get_leafs()+self.rchild.get_leafs() def get_level(self, level, queue): if queue == None: queue = [] if level == 1: queue.push(self) else: if self.lchild != None: self.lchild.get_level(level-1, queue) if self.rchild != None: self.rchild.get_level(level-1, queue) return queue def paint(self, c): self.leaf.paint(c) if self.lchild != None: self.lchild.paint(c) if self.rchild != None: self.rchild.paint(c) class Container(): def __init__(self, x, y, w, h): self.x = x self.y = y self.w = w self.h = h self.center = (self.x+int(self.w/2),self.y+int(self.h/2)) self.distance_from_center = sqrt((self.center[0]-MAP_WIDTH/2)**2 + (self.center[1]-MAP_HEIGHT/2)**2) def paint(self, c): c.stroke_rectangle(self.x, self.y, self.w, self.h) def draw_path(self,c,container): c.path(self.center[0],self.center[1],container.center[0],container.center[1]) class Canvas: brushes = {"empty":0, "hallway":1, "room":2} def __init__(self, w, h, color = "empty"): self.board = zeros((h,w), dtype=uint8) self.w = w self.h = h self.set_brush(color) def set_brush(self, code): self.color = self.brushes[code] def stroke_rectangle(self, x, y, w, h): self.line(x,y,w,True) self.line(x,y+h-1,w,True) self.line(x,y,h,False) self.line(x+w-1,y,h,False) def filled_rectangle(self, x, y, w, h): self.board[y:y+h,x:x+w] = self.color def line(self, x, y, length, horizontal): if horizontal: self.board[y,x:x+length] = self.color else: self.board[y:y+length,x] = self.color def path(self,x1,y1,x2,y2): self.board[y1:y2+1,x1:x2+1] = self.color def circle(self,x,y,r): for x_offset in range(-r,r+1): for y_offset in range(-r,r+1): if sqrt(x_offset**2+y_offset**2)<r: self.board[x+x_offset,y+y_offset] = self.color def draw(self): im = Image.fromarray(self.board) im.save(MAP_NAME) def __str__(self): return str(self.board) class Room: environments = ["serene", "calm", "wild", "dangerous", "evil"] biomes = ["rock", "rugged", "sand", "mossy", "muddy", "flooded", "gelid", "gloomy", "magma"] biomes_CDF = cumsum([0.22,0.14,0.12,0.10,0.10,0.07,0.06,0.06,0.04,0.03,0.03,0.03]) def __init__(self, container): self.x = container.x+randint(1, floor(container.w/3)) self.y = container.y+randint(1, floor(container.h/3)) self.w = container.w-(self.x-container.x) self.h = container.h-(self.y-container.y) self.w -= randint(0,floor(self.w/3)) self.h -= randint(0,floor(self.w/3)) self.environment = int(min(4,10*(container.distance_from_center/MAP_WIDTH)+random()*2-1)) roll = random()*0.9+(2*container.distance_from_center/MAP_WIDTH)*0.1 self.biome = next(n for n,b in enumerate(self.biomes_CDF) if roll<b) def paint(self,c): c.filled_rectangle(self.x, self.y,self.w, self.h) def random_split(container): if container.w<MIN_ROOM_SIDE and container.h<MIN_ROOM_SIDE: return None def _split_vertical(container): r1 = None r2 = None min_w = int(W_RATIO*container.h)+1 if container.w < 2*min_w: return None r1 = Container(container.x,container.y,randint(min_w, container.w-min_w),container.h) r2 = Container(container.x+r1.w,container.y,container.w-r1.w,container.h) return [r1, r2] def _split_horizontal(container): r1 = None r2 = None min_h = int(H_RATIO*container.w)+1 if container.h < 2*min_h: return None r1 = Container(container.x,container.y,container.w,randint(min_h, container.h-min_h)) r2 = Container(container.x,container.y+r1.h,container.w,container.h-r1.h) return [r1, r2] if randint(0,1) == 0: res = _split_vertical(container) if res == None: return _split_horizontal(container) return res else: res = _split_horizontal(container) if res == None: return _split_vertical(container) return res def split_container(container, iter): root = Tree(container) if iter != 0: sr = random_split(container) if sr!=None: root.lchild = split_container(sr[0], iter-1) root.rchild = split_container(sr[1], iter-1) return root def draw_paths(c, tree): if tree.lchild == None or tree.rchild == None: return tree.lchild.leaf.draw_path(c, tree.rchild.leaf) draw_paths(c, tree.lchild) draw_paths(c, tree.rchild) MAP_WIDTH = 0 MAP_HEIGHT = 0 N_ITERATIONS = 0 H_RATIO = 0 W_RATIO = 0 MIN_ROOM_SIDE = 0 CENTER_HUB_HOLE = 0 CENTER_HUB_RADIO = 0 MAP_NAME = 0 def init(num_players): global MAP_WIDTH,MAP_HEIGHT,N_ITERATIONS,H_RATIO,W_RATIO,MIN_ROOM_SIDE,CENTER_HUB_HOLE,CENTER_HUB_RADIO,MAP_NAME MAP_WIDTH=int(500*sqrt(num_players)) MAP_HEIGHT=MAP_WIDTH N_ITERATIONS=log(MAP_WIDTH*100,2) H_RATIO=0.49 W_RATIO=H_RATIO MIN_ROOM_SIDE = 32 CENTER_HUB_HOLE = 32 CENTER_HUB_RADIO = CENTER_HUB_HOLE-MIN_ROOM_SIDE/2 MAP_NAME="result%s.png"%MAP_WIDTH def main(num_players, seed_number): logger = getLogger('BSPTree') logger.info("Initialising") init(num_players) seed(seed_number)<|fim▁hole|> canvas.filled_rectangle(0,0,MAP_WIDTH,MAP_HEIGHT) logger.info("Generating container tree") # -1 on the main container to remove borders to avoid opened border rooms main_container = Container(0, 0, MAP_WIDTH-1, MAP_HEIGHT-1) container_tree = split_container(main_container, N_ITERATIONS) logger.info("Generating hallways") canvas.set_brush("hallway") draw_paths(canvas, container_tree) logger.info("Generating rooms") canvas.set_brush("room") leafs = container_tree.get_leafs() rooms = [] for i in range(0, len(leafs)): if CENTER_HUB_HOLE < leafs[i].distance_from_center < MAP_WIDTH/2: rooms.append(Room(leafs[i])) rooms[-1].paint(canvas) logger.info("Generating hub") canvas.circle(int(MAP_WIDTH/2),int(MAP_HEIGHT/2),int(CENTER_HUB_RADIO)) #canvas.draw() return (rooms, canvas.board)<|fim▁end|>
canvas = Canvas(MAP_WIDTH, MAP_HEIGHT) canvas.set_brush("empty")
<|file_name|>temporal_analysis_widget.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # -*- coding: utf-8 -*- from PyQt5 import QtWidgets from view.analysis_widget import AnalysisWidget # noinspection PyPep8Naming class TemporalAnalysisWidget(AnalysisWidget): # noinspection PyArgumentList def __init__(self, mplCanvas): """ Construct the Temporal Analysis page in the main window. |br| A ``ScatterPlot.mplCanvas`` will be shown on this page. :param mplCanvas: The ``ScatterPlot.mplCanvas`` widget. """ super().__init__() upperLabel = QtWidgets.QLabel("Temporal Distribution &Graph:") upperLabel.setMargin(1) upperLabel.setBuddy(mplCanvas) lowerLabel = QtWidgets.QLabel("Temporal Correlation &Quotient:") lowerLabel.setMargin(1) lowerLabel.setBuddy(self.tableWidget)<|fim▁hole|> mainLayout = QtWidgets.QVBoxLayout() mainLayout.addWidget(upperLabel) mainLayout.addWidget(mplCanvas) mainLayout.addWidget(lowerLabel) mainLayout.addWidget(self.tableWidget) self.setLayout(mainLayout)<|fim▁end|>
<|file_name|>CreateProfileForm.py<|end_file_name|><|fim▁begin|><|fim▁hole|>.. moduleauthor:: Dan Schlosser <[email protected]> """ from flask.ext.wtf import Form from wtforms import StringField, HiddenField from wtforms.validators import URL, Email, Required EMAIL_ERROR = 'Please provide a valid email address.' class CreateProfileForm(Form): """A form for completing a :class:`~app.models.User` profile after they login to Eventum for the first time. :ivar email: :class:`wtforms.fields.StringField` - The user's email address. :ivar name: :class:`wtforms.fields.StringField` - The user's name. :ivar next: :class:`wtforms.fields.HiddenField` - The URL that they should be redirected to after completing their profile. """ name = StringField('Full Name') email = StringField('Email Address', [Email(message=EMAIL_ERROR), Required(message=EMAIL_ERROR)]) next = HiddenField('hidden', [URL(require_tld=False)])<|fim▁end|>
""" .. module:: CreateProfileForm :synopsis: A form for completing a user's profile.
<|file_name|>linkmap.py<|end_file_name|><|fim▁begin|># Copyright (C) 2002-2006 Stephen Kennedy <[email protected]> # Copyright (C) 2009-2013 Kai Willadsen <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or (at # your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import math <|fim▁hole|> # Rounded rectangle corner radius for culled changes display RADIUS = 3 class LinkMap(Gtk.DrawingArea): __gtype_name__ = "LinkMap" def __init__(self): self.filediff = None meldsettings.connect('changed', self.on_setting_changed) def associate(self, filediff, left_view, right_view): self.filediff = filediff self.views = [left_view, right_view] if self.get_direction() == Gtk.TextDirection.RTL: self.views.reverse() self.view_indices = [filediff.textview.index(t) for t in self.views] self.on_setting_changed(meldsettings, 'style-scheme') def on_setting_changed(self, settings, key): if key == 'style-scheme': self.fill_colors, self.line_colors = get_common_theme() def do_draw(self, context): if not self.filediff: return context.set_line_width(1.0) allocation = self.get_allocation() pix_start = [t.get_visible_rect().y for t in self.views] y_offset = [ t.translate_coordinates(self, 0, 0)[1] + 1 for t in self.views] clip_y = min(y_offset) - 1 clip_height = max(t.get_visible_rect().height for t in self.views) + 2 context.rectangle(0, clip_y, allocation.width, clip_height) context.clip() height = allocation.height visible = [self.views[0].get_line_num_for_y(pix_start[0]), self.views[0].get_line_num_for_y(pix_start[0] + height), self.views[1].get_line_num_for_y(pix_start[1]), self.views[1].get_line_num_for_y(pix_start[1] + height)] wtotal = allocation.width # For bezier control points x_steps = [-0.5, (1. / 3) * wtotal, (2. / 3) * wtotal, wtotal + 0.5] q_rad = math.pi / 2 left, right = self.view_indices view_offset_line = lambda v, l: (self.views[v].get_y_for_line_num(l) - pix_start[v] + y_offset[v]) for c in self.filediff.linediffer.pair_changes(left, right, visible): # f and t are short for "from" and "to" f0, f1 = [view_offset_line(0, l) for l in c[1:3]] t0, t1 = [view_offset_line(1, l) for l in c[3:5]] # We want the last pixel of the previous line f1 = f1 if f1 == f0 else f1 - 1 t1 = t1 if t1 == t0 else t1 - 1 # If either endpoint is completely off-screen, we cull for clarity if (t0 < 0 and t1 < 0) or (t0 > height and t1 > height): if f0 == f1: continue context.arc(x_steps[0], f0 - 0.5 + RADIUS, RADIUS, -q_rad, 0) context.arc(x_steps[0], f1 - 0.5 - RADIUS, RADIUS, 0, q_rad) context.close_path() elif (f0 < 0 and f1 < 0) or (f0 > height and f1 > height): if t0 == t1: continue context.arc_negative(x_steps[3], t0 - 0.5 + RADIUS, RADIUS, -q_rad, q_rad * 2) context.arc_negative(x_steps[3], t1 - 0.5 - RADIUS, RADIUS, q_rad * 2, q_rad) context.close_path() else: context.move_to(x_steps[0], f0 - 0.5) context.curve_to(x_steps[1], f0 - 0.5, x_steps[2], t0 - 0.5, x_steps[3], t0 - 0.5) context.line_to(x_steps[3], t1 - 0.5) context.curve_to(x_steps[2], t1 - 0.5, x_steps[1], f1 - 0.5, x_steps[0], f1 - 0.5) context.close_path() context.set_source_rgba(*self.fill_colors[c[0]]) context.fill_preserve() chunk_idx = self.filediff.linediffer.locate_chunk(left, c[1])[0] if chunk_idx == self.filediff.cursor.chunk: highlight = self.fill_colors['current-chunk-highlight'] context.set_source_rgba(*highlight) context.fill_preserve() context.set_source_rgba(*self.line_colors[c[0]]) context.stroke() def do_scroll_event(self, event): self.filediff.next_diff(event.direction) class ScrollLinkMap(Gtk.DrawingArea): __gtype_name__ = "ScrollLinkMap" def __init__(self): self.melddoc = None def associate(self, melddoc): self.melddoc = melddoc def do_scroll_event(self, event): if not self.melddoc: return self.melddoc.next_diff(event.direction)<|fim▁end|>
from gi.repository import Gtk from meld.misc import get_common_theme from meld.settings import meldsettings
<|file_name|>simplify.py<|end_file_name|><|fim▁begin|># Natural Language Toolkit: POS Tag Simplification # # Copyright (C) 2001-2013 NLTK Project # Author: Steven Bird <[email protected]> # URL: <http://www.nltk.org/> # For license information, see LICENSE.TXT # Brown Corpus # http://khnt.hit.uib.no/icame/manuals/brown/INDEX.HTM brown_mapping1 = { 'j': 'ADJ', 'p': 'PRO', 'm': 'MOD', 'q': 'DET', 'w': 'WH', 'r': 'ADV', 'i': 'P', 'u': 'UH', 'e': 'EX', 'o': 'NUM', 'b': 'V', 'h': 'V', 'f': 'FW', 'a': 'DET', 't': 'TO', 'cc': 'CNJ', 'cs': 'CNJ', 'cd': 'NUM', 'do': 'V', 'dt': 'DET', 'nn': 'N', 'nr': 'N', 'np': 'NP', 'nc': 'N', '--': '--' } brown_mapping2 = { 'vb': 'V', 'vbd': 'VD', 'vbg': 'VG', 'vbn': 'VN' } def simplify_brown_tag(tag): tag = tag.lower() if tag[0] in brown_mapping1: return brown_mapping1[tag[0]] elif tag[:2] in brown_mapping1: # still doesn't handle DOD tag correctly return brown_mapping1[tag[:2]] try: if '-' in tag: tag = tag.split('-')[0] return brown_mapping2[tag] except KeyError: return tag.upper() # Wall Street Journal tags (Penn Treebank) wsj_mapping = { '-lrb-': '(', '-rrb-': ')', '-lsb-': '(', '-rsb-': ')', '-lcb-': '(', '-rcb-': ')', '-none-': '', 'cc': 'CNJ', 'cd': 'NUM', 'dt': 'DET', 'ex': 'EX', 'fw': 'FW', # existential "there", foreign word 'in': 'P', 'jj': 'ADJ', 'jjr': 'ADJ', 'jjs': 'ADJ', 'ls': 'L', 'md': 'MOD', # list item marker 'nn': 'N', 'nnp': 'NP', 'nnps': 'NP', 'nns': 'N', 'pdt': 'DET', 'pos': '', 'prp': 'PRO', 'prp$': 'PRO', 'rb': 'ADV', 'rbr': 'ADV', 'rbs': 'ADV', 'rp': 'PRO', 'sym': 'S', 'to': 'TO', 'uh': 'UH', 'vb': 'V', 'vbd': 'VD', 'vbg': 'VG', 'vbn': 'VN', 'vbp': 'V', 'vbz': 'V', 'wdt': 'WH', 'wp': 'WH', 'wp$': 'WH', 'wrb': 'WH', 'bes': 'V', 'hvs': 'V', 'prp^vbp': 'PRO' # additions for NPS Chat corpus } def simplify_wsj_tag(tag): if tag and tag[0] == '^': tag = tag[1:] try:<|fim▁hole|> except KeyError: pass return tag.upper() indian_mapping = { 'nn': 'N', 'vm': 'MOD', 'jj': 'ADJ', 'nnp': 'NP', 'prp': 'PRO', 'prep': 'PRE', 'vaux': 'V', 'vfm': 'V', 'cc': 'CNJ', 'nnpc': 'NP', 'nnc': 'N', 'qc': 'QC', 'dem': 'DET', 'vrb': 'V', 'qfnum': 'NUM', 'rb': 'ADV', 'qf': 'DET', 'punc': '.', 'rp': 'PRT', 'psp': 'PSP', 'nst': 'N', 'nvb': 'N', 'vjj': 'V', 'neg': 'NEG', 'vnn': 'V', 'xc': 'XC', 'intf': 'INTF', 'nloc': 'N', 'jvb': 'ADJ', 'wq': 'WH', 'qw': 'WH', 'jj:?': 'ADJ', '"cc': 'CNJ', 'nnp,': 'NP', 'sym\xc0\xa7\xb7': 'SYM', 'symc': 'SYM'} def simplify_indian_tag(tag): if ':' in tag: tag = tag.split(':')[0] try: tag = indian_mapping[tag.lower()] except KeyError: pass return tag.upper() # Alpino tags alpino_mapping = { 'noun':'N', 'name': 'NP', 'vg': 'VG', 'punct':'.', 'verb':'V', 'pron': 'PRO', 'prep':'P' } def simplify_alpino_tag(tag): try: tag = alpino_mapping[tag] except KeyError: pass return tag.upper() # Default tag simplification def simplify_tag(tag): return tag[0].upper() if __name__ == "__main__": import doctest doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE)<|fim▁end|>
tag = wsj_mapping[tag.lower()]
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|>/* * Copyright (c) Facebook, Inc. and its affiliates. * * This software may be used and distributed according to the terms of the * GNU General Public License version 2. */ use super::*; use once_cell::sync::Lazy; use parking_lot::Mutex; use regex::Regex; use std::fmt; use std::sync::atomic::AtomicU64; use std::sync::atomic::Ordering::AcqRel; use std::sync::Arc; use tracing::span::Attributes; use tracing::span::Record; use tracing::Callsite; use tracing::Event; use tracing::Id; use tracing::Level; use tracing::Metadata; use tracing::Subscriber; #[test]<|fim▁hole|>fn test_callsite_span() { let callsite = create_callsite::<SpanKindType, _>((11, 0), || CallsiteInfo { name: "foo".to_string(), target: "bar".to_string(), level: Level::ERROR, file: Some("a.rs".to_string()), line: Some(10), module_path: Some("z::a".to_string()), field_names: vec!["x".to_string(), "y".to_string(), "z".to_string()], }); assert_eq!( d(callsite.metadata()), "Metadata { name: foo, target: bar, level: Level(Error), module_path: z::a, location: a.rs:10, fields: {x, y, z}, callsite: _, kind: Kind(Span) }" ); assert_eq!(callsite.identifier(), callsite.metadata().callsite()); let log = capture(|| { let span = callsite.create_span(&[None, None, None]); span.record("y", &"yyy2"); span.in_scope(|| {}); let span = callsite.create_span(&[Some(Box::new("foo")), None, Some(Box::new(123))]); span.record("x", &123); span.in_scope(|| {}); }); assert_eq!( log, [ "new_span(Attributes { metadata: Metadata { name: foo, target: bar, level: Level(Error), module_path: z::a, location: a.rs:10, fields: {x, y, z}, callsite: _, kind: Kind(Span) }, values: ValueSet { callsite: _ }, parent: Current } = 1", "record(Id(1), Record { values: ValueSet { y: yyy2, callsite: _ } })", "enter(Id(1))", "exit(Id(1))", "new_span(Attributes { metadata: Metadata { name: foo, target: bar, level: Level(Error), module_path: z::a, location: a.rs:10, fields: {x, y, z}, callsite: _, kind: Kind(Span) }, values: ValueSet { x: foo, z: 123, callsite: _ }, parent: Current } = 2", "record(Id(2), Record { values: ValueSet { x: 123, callsite: _ } })", "enter(Id(2))", "exit(Id(2))" ] ); } #[test] fn test_callsite_event() { let callsite = create_callsite::<EventKindType, _>((22, 0), || CallsiteInfo { name: "foo".to_string(), level: Level::ERROR, field_names: vec!["x".to_string(), "y".to_string(), "z".to_string()], ..Default::default() }); assert_eq!( d(callsite.metadata()), "Metadata { name: foo, target: , level: Level(Error), fields: {x, y, z}, callsite: _, kind: Kind(Event) }" ); assert_eq!(callsite.identifier(), callsite.metadata().callsite()); let log = capture(|| { callsite.create_event(&[None, None, None]); callsite.create_event(&[Some(Box::new(12)), None, Some(Box::new("zz"))]); callsite.create_event(&[Some(Box::new("15"))]); }); assert_eq!( log, [ "event(Event { fields: ValueSet { callsite: _ }, metadata: Metadata { name: foo, target: , level: Level(Error), fields: {x, y, z}, callsite: _, kind: Kind(Event) }, parent: Current })", "event(Event { fields: ValueSet { x: 12, z: zz, callsite: _ }, metadata: Metadata { name: foo, target: , level: Level(Error), fields: {x, y, z}, callsite: _, kind: Kind(Event) }, parent: Current })", "event(Event { fields: ValueSet { x: 15, callsite: _ }, metadata: Metadata { name: foo, target: , level: Level(Error), fields: {x, y, z}, callsite: _, kind: Kind(Event) }, parent: Current })" ] ); } #[test] fn test_callsite_reuse() { let callsite1 = create_callsite::<EventKindType, _>((33, 1), CallsiteInfo::default); let callsite2 = create_callsite::<EventKindType, _>((33, 1), CallsiteInfo::default); assert_eq!(callsite1 as *const _, callsite2 as *const _); } #[test] fn test_intern() { use crate::Intern; let s1 = "abc".intern(); let s2 = "abc".to_string().intern(); assert_eq!(s1.as_ptr(), s2.as_ptr()); } /// Capture logs about tracing. fn capture(f: impl FnOnce()) -> Vec<String> { // Prevent races since tests run in multiple threads. let _locked = THREAD_LOCK.lock(); let sub = TestSubscriber::default(); let out = sub.out.clone(); tracing::subscriber::with_default(sub, f); let out = out.lock(); out.clone() } /// Subscriber that captures calls to a string. #[derive(Default)] struct TestSubscriber { id: AtomicU64, out: Arc<Mutex<Vec<String>>>, } impl TestSubscriber { fn log(&self, s: String) { self.out.lock().push(normalize(&s)); } } impl Subscriber for TestSubscriber { fn enabled(&self, _metadata: &Metadata) -> bool { true } fn new_span(&self, span: &Attributes) -> Id { let id = self.id.fetch_add(1, AcqRel) + 1; self.log(format!("new_span({:?} = {}", span, id)); Id::from_u64(id) } fn record(&self, span: &Id, values: &Record) { self.log(format!("record({:?}, {:?})", span, values)); } fn event(&self, event: &Event) { self.log(format!("event({:?})", event)); } fn enter(&self, span: &Id) { self.log(format!("enter({:?})", span)); } fn exit(&self, span: &Id) { self.log(format!("exit({:?})", span)); } fn record_follows_from(&self, span: &Id, follows: &Id) { self.log(format!("record_follows_from({:?}, {:?})", span, follows)); } } /// Debug format with some normalization. fn d<T: fmt::Debug>(t: T) -> String { let s = format!("{:?}", t); normalize(&s) } fn normalize(s: &str) -> String { // Change "Identifier(...)" to "_". It has dynamic pointer. IDENTIFIER_RE.replace_all(&s, "_").replace('"', "") } static THREAD_LOCK: Lazy<Mutex<()>> = Lazy::new(Default::default); static IDENTIFIER_RE: Lazy<Regex> = Lazy::new(|| Regex::new("Identifier\\([^)]*\\)").unwrap());<|fim▁end|>
<|file_name|>html.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #![allow(unrooted_must_root)] use dom::bindings::codegen::Bindings::HTMLTemplateElementBinding::HTMLTemplateElementMethods; use dom::bindings::inheritance::{Castable, CharacterDataTypeId, NodeTypeId}; use dom::bindings::js::{JS, Root}; use dom::bindings::trace::JSTraceable; use dom::characterdata::CharacterData; use dom::document::Document; use dom::documenttype::DocumentType; use dom::element::Element; use dom::htmlscriptelement::HTMLScriptElement; use dom::htmltemplateelement::HTMLTemplateElement; use dom::node::Node; use dom::processinginstruction::ProcessingInstruction; use dom::servoparser::Sink; use html5ever::QualName; use html5ever::buffer_queue::BufferQueue; use html5ever::serialize::{AttrRef, Serialize, Serializer}; use html5ever::serialize::TraversalScope; use html5ever::serialize::TraversalScope::{ChildrenOnly, IncludeNode}; use html5ever::tokenizer::{Tokenizer as HtmlTokenizer, TokenizerOpts, TokenizerResult}; use html5ever::tree_builder::{Tracer as HtmlTracer, TreeBuilder, TreeBuilderOpts}; use js::jsapi::JSTracer; use servo_url::ServoUrl; use std::io; #[derive(HeapSizeOf, JSTraceable)] #[must_root] pub struct Tokenizer { #[ignore_heap_size_of = "Defined in html5ever"] inner: HtmlTokenizer<TreeBuilder<JS<Node>, Sink>>, } impl Tokenizer { pub fn new( document: &Document, url: ServoUrl, fragment_context: Option<super::FragmentContext>) -> Self { let sink = Sink { base_url: url, document: JS::from_ref(document), current_line: 1, script: Default::default(),<|fim▁hole|> let options = TreeBuilderOpts { ignore_missing_rules: true, .. Default::default() }; let inner = if let Some(fc) = fragment_context { let tb = TreeBuilder::new_for_fragment( sink, JS::from_ref(fc.context_elem), fc.form_elem.map(|n| JS::from_ref(n)), options); let tok_options = TokenizerOpts { initial_state: Some(tb.tokenizer_state_for_context_elem()), .. Default::default() }; HtmlTokenizer::new(tb, tok_options) } else { HtmlTokenizer::new(TreeBuilder::new(sink, options), Default::default()) }; Tokenizer { inner: inner, } } pub fn feed(&mut self, input: &mut BufferQueue) -> Result<(), Root<HTMLScriptElement>> { match self.inner.feed(input) { TokenizerResult::Done => Ok(()), TokenizerResult::Script(script) => Err(Root::from_ref(script.downcast().unwrap())), } } pub fn end(&mut self) { self.inner.end(); } pub fn url(&self) -> &ServoUrl { &self.inner.sink().sink().base_url } pub fn set_plaintext_state(&mut self) { self.inner.set_plaintext_state(); } } #[allow(unsafe_code)] unsafe impl JSTraceable for HtmlTokenizer<TreeBuilder<JS<Node>, Sink>> { unsafe fn trace(&self, trc: *mut JSTracer) { struct Tracer(*mut JSTracer); let tracer = Tracer(trc); impl HtmlTracer for Tracer { type Handle = JS<Node>; #[allow(unrooted_must_root)] fn trace_handle(&self, node: &JS<Node>) { unsafe { node.trace(self.0); } } } let tree_builder = self.sink(); tree_builder.trace_handles(&tracer); tree_builder.sink().trace(trc); } } impl<'a> Serialize for &'a Node { fn serialize<S: Serializer>(&self, serializer: &mut S, traversal_scope: TraversalScope) -> io::Result<()> { let node = *self; match (traversal_scope, node.type_id()) { (_, NodeTypeId::Element(..)) => { let elem = node.downcast::<Element>().unwrap(); let name = QualName::new(None, elem.namespace().clone(), elem.local_name().clone()); if traversal_scope == IncludeNode { let attrs = elem.attrs().iter().map(|attr| { let qname = QualName::new(None, attr.namespace().clone(), attr.local_name().clone()); let value = attr.value().clone(); (qname, value) }).collect::<Vec<_>>(); let attr_refs = attrs.iter().map(|&(ref qname, ref value)| { let ar: AttrRef = (&qname, &**value); ar }); try!(serializer.start_elem(name.clone(), attr_refs)); } let children = if let Some(tpl) = node.downcast::<HTMLTemplateElement>() { // https://github.com/w3c/DOM-Parsing/issues/1 tpl.Content().upcast::<Node>().children() } else { node.children() }; for handle in children { try!((&*handle).serialize(serializer, IncludeNode)); } if traversal_scope == IncludeNode { try!(serializer.end_elem(name.clone())); } Ok(()) }, (ChildrenOnly, NodeTypeId::Document(_)) => { for handle in node.children() { try!((&*handle).serialize(serializer, IncludeNode)); } Ok(()) }, (ChildrenOnly, _) => Ok(()), (IncludeNode, NodeTypeId::DocumentType) => { let doctype = node.downcast::<DocumentType>().unwrap(); serializer.write_doctype(&doctype.name()) }, (IncludeNode, NodeTypeId::CharacterData(CharacterDataTypeId::Text)) => { let cdata = node.downcast::<CharacterData>().unwrap(); serializer.write_text(&cdata.data()) }, (IncludeNode, NodeTypeId::CharacterData(CharacterDataTypeId::Comment)) => { let cdata = node.downcast::<CharacterData>().unwrap(); serializer.write_comment(&cdata.data()) }, (IncludeNode, NodeTypeId::CharacterData(CharacterDataTypeId::ProcessingInstruction)) => { let pi = node.downcast::<ProcessingInstruction>().unwrap(); let data = pi.upcast::<CharacterData>().data(); serializer.write_processing_instruction(&pi.target(), &data) }, (IncludeNode, NodeTypeId::DocumentFragment) => Ok(()), (IncludeNode, NodeTypeId::Document(_)) => panic!("Can't serialize Document node itself"), } } }<|fim▁end|>
};
<|file_name|>htmlformcontrolscollection.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ use crate::dom::bindings::codegen::Bindings::HTMLCollectionBinding::HTMLCollectionMethods; use crate::dom::bindings::codegen::Bindings::HTMLFormControlsCollectionBinding::HTMLFormControlsCollectionMethods; use crate::dom::bindings::codegen::Bindings::NodeBinding::{GetRootNodeOptions, NodeMethods}; use crate::dom::bindings::codegen::UnionTypes::RadioNodeListOrElement; use crate::dom::bindings::inheritance::Castable; use crate::dom::bindings::reflector::{reflect_dom_object, DomObject}; use crate::dom::bindings::root::{Dom, DomRoot}; use crate::dom::bindings::str::DOMString; use crate::dom::element::Element; use crate::dom::htmlcollection::{CollectionFilter, HTMLCollection}; use crate::dom::htmlformelement::HTMLFormElement; use crate::dom::node::Node; use crate::dom::radionodelist::RadioNodeList; use crate::dom::window::Window; use dom_struct::dom_struct; use servo_atoms::Atom; #[dom_struct] pub struct HTMLFormControlsCollection { collection: HTMLCollection, form: Dom<HTMLFormElement>, } impl HTMLFormControlsCollection { fn new_inherited( form: &HTMLFormElement, filter: Box<dyn CollectionFilter + 'static>, ) -> HTMLFormControlsCollection { let root_of_form = form .upcast::<Node>() .GetRootNode(&GetRootNodeOptions::empty()); HTMLFormControlsCollection { collection: HTMLCollection::new_inherited(&*root_of_form, filter), form: Dom::from_ref(form), } } pub fn new( window: &Window, form: &HTMLFormElement, filter: Box<dyn CollectionFilter + 'static>, ) -> DomRoot<HTMLFormControlsCollection> { reflect_dom_object( Box::new(HTMLFormControlsCollection::new_inherited(form, filter)), window, ) } // FIXME: This shouldn't need to be implemented here since HTMLCollection (the parent of // HTMLFormControlsCollection) implements Length #[allow(non_snake_case)] pub fn Length(&self) -> u32 { self.collection.Length() } } impl HTMLFormControlsCollectionMethods for HTMLFormControlsCollection { // https://html.spec.whatwg.org/multipage/#dom-htmlformcontrolscollection-nameditem fn NamedItem(&self, name: DOMString) -> Option<RadioNodeListOrElement> { // Step 1 if name.is_empty() { return None; } let name = Atom::from(name); let mut filter_map = self.collection.elements_iter().filter_map(|elem| { if elem.get_name().map_or(false, |n| n == name) || elem.get_id().map_or(false, |i| i == name) { Some(elem) } else { None } }); if let Some(elem) = filter_map.next() { let mut peekable = filter_map.peekable(); // Step 2 if peekable.peek().is_none() {<|fim▁hole|> let global = self.global(); let window = global.as_window(); // There is only one way to get an HTMLCollection, // specifically HTMLFormElement::Elements(), // and the collection filter excludes image inputs. Some(RadioNodeListOrElement::RadioNodeList( RadioNodeList::new_controls_except_image_inputs(window, &*self.form, &name), )) } // Step 3 } else { None } } // https://html.spec.whatwg.org/multipage/#dom-htmlformcontrolscollection-nameditem fn NamedGetter(&self, name: DOMString) -> Option<RadioNodeListOrElement> { self.NamedItem(name) } // https://html.spec.whatwg.org/multipage/#the-htmlformcontrolscollection-interface:supported-property-names fn SupportedPropertyNames(&self) -> Vec<DOMString> { self.collection.SupportedPropertyNames() } // FIXME: This shouldn't need to be implemented here since HTMLCollection (the parent of // HTMLFormControlsCollection) implements IndexedGetter. // https://github.com/servo/servo/issues/5875 // // https://dom.spec.whatwg.org/#dom-htmlcollection-item fn IndexedGetter(&self, index: u32) -> Option<DomRoot<Element>> { self.collection.IndexedGetter(index) } }<|fim▁end|>
Some(RadioNodeListOrElement::Element(elem)) } else { // Step 4-5
<|file_name|>druid-query.tsx<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import axios from 'axios'; import { AxiosResponse } from 'axios'; export function getDruidErrorMessage(e: any) { const data: any = ((e.response || {}).data || {}); return [data.error, data.errorMessage, data.errorClass].filter(Boolean).join(' / ') || e.message; } export async function queryDruidRune(runeQuery: Record<string, any>): Promise<any> { let runeResultResp: AxiosResponse<any>; try { runeResultResp = await axios.post("/druid/v2", runeQuery); } catch (e) { throw new Error(getDruidErrorMessage(e)); } return runeResultResp.data; } export async function queryDruidSql(sqlQuery: Record<string, any>): Promise<any[]> { let sqlResultResp: AxiosResponse<any>; try { sqlResultResp = await axios.post("/druid/v2/sql", sqlQuery); } catch (e) { throw new Error(getDruidErrorMessage(e)); } return sqlResultResp.data;<|fim▁hole|><|fim▁end|>
}
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>""" Django settings for cbs project. For more information on this file, see https://docs.djangoproject.com/en/1.7/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.7/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.dirname(__file__)) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '51ff&6zumcwpo8+60&5+dg5nqh6-ehdo@uk-xi$*paicy7b4e%' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'p311', 'p365', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'cbs.urls' WSGI_APPLICATION = 'cbs.wsgi.application' # Database # https://docs.djangoproject.com/en/1.7/ref/settings/#databases <|fim▁hole|> 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Internationalization # https://docs.djangoproject.com/en/1.7/topics/i18n/ LANGUAGE_CODE = 'ru' TIME_ZONE = 'Europe/Moscow' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.7/howto/static-files/ STATIC_URL = '/static/' STATICFILES_DIRS = ( os.path.join(BASE_DIR, "static"), ) TEMPLATE_DIRS = ( os.path.join(BASE_DIR, "templates"), )<|fim▁end|>
DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3',
<|file_name|>eclipse.py<|end_file_name|><|fim▁begin|># Copyright 2014-2015 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import copy import logging from collections import OrderedDict # eclipse works with linux paths from posixpath import normpath, join, basename from .tool import Tool, Builder, Exporter from .gccarm import MakefileGccArm from ..util import SOURCE_KEYS logger = logging.getLogger('progen.tools.eclipse') class EclipseGnuARM(Tool, Exporter, Builder): file_types = {'cpp': 1, 'c': 1, 's': 1, 'obj': 1, 'lib': 1, 'h': 1} generated_project = { 'path': '', 'files': { 'proj_file': '', 'cproj': '', 'makefile': '', } } def __init__(self, workspace, env_settings): self.definitions = 0 self.exporter = MakefileGccArm(workspace, env_settings)<|fim▁hole|> self.workspace = workspace self.env_settings = env_settings @staticmethod def get_toolnames(): return ['eclipse_make_gcc_arm', 'make_gcc_arm'] @staticmethod def get_toolchain(): return 'gcc_arm' def _expand_one_file(self, source, new_data, extension): return {"path": join('PARENT-%s-PROJECT_LOC' % new_data['output_dir']['rel_path'], normpath(source)), "name": basename( source), "type": self.file_types[extension.lower()]} def _expand_sort_key(self, file) : return file['name'].lower() def export_workspace(self): logger.debug("Current version of CoIDE does not support workspaces") def export_project(self): """ Processes groups and misc options specific for eclipse, and run generator """ output = copy.deepcopy(self.generated_project) data_for_make = self.workspace.copy() self.exporter.process_data_for_makefile(data_for_make) output['path'], output['files']['makefile'] = self.gen_file_jinja('makefile_gcc.tmpl', data_for_make, 'Makefile', data_for_make['output_dir']['path']) expanded_dic = self.workspace.copy() expanded_dic['rel_path'] = data_for_make['output_dir']['rel_path'] groups = self._get_groups(expanded_dic) expanded_dic['groups'] = {} for group in groups: expanded_dic['groups'][group] = [] self._iterate(self.workspace, expanded_dic) # Project file project_path, output['files']['cproj'] = self.gen_file_jinja( 'eclipse_makefile.cproject.tmpl', expanded_dic, '.cproject', data_for_make['output_dir']['path']) project_path, output['files']['proj_file'] = self.gen_file_jinja( 'eclipse.project.tmpl', expanded_dic, '.project', data_for_make['output_dir']['path']) return output def get_generated_project_files(self): return {'path': self.workspace['path'], 'files': [self.workspace['files']['proj_file'], self.workspace['files']['cproj'], self.workspace['files']['makefile']]}<|fim▁end|>
<|file_name|>p0017_test.py<|end_file_name|><|fim▁begin|>import unittest class Test0017(unittest.TestCase): def test_problem(self): one_to_nine = [3, 3, 5, 4, 4, 3, 5, 5, 4] ten_to_nineteen = [3, 6, 6, 8, 8, 7, 7, 9, 8, 8] twenty_to_ninety = [6, 6, 5, 5, 5, 7, 6, 6] words_len = 0 sum_1_to_9 = sum(one_to_nine)<|fim▁hole|> #1~9,10~19 sum_1_to_99 = sum_1_to_9 + sum_10_to_19 #20~99 sum_1_to_99 += len(twenty_to_ninety) * sum_1_to_9 + (len(one_to_nine) + 1) * sum_20_to_90 #1~99 words_len += sum_1_to_99 #100~999, 'hundred and' => 10 words_len += len(one_to_nine) * sum_1_to_99 + 100 * ( sum_1_to_9 + 10 * len(one_to_nine)) - 3 * len(one_to_nine) #1000 words_len += 11 self.assertEqual(words_len, 21124)<|fim▁end|>
sum_10_to_19 = sum(ten_to_nineteen) sum_20_to_90 = sum(twenty_to_ninety)
<|file_name|>bitcoin_it.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="it" version="2.0"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About Litecoin</source> <translation>Info su Mana</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;Litecoin&lt;/b&gt; version</source> <translation>Versione di &lt;b&gt;Mana&lt;/b&gt;</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation> Questo è un software sperimentale. Distribuito sotto la licenza software MIT/X11, vedi il file COPYING incluso oppure su http://www.opensource.org/licenses/mit-license.php. Questo prodotto include software sviluppato dal progetto OpenSSL per l&apos;uso del Toolkit OpenSSL (http://www.openssl.org/), software crittografico scritto da Eric Young ([email protected]) e software UPnP scritto da Thomas Bernard.</translation> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation>Copyright</translation> </message> <message> <location line="+0"/> <source>The Litecoin developers</source> <translation>Sviluppatori di Mana</translation> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Rubrica</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>Fai doppio click per modificare o cancellare l&apos;etichetta</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Crea un nuovo indirizzo</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Copia l&apos;indirizzo attualmente selezionato nella clipboard</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>&amp;Nuovo indirizzo</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your Litecoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Questi sono i tuoi indirizzi Mana per ricevere pagamenti. Potrai darne uno diverso ad ognuno per tenere così traccia di chi ti sta pagando.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation>&amp;Copia l&apos;indirizzo</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Mostra il codice &amp;QR</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a Litecoin address</source> <translation>Firma un messaggio per dimostrare di possedere questo indirizzo</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Firma il &amp;messaggio</translation> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation>Cancella l&apos;indirizzo attualmente selezionato dalla lista</translation> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation>Esporta i dati nella tabella corrente su un file</translation> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation>&amp;Esporta...</translation> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified Litecoin address</source> <translation>Verifica un messaggio per accertarsi che sia firmato con un indirizzo Mana specifico</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>&amp;Verifica Messaggio</translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Cancella</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your Litecoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation>Copia &amp;l&apos;etichetta</translation> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation>&amp;Modifica</translation> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation>Invia &amp;Mana</translation> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation>Esporta gli indirizzi della rubrica</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Testo CSV (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Errore nell&apos;esportazione</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Impossibile scrivere sul file %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Etichetta</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Indirizzo</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(nessuna etichetta)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Finestra passphrase</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Inserisci la passphrase</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Nuova passphrase</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Ripeti la passphrase</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Inserisci la passphrase per il portamonete.&lt;br/&gt;Per piacere usare unapassphrase di &lt;b&gt;10 o più caratteri casuali&lt;/b&gt;, o &lt;b&gt;otto o più parole&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Cifra il portamonete</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Quest&apos;operazione necessita della passphrase per sbloccare il portamonete.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Sblocca il portamonete</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Quest&apos;operazione necessita della passphrase per decifrare il portamonete,</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Decifra il portamonete</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Cambia la passphrase</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Inserisci la vecchia e la nuova passphrase per il portamonete.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Conferma la cifratura del portamonete</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR LITECOINS&lt;/b&gt;!</source> <translation>Attenzione: se si cifra il portamonete e si perde la frase d&apos;ordine, &lt;b&gt;SI PERDERANNO TUTTI I PROPRI Mana&lt;/b&gt;!</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Si è sicuri di voler cifrare il portamonete?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>IMPORTANTE: qualsiasi backup del portafoglio effettuato precedentemente dovrebbe essere sostituito con il file del portafoglio criptato appena generato. Per ragioni di sicurezza, i backup precedenti del file del portafoglio non criptato diventeranno inservibili non appena si inizi ad usare il nuovo portafoglio criptato.</translation> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Attenzione: tasto Blocco maiuscole attivo.</translation> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>Portamonete cifrato</translation> </message> <message> <location line="-56"/> <source>Litecoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your litecoins from being stolen by malware infecting your computer.</source> <translation>Mana verrà ora chiuso per finire il processo di crittazione. Ricorda che criptare il tuo portamonete non può fornire una protezione totale contro furti causati da malware che dovessero infettare il tuo computer.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Cifratura del portamonete fallita</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Cifratura del portamonete fallita a causa di un errore interno. Il portamonete non è stato cifrato.</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>Le passphrase inserite non corrispondono.</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>Sblocco del portamonete fallito</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>La passphrase inserita per la decifrazione del portamonete è errata.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Decifrazione del portamonete fallita</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>Passphrase del portamonete modificata con successo.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation>Firma il &amp;messaggio...</translation> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation>Sto sincronizzando con la rete...</translation> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation>&amp;Sintesi</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Mostra lo stato generale del portamonete</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>&amp;Transazioni</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Cerca nelle transazioni</translation> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation>Modifica la lista degli indirizzi salvati e delle etichette</translation> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation>Mostra la lista di indirizzi su cui ricevere pagamenti</translation> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation>&amp;Esci</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Chiudi applicazione</translation> </message> <message> <location line="+4"/> <source>Show information about Litecoin</source> <translation>Mostra informazioni su Mana</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>Informazioni su &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Mostra informazioni su Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Opzioni...</translation> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Cifra il portamonete...</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Backup Portamonete...</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>&amp;Cambia la passphrase...</translation> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation>Importa blocchi dal disco...</translation> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation>Re-indicizzazione blocchi su disco...</translation> </message> <message> <location line="-347"/> <source>Send coins to a Litecoin address</source> <translation>Invia monete ad un indirizzo Mana</translation> </message> <message> <location line="+49"/> <source>Modify configuration options for Litecoin</source> <translation>Modifica configurazione opzioni per Mana</translation> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation>Backup portamonete in un&apos;altra locazione</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Cambia la passphrase per la cifratura del portamonete</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation>Finestra &amp;Debug</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Apri la console di degugging e diagnostica</translation> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation>&amp;Verifica messaggio...</translation> </message> <message> <location line="-165"/> <location line="+530"/> <source>Litecoin</source> <translation>Mana</translation> </message> <message> <location line="-530"/> <source>Wallet</source> <translation>Portamonete</translation> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation>&amp;Spedisci</translation> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation>&amp;Ricevi</translation> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation>&amp;Indirizzi</translation> </message> <message> <location line="+22"/> <source>&amp;About Litecoin</source> <translation>&amp;Info su Mana</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>&amp;Mostra/Nascondi</translation> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation>Mostra o nascondi la Finestra principale</translation> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation>Crittografa le chiavi private che appartengono al tuo portafoglio</translation> </message> <message> <location line="+7"/> <source>Sign messages with your Litecoin addresses to prove you own them</source> <translation>Firma i messaggi con il tuo indirizzo Mana per dimostrare di possederli</translation> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified Litecoin addresses</source> <translation>Verifica i messaggi per accertarsi che siano stati firmati con gli indirizzi Mana specificati</translation> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;File</translation> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation>&amp;Impostazioni</translation> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation>&amp;Aiuto</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Barra degli strumenti &quot;Tabs&quot;</translation> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+47"/> <source>Litecoin client</source> <translation>Mana client</translation> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to Litecoin network</source> <translation><numerusform>%n connessione attiva alla rete Mana</numerusform><numerusform>%n connessioni attive alla rete Mana</numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation>Processati %1 di %2 (circa) blocchi della cronologia transazioni.</translation> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation>Processati %1 blocchi della cronologia transazioni.</translation> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation><numerusform>%n ora</numerusform><numerusform>%n ore</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation><numerusform>%n giorno</numerusform><numerusform>%n giorni</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation><numerusform>%n settimana</numerusform><numerusform>%n settimane</numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation>L&apos;ultimo blocco ricevuto è stato generato %1 fa.</translation> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>Error</source> <translation>Errore</translation> </message> <message> <location line="+3"/> <source>Warning</source> <translation>Attenzione</translation> </message> <message> <location line="+3"/> <source>Information</source> <translation>Informazione</translation> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation>Questa transazione è superiore al limite di dimensione. È comunque possibile inviarla con una commissione di %1, che va ai nodi che processano la tua transazione e contribuisce a sostenere la rete. Vuoi pagare la commissione?</translation> </message> <message> <location line="-140"/> <source>Up to date</source> <translation>Aggiornato</translation> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation>In aggiornamento...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation>Conferma compenso transazione</translation> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation>Transazione inviata</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>Transazione ricevuta</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Data: %1 Quantità: %2 Tipo: %3 Indirizzo: %4 </translation> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation>Gestione URI</translation> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid Litecoin address or malformed URI parameters.</source> <translation>Impossibile interpretare l&apos;URI! Ciò può essere causato da un indirizzo Mana invalido o da parametri URI non corretti.</translation> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Il portamonete è &lt;b&gt;cifrato&lt;/b&gt; e attualmente &lt;b&gt;sbloccato&lt;/b&gt;</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Il portamonete è &lt;b&gt;cifrato&lt;/b&gt; e attualmente &lt;b&gt;bloccato&lt;/b&gt;</translation> </message> <message> <location filename="../bitcoin.cpp" line="+111"/> <source>A fatal error occurred. Litecoin can no longer continue safely and will quit.</source> <translation>Riscontrato un errore irreversibile. Mana non può più continuare in sicurezza e verrà terminato.</translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation>Avviso di rete</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Modifica l&apos;indirizzo</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Etichetta</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>L&apos;etichetta associata a questo indirizzo nella rubrica</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Indirizzo</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>L&apos;indirizzo associato a questa voce della rubrica. Si può modificare solo negli indirizzi di spedizione.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>Nuovo indirizzo di ricezione</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Nuovo indirizzo d&apos;invio</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Modifica indirizzo di ricezione</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Modifica indirizzo d&apos;invio</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>L&apos;indirizzo inserito &quot;%1&quot; è già in rubrica.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid Litecoin address.</source> <translation>L&apos;indirizzo inserito &quot;%1&quot; non è un indirizzo Mana valido.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Impossibile sbloccare il portamonete.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Generazione della nuova chiave non riuscita.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>Litecoin-Qt</source> <translation>Mana-Qt</translation> </message> <message> <location line="-12"/> <source>version</source> <translation>versione</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Utilizzo:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>opzioni riga di comando</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>UI opzioni</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Imposta lingua, ad esempio &quot;it_IT&quot; (predefinita: lingua di sistema)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Parti in icona </translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>Mostra finestra di presentazione all&apos;avvio (default: 1)</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Opzioni</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Principale</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Paga la &amp;commissione</translation> </message> <message> <location line="+31"/> <source>Automatically start Litecoin after logging in to the system.</source> <translation>Avvia automaticamente Mana all&apos;accensione del computer</translation> </message> <message> <location line="+3"/> <source>&amp;Start Litecoin on system login</source> <translation>&amp;Fai partire Mana all&apos;avvio del sistema</translation> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation>Ripristina tutte le opzioni del client alle predefinite.</translation> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation>&amp;Ripristina Opzioni</translation> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation>Rete</translation> </message> <message> <location line="+6"/> <source>Automatically open the Litecoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Apri automaticamente la porta del client Mana sul router. Questo funziona solo se il router supporta UPnP ed è abilitato.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Mappa le porte tramite l&apos;&amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the Litecoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>Connettiti alla rete Bitcon attraverso un proxy SOCKS (ad esempio quando ci si collega via Tor)</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>&amp;Collegati tramite SOCKS proxy:</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>&amp;IP del proxy:</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>Indirizzo IP del proxy (ad esempio 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>&amp;Porta:</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Porta del proxy (es. 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>SOCKS &amp;Version:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>Versione SOCKS del proxy (es. 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>&amp;Finestra</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Mostra solo un&apos;icona nel tray quando si minimizza la finestra</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Minimizza sul tray invece che sulla barra delle applicazioni</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Riduci ad icona, invece di uscire dall&apos;applicazione quando la finestra viene chiusa. Quando questa opzione è attivata, l&apos;applicazione verrà chiusa solo dopo aver selezionato Esci nel menu.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>M&amp;inimizza alla chiusura</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>&amp;Mostra</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>&amp;Lingua Interfaccia Utente:</translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting Litecoin.</source> <translation>La lingua dell&apos;interfaccia utente può essere impostata qui. L&apos;impostazione avrà effetto dopo il riavvio di Mana.</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Unità di misura degli importi in:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Scegli l&apos;unità di suddivisione di default per l&apos;interfaccia e per l&apos;invio di monete</translation> </message> <message> <location line="+9"/> <source>Whether to show Litecoin addresses in the transaction list or not.</source> <translation>Se mostrare l&apos;indirizzo Mana nella transazione o meno.</translation> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>&amp;Mostra gli indirizzi nella lista delle transazioni</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;OK</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Cancella</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>&amp;Applica</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation>default</translation> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation>Conferma ripristino opzioni</translation> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation>Alcune modifiche necessitano del riavvio del programma per essere salvate.</translation> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation>Vuoi procedere?</translation> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation>Attenzione</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting Litecoin.</source> <translation>L&apos;impostazione avrà effetto dopo il riavvio di Mana.</translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>L&apos;indirizzo proxy che hai fornito è invalido.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Modulo</translation> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Litecoin network after a connection is established, but this process has not completed yet.</source> <translation>Le informazioni visualizzate sono datate. Il tuo partafogli verrà sincronizzato automaticamente con il network Mana dopo che la connessione è stabilita, ma questo processo non può essere completato ora.</translation> </message> <message> <location line="-124"/> <source>Balance:</source> <translation>Saldo</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Non confermato:</translation> </message> <message> <location line="-78"/> <source>Wallet</source> <translation>Portamonete</translation> </message> <message> <location line="+107"/> <source>Immature:</source> <translation>Immaturo:</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation>Importo scavato che non è ancora maturato</translation> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Transazioni recenti&lt;/b&gt;</translation> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation>Saldo attuale</translation> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Totale delle transazioni in corso di conferma, che non sono ancora incluse nel saldo attuale</translation> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation>fuori sincrono</translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start litecoin: click-to-pay handler</source> <translation type="unfinished"/> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>Codice QR di dialogo</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Richiedi pagamento</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Importo:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Etichetta:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Messaggio:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;Salva come...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>Errore nella codifica URI nel codice QR</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>L&apos;importo specificato non è valido, prego verificare.</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>L&apos;URI risulta troppo lungo, prova a ridurre il testo nell&apos;etichetta / messaggio.</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>Salva codice QR</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>Immagini PNG (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Nome del client</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/><|fim▁hole|> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation>N/D</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>Versione client</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Informazione</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>Versione OpenSSL in uso</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>Tempo di avvio</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Rete</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Numero connessioni</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>Nel testnet</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>Block chain</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Numero attuale di blocchi</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>Numero totale stimato di blocchi</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>Ora dell blocco piu recente</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>&amp;Apri</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>opzioni riga di comando</translation> </message> <message> <location line="+7"/> <source>Show the Litecoin-Qt help message to get a list with possible Litecoin command-line options.</source> <translation>Mostra il messaggio di aiuto di Mana-QT per avere la lista di tutte le opzioni della riga di comando di Mana.</translation> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation>&amp;Mostra</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Console</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>Data di creazione</translation> </message> <message> <location line="-104"/> <source>Litecoin - Debug window</source> <translation>Mana - Finestra debug</translation> </message> <message> <location line="+25"/> <source>Litecoin Core</source> <translation type="unfinished"/> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation>File log del Debug</translation> </message> <message> <location line="+7"/> <source>Open the Litecoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Apri il file di log del debug di Mana dalla cartella attuale. Può richiedere alcuni secondi per file di log grandi.</translation> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Svuota console</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the Litecoin RPC console.</source> <translation>Benvenuto nella console RPC di Mana</translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Usa le frecce direzionali per navigare la cronologia, and &lt;b&gt;Ctrl-L&lt;/b&gt; per cancellarla.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Scrivi &lt;b&gt;help&lt;/b&gt; per un riassunto dei comandi disponibili</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+124"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Spedisci Mana</translation> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation>Spedisci a diversi beneficiari in una volta sola</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>&amp;Aggiungi beneficiario</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>Rimuovi tutti i campi della transazione</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Cancella &amp;tutto</translation> </message> <message> <location line="+22"/> <source>Balance:</source> <translation>Saldo:</translation> </message> <message> <location line="+10"/> <source>123.456 BTC</source> <translation>123,456 BTC</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Conferma la spedizione</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>&amp;Spedisci</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Conferma la spedizione di Mana</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Si è sicuri di voler spedire %1?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation> e </translation> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation>L&apos;indirizzo del beneficiario non è valido, per cortesia controlla.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>L&apos;importo da pagare dev&apos;essere maggiore di 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>L&apos;importo è superiore al saldo attuale</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Il totale è superiore al saldo attuale includendo la commissione %1.</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Trovato un indirizzo doppio, si può spedire solo una volta a ciascun indirizzo in una singola operazione.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation>Errore: Creazione transazione fallita!</translation> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Errore: la transazione è stata rifiutata. Ciò accade se alcuni Mana nel portamonete sono stati già spesi, ad esempio se è stata usata una copia del file wallet.dat e i Mana sono stati spesi dalla copia ma non segnati come spesi qui.</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Modulo</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>&amp;Importo:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>Paga &amp;a:</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>L&apos;indirizzo del beneficiario a cui inviare il pagamento (ad esempio Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>Inserisci un&apos;etichetta per questo indirizzo, per aggiungerlo nella rubrica</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>&amp;Etichetta</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>Scegli l&apos;indirizzo dalla rubrica</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Incollare l&apos;indirizzo dagli appunti</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Rimuovere questo beneficiario</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a Litecoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Inserisci un indirizzo Mana (ad esempio Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>Firme - Firma / Verifica un messaggio</translation> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation>&amp;Firma il messaggio</translation> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Puoi firmare messeggi con i tuoi indirizzi per dimostrare che sono tuoi. Fai attenzione a non firmare niente di vago, visto che gli attacchi di phishing potrebbero cercare di spingerti a mettere la tua firma su di loro. Firma solo dichiarazioni completamente dettagliate con cui sei d&apos;accordo.</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Inserisci un indirizzo Mana (ad esempio Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation>Scegli l&apos;indirizzo dalla rubrica</translation> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation>Incollare l&apos;indirizzo dagli appunti</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Inserisci qui il messaggio che vuoi firmare</translation> </message> <message> <location line="+7"/> <source>Signature</source> <translation>Firma</translation> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation>Copia la firma corrente nella clipboard</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this Litecoin address</source> <translation>Firma un messaggio per dimostrare di possedere questo indirizzo</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Firma &amp;messaggio</translation> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation>Reimposta tutti i campi della firma</translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Cancella &amp;tutto</translation> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation>&amp;Verifica Messaggio</translation> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Inserisci un indirizzo Mana (ad esempio Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified Litecoin address</source> <translation>Verifica il messaggio per assicurarsi che sia stato firmato con l&apos;indirizzo Mana specificato</translation> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation>&amp;Verifica Messaggio</translation> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation>Reimposta tutti i campi della verifica messaggio</translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a Litecoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Inserisci un indirizzo Mana (ad esempio Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Clicca &quot;Firma il messaggio&quot; per ottenere la firma</translation> </message> <message> <location line="+3"/> <source>Enter Litecoin signature</source> <translation>Inserisci firma Mana</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>L&apos;indirizzo inserito non è valido.</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Per favore controlla l&apos;indirizzo e prova ancora</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation>L&apos;indirizzo Mana inserito non è associato a nessuna chiave.</translation> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation>Sblocco del portafoglio annullato.</translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation>La chiave privata per l&apos;indirizzo inserito non è disponibile.</translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>Firma messaggio fallita.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>Messaggio firmato.</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>Non è stato possibile decodificare la firma.</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>Per favore controlla la firma e prova ancora.</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation>La firma non corrisponde al sunto del messaggio.</translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>Verifica messaggio fallita.</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>Messaggio verificato.</translation> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>The Litecoin developers</source> <translation>Sviluppatori di Mana</translation> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>Aperto fino a %1</translation> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation>%1/offline</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/non confermato</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 conferme</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Stato</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation><numerusform>, trasmesso attraverso %n nodo</numerusform><numerusform>, trasmesso attraverso %n nodi</numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>Sorgente</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Generato</translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>Da</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>A</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation>proprio indirizzo</translation> </message> <message> <location line="-2"/> <source>label</source> <translation>etichetta</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Credito</translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation><numerusform>matura in %n ulteriore blocco</numerusform><numerusform>matura in altri %n blocchi</numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>non accettate</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Debito</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Tranzakciós díj</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Importo netto</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Messaggio</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Commento</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>ID della transazione</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Bisogna attendere 120 blocchi prima di spendere I Mana generati. Quando è stato generato questo blocco, è stato trasmesso alla rete per aggiungerlo alla catena di blocchi. Se non riesce a entrare nella catena, verrà modificato in &quot;non accettato&quot; e non sarà spendibile. Questo può accadere a volte, se un altro nodo genera un blocco entro pochi secondi del tuo.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation>Informazione di debug</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Transazione</translation> </message> <message> <location line="+3"/> <source>Inputs</source> <translation>Input</translation> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Importo</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>vero</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>falso</translation> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>, non è stato ancora trasmesso con successo</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation><numerusform>Aperto per %n altro blocco</numerusform><numerusform>Aperto per altri %n blocchi</numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>sconosciuto</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Dettagli sulla transazione</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Questo pannello mostra una descrizione dettagliata della transazione</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Tipo</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Indirizzo</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Importo</translation> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation><numerusform>Aperto per %n altro blocco</numerusform><numerusform>Aperto per altri %n blocchi</numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>Aperto fino a %1</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>Offline (%1 conferme)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>Non confermati (%1 su %2 conferme)</translation> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation>Confermato (%1 conferme)</translation> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation><numerusform>Il saldo generato sarà disponibile quando maturerà in %n altro blocco</numerusform><numerusform>Il saldo generato sarà disponibile quando maturerà in altri %n blocchi</numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Questo blocco non è stato ricevuto da altri nodi e probabilmente non sarà accettato!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Generati, ma non accettati</translation> </message> <message> <location line="+43"/> <source>Received with</source> <translation>Ricevuto tramite</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Ricevuto da</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Spedito a</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Pagamento a te stesso</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Ottenuto dal mining</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(N / a)</translation> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Stato della transazione. Passare con il mouse su questo campo per vedere il numero di conferme.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Data e ora in cui la transazione è stata ricevuta.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Tipo di transazione.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Indirizzo di destinazione della transazione.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Importo rimosso o aggiunto al saldo.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation>Tutti</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Oggi</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Questa settimana</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Questo mese</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>Il mese scorso</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Quest&apos;anno</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Intervallo...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Ricevuto tramite</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Spedito a</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>A te</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Ottenuto dal mining</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Altro</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Inserisci un indirizzo o un&apos;etichetta da cercare</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Importo minimo</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Copia l&apos;indirizzo</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Copia l&apos;etichetta</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Copia l&apos;importo</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Modifica l&apos;etichetta</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Mostra i dettagli della transazione</translation> </message> <message> <location line="+139"/> <source>Export Transaction Data</source> <translation>Esporta i dati della transazione</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Testo CSV (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Confermato</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Tipo</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Etichetta</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Indirizzo</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Importo</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Errore nell&apos;esportazione</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Impossibile scrivere sul file %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Intervallo:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>a</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation>Spedisci Mana</translation> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation>Esporta i dati nella tabella corrente su un file</translation> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation>Backup fallito</translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation>Backup eseguito con successo</translation> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation>Il portafoglio è stato correttamente salvato nella nuova cartella.</translation> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+94"/> <source>Litecoin version</source> <translation>Versione di Mana</translation> </message> <message> <location line="+102"/> <source>Usage:</source> <translation>Utilizzo:</translation> </message> <message> <location line="-29"/> <source>Send command to -server or litecoind</source> <translation>Manda il comando a -server o litecoind </translation> </message> <message> <location line="-23"/> <source>List commands</source> <translation>Lista comandi </translation> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation>Aiuto su un comando </translation> </message> <message> <location line="+24"/> <source>Options:</source> <translation>Opzioni: </translation> </message> <message> <location line="+24"/> <source>Specify configuration file (default: litecoin.conf)</source> <translation>Specifica il file di configurazione (di default: litecoin.conf) </translation> </message> <message> <location line="+3"/> <source>Specify pid file (default: litecoind.pid)</source> <translation>Specifica il file pid (default: litecoind.pid) </translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Specifica la cartella dati </translation> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Imposta la dimensione cache del database in megabyte (default: 25)</translation> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 9333 or testnet: 19333)</source> <translation>Ascolta le connessioni JSON-RPC su &lt;porta&gt; (default: 9333 o testnet: 19333)</translation> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Mantieni al massimo &lt;n&gt; connessioni ai peer (default: 125)</translation> </message> <message> <location line="-48"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Connessione ad un nodo per ricevere l&apos;indirizzo del peer, e disconnessione</translation> </message> <message> <location line="+82"/> <source>Specify your own public address</source> <translation>Specifica il tuo indirizzo pubblico</translation> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Soglia di disconnessione dei peer di cattiva qualità (default: 100)</translation> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Numero di secondi di sospensione che i peer di cattiva qualità devono trascorrere prima di riconnettersi (default: 86400)</translation> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation>Errore riscontrato durante l&apos;impostazione della porta RPC %u per l&apos;ascolto su IPv4: %s</translation> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 9332 or testnet: 19332)</source> <translation>Attendi le connessioni JSON-RPC su &lt;porta&gt; (default: 9332 or testnet: 19332)</translation> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation>Accetta da linea di comando e da comandi JSON-RPC </translation> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation>Esegui in background come demone e accetta i comandi </translation> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation>Utilizza la rete di prova </translation> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Accetta connessioni dall&apos;esterno (default: 1 se no -proxy o -connect)</translation> </message> <message> <location line="-80"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=litecoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Litecoin Alert&quot; [email protected] </source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation>Errore riscontrato durante l&apos;impostazione della porta RPC %u per l&apos;ascolto su IPv6, tornando su IPv4: %s</translation> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation>Collega all&apos;indirizzo indicato e resta sempre in ascolto su questo. Usa la notazione [host]:porta per l&apos;IPv6</translation> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. Litecoin is probably already running.</source> <translation>Non è possibile ottenere i dati sulla cartella %s. Probabilmente Mana è già in esecuzione.</translation> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Errore: la transazione è stata rifiutata. Ciò accade se alcuni Mana nel portamonete sono stati già spesi, ad esempio se è stata usata una copia del file wallet.dat e i Mana sono stati spesi dalla copia ma non segnati come spesi qui.</translation> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation>Errore: questa transazione necessita di una commissione di almeno %s a causa del suo ammontare, della sua complessità, o dell&apos;uso di fondi recentemente ricevuti!</translation> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Esegui comando quando una transazione del portafoglio cambia (%s in cmd è sostituito da TxID)</translation> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation>Imposta dimensione massima delle transazioni ad alta priorità/bassa-tassa in bytes (predefinito: 27000)</translation> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation>Questa versione è una compilazione pre-rilascio - usala a tuo rischio - non utilizzarla per la generazione o per applicazioni di commercio</translation> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Attenzione: -paytxfee è molto alta. Questa è la commissione che si paga quando si invia una transazione.</translation> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation>Attenzione: le transazioni mostrate potrebbero essere sbagliate! Potresti aver bisogno di aggiornare, o altri nodi ne hanno bisogno.</translation> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong Litecoin will not work properly.</source> <translation>Attenzione: si prega di controllare che la data del computer e l&apos;ora siano corrette. Se il vostro orologio è sbagliato Mana non funziona correttamente.</translation> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Attenzione: errore di lettura di wallet.dat! Tutte le chiave lette correttamente, ma i dati delle transazioni o le voci in rubrica potrebbero mancare o non essere corretti.</translation> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Attenzione: wallet.dat corrotto, dati salvati! Il wallet.dat originale salvato come wallet.{timestamp}.bak in %s; se il tuo bilancio o le transazioni non sono corrette dovresti ripristinare da un backup.</translation> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Tenta di recuperare le chiavi private da un wallet.dat corrotto</translation> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation>Opzioni creazione blocco:</translation> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation>Connetti solo al nodo specificato</translation> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation>Rilevato database blocchi corrotto</translation> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Scopri proprio indirizzo IP (default: 1 se in ascolto e no -externalip)</translation> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation>Vuoi ricostruire ora il database dei blocchi?</translation> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation>Errore caricamento database blocchi</translation> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation>Errore caricamento database blocchi</translation> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation>Errore: la spazio libero sul disco è poco!</translation> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation>Errore: portafoglio bloccato, impossibile creare la transazione!</translation> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation>Errore: errore di sistema:</translation> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Impossibile mettersi in ascolto su una porta. Usa -listen=0 se vuoi usare questa opzione.</translation> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation>Lettura informazioni blocco fallita</translation> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation>Lettura blocco fallita</translation> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation>Scrittura informazioni blocco fallita</translation> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation>Scrittura blocco fallita</translation> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation>Scrittura informazioni file fallita</translation> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation>Scrittura nel database dei Mana fallita</translation> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation>Trova peer utilizzando la ricerca DNS (predefinito: 1 finché utilizzato -connect)</translation> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation>Quanti blocchi da controllare all&apos;avvio (predefinito: 288, 0 = tutti)</translation> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation type="unfinished"/> </message> <message> <location line="+26"/> <source>Verifying blocks...</source> <translation>Verifica blocchi...</translation> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation>Verifica portafoglio...</translation> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation>Importa blocchi da un file blk000??.dat esterno</translation> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+77"/> <source>Information</source> <translation>Informazione</translation> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Indirizzo -tor non valido: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Buffer di ricezione massimo per connessione, &lt;n&gt;*1000 byte (default: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Buffer di invio massimo per connessione, &lt;n&gt;*1000 byte (default: 1000)</translation> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation>Connetti solo a nodi nella rete &lt;net&gt; (IPv4, IPv6 o Tor)</translation> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>Produci informazioni extra utili al debug. Implies all other -debug* options</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation>Genera informazioni extra utili al debug della rete</translation> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation>Anteponi all&apos;output di debug una marca temporale</translation> </message> <message> <location line="+5"/> <source>SSL options: (see the Litecoin Wiki for SSL setup instructions)</source> <translation>Opzioni SSL: (vedi il wiki di Mana per le istruzioni di configurazione SSL)</translation> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation>Selezionare la versione del proxy socks da usare (4-5, default: 5)</translation> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Invia le informazioni di trace/debug alla console invece che al file debug.log</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>Invia le informazioni di trace/debug al debugger</translation> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>Imposta dimensione massima del blocco in bytes (predefinito: 250000)</translation> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Imposta dimensione minima del blocco in bytes (predefinito: 0)</translation> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Riduci il file debug.log all&apos;avvio del client (predefinito: 1 se non impostato -debug)</translation> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Specifica il timeout di connessione in millisecondi (default: 5000)</translation> </message> <message> <location line="+4"/> <source>System error: </source> <translation>Errore di sistema:</translation> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>UPnP-használat engedélyezése a figyelő port feltérképezésénél (default: 0)</translation> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>UPnP-használat engedélyezése a figyelő port feltérképezésénél (default: 1 when listening)</translation> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation>Usa un proxy per raggiungere servizi nascosti di tor (predefinito: uguale a -proxy)</translation> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation>Nome utente per connessioni JSON-RPC </translation> </message> <message> <location line="+4"/> <source>Warning</source> <translation>Attenzione</translation> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Attenzione: questa versione è obsoleta, aggiornamento necessario!</translation> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation>wallet.dat corrotto, salvataggio fallito</translation> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation>Password per connessioni JSON-RPC </translation> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Consenti connessioni JSON-RPC dall&apos;indirizzo IP specificato </translation> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Inviare comandi al nodo in esecuzione su &lt;ip&gt; (default: 127.0.0.1) </translation> </message> <message> <location line="-120"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Esegui il comando quando il miglior block cambia(%s nel cmd è sostituito dall&apos;hash del blocco)</translation> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation>Aggiorna il wallet all&apos;ultimo formato</translation> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Impostare la quantità di chiavi di riserva a &lt;n&gt; (default: 100) </translation> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Ripeti analisi della catena dei blocchi per cercare le transazioni mancanti dal portamonete </translation> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Utilizzare OpenSSL (https) per le connessioni JSON-RPC </translation> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation>File certificato del server (default: server.cert) </translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Chiave privata del server (default: server.pem) </translation> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Cifrari accettabili (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH) </translation> </message> <message> <location line="+165"/> <source>This help message</source> <translation>Questo messaggio di aiuto </translation> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Impossibile collegarsi alla %s su questo computer (bind returned error %d, %s)</translation> </message> <message> <location line="-91"/> <source>Connect through socks proxy</source> <translation>Connessione tramite socks proxy</translation> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Consenti ricerche DNS per aggiungere nodi e collegare </translation> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation>Caricamento indirizzi...</translation> </message> <message> <location line="-35"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Errore caricamento wallet.dat: Wallet corrotto</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of Litecoin</source> <translation>Errore caricamento wallet.dat: il wallet richiede una versione nuova di Mana</translation> </message> <message> <location line="+93"/> <source>Wallet needed to be rewritten: restart Litecoin to complete</source> <translation>Il portamonete deve essere riscritto: riavviare Mana per completare</translation> </message> <message> <location line="-95"/> <source>Error loading wallet.dat</source> <translation>Errore caricamento wallet.dat</translation> </message> <message> <location line="+28"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Indirizzo -proxy non valido: &apos;%s&apos;</translation> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>Rete sconosciuta specificata in -onlynet: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>Versione -socks proxy sconosciuta richiesta: %i</translation> </message> <message> <location line="-96"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>Impossibile risolvere -bind address: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>Impossibile risolvere indirizzo -externalip: &apos;%s&apos;</translation> </message> <message> <location line="+44"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Importo non valido per -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation>Importo non valido</translation> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation>Fondi insufficienti</translation> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation>Caricamento dell&apos;indice del blocco...</translation> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Elérendő csomópont megadása and attempt to keep the connection open</translation> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. Litecoin is probably already running.</source> <translation>Impossibile collegarsi alla %s su questo computer. Probabilmente Mana è già in esecuzione.</translation> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation>Commissione per KB da aggiungere alle transazioni in uscita</translation> </message> <message> <location line="+19"/> <source>Loading wallet...</source> <translation>Caricamento portamonete...</translation> </message> <message> <location line="-52"/> <source>Cannot downgrade wallet</source> <translation>Non è possibile retrocedere il wallet</translation> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation>Non è possibile scrivere l&apos;indirizzo di default</translation> </message> <message> <location line="+64"/> <source>Rescanning...</source> <translation>Ripetere la scansione...</translation> </message> <message> <location line="-57"/> <source>Done loading</source> <translation>Caricamento completato</translation> </message> <message> <location line="+82"/> <source>To use the %s option</source> <translation>Per usare la opzione %s</translation> </message> <message> <location line="-74"/> <source>Error</source> <translation>Errore</translation> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Devi settare rpcpassword=&lt;password&gt; nel file di configurazione: %s Se il file non esiste, crealo con i permessi di amministratore</translation> </message> </context> </TS><|fim▁end|>
<location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/>
<|file_name|>Alternative.ts<|end_file_name|><|fim▁begin|>/* * @Author: aaronpmishkin * @Date: 2016-05-25 16:41:41 * @Last Modified by: aaronpmishkin * @Last Modified time: 2016-09-22 20:35:14 */ // Import Utility Classes: import * as Formatter from '../modules/utilities/classes/Formatter'; /* This class is the data representation of a decision option in a ValueChart. It uses an internal map object to associate decision consequences with PrimitiveObjectives in the ValueChart. Each instance of the Alternative class must be a complete mapping of a consequence to each PrimitiveObjective in the ValueChart to be valid, and each consequence must be within the domain of the corresponding PrimitiveObjective. It is best to think about Alternatives as points in the consequence space defined by the ValueChart's set of PrimitiveObjectives. */ export class Alternative { // ======================================================================================== // Fields // ======================================================================================== private name: string; // The name of the Alternative private id: string; // The name of the Alternative formatted for use as a HTML id. private description: string; // The description of the alternative. private objectiveValues: Map<string, string | number>; // The internal Map objective used to match consequences to the name of the associated PrimitiveObjective. // ======================================================================================== // Constructor // ======================================================================================== /* @param name - The name of the Alternative. @param description - The description of the Alternative. @returns {void} <|fim▁hole|> Alternative must he added manually using the setObjectiveValue method. */ constructor(name: string, description: string) { this.name = name; this.description = description; this.objectiveValues = new Map<string, string | number>(); this.id = Formatter.nameToID(this.name); } // ======================================================================================== // Methods // ======================================================================================== getId(): string { return this.id; } getName(): string { return this.name; } setName(name: string): void { this.name = name; this.id = Formatter.nameToID(this.name); } getDescription(): string { return this.description; } setDescription(description: string): void { this.description = description; } getObjectiveValue(objectiveName: string): string | number { return this.objectiveValues.get(objectiveName); } /* @returns {{ objectiveName: string, value: string | number }[]} - The collection of the Alternative's consequence paired with the associated objective's name. @description Iterates over the objectiveValues to return an array of objective names paired with the Alternative's consequence for that objective. */ getAllObjectiveValuePairs(): { objectiveName: string, value: string | number }[] { var objectiveValuePairs: { objectiveName: string, value: string | number }[] = []; var mapIterator: Iterator<string> = this.objectiveValues.keys(); var iteratorElement: IteratorResult<string> = mapIterator.next(); while (iteratorElement.done === false) { objectiveValuePairs.push({ objectiveName: iteratorElement.value, value: this.objectiveValues.get(iteratorElement.value) }); iteratorElement = mapIterator.next(); } return objectiveValuePairs; } setObjectiveValue(objectiveName: string, value: string | number): void { this.objectiveValues.set(objectiveName, value); } removeObjective(objectiveName: string): void { this.objectiveValues.delete(objectiveName); } }<|fim▁end|>
@description Constructs a new Alternative with no consequences. Objective consequences for the new
<|file_name|>BaseControl.ts<|end_file_name|><|fim▁begin|>// NG2 import { EventEmitter } from '@angular/core'; import { Validators } from '@angular/forms'; // APP import { Helpers } from '../../../utils/Helpers'; import { notify } from '../../../utils/notifier/notifier.util'; import { IMaskOptions } from '../Control'; import { NovoControlGroupAddConfig } from '../ControlGroup'; export interface NovoGroupedControlConfig { label?: string; icon?: string; add?: NovoControlGroupAddConfig; remove?: boolean; key: string; initialValue?: {}[]; } class ControlConfig { alwaysActive?: Boolean; allowInvalidDate?: boolean; appendToBody: boolean; // Deprecated; associatedEntity: string; asyncValidators?: Array<any>; checkboxLabel: string; closeOnSelect: boolean; config: any; controlType: string; currencyFormat: string; customControl?: any; customControlConfig?: any; dataSpecialization: string; dataType: string; dateFormat?: string; description?: string; dirty: boolean; disabled: boolean; enabled: boolean; encrypted: boolean; endDate?: Date | Number; fileBrowserImageUploadUrl?: string; forceClear: EventEmitter<any>; headerConfig: any; hidden: boolean; interactions: Array<{ event?: 'change' | 'focus' | string; invokeOnInit?: boolean; script? }>; isEmpty?: Function; key: string; label: string; maskOptions?: IMaskOptions; maxlength: number;<|fim▁hole|> military?: boolean; minimal?: boolean; minlength: number; multiple: boolean; name: string; options: Array<any>; optionsType: string; parentScrollSelector: string; placeholder: string; readOnly: boolean; // "disabled", so it appears in the model still; removeTooltipArrow?: boolean; required: boolean; restrictFieldInteractions?: boolean; sortOrder: number; startDate?: Date | Number; startupFocus?: boolean; subType?: string; template?: any; textMaskEnabled?: boolean; tooltip?: string; tooltipAutoPosition?: boolean; tooltipPosition?: string; tooltipPreline?: boolean; tooltipSize?: string; type: string; validators: Array<any>; value: any; warning?: string; width: number; layoutOptions?: { customActions?: boolean; download?: boolean; draggable?: boolean; edit?: boolean; iconStyle?: string; labelStyle?: string; order?: string; removable?: boolean; customValidation?: { action: string; fn: Function }[]; removableWhenNew?: boolean; }; tipWell?: { button?: boolean; icon?: string; tip: string; }; isEmbedded = false; isInlineEmbedded = false; weekStart?: number; highlighted = false; disabledDateMessage?: string; } export type NovoControlConfig = Partial<ControlConfig>; export class BaseControl extends ControlConfig { __type: string = 'BaseControl'; __config: NovoControlConfig; constructor(type: string = 'BaseControl', config: NovoControlConfig = {}) { super(); this.__type = type; this.__config = config; this.alwaysActive = config.alwaysActive; this.validators = config.validators || []; this.asyncValidators = config.asyncValidators || []; this.value = config.value; this.key = config.key || ''; this.label = config.label || ''; this.checkboxLabel = config.checkboxLabel; this.name = config.name || ''; this.required = !!config.required; this.hidden = !!config.hidden; this.encrypted = !!config.encrypted; this.sortOrder = config.sortOrder === undefined ? 1 : config.sortOrder; this.controlType = config.controlType || ''; this.type = config.type; this.subType = config.subType; this.metaType = config.metaType; this.placeholder = config.placeholder || ''; this.config = config.config || null; this.dirty = !!(config.value !== undefined && config.value !== null); this.multiple = !!config.multiple; this.headerConfig = config.headerConfig || null; this.currencyFormat = config.currencyFormat || null; this.associatedEntity = config.associatedEntity || null; this.optionsType = config.optionsType || null; this.options = config.options || []; this.forceClear = new EventEmitter(); this.readOnly = !!config.readOnly || !!config.disabled; this.disabled = !!config.disabled; this.enabled = true; this.layoutOptions = config.layoutOptions || {}; this.military = !!config.military; this.dateFormat = config.dateFormat; this.textMaskEnabled = config.textMaskEnabled; this.maskOptions = config.maskOptions; this.allowInvalidDate = config.allowInvalidDate; this.startDate = config.startDate; this.endDate = config.endDate; this.restrictFieldInteractions = !!config.restrictFieldInteractions; this.highlighted = !!config.highlighted; if (!Helpers.isEmpty(config.warning)) { this.warning = config.warning; } if (this.required) { this.validators.push(Validators.required); } if (!Helpers.isBlank(config.maxlength)) { this.maxlength = config.maxlength; this.validators.push(Validators.maxLength(this.maxlength)); } if (!Helpers.isBlank(config.minlength)) { this.minlength = config.minlength; this.validators.push(Validators.minLength(this.minlength)); } this.closeOnSelect = !!config.closeOnSelect; this.interactions = config.interactions; this.dataSpecialization = config.dataSpecialization; this.dataType = config.dataType; this.appendToBody = !!config.appendToBody; if (this.appendToBody) { notify(`'appendToBody' has been deprecated. Please remove this attribute.`); } this.parentScrollSelector = config.parentScrollSelector; this.description = config.description; if (config.tooltip) { this.tooltip = config.tooltip; this.tooltipPosition = config.tooltipPosition; this.tooltipSize = config.tooltipSize; this.tooltipPreline = config.tooltipPreline; this.removeTooltipArrow = config.removeTooltipArrow; this.tooltipAutoPosition = config.tooltipAutoPosition; } this.template = config.template; this.customControlConfig = config.customControlConfig; this.tipWell = config.tipWell; this.width = config.width; this.startupFocus = !!config.startupFocus; if (config.fileBrowserImageUploadUrl) { this.fileBrowserImageUploadUrl = config.fileBrowserImageUploadUrl; } if (config.isEmpty) { this.isEmpty = config.isEmpty; } this.weekStart = config.weekStart || 0; this.disabledDateMessage = config.disabledDateMessage; } }<|fim▁end|>
metaType: string;
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># This work was created by participants in the DataONE project, and is # jointly copyrighted by participating institutions in DataONE. For # more information on DataONE, see our web site at http://dataone.org. # # Copyright 2009-2019 DataONE # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at #<|fim▁hole|># # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ONEDrive resolvers."""<|fim▁end|>
# http://www.apache.org/licenses/LICENSE-2.0
<|file_name|>response.rs<|end_file_name|><|fim▁begin|>// src/network/rpc/response.rs // Copyright (C) 2017 authors and contributors (see AUTHORS file) // // This file is released under the MIT License. //! This module defines the Response RPC message type. //! //! A Response RPC message is used by a server to send a response to a client's request. //! server. Based on the generic [`Message`] type, the Response message type is //! essentially an array containing 4 items. These 4 items are: //! //! 1. Message Type - This will always be the Request message type. While //! represented as the enum variant `MessageType::Request`, the value stored //! in the array is actually a u8 integer. //! //! 2. Message ID - This is a u32 integer that is unique for the //! session/connection. If the message id is re-used, then the server is //! expected to respond with an error. //! //! 3. Error code - This is a u8 integer representing any errors due to the //! client's request. If there is no error, then this value is `0`. //! //! 4. Message result - this is an arbitrary value. //! //! # Example //! //! To create a new Response object, you can create one from an existing //! [`Message`] instance. This is used, for example, when the message is //! deserialized by the server into a general [`Message`] object, identified as //! a Response message, and it is required to perform Response specific //! operations on the message. //! //! Alternatively, a `ResponseMessage` object can be created manually via the //! `ResponseMessage::new()` method //! //! ```rust //! //! extern crate rmpv; //! extern crate safesec; //! //! #[macro_use] //! extern crate safesec_derive; //! //! use rmpv::Value; //! use safesec::error::{Error, GeneralError, Result}; //! use safesec::network::rpc::message::{CodeConvert, Message, MessageType, //! RpcMessage, RpcMessageType}; //! use safesec::network::rpc::response::{ResponseMessage, RpcResponse}; //! //! // Define Error codes //! #[derive(Debug, Clone, PartialEq, CodeConvert)] //! enum RequestError { //! Nope, //! NuhUh, //! } //! //! # fn main() { //! // Create an alias for ResponseMessage //! type Response = ResponseMessage<RequestError>; //! //! // Build Message //! let msgtype = Value::from(MessageType::Response.to_number()); //! let msgid = Value::from(42); //! let msgcode = Value::from(RequestError::Nope.to_number()); //! let msgresult = Value::from(9001); //! let msgval = Value::Array(vec![msgtype, msgid, msgcode, msgresult]); //! let msg = Message::from(msgval).unwrap(); //! //! // Turn the message into a Response type //! let res = Response::from(msg).unwrap(); //! assert_eq!(res.message_type().unwrap(), MessageType::Response); //! assert_eq!(res.message_id(), 42); //! assert_eq!(res.error_code(), RequestError::Nope); //! assert_eq!(res.result(), &Value::from(9001)); //! //! // Create a brand new response from scratch //! let new_res = Response::new(42, RequestError::NuhUh, Value::from(9001)); //! assert_eq!(new_res.message_type().unwrap(), MessageType::Response); //! assert_eq!(new_res.message_id(), 42); //! assert_eq!(new_res.error_code(), RequestError::NuhUh); //! assert_eq!(new_res.result(), &Value::from(9001)); //! # } //! //! ``` //! // =========================================================================== // Imports // =========================================================================== // Stdlib imports use std::marker::PhantomData; // Third-party imports use rmpv::Value; // Local imports use error::Error; use error::network::rpc::{RpcError, RpcResult}; use network::rpc::message::{CodeConvert, Message, MessageType, RpcMessage, RpcMessageType}; // =========================================================================== // ResponseMessage // =========================================================================== /// Trait providing Response message specific getter methods. /// /// # Example /// /// ```rust /// extern crate rmpv; /// extern crate safesec; /// /// use rmpv::Value; /// use safesec::network::rpc::message::{MessageType, RpcMessage, /// RpcMessageType}; /// use safesec::network::rpc::response::{ResponseMessage, RpcResponse}; /// /// # fn main() { /// // Create Response alias /// type Response = ResponseMessage<MessageType>; /// /// // Re-use MessageType as error code /// let req = Response::new(42, MessageType::Notification, /// Value::from(9001)); /// /// // Check all getter methods /// assert_eq!(req.message_type().unwrap(), MessageType::Response); /// assert_eq!(req.message_id(), 42); /// assert_eq!(req.error_code(), MessageType::Notification); /// assert_eq!(req.result(), &Value::from(9001)); /// # } /// ``` pub trait RpcResponse<C>: RpcMessage where C: CodeConvert<C>, { fn message_id(&self) -> u32 { let msgid = &self.as_vec()[1]; msgid.as_u64().unwrap() as u32 } fn error_code(&self) -> C { let errcode = &self.as_vec()[2]; let errcode = errcode.as_u64().unwrap() as u8; C::from_number(errcode).unwrap() } fn result(&self) -> &Value { let msgresult = &self.as_vec()[3]; msgresult } } /// A representation of the Response RPC message type. pub struct ResponseMessage<C> { msg: Message, msgtype: PhantomData<C>, } impl<C> RpcMessage for ResponseMessage<C> where C: CodeConvert<C>, {<|fim▁hole|> } fn as_value(&self) -> &Value { self.msg.as_value() } } impl<C> RpcMessageType for ResponseMessage<C> where C: CodeConvert<C>, { fn as_message(&self) -> &Message { &self.msg } } impl<C> RpcResponse<C> for ResponseMessage<C> where C: CodeConvert<C>, { } impl<C> ResponseMessage<C> where C: CodeConvert<C>, { /// Create a brand new ResponseMessage object. /// /// # Example /// /// ```rust /// extern crate rmpv; /// extern crate safesec; /// /// use rmpv::Value; /// use safesec::network::rpc::message::{MessageType, RpcMessage}; /// use safesec::network::rpc::response::{ResponseMessage, RpcResponse}; /// /// # fn main() { /// // Create Response alias /// type Response = ResponseMessage<MessageType>; /// /// // Re-use MessageType as error code /// let res = Response::new(42, MessageType::Notification, /// Value::from(42)); /// # } /// ``` pub fn new(msgid: u32, errcode: C, result: Value) -> Self { let msgtype = Value::from(MessageType::Response as u8); let msgid = Value::from(msgid); let errcode = Value::from(errcode.to_number()); let msgval = Value::from(vec![msgtype, msgid, errcode, result]); match Message::from(msgval) { Ok(msg) => Self { msg: msg, msgtype: PhantomData, }, Err(_) => unreachable!(), } } /// Create a RequestMessage from a Message /// /// # Example /// /// ```rust /// extern crate rmpv; /// extern crate safesec; /// /// use rmpv::Value; /// use safesec::network::rpc::message::{CodeConvert, Message, MessageType, RpcMessage}; /// use safesec::network::rpc::response::{ResponseMessage, RpcResponse}; /// /// # fn main() { /// // Create an alias for ResponseMessage, re-using `MessageType` as the /// // message code. /// type Response = ResponseMessage<MessageType>; /// /// // Build Message /// let msgtype = Value::from(MessageType::Response.to_number()); /// let msgid = Value::from(42); /// let msgcode = Value::from(MessageType::Notification.to_number()); /// let msgresult = Value::from(9001); /// let msgval = Value::Array(vec![msgtype, msgid, msgcode, msgresult]); /// let msg = Message::from(msgval).unwrap(); /// /// // Turn the message into a Response type /// let res = Response::from(msg).unwrap(); /// # } /// ``` pub fn from(msg: Message) -> RpcResult<Self> { // Response is always represented as an array of 4 values { // Response is always represented as an array of 4 values let array = msg.as_vec(); let arraylen = array.len(); if arraylen != 4 { let errmsg = format!("expected array length of 4, got {}", arraylen); let err = Error::new(RpcError::InvalidArrayLength, errmsg); return Err(err); } // Run all check functions and return the first error generated let funcvec: Vec<fn(&Value) -> RpcResult<()>>; funcvec = vec![ Self::check_message_type, Self::check_message_id, Self::check_error_code, ]; for (i, func) in funcvec.iter().enumerate() { func(&array[i])?; } } Ok(Self { msg: msg, msgtype: PhantomData, }) } // Checks that the message type parameter of a Response message is valid // // This is a private method used by the public from() method fn check_message_type(msgtype: &Value) -> RpcResult<()> { let msgtype = msgtype.as_u64().unwrap() as u8; let expected_msgtype = MessageType::Response.to_number(); if msgtype != expected_msgtype { let errmsg = format!( "expected {} for message type (ie \ MessageType::Response), got {}", expected_msgtype, msgtype ); let err = Error::new(RpcError::InvalidMessageType, errmsg); return Err(err); } Ok(()) } // Checks that the message id parameter of a Response message is valid // // This is a private method used by the public from() method fn check_message_id(msgid: &Value) -> RpcResult<()> { let msgid = Self::check_int( msgid.as_u64(), u32::max_value() as u64, "u32".to_string(), ); if let Err(e) = msgid { let err = Error::new(RpcError::InvalidIDType, e); return Err(err); } Ok(()) } // Checks that the error code parameter of a Response message is valid // // This is a private method used by the public from() method fn check_error_code(msgcode: &Value) -> RpcResult<()> { let msgcode = Self::check_int( msgcode.as_u64(), u8::max_value() as u64, "u8".to_string(), ); match msgcode { Err(e) => { let err = Error::new(RpcError::InvalidResponse, e); return Err(err); } Ok(v) => { let u8val = v as u8; if let Err(e) = C::from_number(u8val) { let err = Error::new(RpcError::InvalidResponse, e); return Err(err); } } } Ok(()) } } impl<C> Into<Message> for ResponseMessage<C> where C: CodeConvert<C>, { fn into(self) -> Message { self.msg } } impl<C> Into<Value> for ResponseMessage<C> where C: CodeConvert<C>, { fn into(self) -> Value { let msg: Message = self.msg.into(); msg.into() } } // =========================================================================== // Tests // =========================================================================== #[cfg(test)] mod tests { // -------------------- // Imports // -------------------- // Stdlib imports use std::error::Error as StdError; // Third-party imports use quickcheck::TestResult; use rmpv::{Utf8String, Value}; // Local imports use error::{Error, GeneralError, Result}; use error::network::rpc::RpcError; use network::rpc::message::{CodeConvert, Message, MessageType, RpcMessage}; use network::rpc::response::{ResponseMessage, RpcResponse}; // -------------------- // Helpers // -------------------- #[derive(Debug, PartialEq, Clone, CodeConvert)] enum TestError { One, Two, Three, } type Response = ResponseMessage<TestError>; // -------------------- // ResponseMessage::new // -------------------- quickcheck! { fn new_messagetype_always_response(msgid: u32, err: u8) -> TestResult { if err > 2 { return TestResult::discard() } let msgtype = Value::from(MessageType::Response.to_number()); // Build expected let a = vec![msgtype, Value::from(msgid), Value::from(err), Value::from(42)]; let expected = Value::Array(a); // Compare ResponseMessage object to expected let req = Response::new(msgid, TestError::from_number(err).unwrap(), Value::from(42)); TestResult::from_bool(req.as_value() == &expected) } } // -------------------- // ResponseMessage::from // -------------------- #[test] fn from_invalid_arraylen() { // -------------------- // GIVEN // -------------------- // Message with only 3 arguments // Create message let msgtype = Value::from(MessageType::Response.to_number()); let msgid = Value::from(42); let msgcode = Value::from(TestError::One.to_number()); let array: Vec<Value> = vec![msgtype, msgid, msgcode]; let val = Value::Array(array); let msg = Message::from(val).unwrap(); // -------------------- // WHEN // -------------------- // ResponseMessage::from is called with the message let result = Response::from(msg); // -------------------- // THEN // -------------------- // Error is returned match result { Err(e) => { let expected = "expected array length of 4, got 3"; assert_eq!(e.kind(), RpcError::InvalidArrayLength); assert_eq!(e.description(), expected); } _ => assert!(false), } } #[test] fn from_invalid_messagetype() { // -------------------- // GIVEN // -------------------- // Message with MessageType::Notification // Create message let expected = MessageType::Notification.to_number(); let msgtype = Value::from(expected); let msgid = Value::from(42); let msgcode = Value::from(TestError::One.to_number()); let msgval = Value::from(42); let val = Value::Array(vec![msgtype, msgid, msgcode, msgval]); let msg = Message::from(val).unwrap(); // -------------------- // WHEN // -------------------- // ResponseMessage::from is called with the message let result = Response::from(msg); // -------------------- // THEN // -------------------- // Error is returned match result { Err(e) => { let expected = format!( "expected {} for message type (ie \ MessageType::Response), got {}", MessageType::Response.to_number(), expected ); assert_eq!(e.kind(), RpcError::InvalidMessageType); assert_eq!(e.description(), expected); } _ => assert!(false), } } #[test] fn from_message_id_invalid_type() { // -------------------- // GIVEN // -------------------- // Message with a string for message id // Create message let msgtype = Value::from(MessageType::Response.to_number()); let msgid = Value::String(Utf8String::from("hello")); let msgcode = Value::from(TestError::One.to_number()); let msgval = Value::from(42); let val = Value::Array(vec![msgtype, msgid, msgcode, msgval]); let msg = Message::from(val).unwrap(); // -------------------- // WHEN // -------------------- // ResponseMessage::from is called with the message let result = Response::from(msg); // -------------------- // THEN // -------------------- // Error is returned for the invalid message id match result { Err(e) => { let errmsg = "expected u32 but got None"; assert_eq!(e.kind(), RpcError::InvalidIDType); assert_eq!(e.description(), errmsg); } _ => assert!(false), } } quickcheck! { fn from_message_id_invalid_value(msgid: u64) -> TestResult { if msgid <= u32::max_value() as u64 { return TestResult::discard() } // -------------------- // GIVEN // -------------------- // Message with a val > u32::max_value() for message id // Create message let msgtype = Value::from(MessageType::Response.to_number()); let msgid = Value::from(msgid); let msgcode = Value::from(TestError::One.to_number()); let msgval = Value::from(42); let val = Value::Array(vec![msgtype, msgid.clone(), msgcode, msgval]); let msg = Message::from(val).unwrap(); // -------------------- // WHEN // -------------------- // ResponseMessage::from is called with the message let result = Response::from(msg); // -------------------- // THEN // -------------------- // Error is returned for the invalid message id value match result { Err(e) => { let errmsg = format!("expected value <= {} but got value {}", u32::max_value().to_string(), msgid.to_string()); TestResult::from_bool(e.kind() == RpcError::InvalidIDType && e.description() == errmsg) }, _ => TestResult::from_bool(false) } } } #[test] fn from_error_code_invalid_type() { // -------------------- // GIVEN // -------------------- // Message with a string for message code // Create message let msgtype = Value::from(MessageType::Response.to_number()); let msgid = Value::from(42); let msgcode = Value::String(Utf8String::from("hello")); let msgval = Value::from(42); let val = Value::Array(vec![msgtype, msgid, msgcode, msgval]); let msg = Message::from(val).unwrap(); // -------------------- // WHEN // -------------------- // ResponseMessage::from is called with the message let result = Response::from(msg); // -------------------- // THEN // -------------------- // Error is returned for the invalid message id match result { Err(e) => { let errmsg = "expected u8 but got None"; assert_eq!(e.kind(), RpcError::InvalidResponse); assert_eq!(e.description(), errmsg); } _ => assert!(false), } } quickcheck! { fn from_error_code_invalid_value(msgcode: u64) -> TestResult { if msgcode <= u8::max_value() as u64 { return TestResult::discard() } // -------------------- // GIVEN // -------------------- // Message with a msgcode > u8::max_value() for message code // Create message let msgtype = Value::from(MessageType::Response.to_number()); let msgid = Value::from(42); let msgcode = Value::from(msgcode); let msgval = Value::from(42); let val = Value::Array(vec![msgtype, msgid, msgcode.clone(), msgval]); let msg = Message::from(val).unwrap(); // -------------------- // WHEN // -------------------- // ResponseMessage::from is called with the message let result = Response::from(msg); // -------------------- // THEN // -------------------- // Error is returned for the invalid message id value match result { Err(e) => { let errmsg = format!("expected value <= {} but got value {}", u8::max_value().to_string(), msgcode.to_string()); TestResult::from_bool(e.kind() == RpcError::InvalidResponse && e.description() == errmsg) }, _ => TestResult::from_bool(false) } } fn from_error_code_invalid_code(code: u8) -> TestResult { // -------------------- // GIVEN // -------------------- // Message with a msgcode > 2 for message code if code <= 2 { return TestResult::discard() } // Create message let msgtype = Value::from(MessageType::Response.to_number()); let msgid = Value::from(42); let msgcode = Value::from(code); let msgval = Value::from(42); let val = Value::Array(vec![msgtype, msgid, msgcode, msgval]); let msg = Message::from(val).unwrap(); // -------------------- // WHEN // -------------------- // ResponseMessage::from is called with the message let result = Response::from(msg); // -------------------- // THEN // -------------------- match result { Err(e) => { let errmsg = code.to_string(); TestResult::from_bool(e.kind() == RpcError::InvalidResponse && e.description() == errmsg) }, _ => TestResult::from_bool(false) } } } // -------------------- // RpcMessage methods // -------------------- #[test] fn rpcmessage_as_vec() { // -------------------- // GIVEN // -------------------- // A response message // Create message let msgtype = Value::from(MessageType::Response.to_number()); let msgid = Value::from(42); let msgcode = Value::from(TestError::One.to_number()); let msgval = Value::Array(vec![Value::from(42)]); let val = Value::Array(vec![msgtype, msgid, msgcode, msgval]); let msg = Message::from(val).unwrap(); let expected = msg.clone(); let res = Response::from(msg).unwrap(); // -------------------- // WHEN // -------------------- // ResponseMessage::as_vec() method is called let result = res.as_vec(); // -------------------- // THEN // -------------------- // The contained value is as expected let expected = expected.as_vec(); assert_eq!(result, expected) } #[test] fn rpcmessage_as_value() { // -------------------- // GIVEN // -------------------- // A response message // Create message let msgtype = Value::from(MessageType::Response.to_number()); let msgid = Value::from(42); let msgcode = Value::from(TestError::One.to_number()); let msgval = Value::Array(vec![Value::from(42)]); let val = Value::Array(vec![msgtype, msgid, msgcode, msgval]); let msg = Message::from(val).unwrap(); let expected = msg.clone(); let res = Response::from(msg).unwrap(); // -------------------- // WHEN // -------------------- // ResponseMessage::as_value() method is called let result = res.as_value(); // -------------------- // THEN // -------------------- // The contained value is as expected let expected = expected.as_value(); assert_eq!(result, expected) } // -------------------- // RpcResponse methods // -------------------- #[test] fn rpcresponse_message_id() { // -------------------- // GIVEN // -------------------- // A response message // Create message let msgtype = Value::from(MessageType::Response.to_number()); let msgid = Value::from(42); let msgcode = Value::from(TestError::One.to_number()); let msgval = Value::Array(vec![Value::from(42)]); let val = Value::Array(vec![msgtype, msgid, msgcode, msgval]); let msg = Message::from(val).unwrap(); let expected = msg.clone(); let res = Response::from(msg).unwrap(); // -------------------- // WHEN // -------------------- // ResponseMessage::message_id() method is called let result = res.message_id(); // -------------------- // THEN // -------------------- // The contained value is as expected let expected = expected.as_vec()[1].as_u64().unwrap() as u32; assert_eq!(result, expected) } #[test] fn rpcresponse_error_code() { // -------------------- // GIVEN // -------------------- // A response message // Create message let msgtype = Value::from(MessageType::Response.to_number()); let msgid = Value::from(42); let msgcode = Value::from(TestError::One.to_number()); let msgval = Value::Array(vec![Value::from(42)]); let val = Value::Array(vec![msgtype, msgid, msgcode, msgval]); let msg = Message::from(val).unwrap(); let expected = msg.clone(); let res = Response::from(msg).unwrap(); // -------------------- // WHEN // -------------------- // ResponseMessage::error_code() method is called let result = res.error_code(); // -------------------- // THEN // -------------------- // The contained value is as expected let code = expected.as_vec()[2].as_u64().unwrap() as u8; let expected = TestError::from_number(code).unwrap(); assert_eq!(result, expected) } #[test] fn rpcresponse_result() { // -------------------- // GIVEN // -------------------- // A response message // Create message let msgtype = Value::from(MessageType::Response.to_number()); let msgid = Value::from(42); let errcode = Value::from(TestError::One.to_number()); let msgresult = Value::from(42); let val = Value::Array(vec![msgtype, msgid, errcode, msgresult]); let msg = Message::from(val).unwrap(); let expected = msg.clone(); let res = Response::from(msg).unwrap(); // -------------------- // WHEN // -------------------- // ResponseMessage::result() method is called let result = res.result(); // -------------------- // THEN // -------------------- // The contained value is as expected let expected = &expected.as_vec()[3]; assert_eq!(result, expected) } } // =========================================================================== // // ===========================================================================<|fim▁end|>
fn as_vec(&self) -> &Vec<Value> { self.msg.as_vec()
<|file_name|>rpath.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use driver::session; use metadata::cstore; use metadata::filesearch; use std::hashmap::HashSet; use std::{os, vec}; use syntax::abi; fn not_win32(os: abi::Os) -> bool { os != abi::OsWin32 } pub fn get_rpath_flags(sess: session::Session, out_filename: &Path) -> ~[~str] { let os = sess.targ_cfg.os; // No rpath on windows if os == abi::OsWin32 { return ~[]; } let mut flags = ~[]; if sess.targ_cfg.os == abi::OsFreebsd { flags.push_all([~"-Wl,-rpath,/usr/local/lib/gcc46", ~"-Wl,-rpath,/usr/local/lib/gcc44", ~"-Wl,-z,origin"]); } debug!("preparing the RPATH!"); let sysroot = sess.filesearch.sysroot; let output = out_filename; let libs = sess.cstore.get_used_crates(cstore::RequireDynamic); let libs = libs.move_iter().filter_map(|(_, l)| l.map(|p| p.clone())).collect(); // We don't currently rpath extern libraries, but we know // where rustrt is and we know every rust program needs it let libs = vec::append_one(libs, get_sysroot_absolute_rt_lib(sess)); let rpaths = get_rpaths(os, sysroot, output, libs, sess.opts.target_triple); flags.push_all(rpaths_to_flags(rpaths)); flags } fn get_sysroot_absolute_rt_lib(sess: session::Session) -> Path { let r = filesearch::relative_target_lib_path(sess.opts.target_triple); let mut p = sess.filesearch.sysroot.join(&r); p.push(os::dll_filename("rustrt")); p } pub fn rpaths_to_flags(rpaths: &[~str]) -> ~[~str] { let mut ret = ~[]; for rpath in rpaths.iter() { ret.push("-Wl,-rpath," + *rpath); } return ret; } fn get_rpaths(os: abi::Os, sysroot: &Path, output: &Path, libs: &[Path], target_triple: &str) -> ~[~str] { debug!("sysroot: {}", sysroot.display()); debug!("output: {}", output.display()); debug!("libs:"); for libpath in libs.iter() { debug!(" {}", libpath.display()); } debug!("target_triple: {}", target_triple); // Use relative paths to the libraries. Binaries can be moved // as long as they maintain the relative relationship to the // crates they depend on. let rel_rpaths = get_rpaths_relative_to_output(os, output, libs); // Make backup absolute paths to the libraries. Binaries can // be moved as long as the crates they link against don't move. let abs_rpaths = get_absolute_rpaths(libs); // And a final backup rpath to the global library location. let fallback_rpaths = ~[get_install_prefix_rpath(target_triple)]; fn log_rpaths(desc: &str, rpaths: &[~str]) { debug!("{} rpaths:", desc); for rpath in rpaths.iter() { debug!(" {}", *rpath); } } log_rpaths("relative", rel_rpaths); log_rpaths("absolute", abs_rpaths); log_rpaths("fallback", fallback_rpaths); let mut rpaths = rel_rpaths; rpaths.push_all(abs_rpaths); rpaths.push_all(fallback_rpaths); // Remove duplicates let rpaths = minimize_rpaths(rpaths); return rpaths; } fn get_rpaths_relative_to_output(os: abi::Os, output: &Path, libs: &[Path]) -> ~[~str] { libs.iter().map(|a| get_rpath_relative_to_output(os, output, a)).collect() } pub fn get_rpath_relative_to_output(os: abi::Os, output: &Path, lib: &Path) -> ~str { use std::os; assert!(not_win32(os)); // Mac doesn't appear to support $ORIGIN let prefix = match os { abi::OsAndroid | abi::OsLinux | abi::OsFreebsd => "$ORIGIN", abi::OsMacos => "@loader_path", abi::OsWin32 => unreachable!() };<|fim▁hole|> output.pop(); let relative = lib.path_relative_from(&output); let relative = relative.expect("could not create rpath relative to output"); // FIXME (#9639): This needs to handle non-utf8 paths prefix+"/"+relative.as_str().expect("non-utf8 component in path") } fn get_absolute_rpaths(libs: &[Path]) -> ~[~str] { libs.iter().map(|a| get_absolute_rpath(a)).collect() } pub fn get_absolute_rpath(lib: &Path) -> ~str { let mut p = os::make_absolute(lib); p.pop(); // FIXME (#9639): This needs to handle non-utf8 paths p.as_str().expect("non-utf8 component in rpath").to_owned() } pub fn get_install_prefix_rpath(target_triple: &str) -> ~str { let install_prefix = env!("CFG_PREFIX"); let tlib = filesearch::relative_target_lib_path(target_triple); let mut path = Path::new(install_prefix); path.push(&tlib); let path = os::make_absolute(&path); // FIXME (#9639): This needs to handle non-utf8 paths path.as_str().expect("non-utf8 component in rpath").to_owned() } pub fn minimize_rpaths(rpaths: &[~str]) -> ~[~str] { let mut set = HashSet::new(); let mut minimized = ~[]; for rpath in rpaths.iter() { if set.insert(rpath.as_slice()) { minimized.push(rpath.clone()); } } minimized } #[cfg(unix, test)] mod test { use std::os; use back::rpath::{get_absolute_rpath, get_install_prefix_rpath}; use back::rpath::{minimize_rpaths, rpaths_to_flags, get_rpath_relative_to_output}; use syntax::abi; use metadata::filesearch; #[test] fn test_rpaths_to_flags() { let flags = rpaths_to_flags([~"path1", ~"path2"]); assert_eq!(flags, ~[~"-Wl,-rpath,path1", ~"-Wl,-rpath,path2"]); } #[test] fn test_prefix_rpath() { let res = get_install_prefix_rpath("triple"); let mut d = Path::new(env!("CFG_PREFIX")); d.push("lib"); d.push(filesearch::rustlibdir()); d.push("triple/lib"); debug!("test_prefix_path: {} vs. {}", res, d.display()); assert!(res.as_bytes().ends_with(d.as_vec())); } #[test] fn test_prefix_rpath_abs() { let res = get_install_prefix_rpath("triple"); assert!(Path::new(res).is_absolute()); } #[test] fn test_minimize1() { let res = minimize_rpaths([~"rpath1", ~"rpath2", ~"rpath1"]); assert_eq!(res.as_slice(), [~"rpath1", ~"rpath2"]); } #[test] fn test_minimize2() { let res = minimize_rpaths([~"1a", ~"2", ~"2", ~"1a", ~"4a", ~"1a", ~"2", ~"3", ~"4a", ~"3"]); assert_eq!(res.as_slice(), [~"1a", ~"2", ~"4a", ~"3"]); } #[test] #[cfg(target_os = "linux")] #[cfg(target_os = "android")] fn test_rpath_relative() { let o = abi::OsLinux; let res = get_rpath_relative_to_output(o, &Path::new("bin/rustc"), &Path::new("lib/libstd.so")); assert_eq!(res.as_slice(), "$ORIGIN/../lib"); } #[test] #[cfg(target_os = "freebsd")] fn test_rpath_relative() { let o = abi::OsFreebsd; let res = get_rpath_relative_to_output(o, &Path::new("bin/rustc"), &Path::new("lib/libstd.so")); assert_eq!(res.as_slice(), "$ORIGIN/../lib"); } #[test] #[cfg(target_os = "macos")] fn test_rpath_relative() { let o = abi::OsMacos; let res = get_rpath_relative_to_output(o, &Path::new("bin/rustc"), &Path::new("lib/libstd.so")); assert_eq!(res.as_slice(), "@loader_path/../lib"); } #[test] fn test_get_absolute_rpath() { let res = get_absolute_rpath(&Path::new("lib/libstd.so")); let lib = os::make_absolute(&Path::new("lib")); debug!("test_get_absolute_rpath: {} vs. {}", res.to_str(), lib.display()); // FIXME (#9639): This needs to handle non-utf8 paths assert_eq!(res.as_slice(), lib.as_str().expect("non-utf8 component in path")); } }<|fim▁end|>
let mut lib = os::make_absolute(lib); lib.pop(); let mut output = os::make_absolute(output);
<|file_name|>about_packages.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # # This is very different to AboutModules in Ruby Koans # Our AboutMultipleInheritance class is a little more comparable # from runner.koan import * # # Package hierarchy of Python Koans project: # # contemplate_koans.py # koans/ # __init__.py # about_asserts.py # about_attribute_access.py # about_class_attributes.py # about_classes.py # ... # a_package_folder/ # __init__.py # a_module.py class AboutPackages(Koan): def test_subfolders_can_form_part_of_a_module_package(self): # Import ./a_package_folder/a_module.py from .a_package_folder.a_module import Duck duck = Duck() self.assertEqual(__, duck.name)<|fim▁hole|> self.assertEqual(__, an_attribute) # ------------------------------------------------------------------ def test_use_absolute_imports_to_import_upper_level_modules(self): # Import /contemplate_koans.py import contemplate_koans self.assertEqual(__, contemplate_koans.__name__) # contemplate_koans.py is the root module in this package because its # the first python module called in koans. # # If contemplate_koan.py was based in a_package_folder that would be # the root folder, which would make reaching the koans folder # almost impossible. So always leave the starting python script in # a folder which can reach everything else. def test_import_a_module_in_a_subfolder_folder_using_an_absolute_path(self): # Import contemplate_koans.py/koans/a_package_folder/a_module.py from koans.a_package_folder.a_module import Duck self.assertEqual(__, Duck.__module__)<|fim▁end|>
def test_subfolders_become_modules_if_they_have_an_init_module(self): # Import ./a_package_folder/__init__.py from .a_package_folder import an_attribute
<|file_name|>backend_test.go<|end_file_name|><|fim▁begin|>package trace import ( "context" "fmt" "io" "io/ioutil" "net" "os" "path/filepath" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stripe/veneur/ssf" ) func benchmarkPlainCombination(backend ClientBackend, span *ssf.SSFSpan) func(*testing.B) { ctx := context.TODO() return func(b *testing.B) { for i := 0; i < b.N; i++ { assert.NoError(b, backend.SendSync(ctx, span)) } } } func benchmarkFlushingCombination(backend FlushableClientBackend, span *ssf.SSFSpan, every time.Duration) func(*testing.B) { return func(b *testing.B) { ctx := context.TODO() tick := time.NewTicker(every) defer tick.Stop() for i := 0; i < b.N; i++ { select { case <-tick.C: assert.NoError(b, backend.FlushSync(ctx)) default: assert.NoError(b, backend.SendSync(ctx, span)) } } assert.NoError(b, backend.FlushSync(ctx)) } } func serveUNIX(t testing.TB, laddr *net.UnixAddr, onconnect func(conn net.Conn)) (cleanup func() error) { srv, err := net.ListenUnix(laddr.Network(), laddr) require.NoError(t, err) cleanup = srv.Close go func() { for { in, err := srv.Accept() if err != nil { return } go onconnect(in) } }() return } // BenchmarkSerialization tests how long the serialization of a span // over each kind of network link can take: UNIX with no buffer, UNIX // with a buffer, UDP (only unbuffered); combined with the kinds of // spans we send: spans with metrics attached, spans with no metrics // attached, and empty spans with only metrics. // // The counterpart is either a fresh UDP port with nothing reading // packets, or a network reader that discards every byte read. func BenchmarkSerialization(b *testing.B) { dir, err := ioutil.TempDir("", "test_unix") require.NoError(b, err) defer os.RemoveAll(dir) sockName := filepath.Join(dir, "sock") laddr, err := net.ResolveUnixAddr("unix", sockName) require.NoError(b, err) cleanup := serveUNIX(b, laddr, func(conn net.Conn) { io.Copy(ioutil.Discard, conn) }) defer cleanup() udpConn, err := net.ListenUDP("udp", &net.UDPAddr{IP: net.ParseIP("127.0.0.1"), Port: 0}) require.NoError(b, err) defer udpConn.Close() unixBackend := &streamBackend{ backendParams: backendParams{ addr: laddr, }, } flushyUnixBackend := &streamBackend{ backendParams: backendParams{ addr: laddr, bufferSize: uint(BufferSize), }, } udpBackend := &packetBackend{ backendParams: backendParams{ addr: udpConn.LocalAddr(), }, } spanWithMetrics := &ssf.SSFSpan{ Name: "realistic_span", Service: "hi-there-srv", Id: 1, ParentId: 2, TraceId: 3, Error: false, Tags: map[string]string{ "span_purpose": "testing", }, Metrics: []*ssf.SSFSample{ ssf.Count("oh.hai", 1, map[string]string{"purpose": "testing"}), ssf.Histogram("hello.there", 1, map[string]string{"purpose": "testing"}, ssf.Unit("absolute")), }, } spanNoMetrics := &ssf.SSFSpan{ Name: "realistic_span", Service: "hi-there-srv", Id: 1, ParentId: 2, TraceId: 3, Error: false,<|fim▁hole|> Tags: map[string]string{ "span_purpose": "testing", }, } emptySpanWithMetrics := &ssf.SSFSpan{ Metrics: []*ssf.SSFSample{ ssf.Count("oh.hai", 1, map[string]string{"purpose": "testing"}), ssf.Histogram("hello.there", 1, map[string]string{"purpose": "testing"}, ssf.Unit("lad")), }, } // Warm up things: connect(context.TODO(), unixBackend) _ = flushyUnixBackend.FlushSync(context.TODO()) connect(context.TODO(), udpBackend) b.ResetTimer() // Start benchmarking: for _, every := range []int{10, 50, 100} { b.Run(fmt.Sprintf("UNIX_flush_span_with_metrics_%dms", every), benchmarkFlushingCombination(flushyUnixBackend, spanWithMetrics, time.Duration(every)*time.Millisecond)) b.Run(fmt.Sprintf("UNIX_flush_span_no_metrics_%dms", every), benchmarkFlushingCombination(flushyUnixBackend, spanNoMetrics, time.Duration(every)*time.Millisecond)) b.Run(fmt.Sprintf("UNIX_flush_empty_span_with_metrics_%dms", every), benchmarkFlushingCombination(flushyUnixBackend, emptySpanWithMetrics, time.Duration(every)*time.Millisecond)) } b.Run("UNIX_plain_span_with_metrics", benchmarkPlainCombination(unixBackend, spanWithMetrics)) b.Run("UNIX_plain_span_no_metrics", benchmarkPlainCombination(unixBackend, spanNoMetrics)) b.Run("UNIX_plain_empty_span_with_metrics", benchmarkPlainCombination(unixBackend, emptySpanWithMetrics)) b.Run("UDP_plain_span_with_metrics", benchmarkPlainCombination(udpBackend, spanWithMetrics)) b.Run("UDP_plain_span_no_metrics", benchmarkPlainCombination(udpBackend, spanNoMetrics)) b.Run("UDP_plain_empty_span_with_metrics", benchmarkPlainCombination(udpBackend, emptySpanWithMetrics)) }<|fim▁end|>
<|file_name|>cgroup_service_test.py<|end_file_name|><|fim▁begin|>"""Unit test for cgroup_service - Treadmill cgroup service. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import os import tempfile import unittest import select import shutil import mock import treadmill from treadmill.services import cgroup_service class CGroupServiceTest(unittest.TestCase): """Unit tests for the cgroup service implementation. """ def setUp(self): self.root = tempfile.mkdtemp() self.cgroup_svc = os.path.join(self.root, 'cgroup_svc') self.running = os.path.join(self.root, 'running') def tearDown(self): if self.root and os.path.isdir(self.root): shutil.rmtree(self.root) def test_initialize(self): """Test service initialization. """ svc = cgroup_service.CgroupResourceService(self.running) svc.initialize(self.cgroup_svc) def test_report_status(self): """Test processing of status request. """ svc = cgroup_service.CgroupResourceService(self.running) status = svc.report_status() self.assertEqual( status, {'ready': True} ) def test_event_handlers(self): """Test event_handlers request. """ svc = cgroup_service.CgroupResourceService(self.running) handlers = svc.event_handlers() self.assertEqual( handlers, [] ) @mock.patch('treadmill.cgroups.create', mock.Mock()) @mock.patch('treadmill.cgroups.get_value', mock.Mock(return_value=10000)) @mock.patch('treadmill.cgroups.join', mock.Mock()) @mock.patch('treadmill.cgroups.set_value', mock.Mock()) @mock.patch('treadmill.services.cgroup_service.CgroupResourceService.' '_register_oom_handler', mock.Mock()) def test_on_create_request(self): """Test processing of a cgroups create request. """ # Access to a protected member _register_oom_handler of a client class # pylint: disable=W0212 svc = cgroup_service.CgroupResourceService(self.running) request = { 'memory': '100M', 'cpu': '100%', } request_id = 'myproid.test-0-ID1234' svc.on_create_request(request_id, request) cgrp = os.path.join('treadmill/apps', request_id) svc._register_oom_handler.assert_called_with(cgrp, request_id) treadmill.cgroups.create.assert_has_calls( [ mock.call(ss, cgrp) for ss in ['cpu', 'cpuacct', 'memory', 'blkio'] ] ) # Memory calculation: # # (demand * virtual cpu bmips / total bmips) * treadmill.cpu.shares # (100% * 5000 / (24000 * 0.9 ) * 10000) = 2314 treadmill.cgroups.set_value.assert_has_calls([ mock.call('blkio', cgrp, 'blkio.weight', 100), mock.call('memory', cgrp, 'memory.soft_limit_in_bytes', '100M'), mock.call('memory', cgrp, 'memory.limit_in_bytes', '100M'), mock.call('memory', cgrp, 'memory.memsw.limit_in_bytes', '100M'), mock.call('cpu', cgrp, 'cpu.shares', treadmill.sysinfo.BMIPS_PER_CPU)<|fim▁hole|> @mock.patch('treadmill.cgutils.delete', mock.Mock()) @mock.patch('treadmill.services.cgroup_service.CgroupResourceService.' '_unregister_oom_handler', mock.Mock()) def test_on_delete_request(self): """Test processing of a cgroups delete request. """ # Access to a protected member _unregister_oom_handler of a client # class # pylint: disable=W0212 svc = cgroup_service.CgroupResourceService(self.running) request_id = 'myproid.test-0-ID1234' svc.on_delete_request(request_id) cgrp = os.path.join('treadmill/apps', request_id) treadmill.cgutils.delete.assert_has_calls( [ mock.call(ss, cgrp) for ss in ['cpu', 'cpuacct', 'memory', 'blkio'] ] ) svc._unregister_oom_handler.assert_called_with(cgrp) @mock.patch('treadmill.cgutils.get_memory_oom_eventfd', mock.Mock(return_value='fake_efd')) def test__register_oom_handler(self): """Test registration of OOM handler. """ # Access to a protected member _register_oom_handler of a client class # pylint: disable=W0212 svc = cgroup_service.CgroupResourceService(self.running) registered_handlers = svc.event_handlers() self.assertNotIn( ('fake_efd', select.POLLIN, mock.ANY), registered_handlers ) cgrp = 'treadmill/apps/myproid.test-42-ID1234' svc._register_oom_handler(cgrp, 'myproid.test-42-ID1234') treadmill.cgutils.get_memory_oom_eventfd.assert_called_with(cgrp) registered_handlers = svc.event_handlers() self.assertIn( ('fake_efd', select.POLLIN, mock.ANY), registered_handlers ) @mock.patch('os.close', mock.Mock()) @mock.patch('treadmill.cgutils.get_memory_oom_eventfd', mock.Mock(return_value='fake_efd')) def test__unregister_oom_handler(self): """Test unregistration of OOM handler. """ # Access to a protected member _unregister_oom_handler of a client # class # pylint: disable=W0212 svc = cgroup_service.CgroupResourceService(self.running) cgrp = 'treadmill/apps/myproid.test-42-ID1234' svc._register_oom_handler(cgrp, 'myproid.test-42-ID1234') registered_handlers = svc.event_handlers() self.assertIn( ('fake_efd', select.POLLIN, mock.ANY), registered_handlers ) svc._unregister_oom_handler(cgrp) registered_handlers = svc.event_handlers() self.assertNotIn( ('fake_efd', select.POLLIN, mock.ANY), registered_handlers ) os.close.assert_called_with('fake_efd') if __name__ == '__main__': unittest.main()<|fim▁end|>
])
<|file_name|>net.go<|end_file_name|><|fim▁begin|>package daemon import ( "fmt" "os" "strconv" "strings" "time" ) var ( netdir = "/sys/class/net" rxfile = "/sys/class/net/%s/statistics/rx_bytes" txfile = "/sys/class/net/%s/statistics/tx_bytes" ) type Net struct { Rxrate float64 Txrate float64 } func NetRate(done <-chan struct{}, delay time.Duration) (<-chan Net, <-chan error) { resultc := make(chan Net, 1)<|fim▁hole|> var net Net cleanup := func() { errc <- err close(errc) close(resultc) } go func() { defer cleanup() var prevrx, currx, prevtx, curtx float64 var prevtime, curtime int64 if err = checkNetFiles(); err != nil { return } for { prevrx = currx currx, _ = numbytes(rxfile) prevtx = curtx curtx, _ = numbytes(txfile) prevtime = curtime curtime = time.Now().UnixNano() net.Rxrate = getrate(prevrx, currx, prevtime, curtime) net.Txrate = getrate(prevtx, curtx, prevtime, curtime) select { case resultc <- net: case <-done: return } time.Sleep(delay) } }() return resultc, errc } func checkNetFiles() error { if _, err := numbytes(rxfile); err != nil { return err } if _, err := numbytes(txfile); err != nil { return err } return nil } func connNames() ([]string, error) { file, err := os.Open(netdir) if err != nil { return make([]string, 0), err } fi, _ := file.Readdir(-1) conns := make([]string, len(fi)) for index, i := range fi { conns[index] = i.Name() } return conns, nil } func numbytes(path string) (float64, error) { conns, err := connNames() if err != nil { return 0, err } var total int for _, conn := range conns { p := fmt.Sprintf(path, conn) val, err := readFile(p) if err != nil { return 0, err } val = strings.Trim(val, " \n") // Skip the lo interface. Its the loopback used by local processes if val == "lo" { continue } num, _ := strconv.Atoi(val) total = total + num } return float64(total), nil }<|fim▁end|>
errc := make(chan error) var err error
<|file_name|>Form1Impl.go<|end_file_name|><|fim▁begin|>// 在这里写你的事件 package main import ( "github.com/ying32/govcl/vcl" "github.com/ying32/govcl/vcl/rtl" "github.com/ying32/govcl/vcl/types" "github.com/ying32/govcl/vcl/types/keys" "github.com/ying32/govcl/vcl/types/messages" "github.com/ying32/govcl/vcl/win" ) //::private:: type TForm1Fields struct { hotKeyId types.ATOM } func (f *TForm1) OnFormCreate(sender vcl.IObject) { f.SetCaption("Press Ctrl+F1") f.ScreenCenter() f.hotKeyId = win.GlobalAddAtom("HotKeyId") - 0xC000 //rtl.ShortCutToText() shift := types.NewSet(types.SsCtrl) // 注册Ctrl+F1 // rtl.ShiftStateToWord(shift) 这个只是更容易理解,也可以使用 MOD_CONTROL | MOD_ALT 方法 if !win.RegisterHotKey(f.Handle(), int32(f.hotKeyId), rtl.ShiftStateToWord(shift), keys.VkF1) { vcl.ShowMessage("注册热键失败。") } } func (f *TForm1) OnFormDestroy(sender vcl.IObject) { if f.hotKeyId > 0 { win.UnregisterHotKey(f.Handle(), int32(f.hotKeyId)) win.GlobalDeleteAtom(f.hotKeyId) } } func (f *TForm1) OnFormWndProc(msg *types.TMessage) { f.InheritedWndProc(msg) /* TWMHotKey = record Msg: Cardinal; MsgFiller: TDWordFiller; HotKey: WPARAM; Unused: LPARAM; Result: LRESULT; end;<|fim▁hole|> } } }<|fim▁end|>
*/ if msg.Msg == messages.WM_HOTKEY { if msg.WParam == types.WPARAM(f.hotKeyId) { vcl.ShowMessage("按下了Ctrl+F1")
<|file_name|>test.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from test_support import * prove_all(no_fail=True, steps = 400)<|fim▁end|>
<|file_name|>type_converter.py<|end_file_name|><|fim▁begin|>#------------------------------------------------------------------------------ # type_converter.py (Section 6.2) #------------------------------------------------------------------------------ #------------------------------------------------------------------------------ # Copyright 2017, 2018, Oracle and/or its affiliates. All rights reserved. #------------------------------------------------------------------------------ from __future__ import print_function import cx_Oracle import decimal import db_config<|fim▁hole|> con = cx_Oracle.connect(db_config.user, db_config.pw, db_config.dsn) cur = con.cursor() def ReturnNumbersAsDecimal(cursor, name, defaultType, size, precision, scale): if defaultType == cx_Oracle.NUMBER: return cursor.var(str, 9, cursor.arraysize, outconverter = decimal.Decimal) cur.outputtypehandler = ReturnNumbersAsDecimal for value, in cur.execute("select 0.1 from dual"): print("Value:", value, "* 3 =", value * 3)<|fim▁end|>
<|file_name|>learn.go<|end_file_name|><|fim▁begin|>package main import ( "fmt" "io" "io/ioutil" "log" "net" "net/http" ) func main() { http.HandleFunc("/host", hostFinder) http.HandleFunc("/ip", sourceIp) http.HandleFunc("/date", GetJosnTime) fmt.Println("Listening on 0.0.0.0:8000") err := http.ListenAndServe("0.0.0.0:8000", nil) if err != nil { log.Fatal(err) } } func hostFinder(rw http.ResponseWriter, req *http.Request) { hostPort := req.Host host, port, _ := net.SplitHostPort(hostPort) io.WriteString(rw, "<html>") io.WriteString(rw, "Host Name : "+host+" <br>") io.WriteString(rw, "Port Number : "+port) io.WriteString(rw, "</br>") io.WriteString(rw, req.RemoteAddr) io.WriteString(rw, "</html>") // io.WriteString(w, s) }<|fim▁hole|>func sourceIp(rw http.ResponseWriter, req *http.Request) { host, _, _ := net.SplitHostPort(req.RemoteAddr) io.WriteString(rw, "<h1> Your Ip address : "+host) } func GetJosnTime(rw http.ResponseWriter, req *http.Request) { rw.Header().Set("Content-type", "application/json") rw.Header().Add("Content-type", "charset=utf-8") response, _ := http.Get("http://date.jsontest.com/") defer response.Body.Close() data, _ := ioutil.ReadAll(response.Body) io.WriteString(rw, string(data)) }<|fim▁end|>
<|file_name|>sc_main.cpp<|end_file_name|><|fim▁begin|>// -*- SystemC -*- // DESCRIPTION: Verilator Example: Top level main for invoking SystemC model // // This file ONLY is placed under the Creative Commons Public Domain, for // any use, without warranty, 2017 by Wilson Snyder. // SPDX-License-Identifier: CC0-1.0 //====================================================================== // For std::unique_ptr #include <memory> // SystemC global header #include <systemc.h> // Include common routines #include <verilated.h> #if VM_TRACE #include <verilated_vcd_sc.h> #endif #include <sys/stat.h> // mkdir // Include model header, generated from Verilating "top.v" #include "Vtop.h" int sc_main(int argc, char* argv[]) { // This is a more complicated example, please also see the simpler examples/make_hello_c. // Prevent unused variable warnings if (false && argc && argv) {} // Create logs/ directory in case we have traces to put under it Verilated::mkdir("logs"); // Set debug level, 0 is off, 9 is highest presently used // May be overridden by commandArgs argument parsing Verilated::debug(0); // Randomization reset policy // May be overridden by commandArgs argument parsing Verilated::randReset(2); #if VM_TRACE // Before any evaluation, need to know to calculate those signals only used for tracing Verilated::traceEverOn(true); #endif // Pass arguments so Verilated code can see them, e.g. $value$plusargs // This needs to be called before you create any model Verilated::commandArgs(argc, argv); // General logfile ios::sync_with_stdio(); // Define clocks sc_clock clk{"clk", 10, SC_NS, 0.5, 3, SC_NS, true}; sc_clock fastclk{"fastclk", 2, SC_NS, 0.5, 2, SC_NS, true}; // Define interconnect sc_signal<bool> reset_l; sc_signal<vluint32_t> in_small; sc_signal<vluint64_t> in_quad; sc_signal<sc_bv<70>> in_wide; sc_signal<vluint32_t> out_small; sc_signal<vluint64_t> out_quad; sc_signal<sc_bv<70>> out_wide; // Construct the Verilated model, from inside Vtop.h // Using unique_ptr is similar to "Vtop* top = new Vtop" then deleting at end const std::unique_ptr<Vtop> top{new Vtop{"top"}}; // Attach Vtop's signals to this upper model top->clk(clk); top->fastclk(fastclk); top->reset_l(reset_l); top->in_small(in_small); top->in_quad(in_quad); top->in_wide(in_wide); top->out_small(out_small); top->out_quad(out_quad); top->out_wide(out_wide); // You must do one evaluation before enabling waves, in order to allow // SystemC to interconnect everything for testing. sc_start(1, SC_NS); #if VM_TRACE // If verilator was invoked with --trace argument, // and if at run time passed the +trace argument, turn on tracing VerilatedVcdSc* tfp = nullptr; const char* flag = Verilated::commandArgsPlusMatch("trace"); if (flag && 0 == strcmp(flag, "+trace")) { cout << "Enabling waves into logs/vlt_dump.vcd...\n"; tfp = new VerilatedVcdSc; top->trace(tfp, 99); // Trace 99 levels of hierarchy Verilated::mkdir("logs"); tfp->open("logs/vlt_dump.vcd"); } #endif // Simulate until $finish while (!Verilated::gotFinish()) {<|fim▁hole|> if (tfp) tfp->flush(); #endif // Apply inputs if (sc_time_stamp() > sc_time(1, SC_NS) && sc_time_stamp() < sc_time(10, SC_NS)) { reset_l = !1; // Assert reset } else { reset_l = !0; // Deassert reset } // Simulate 1ns sc_start(1, SC_NS); } // Final model cleanup top->final(); // Close trace if opened #if VM_TRACE if (tfp) { tfp->close(); tfp = nullptr; } #endif // Coverage analysis (calling write only after the test is known to pass) #if VM_COVERAGE Verilated::mkdir("logs"); VerilatedCov::write("logs/coverage.dat"); #endif // Return good completion status return 0; }<|fim▁end|>
#if VM_TRACE // Flush the wave files each cycle so we can immediately see the output // Don't do this in "real" programs, do it in an abort() handler instead
<|file_name|>issue-34947-pow-i32.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // compile-flags: -O #![crate_type = "lib"] // CHECK-LABEL: @issue_34947<|fim▁hole|> // CHECK: mul // CHECK-NEXT: mul // CHECK-NEXT: mul // CHECK-NEXT: ret x.pow(5) }<|fim▁end|>
#[no_mangle] pub fn issue_34947(x: i32) -> i32 {
<|file_name|>run.py<|end_file_name|><|fim▁begin|><|fim▁hole|> app.run()<|fim▁end|>
from organise import app
<|file_name|>MonsterManager.cpp<|end_file_name|><|fim▁begin|>// // MonsterManager.cpp // LuoBoGuard // // Created by jwill on 16/2/26. // // #include "MonsterManager.hpp" #include "PointConvertCtrl.hpp" MonsterManager* MonsterManager::s_sharedMonsterManager = nullptr; MonsterManager* MonsterManager::getInstance() { if (s_sharedMonsterManager == nullptr) { s_sharedMonsterManager = new (std::nothrow) MonsterManager(); if(!s_sharedMonsterManager->init()) { delete s_sharedMonsterManager; s_sharedMonsterManager = nullptr; CCLOG("ERROR: Could not init MonsterManager"); } } return s_sharedMonsterManager; } void MonsterManager::destroyInstance() { CC_SAFE_DELETE(s_sharedMonsterManager); } MonsterManager::MonsterManager(){<|fim▁hole|>MonsterManager::~MonsterManager(){ SpriteFrameCache::getInstance()->removeUnusedSpriteFrames(); } bool MonsterManager::init() { return true; } void MonsterManager::loadRoute(__Array* mapArr){ _mapArr=mapArr; _mapArr->retain(); } void MonsterManager::loadMonster(){ SpriteFrameCache::getInstance()->addSpriteFramesWithFile("Resource/Monsters01.plist", "Resource/Monsters01.png"); char missonName[20]; int randomI=1+floor(rand_0_1()*2); sprintf(missonName,"land_nima0%d.png",randomI); SpriteFrame *spf=SpriteFrameCache::getInstance()->getSpriteFrameByName(missonName); _monster=MonsterSprite::createWithSF(spf); _monster->setActionName("land_nima0"); _monster->setCurPointId(0); } Point MonsterManager::getStartPoint(){ __String *firstPS=(__String*)_mapArr->getObjectAtIndex(0); Point firstP=PointFromString(firstPS->getCString()); return firstP; } void MonsterManager::startGo(){ long mapCount=_mapArr->count()-1; if (_monster->getCurPointId()<mapCount) { _monster->move(); int nextId=_monster->getCurPointId()+1; __String *pointStr=(__String*)_mapArr->getObjectAtIndex(nextId); Point nextP_fake=PointFromString(pointStr->getCString()); Point nextp_real=PointConvertCtrl::getInstance()->convertFakePoint(nextP_fake); log("->(%.2f,%.2f)",nextp_real.x,nextp_real.y); // auto callFunc = CallFunc::create(this,callfunc_selector(MonsterManager::startGo)); auto callFunc=CallFunc::create(CC_CALLBACK_0(MonsterManager::startGo, this)); auto moveTo = MoveTo::create(2, nextp_real); auto action = Sequence::create(moveTo,callFunc,NULL); _monster->setCurPointId(nextId); _monster->runAction(action); }else{ _monster->stop(); log("arrived"); } } //void MonsterManager::endGo(){ // log("next"); // long mapCount=_mapArr->count(); // if (_monster->getCurPointId()<mapCount) { // int nextId=_monster->getCurPointId()+1; // __String *pointStr=(__String*)_mapArr->getObjectAtIndex(nextId); // Point nextP_fake=PointFromString(pointStr->getCString()); // Point nextp_real=PointConvertCtrl::getInstance()->convertFakePoint(nextP_fake); // // auto callFunc = CallFunc::create(this,callfunc_selector(MonsterManager::startGo)); // auto callFunc=CallFunc::create(CC_CALLBACK_0(MonsterManager::endGo, this)); // auto moveTo = MoveTo::create(2, nextp_real); // auto action = Sequence::create(moveTo,callFunc,NULL); // _monster->runAction(action); // }else{ // log("arrived"); // } //}<|fim▁end|>
loadMonster(); }
<|file_name|>webpack.config.js<|end_file_name|><|fim▁begin|>var path = require('path'); var webpack = require('webpack'); module.exports = { devtool: 'source-map', entry: [ 'webpack-hot-middleware/client', './index' ], output: { path: path.join(__dirname, 'dist'), filename: 'bundle.js', publicPath: '/static/' },<|fim▁hole|> new webpack.NoErrorsPlugin() ], module: { loaders: [{ test: /\.js$/, loaders: ['babel'], exclude: /node_modules/, include: __dirname }] } }; var src = path.join(__dirname, '..', '..', 'src'); var nodeModules = path.join(__dirname, '..', '..', 'node_modules'); var fs = require('fs'); if (fs.existsSync(src) && fs.existsSync(nodeModules)) { // Resolve to source module.exports.resolve = { alias: { 'remote-redux-devtools': src } }; // Compile from source module.exports.module.loaders.push({ test: /\.js$/, loaders: ['babel'], include: src }); }<|fim▁end|>
plugins: [ new webpack.optimize.OccurenceOrderPlugin(), new webpack.HotModuleReplacementPlugin(),
<|file_name|>DrillDownPathBean.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.das.analytics.rest.beans; import javax.xml.bind.annotation.XmlAccessType;<|fim▁hole|>import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; /** * This class represents a facet object bean. facet object defines the hierarchical fieldName, * which can be drilled down. This can be used as a value in a record. * Example : * Assume a record represents a book. * Then the record field : value pairs will be, e.g. * Price : $50.00 * Author : firstName LastName * ISBN : 234325435445435436 * Published Date : "1987" , "March", "21" * * Here Publish Date will be a facet/categoryPath, since it can be drilled down to Year, then month and date * and categorizes by each level. * */ @XmlRootElement(name = "categoryPath") @XmlAccessorType(XmlAccessType.FIELD) public class DrillDownPathBean { @XmlElement(name = "path") private String[] path; @XmlElement(name = "fieldName") private String fieldName; /** * This constructor is for jax-rs json serialization/deserialization */ public DrillDownPathBean() { } public String[] getPath() { return path; } public String getFieldName() { return fieldName; } public void setPath(String[] path) { this.path = path; } public void setFieldName(String fieldName) { this.fieldName = fieldName; } }<|fim▁end|>
import javax.xml.bind.annotation.XmlAccessorType;
<|file_name|>topsort.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # −*− coding: UTF−8 −*− # Topological Sorting from collections import defaultdict def topsort(graph): if not graph: return [] # 1. Count every node's dependencies count = defaultdict(int) for node in graph: for dependency in graph[node]: count[dependency] += 1 # 2. Find initial nodes - The ones with no incoming edges, so the ones that # no dependency points at initial_nodes = [node for node in graph if count[node] == 0] if graph and not initial_nodes: raise Exception("Circular depenency detected") # 3. Process each node in the order found in initial_nodes. Populate # initial_nodes with processed node's dependencies if these aren't referred # in any other node. result = [] while initial_nodes: node = initial_nodes.pop() result.append(node) for dependency in graph[node]: count[dependency] -= 1 if count[dependency] == 0: initial_nodes.append(dependency) if len(result) != len(graph): raise Exception("Circular depenency detected") return result[::-1] def test(): from simpletest import _assert, _assert_raises a, b, c, d, e, f = 'abcdef' graph = {} _assert(topsort(graph), [])<|fim▁hole|> a: set([]), } _assert(topsort(graph), [a]) graph = { a: set([d, b]), b: set([d, c]), c: set([d]), d: set([]), e: set([d]), f: set([d, e]), } _assert(topsort(graph), [d, c, b, a, e, f]) graph = { a: set([b]), b: set([a]), } _assert_raises(Exception, topsort, graph) if __name__ == '__main__': test()<|fim▁end|>
graph = {
<|file_name|>search.py<|end_file_name|><|fim▁begin|>""" search.py """ from flask import Flask, request, redirect, abort, make_response from flask import render_template, flash import bibserver.dao from bibserver import auth import json, httplib from bibserver.config import config import bibserver.util as util import logging from logging.handlers import RotatingFileHandler LOG_FILENAME="./app.log" log = logging.getLogger('werkzeug') log.setLevel(logging.DEBUG) formatter = logging.Formatter( "[%(asctime)s] {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s") handler = RotatingFileHandler(LOG_FILENAME, maxBytes=10000000, backupCount=5) handler.setFormatter(formatter) log.addHandler(handler) class Search(object): def __init__(self,path,current_user): self.path = path.replace(".json","") self.current_user = current_user # facets -> convert to aggs self.search_options = { 'search_url': '/query?', 'search_index': 'elasticsearch', 'paging': { 'from': 0, 'size': 10 }, #'predefined_filters': {}, #'facets': config['search_facet_fields'], 'result_display': config['search_result_display'], 'search_sortby': [{'display':'year', 'field':'year.exact'}, {'display':'author','field':'author.name'}, {'display':'journal','field':'journal.name'}], 'searchbox_fieldselect': [ {'display':'author','field':'author.name'}, {'display':'journal','field':'journal.name'}]#, #'addremovefacets': config['add_remove_facets'] # (full list could also be pulled from DAO) } self.parts = self.path.strip('/').split('/') def find(self): log.debug(self.parts[0]) log.debug(self.parts) log.debug(len(self.parts)) if bibserver.dao.Account.get(self.parts[0]): if len(self.parts) == 1: return self.account() # user account elif len(self.parts) == 2: if self.parts[1] == "collections": return self.collections() else: return self.collection() # get a collection elif len(self.parts) == 3: return self.record() # get a record in collection elif self.parts[0] == 'collections': return self.collections() # get search list of all collections elif len(self.parts) == 1: if self.parts[0] != 'search': self.search_options['q'] = self.parts[0] return self.default() # get search result of implicit search term elif len(self.parts) == 2: return self.implicit_facet() # get search result of implicit facet filter else: abort(404) def default(self): # default search page if util.request_wants_json(): res = bibserver.dao.Record.query() resp = make_response( json.dumps([i['_source'] for i in res._hits], sort_keys=True, indent=4) ) resp.mimetype = "application/json" return resp else: return render_template('search/index.html', current_user=self.current_user, search_options=json.dumps(self.search_options), collection=None ) # TODO: convert facet => aggs def implicit_facet(self): self.search_options['predefined_filters'][self.parts[0]+config['facet_field']] = self.parts[1] # remove the implicit facet from facets for count,facet in enumerate(self.search_options['facets']): if facet['field'] == self.parts[0]+config['facet_field']: del self.search_options['facets'][count] if util.request_wants_json(): res = bibserver.dao.Record.query(terms=self.search_options['predefined_filters']) resp = make_response( json.dumps([i['_source'] for i in res._hits], sort_keys=True, indent=4) ) resp.mimetype = "application/json" return resp else: return render_template('search/index.html', current_user=self.current_user, search_options=json.dumps(self.search_options), collection=None, implicit=self.parts[0]+': ' + self.parts[1] ) def collections(self): if len(self.parts) == 1: if util.request_wants_json(): res = bibserver.dao.Collection.query(size=1000000) colls = [i['_source'] for i in res._hits] resp = make_response( json.dumps(colls, sort_keys=True, indent=4) ) resp.mimetype = "application/json" return resp else: # search collection records self.search_options['search_url'] = '/query/collection?' self.search_options['facets'] = [{'field':'owner','size':100},{'field':'_created','size':100}] self.search_options['result_display'] = [[{'pre':'<h3>','field':'label','post':'</h3>'}],[{'field':'description'}],[{'pre':'created by ','field':'owner'}]] self.search_options['result_display'] = config['collections_result_display'] return render_template('collection/index.html', current_user=self.current_user, search_options=json.dumps(self.search_options), collection=None) elif len(self.parts) == 2: if self.parts[0] == "collections": acc = bibserver.dao.Account.get(self.parts[1]) else: acc = bibserver.dao.Account.get(self.parts[0]) if acc: resp = make_response( json.dumps([coll.data for coll in acc.collections], sort_keys=True, indent=4) ) resp.mimetype = "application/json" return resp else: abort(404) elif len(self.parts) == 3: coll = bibserver.dao.Collection.get_by_owner_coll(self.parts[1],self.parts[2]) if coll: coll.data['records'] = len(coll) resp = make_response( json.dumps(coll.data, sort_keys=True, indent=4) ) resp.mimetype = "application/json" return resp else: abort(404) else: abort(404) def record(self): found = None res = bibserver.dao.Record.query(terms = { 'owner'+config['facet_field']:self.parts[0], 'collection'+config['facet_field']:self.parts[1], 'id'+config['facet_field']:self.parts[2] }) if res.total == 0: rec = bibserver.dao.Record.get(self.parts[2]) if rec: found = 1 elif res.total == 1: rec = bibserver.dao.Record.get(res._hits[0]['_id']) found = 1 else: found = 2 if not found: abort(404) elif found == 1: collection = bibserver.dao.Collection.get_by_owner_coll(rec.data['owner'],rec.data['collection']) if request.method == 'DELETE': if rec: if not auth.collection.update(self.current_user, collection): abort(401) rec.delete() abort(404) else: abort(404) elif request.method == 'POST': if rec: if not auth.collection.update(self.current_user, collection): abort(401) rec.data = request.json rec.save() resp = make_response( json.dumps(rec.data, sort_keys=True, indent=4) ) resp.mimetype = "application/json" return resp else: if util.request_wants_json(): resp = make_response( json.dumps(rec.data, sort_keys=True, indent=4) ) resp.mimetype = "application/json" return resp else: admin = True if auth.collection.update(self.current_user, collection) else False # make a list of all the values in the record, for autocomplete on the search field searchvals = [] def valloop(obj): if isinstance(obj,dict): for item in obj: valloop(obj[item]) elif isinstance(obj,list): for thing in obj: valloop(thing) else: searchvals.append(obj) valloop(rec.data) # get fuzzy like this host = str(config['ELASTIC_SEARCH_HOST']).rstrip('/') db_path = config['ELASTIC_SEARCH_DB'] fullpath = '/' + db_path + '/record/' + rec.id + '/_mlt?mlt_fields=title&min_term_freq=1&percent_terms_to_match=1&min_word_len=3' c = httplib.HTTPConnection(host) c.request('GET', fullpath) resp = c.getresponse() res = json.loads(resp.read()) mlt = [i['_source'] for i in res['hits']['hits']] # get any notes notes = bibserver.dao.Note.about(rec.id) # check service core for more data about the record # TODO: should maybe move this into the record dao or something # TODO: also, add in any other calls to external APIs servicecore = "" apis = config['external_apis'] if apis['servicecore']['key']: try: servicecore = "not found in any UK repository" addr = apis['servicecore']['url'] + rec.data['title'].replace(' ','%20') + "?format=json&api_key=" + apis['servicecore']['key'] import urllib2 response = urllib2.urlopen( addr ) data = json.loads(response.read()) if 'ListRecords' in data and len(data['ListRecords']) != 0: record = data['ListRecords'][0]['record']['metadata']['oai_dc:dc'] servicecore = "<h3>Availability</h3><p>This article is openly available in an institutional repository:</p>" servicecore += '<p><a target="_blank" href="' + record["dc:source"] + '">' + record["dc:title"] + '</a><br />' if "dc:description" in record: servicecore += record["dc:description"] + '<br /><br />' servicecore += '</p>' except: pass # render the record with all extras return render_template('record.html', record=json.dumps(rec.data), prettyrecord=self.prettify(rec.data), objectrecord = rec.data, searchvals=json.dumps(searchvals), admin=admin, notes=notes, servicecore=servicecore, mlt=mlt, searchables=json.dumps(config["searchables"], sort_keys=True) ) else: if util.request_wants_json(): resp = make_response( json.dumps([i['_source'] for i in res._hits], sort_keys=True, indent=4) ) resp.mimetype = "application/json" return resp else: return render_template('record.html', multiple=[i['_source'] for i in res._hits]) def account(self): self.search_options['predefined_filters']['owner'+config['facet_field']] = self.parts[0] acc = bibserver.dao.Account.get(self.parts[0]) if request.method == 'DELETE': if not auth.user.update(self.current_user,acc): abort(401) if acc: acc.delete() return '' elif request.method == 'POST': if not auth.user.update(self.current_user,acc): abort(401) info = request.json if info.get('_id',False): if info['_id'] != self.parts[0]: acc = bibserver.dao.Account.get(info['_id']) else: info['api_key'] = acc.data['api_key'] info['_created'] = acc.data['_created'] info['collection'] = acc.data['collection'] info['owner'] = acc.data['collection'] acc.data = info if 'password' in info and not info['password'].startswith('sha1'): acc.set_password(info['password']) acc.save() resp = make_response( json.dumps(acc.data, sort_keys=True, indent=4) ) resp.mimetype = "application/json" return resp else: if util.request_wants_json(): if not auth.user.update(self.current_user,acc): abort(401) resp = make_response( json.dumps(acc.data, sort_keys=True, indent=4) ) resp.mimetype = "application/json" return resp else: admin = True if auth.user.update(self.current_user,acc) else False recordcount = bibserver.dao.Record.query(terms={'owner':acc.id}).total collcount = bibserver.dao.Collection.query(terms={'owner':acc.id}).total return render_template('account/view.html', current_user=self.current_user, search_options=json.dumps(self.search_options), record=json.dumps(acc.data), recordcount=recordcount, collcount=collcount, admin=admin, account=acc, superuser=auth.user.is_super(self.current_user) ) def collection(self): # show the collection that matches parts[1] self.search_options['predefined_filters']['owner'] = self.parts[0] self.search_options['predefined_filters']['collection'] = self.parts[1] # remove the collection facet for count,facet in enumerate(self.search_options['facets']): if facet['field'] == 'collection'+config['facet_field']: del self.search_options['facets'][count]<|fim▁hole|> if request.method == 'DELETE': if metadata != None: if not auth.collection.update(self.current_user, metadata): abort(401) else: metadata.delete() return '' else: if not auth.collection.create(self.current_user, None): abort(401) else: size = bibserver.dao.Record.query(terms={'owner':self.parts[0],'collection':self.parts[1]}).total for rid in bibserver.dao.Record.query(terms={'owner':self.parts[0],'collection':self.parts[1]},size=size)._hits: record = bibserver.dao.Record.get(rid['_id']) if record: record.delete() return '' elif request.method == 'POST': if metadata != None: metadata.data = request.json metadata.save() return '' else: abort(404) else: if util.request_wants_json(): out = {"metadata":metadata.data,"records":[]} out['metadata']['records'] = len(metadata) out['metadata']['query'] = request.url for rec in metadata.records: out['records'].append(rec.data) resp = make_response( json.dumps(out, sort_keys=True, indent=4) ) resp.mimetype = "application/json" return resp else: admin = True if metadata != None and auth.collection.update(self.current_user, metadata) else False if metadata and '_display_settings' in metadata: self.search_options.update(metadata['_display_settings']) users = bibserver.dao.Account.query(size=1000000) # pass the userlist for autocomplete admin addition (could be ajax'd) userlist = [i['_source']['_id'] for i in users['hits']['hits']] return render_template('search/index.html', current_user=self.current_user, search_options=json.dumps(self.search_options), collection=metadata.data, record = json.dumps(metadata.data), userlist=json.dumps(userlist), request=request, admin=admin ) def prettify(self,record): result = '<p>' # given a result record, build how it should look on the page img = False if img: result += '<img class="thumbnail" style="float:left; width:100px; margin:0 5px 10px 0; max-height:150px;" src="' + img[0] + '" />' # add the record based on display template if available display = config['search_result_display'] lines = '' for lineitem in display: line = '' for obj in lineitem: thekey = obj['field'] parts = thekey.split('.') if len(parts) == 1: res = record elif len(parts) == 2: res = record.get(parts[0],'') elif len(parts) == 3: res = record[parts[0]][parts[1]] counter = len(parts) - 1 if res and isinstance(res, dict): thevalue = res.get(parts[counter],'') # this is a dict else: thevalue = [] for row in res: thevalue.append(row[parts[counter]]) if thevalue and len(thevalue): line += obj.get('pre','') if isinstance(thevalue, list): for index,val in enumerate(thevalue): if index != 0 and index != len(thevalue)-1: line += ', ' line += val else: line += thevalue line += obj.get('post','') if line: lines += line + "<br />" if lines: result += lines else: result += json.dumps(record,sort_keys=True,indent=4) result += '</p>' return result<|fim▁end|>
# look for collection metadata metadata = bibserver.dao.Collection.get_by_owner_coll(self.parts[0],self.parts[1])
<|file_name|>BrowserFactory.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import sys<|fim▁hole|> class BrowserFactory(object): def create(self, type, *args, **kwargs): return getattr(__import__(type), type)(*args, **kwargs)<|fim▁end|>
sys.path.append('../browser_interface/browser')
<|file_name|>Startup.cpp<|end_file_name|><|fim▁begin|>/* UrchinTSS Copyright (c) Microsoft Corporation All rights reserved. MIT License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the ""Software""), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ // Note: This code was derived from the TCG TPM 2.0 Library Specification at // http://www.trustedcomputinggroup.org/resources/tpm_library_specification #include "stdafx.h" UINT16 TPM2_Startup_Marshal( SESSION *sessionTable, UINT32 sessionCnt, Marshal_Parms *parms, BYTE **buffer, INT32 *size ) { if((parms == NULL) || // (parms->objectCntIn < TPM2_Startup_HdlCntIn) || (parms->objectCntOut < TPM2_Startup_HdlCntOut) || (parms->parmIn == NULL) || (parms->parmOut == NULL)) { return TPM_RC_FAILURE; } return Command_Marshal( TPM_CC_Startup, sessionTable, sessionCnt, TPM2_Startup_Parameter_Marshal, parms, buffer, size); } TPM_RC TPM2_Startup_Unmarshal( SESSION *sessionTable, UINT32 sessionCnt, Marshal_Parms *parms, BYTE **buffer, INT32 *size ) { TPM_RC result = TPM_RC_SUCCESS; // Startup_In *in = (Startup_In *)parms->parmIn; // Startup_Out *out = (Startup_Out *)parms->parmOut; if((parms == NULL) || // (parms->objectCntIn < TPM2_Startup_HdlCntIn) || (parms->objectCntOut < TPM2_Startup_HdlCntOut) || (parms->parmIn == NULL) || (parms->parmOut == NULL)) { return TPM_RC_FAILURE;<|fim▁hole|> TPM_CC_Startup, sessionTable, sessionCnt, TPM2_Startup_Parameter_Unmarshal, parms, buffer, size)) == TPM_RC_SUCCESS) { } return result; } UINT16 TPM2_Startup_Parameter_Marshal( Marshal_Parms *parms, BYTE **buffer, INT32 *size ) { Startup_In *in = (Startup_In *)parms->parmIn; // Startup_Out *out = (Startup_Out *)parms->parmOut; UINT16 parameterSize = 0; // Create the parameter buffer parameterSize += TPM_SU_Marshal(&in->startupType, buffer, size); if(size < 0) return TPM_RC_SIZE; return parameterSize; } TPM_RC TPM2_Startup_Parameter_Unmarshal( Marshal_Parms *parms, BYTE **buffer, INT32 *size ) { TPM_RC result = TPM_RC_SUCCESS; // Startup_In *in = (Startup_In *)parms->parmIn; // Startup_Out *out = (Startup_Out *)parms->parmOut; // Unmarshal the parameters UNREFERENCED_PARAMETER(parms); UNREFERENCED_PARAMETER(buffer); UNREFERENCED_PARAMETER(size); return result; }<|fim▁end|>
} if((result = Command_Unmarshal(
<|file_name|>DropDownPager.js<|end_file_name|><|fim▁begin|>define([ "dojo/_base/declare", "dojo/_base/lang", "dojo/store/Memory", "dojo/query", "dojo/dom-attr", "dijit/_WidgetBase", "dijit/_FocusMixin", "dijit/_TemplatedMixin", "dijit/form/FilteringSelect" ], function(declare, lang, Store, query, domAttr, _WidgetBase, _FocusMixin, _TemplatedMixin, FilteringSelect){ /*===== return declare([_WidgetBase, _FocusMixin, _TemplatedMixin], { // summary: // This grid bar plugin is to switch pages using select widget. // grid: [const] gridx.Grid // The grid widget this plugin works for. grid: null, // stepperClass: Function // The constructor of the select widget stepperClass: FilteringSelect, // stepperProps: Object // The properties passed to select widget when creating it. stepperProps: null, refresh: function(){} }); =====*/ return declare([_WidgetBase, _FocusMixin, _TemplatedMixin], { templateString: '<div class="gridxDropDownPager"><label class="gridxPagerLabel">${pageLabel}</label></div>', constructor: function(args){ lang.mixin(this, args.grid.nls); }, postCreate: function(){ var t = this, g = t.grid, c = 'connect', p = g.pagination; t[c](p, 'onSwitchPage', '_onSwitchPage'); t[c](p, 'onChangePageSize', 'refresh'); t[c](g.model, 'onSizeChange', 'refresh'); g.pagination.loaded.then(function(){ t.refresh(); //Set initial page after pagination module is ready. t._onSwitchPage(g.pagination.currentPage());<|fim▁hole|> grid: null, stepperClass: FilteringSelect, stepperProps: null, refresh: function(){ var t = this, mod = t.module, items = [], selectedItem, p = t.grid.pagination, pageCount = p.pageCount(), currentPage = p.currentPage(), stepper = t._pageStepperSelect, i, v, item; for(i = 0; i < pageCount; ++i){ v = i + 1; item = { id: v, label: v, value: v }; items.push(item); if(currentPage == i){ selectedItem = item; } } var store = new Store({data: items}); if(!stepper){ var cls = t.stepperClass, props = lang.mixin({ store: store, searchAttr: 'label', item: selectedItem, 'class': 'gridxPagerStepperWidget', onChange: function(page){ p.gotoPage(page - 1); } }, t.stepperProps || {}); stepper = t._pageStepperSelect = new cls(props); stepper.placeAt(t.domNode, "last"); stepper.startup(); domAttr.set(query('.gridxPagerLabel', t.domNode)[0], 'for', stepper.id); }else{ stepper.set('store', store); stepper.set('value', currentPage + 1); } stepper.set('disabled', pageCount <= 1); }, //Private---------------------------------------------------------------------------- _onSwitchPage: function(page){ this._pageStepperSelect.set('value', page + 1); } }); });<|fim▁end|>
}); }, //Public-----------------------------------------------------------------------------
<|file_name|>xy_pad.rs<|end_file_name|><|fim▁begin|>use { Backend, Color, Colorable, Frameable, FramedRectangle, FontSize, IndexSlot, Labelable, Line, Mouse, Positionable, Scalar, Sizeable, Text, Widget, }; use num::Float; use widget; use utils::{map_range, val_to_string}; /// Used for displaying and controlling a 2D point on a cartesian plane within a given range. /// /// Its reaction is triggered when the value is updated or if the mouse button is released while /// the cursor is above the rectangle. pub struct XYPad<'a, X, Y, F> { common: widget::CommonBuilder, x: X, min_x: X, max_x: X, y: Y, min_y: Y, max_y: Y, maybe_label: Option<&'a str>, /// The reaction function for the XYPad. /// /// It will be triggered when the value is updated or if the mouse button is released while the /// cursor is above the rectangle. pub maybe_react: Option<F>, style: Style, /// Indicates whether the XYPad will respond to user input. pub enabled: bool, } /// Unique kind for the widget type. pub const KIND: widget::Kind = "XYPad"; widget_style!{ KIND; /// Unique graphical styling for the XYPad. style Style { /// The color of the XYPad's rectangle. - color: Color { theme.shape_color } /// The width of the frame surrounding the rectangle. - frame: Scalar { theme.frame_width } /// The color of the surrounding rectangle frame. - frame_color: Color { theme.frame_color } /// The color of the XYPad's label and value label text. - label_color: Color { theme.label_color } /// The font size for the XYPad's label. - label_font_size: FontSize { theme.font_size_medium } /// The font size for the XYPad's *value* label. - value_font_size: FontSize { 14 } /// The thickness of the XYPad's crosshair lines. - line_thickness: Scalar { 2.0 } } } /// The state of the XYPad. #[derive(Clone, Debug, PartialEq)] pub struct State<X, Y> { x: X, min_x: X, max_x: X, y: Y, min_y: Y, max_y: Y, interaction: Interaction, rectangle_idx: IndexSlot, label_idx: IndexSlot, h_line_idx: IndexSlot, v_line_idx: IndexSlot, value_label_idx: IndexSlot, } /// The interaction state of the XYPad. #[derive(Debug, PartialEq, Clone, Copy)] pub enum Interaction { Normal, Highlighted, Clicked, } impl Interaction { /// The color associated with the current state. fn color(&self, color: Color) -> Color { match *self { Interaction::Normal => color, Interaction::Highlighted => color.highlighted(), Interaction::Clicked => color.clicked(), } } } /// Check the current state of the button. fn get_new_interaction(is_over: bool, prev: Interaction, mouse: Mouse) -> Interaction { use mouse::ButtonPosition::{Down, Up}; use self::Interaction::{Normal, Highlighted, Clicked}; match (is_over, prev, mouse.left.position) { (true, Normal, Down) => Normal, (true, _, Down) => Clicked, (true, _, Up) => Highlighted, (false, Clicked, Down) => Clicked, _ => Normal, } } impl<'a, X, Y, F> XYPad<'a, X, Y, F> { /// Build a new XYPad widget. pub fn new(x_val: X, min_x: X, max_x: X, y_val: Y, min_y: Y, max_y: Y) -> Self { XYPad { common: widget::CommonBuilder::new(), x: x_val, min_x: min_x, max_x: max_x, y: y_val, min_y: min_y, max_y: max_y, maybe_react: None, maybe_label: None, style: Style::new(), enabled: true, } } builder_methods!{ pub line_thickness { style.line_thickness = Some(Scalar) } pub value_font_size { style.value_font_size = Some(FontSize) } pub react { maybe_react = Some(F) } pub enabled { enabled = bool } } } impl<'a, X, Y, F> Widget for XYPad<'a, X, Y, F> where X: Float + ToString + ::std::fmt::Debug + ::std::any::Any, Y: Float + ToString + ::std::fmt::Debug + ::std::any::Any, F: FnOnce(X, Y), { type State = State<X, Y>; type Style = Style; fn common(&self) -> &widget::CommonBuilder { &self.common } fn common_mut(&mut self) -> &mut widget::CommonBuilder { &mut self.common } <|fim▁hole|> } fn init_state(&self) -> Self::State { State { interaction: Interaction::Normal, x: self.x, min_x: self.min_x, max_x: self.max_x, y: self.y, min_y: self.min_y, max_y: self.max_y, rectangle_idx: IndexSlot::new(), label_idx: IndexSlot::new(), h_line_idx: IndexSlot::new(), v_line_idx: IndexSlot::new(), value_label_idx: IndexSlot::new(), } } fn style(&self) -> Style { self.style.clone() } /// Update the XYPad's cached state. fn update<B: Backend>(self, args: widget::UpdateArgs<Self, B>) { use position::{Direction, Edge}; use self::Interaction::{Clicked, Highlighted, Normal}; let widget::UpdateArgs { idx, state, rect, style, mut ui, .. } = args; let XYPad { enabled, x, min_x, max_x, y, min_y, max_y, maybe_label, maybe_react, .. } = self; let maybe_mouse = ui.input(idx).maybe_mouse; let frame = style.frame(ui.theme()); let inner_rect = rect.pad(frame); let interaction = state.view().interaction; let new_interaction = match (enabled, maybe_mouse) { (false, _) | (true, None) => Normal, (true, Some(mouse)) => { let is_over_inner = inner_rect.is_over(mouse.xy); get_new_interaction(is_over_inner, interaction, mouse) }, }; // Capture the mouse if clicked, uncapture if released. match (interaction, new_interaction) { (Highlighted, Clicked) => { ui.capture_mouse(idx); }, (Clicked, Highlighted) | (Clicked, Normal) => { ui.uncapture_mouse(idx); }, _ => (), } // Determine new values from the mouse position over the pad. let (new_x, new_y) = match (maybe_mouse, new_interaction) { (None, _) | (_, Normal) | (_, Highlighted) => (x, y), (Some(mouse), Clicked) => { let clamped_x = inner_rect.x.clamp_value(mouse.xy[0]); let clamped_y = inner_rect.y.clamp_value(mouse.xy[1]); let (l, r, b, t) = inner_rect.l_r_b_t(); let new_x = map_range(clamped_x, l, r, min_x, max_x); let new_y = map_range(clamped_y, b, t, min_y, max_y); (new_x, new_y) }, }; // React if value is changed or the pad is clicked/released. if let Some(react) = maybe_react { let should_react = x != new_x || y != new_y || (interaction == Highlighted && new_interaction == Clicked) || (interaction == Clicked && new_interaction == Highlighted); if should_react { react(new_x, new_y); } } if interaction != new_interaction { state.update(|state| state.interaction = new_interaction); } let value_or_bounds_have_changed = { let v = state.view(); v.x != x || v.y != y || v.min_x != min_x || v.max_x != max_x || v.min_y != min_y || v.max_y != max_y }; if value_or_bounds_have_changed { state.update(|state| { state.x = x; state.y = y; state.min_x = min_x; state.max_x = max_x; state.min_y = min_y; state.max_y = max_y; }) } // The backdrop **FramedRectangle** widget. let dim = rect.dim(); let color = new_interaction.color(style.color(ui.theme())); let frame = style.frame(ui.theme()); let frame_color = style.frame_color(ui.theme()); let rectangle_idx = state.view().rectangle_idx.get(&mut ui); FramedRectangle::new(dim) .middle_of(idx) .graphics_for(idx) .color(color) .frame(frame) .frame_color(frame_color) .set(rectangle_idx, &mut ui); // Label **Text** widget. let label_color = style.label_color(ui.theme()); if let Some(label) = maybe_label { let label_idx = state.view().label_idx.get(&mut ui); let label_font_size = style.label_font_size(ui.theme()); Text::new(label) .middle_of(rectangle_idx) .graphics_for(idx) .color(label_color) .font_size(label_font_size) .set(label_idx, &mut ui); } // Crosshair **Line** widgets. let (w, h) = inner_rect.w_h(); let half_w = w / 2.0; let half_h = h / 2.0; let v_line_x = map_range(new_x, min_x, max_x, -half_w, half_w); let h_line_y = map_range(new_y, min_y, max_y, -half_h, half_h); let thickness = style.line_thickness(ui.theme()); let line_color = label_color.with_alpha(1.0); let v_line_start = [0.0, -half_h]; let v_line_end = [0.0, half_h]; let v_line_idx = state.view().v_line_idx.get(&mut ui); Line::centred(v_line_start, v_line_end) .color(line_color) .thickness(thickness) .x_y_relative_to(idx, v_line_x, 0.0) .graphics_for(idx) .parent(idx) .set(v_line_idx, &mut ui); let h_line_start = [-half_w, 0.0]; let h_line_end = [half_w, 0.0]; let h_line_idx = state.view().h_line_idx.get(&mut ui); Line::centred(h_line_start, h_line_end) .color(line_color) .thickness(thickness) .x_y_relative_to(idx, 0.0, h_line_y) .graphics_for(idx) .parent(idx) .set(h_line_idx, &mut ui); // Crosshair value label **Text** widget. let x_string = val_to_string(new_x, max_x, max_x - min_x, rect.w() as usize); let y_string = val_to_string(new_y, max_y, max_y - min_y, rect.h() as usize); let value_string = format!("{}, {}", x_string, y_string); let cross_hair_xy = [inner_rect.x() + v_line_x, inner_rect.y() + h_line_y]; const VALUE_TEXT_PAD: f64 = 5.0; let x_direction = match inner_rect.x.closest_edge(cross_hair_xy[0]) { Edge::End => Direction::Backwards, Edge::Start => Direction::Forwards, }; let y_direction = match inner_rect.y.closest_edge(cross_hair_xy[1]) { Edge::End => Direction::Backwards, Edge::Start => Direction::Forwards, }; let value_font_size = style.value_font_size(ui.theme()); let value_label_idx = state.view().value_label_idx.get(&mut ui); Text::new(&value_string) .x_direction_from(v_line_idx, x_direction, VALUE_TEXT_PAD) .y_direction_from(h_line_idx, y_direction, VALUE_TEXT_PAD) .color(line_color) .graphics_for(idx) .parent(idx) .font_size(value_font_size) .set(value_label_idx, &mut ui); } } impl<'a, X, Y, F> Colorable for XYPad<'a, X, Y, F> { builder_method!(color { style.color = Some(Color) }); } impl<'a, X, Y, F> Frameable for XYPad<'a, X, Y, F> { builder_methods!{ frame { style.frame = Some(Scalar) } frame_color { style.frame_color = Some(Color) } } } impl<'a, X, Y, F> Labelable<'a> for XYPad<'a, X, Y, F> { builder_methods!{ label { maybe_label = Some(&'a str) } label_color { style.label_color = Some(Color) } label_font_size { style.label_font_size = Some(FontSize) } } }<|fim▁end|>
fn unique_kind(&self) -> &'static str { KIND
<|file_name|>Test_objdump_addr.py<|end_file_name|><|fim▁begin|>''' Created on Oct 29, 2015 @author: yangke ''' from model.TaintVar import TaintVar from TraceTrackTest import TraceTrackTest class Test_objdump_addr: def test(self):<|fim▁hole|> not_pass_message="ERRORS FOUND IN BINUTILS-2.23 'addr[1]' TEST!" answer_path='answers/binutils/binutils-2.23/objdump/' name='binutils-2.23_objdump_addr' logfile_path="gdb_logs/binutils-2.23/binutils-2.23_objdump_gdb.txt" c_proj_path="gdb_logs/binutils-2.23/binutils-2.23" taintVars=[TaintVar("addr",['*'])] test=TraceTrackTest(answer_path,name,logfile_path,taintVars,passed_message,not_pass_message) test.set_c_proj_path(c_proj_path) passed=test.test() return passed if __name__ == '__main__': test=Test_objdump_addr() test.test()<|fim▁end|>
passed_message="BINUTILS-2.23 'addr[1]' TEST PASSED!"
<|file_name|>caprobe.py<|end_file_name|><|fim▁begin|># # Copyright 2009 Eigenlabs Ltd. http://www.eigenlabs.com # # This file is part of EigenD. # # EigenD is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # EigenD is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with EigenD. If not, see <http://www.gnu.org/licenses/>. # import macosx_native<|fim▁hole|><|fim▁end|>
def main(): macosx_native.probe_coreaudio(True,True)
<|file_name|>tables.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, unicode_literals from django.core.paginator import Paginator from django.core import urlresolvers from django.utils.html import mark_safe, escape import django_tables2 as tables from django_tables2.tables import Table from django_tables2.utils import Accessor as A, AttributeDict class ActionsColumn(tables.Column):<|fim▁hole|> """ empty_values = () links = None delimiter = None def __init__(self, links=None, delimiter=' | ', **kwargs): super(ActionsColumn, self).__init__(**kwargs) self.orderable = False self.delimiter = delimiter if links is not None: self.links = links def render(self, value, record, bound_column): if not self.links: raise NotImplementedError('Links not assigned.') if not isinstance(self.links, (list, tuple,dict)): raise NotImplementedError('Links must be an iterable.') links = [] for link in self.links: title = link['title'] url = link['url'] attrs = link['attrs'] if 'attrs' in link else None if 'args' in link: args = [a.resolve(record) if isinstance(a, A) else a for a in link['args']] else: args = None attrs = AttributeDict(attrs if attrs is not None else self.attrs.get('a', {})) try: attrs['href'] = urlresolvers.reverse(url, args=args) except urlresolvers.NoReverseMatch: attrs['href'] = url links.append('<a {attrs}>{text}</a>'.format( attrs=attrs.as_html(), text=mark_safe(title) )) return mark_safe(self.delimiter.join(links)) class PaginateTable(Table): """Generic table class that makes use of Django's built in paginate functionality""" def __init__(self, *args, **kwargs): super(PaginateTable, self).__init__(*args, **kwargs) self.template = kwargs.get('template', 'fancy_paged_tables/table.html') def paginate(self, klass=Paginator, per_page=None, page=1, *args, **kwargs): """ Paginates the table using a paginator and creates a ``page`` property containing information for the current page. :type klass: Paginator class :param klass: a paginator class to paginate the results :type per_page: `int` :param per_page: how many records are displayed on each page :type page: `int` :param page: which page should be displayed. Extra arguments are passed to the paginator. Pagination exceptions (`~django.core.paginator.EmptyPage` and `~django.core.paginator.PageNotAnInteger`) may be raised from this method and should be handled by the caller. """ self.per_page_options = [20, 50, 100, 200] # This should probably be a passed in option self.per_page = per_page = per_page or self._meta.per_page self.paginator = klass(self.rows, per_page, *args, **kwargs) self.page = self.paginator.page(page) # Calc variables for use in displaying first, adjacent, and last page links adjacent_pages = 1 # This should probably be a passed in option # Starting page (first page between the ellipsis) start_page = max(self.page.number - adjacent_pages, 1) if start_page <= 3: start_page = 1 # Ending page (last page between the ellipsis) end_page = self.page.number + adjacent_pages + 1 if end_page >= self.paginator.num_pages - 1: end_page = self.paginator.num_pages + 1 # Paging vars used in template self.page_numbers = [n for n in range(start_page, end_page) if 0 < n <= self.paginator.num_pages] self.show_first = 1 not in self.page_numbers self.show_last = self.paginator.num_pages not in self.page_numbers<|fim▁end|>
""" This column allows you to pass in a list of links that will form an Action Column
<|file_name|>sat.py<|end_file_name|><|fim▁begin|>"""Class definitions for Speaker Adapted Triphone trainer""" from __future__ import annotations import multiprocessing as mp import os import re import shutil import subprocess import time from queue import Empty from typing import Dict, List, NamedTuple import tqdm from montreal_forced_aligner.acoustic_modeling.triphone import TriphoneTrainer from montreal_forced_aligner.exceptions import KaldiProcessingError from montreal_forced_aligner.utils import ( KaldiFunction, KaldiProcessWorker, Stopped, log_kaldi_errors, parse_logs, thirdparty_binary, ) __all__ = ["SatTrainer", "AccStatsTwoFeatsFunction", "AccStatsTwoFeatsArguments"] class AccStatsTwoFeatsArguments(NamedTuple): """Arguments for :func:`~montreal_forced_aligner.acoustic_modeling.sat.AccStatsTwoFeatsFunction`""" log_path: str dictionaries: List[str] ali_paths: Dict[str, str] acc_paths: Dict[str, str] model_path: str feature_strings: Dict[str, str] si_feature_strings: Dict[str, str] class AccStatsTwoFeatsFunction(KaldiFunction): """ Multiprocessing function for accumulating stats across speaker-independent and speaker-adapted features See Also -------- :meth:`.SatTrainer.create_align_model` Main function that calls this function in parallel :meth:`.SatTrainer.acc_stats_two_feats_arguments` Job method for generating arguments for this function :kaldi_src:`ali-to-post` Relevant Kaldi binary :kaldi_src:`gmm-acc-stats-twofeats` Relevant Kaldi binary Parameters ---------- args: :class:`~montreal_forced_aligner.acoustic_modeling.sat.AccStatsTwoFeatsArguments` Arguments for the function """ progress_pattern = re.compile(r"^LOG \(gmm-acc-stats-twofeats.* Average like for this file.*") done_pattern = re.compile( r"^LOG \(gmm-acc-stats-twofeats.*Done (?P<utterances>\d+) files, (?P<no_posteriors>\d+) with no posteriors, (?P<no_second_features>\d+) with no second features, (?P<errors>\d+) with other errors.$" ) def __init__(self, args: AccStatsTwoFeatsArguments): self.log_path = args.log_path self.dictionaries = args.dictionaries self.ali_paths = args.ali_paths self.acc_paths = args.acc_paths self.model_path = args.model_path self.feature_strings = args.feature_strings self.si_feature_strings = args.si_feature_strings def run(self): """Run the function""" with open(self.log_path, "w", encoding="utf8") as log_file: for dict_name in self.dictionaries: ali_path = self.ali_paths[dict_name] acc_path = self.acc_paths[dict_name] feature_string = self.feature_strings[dict_name] si_feature_string = self.si_feature_strings[dict_name] ali_to_post_proc = subprocess.Popen( [thirdparty_binary("ali-to-post"), f"ark:{ali_path}", "ark:-"], stderr=log_file, stdout=subprocess.PIPE, env=os.environ, ) acc_proc = subprocess.Popen( [ thirdparty_binary("gmm-acc-stats-twofeats"), self.model_path, feature_string, si_feature_string, "ark,s,cs:-", acc_path, ], stderr=subprocess.PIPE, encoding="utf8", stdin=ali_to_post_proc.stdout, env=os.environ, ) for line in acc_proc.stderr: log_file.write(line) m = self.progress_pattern.match(line.strip()) if m: yield 1, 0, 0, 0 else: m = self.done_pattern.match(line.strip()) if m: yield int(m.group("utterances")), int(m.group("no_posteriors")), int( m.group("no_second_features") ), int(m.group("errors")) class SatTrainer(TriphoneTrainer): """ Speaker adapted trainer (SAT), inherits from TriphoneTrainer Parameters ---------- subset : int Number of utterances to use, defaults to 10000 num_leaves : int Number of states in the decision tree, defaults to 2500 max_gaussians : int Number of gaussians in the decision tree, defaults to 15000 power : float Exponent for number of gaussians according to occurrence counts, defaults to 0.2 See Also -------- :class:`~montreal_forced_aligner.acoustic_modeling.triphone.TriphoneTrainer` For acoustic model training parsing parameters Attributes ---------- fmllr_iterations : list List of iterations to perform fMLLR calculation """ def __init__( self, subset: int = 10000, num_leaves: int = 2500, max_gaussians: int = 15000, power: float = 0.2, **kwargs, ): super().__init__(**kwargs) self.subset = subset self.num_leaves = num_leaves self.max_gaussians = max_gaussians self.power = power self.fmllr_iterations = [] def acc_stats_two_feats_arguments(self) -> List[AccStatsTwoFeatsArguments]: """ Generate Job arguments for :func:`~montreal_forced_aligner.acoustic_modeling.sat.AccStatsTwoFeatsFunction` Returns ------- list[:class:`~montreal_forced_aligner.acoustic_modeling.sat.AccStatsTwoFeatsArguments`] Arguments for processing """ feat_strings = self.worker.construct_feature_proc_strings() si_feat_strings = self.worker.construct_feature_proc_strings(speaker_independent=True) return [ AccStatsTwoFeatsArguments( os.path.join(self.working_log_directory, f"acc_stats_two_feats.{j.name}.log"), j.current_dictionary_names, j.construct_path_dictionary(self.working_directory, "ali", "ark"), j.construct_path_dictionary(self.working_directory, "two_feat_acc", "ark"), self.model_path, feat_strings[j.name], si_feat_strings[j.name], ) for j in self.jobs ] def calc_fmllr(self) -> None: self.worker.calc_fmllr() def compute_calculated_properties(self) -> None: """Generate realignment iterations, initial gaussians, and fMLLR iterations based on configuration""" super().compute_calculated_properties() self.fmllr_iterations = [] max_fmllr_iter = int(self.num_iterations / 2) - 1 for i in range(1, max_fmllr_iter): if i < max_fmllr_iter / 2 and i % 2 == 0: self.fmllr_iterations.append(i) self.fmllr_iterations.append(max_fmllr_iter) def _trainer_initialization(self) -> None: """Speaker adapted training initialization""" self.speaker_independent = False if os.path.exists(os.path.join(self.working_directory, "1.mdl")): return if os.path.exists(os.path.join(self.previous_aligner.working_directory, "lda.mat")): shutil.copyfile( os.path.join(self.previous_aligner.working_directory, "lda.mat"), os.path.join(self.working_directory, "lda.mat"), ) self.tree_stats() self._setup_tree() self.compile_train_graphs() self.convert_alignments() os.rename(self.model_path, self.next_model_path) self.iteration = 1 if os.path.exists(os.path.join(self.previous_aligner.working_directory, "trans.0.ark")): for j in self.jobs: for path in j.construct_path_dictionary( self.previous_aligner.working_directory, "trans", "ark" ).values(): shutil.copy( path, path.replace( self.previous_aligner.working_directory, self.working_directory ), ) else: self.worker.current_trainer = self self.calc_fmllr() parse_logs(self.working_log_directory) def finalize_training(self) -> None: """ Finalize training and create a speaker independent model for initial alignment Raises ------ :class:`~montreal_forced_aligner.exceptions.KaldiProcessingError` If there were any errors in running Kaldi binaries """ try: self.create_align_model() super().finalize_training() shutil.copy( os.path.join(self.working_directory, f"{self.num_iterations+1}.alimdl"), os.path.join(self.working_directory, "final.alimdl"), ) except Exception as e: if isinstance(e, KaldiProcessingError): log_kaldi_errors(e.error_logs, self.logger) e.update_log_file(self.logger) raise def train_iteration(self) -> None: """ Run a single training iteration """ if os.path.exists(self.next_model_path): self.iteration += 1 return if self.iteration in self.realignment_iterations: self.align_utterances() if self.debug: self.compute_alignment_improvement() if self.iteration in self.fmllr_iterations: self.calc_fmllr() self.acc_stats() parse_logs(self.working_log_directory) if self.iteration < self.final_gaussian_iteration: self.increment_gaussians() self.iteration += 1 @property def alignment_model_path(self) -> str: """Alignment model path""" path = self.model_path.replace(".mdl", ".alimdl") if os.path.exists(path): return path return self.model_path def create_align_model(self) -> None: """ Create alignment model for speaker-adapted training that will use speaker-independent features in later aligning. See Also -------- :func:`~montreal_forced_aligner.acoustic_modeling.sat.AccStatsTwoFeatsFunction` Multiprocessing helper function for each job :meth:`.SatTrainer.acc_stats_two_feats_arguments` Job method for generating arguments for the helper function :kaldi_src:`gmm-est` Relevant Kaldi binary :kaldi_src:`gmm-sum-accs` Relevant Kaldi binary :kaldi_steps:`train_sat` Reference Kaldi script """ self.logger.info("Creating alignment model for speaker-independent features...") begin = time.time() arguments = self.acc_stats_two_feats_arguments() with tqdm.tqdm(total=self.num_utterances) as pbar: if self.use_mp: manager = mp.Manager() error_dict = manager.dict() return_queue = manager.Queue() stopped = Stopped() procs = [] for i, args in enumerate(arguments): function = AccStatsTwoFeatsFunction(args) p = KaldiProcessWorker(i, return_queue, function, error_dict, stopped) procs.append(p) p.start() while True: try: ( num_utterances, no_posteriors, no_second_features, errors, ) = return_queue.get(timeout=1) if stopped.stop_check(): continue except Empty: for proc in procs: if not proc.finished.stop_check(): break else: break continue pbar.update(num_utterances + no_posteriors + no_second_features + errors) for p in procs: p.join() if error_dict: for v in error_dict.values(): raise v<|fim▁hole|> function = AccStatsTwoFeatsFunction(args) for ( num_utterances, no_posteriors, no_second_features, errors, ) in function.run(): pbar.update(num_utterances + no_posteriors + no_second_features + errors) log_path = os.path.join(self.working_log_directory, "align_model_est.log") with open(log_path, "w", encoding="utf8") as log_file: acc_files = [] for x in arguments: acc_files.extend(x.acc_paths.values()) sum_proc = subprocess.Popen( [thirdparty_binary("gmm-sum-accs"), "-"] + acc_files, stderr=log_file, stdout=subprocess.PIPE, env=os.environ, ) est_proc = subprocess.Popen( [ thirdparty_binary("gmm-est"), "--remove-low-count-gaussians=false", f"--power={self.power}", self.model_path, "-", self.model_path.replace(".mdl", ".alimdl"), ], stdin=sum_proc.stdout, stderr=log_file, env=os.environ, ) est_proc.communicate() parse_logs(self.working_log_directory) if not self.debug: for f in acc_files: os.remove(f) self.logger.debug(f"Alignment model creation took {time.time() - begin}")<|fim▁end|>
else: for args in arguments:
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|>""" PynamoDB exceptions """ from typing import Any, Optional import botocore.exceptions class PynamoDBException(Exception): """ A common exception class """ def __init__(self, msg: Optional[str] = None, cause: Optional[Exception] = None) -> None: self.msg = msg self.cause = cause super(PynamoDBException, self).__init__(self.msg) @property def cause_response_code(self) -> Optional[str]: return getattr(self.cause, 'response', {}).get('Error', {}).get('Code') @property def cause_response_message(self) -> Optional[str]: return getattr(self.cause, 'response', {}).get('Error', {}).get('Message') class PynamoDBConnectionError(PynamoDBException): """ A base class for connection errors """ msg = "Connection Error" class DeleteError(PynamoDBConnectionError): """ Raised when an error occurs deleting an item """ msg = "Error deleting item" class QueryError(PynamoDBConnectionError): """ Raised when queries fail """ msg = "Error performing query" class ScanError(PynamoDBConnectionError): """ Raised when a scan operation fails """ msg = "Error performing scan" class PutError(PynamoDBConnectionError): """ Raised when an item fails to be created """ msg = "Error putting item" class UpdateError(PynamoDBConnectionError): """ Raised when an item fails to be updated """ msg = "Error updating item" class GetError(PynamoDBConnectionError): """ Raised when an item fails to be retrieved """ msg = "Error getting item" class TableError(PynamoDBConnectionError): """ An error involving a dynamodb table operation """ msg = "Error performing a table operation" class DoesNotExist(PynamoDBException): """ Raised when an item queried does not exist """ msg = "Item does not exist" class TableDoesNotExist(PynamoDBException): """ Raised when an operation is attempted on a table that doesn't exist """ def __init__(self, table_name: str) -> None: msg = "Table does not exist: `{}`".format(table_name) super(TableDoesNotExist, self).__init__(msg) class TransactWriteError(PynamoDBException): """ Raised when a TransactWrite operation fails """ pass class TransactGetError(PynamoDBException): """ Raised when a TransactGet operation fails """ pass class InvalidStateError(PynamoDBException): """ Raises when the internal state of an operation context is invalid """ msg = "Operation in invalid state" class AttributeDeserializationError(TypeError): """ Raised when attribute type is invalid """ def __init__(self, attr_name: str, attr_type: str): msg = "Cannot deserialize '{}' attribute from type: {}".format(attr_name, attr_type) super(AttributeDeserializationError, self).__init__(msg) class AttributeNullError(ValueError): def __init__(self, attr_name: str) -> None: self.attr_path = attr_name def __str__(self): return f"Attribute '{self.attr_path}' cannot be None" <|fim▁hole|> class VerboseClientError(botocore.exceptions.ClientError): def __init__(self, error_response: Any, operation_name: str, verbose_properties: Optional[Any] = None): """ Modify the message template to include the desired verbose properties """ if not verbose_properties: verbose_properties = {} self.MSG_TEMPLATE = ( 'An error occurred ({{error_code}}) on request ({request_id}) ' 'on table ({table_name}) when calling the {{operation_name}} ' 'operation: {{error_message}}' ).format(request_id=verbose_properties.get('request_id'), table_name=verbose_properties.get('table_name')) super(VerboseClientError, self).__init__(error_response, operation_name)<|fim▁end|>
def prepend_path(self, attr_name: str) -> None: self.attr_path = attr_name + '.' + self.attr_path
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>from django.conf import settings BACKENDS = getattr(settings, 'FAIREPART_BACKENDS', ( 'fairepart.backends.facebook.FacebookBackend', 'fairepart.backends.google.GoogleOAuth2Backend', )) RELATION_LIST_PAGINATE_BY = getattr(settings, 'FAIREPART_RELATION_LIST_PAGINATE_BY', 5)<|fim▁hole|><|fim▁end|>
GOOGLE_APP_NAME = getattr(settings, 'FAIREPART_GOOGLE_APP_NAME', '')
<|file_name|>web_modform_space_computing.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- #***************************************************************************** # Copyright (C) 2010 Fredrik Strömberg <[email protected]>, # Stephan Ehlen <> # Distributed under the terms of the GNU General Public License (GPL) # # This code is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # The full text of the GPL is available at: # # http://www.gnu.org/licenses/ #***************************************************************************** r""" Class for newforms in format which can be presented on the web easily AUTHORS: - Fredrik Stroemberg - Stephan Ehlen """ from sage.all import ZZ, QQ, DirichletGroup, CuspForms, Gamma0, ModularSymbols, Newforms, trivial_character, is_squarefree, divisors, RealField, ComplexField, prime_range, I, join, gcd, Cusp, Infinity, ceil, CyclotomicField, exp, pi, primes_first_n, euler_phi, RR, prime_divisors, Integer, matrix,NumberField,PowerSeriesRing,cached_function,AlphabeticStrings from sage.rings.power_series_poly import PowerSeries_poly from sage.all import Parent, SageObject, dimension_new_cusp_forms, vector, dimension_modular_forms, dimension_cusp_forms, EisensteinForms, Matrix, floor, denominator, latex, is_prime, prime_pi, next_prime, previous_prime,primes_first_n, previous_prime, factor, loads,save,dumps,deepcopy import re import yaml from flask import url_for from wmf import wmf_logger from lmfdb.modular_forms.elliptic_modular_forms import emf_version from sage.rings.number_field.number_field_base import NumberField as NumberField_class from lmfdb.modular_forms.elliptic_modular_forms.backend import connect_to_modularforms_db,get_files_from_gridfs from lmfdb.modular_forms.elliptic_modular_forms.backend.web_modform_space import WebModFormSpace_class def WebModFormSpace_computing(N=1, k=2, chi=1, cuspidal=1, prec=10, bitprec=53, data=None, verbose=0,**kwds): r""" Constructor for WebNewForms with added 'nicer' error message. """ if data is None: data = {} if cuspidal <> 1: raise IndexError,"We are very sorry. There are only cuspidal spaces currently in the database!" #try: F = WebModFormSpace_class(N=N, k=k, chi=chi, cuspidal=cuspidal, prec=prec, bitprec=bitprec, data=data, verbose=verbose,**kwds) #except Exception as e: # wmf_logger.critical("Could not construct WebModFormSpace with N,k,chi = {0}. Error: {1}".format( (N,k,chi),e.message)) # #raise e # #raise IndexError,"We are very sorry. The sought space could not be found in the database." return F from lmfdb.modular_forms.elliptic_modular_forms.backend import WebModFormSpace class WebModFormSpace_computing_class(WebModFormSpace_class): r""" Space of cuspforms to be presented on the web. G = NS. EXAMPLES:: sage: WS=WebModFormSpace(2,39) """ def __init__(self, N=1, k=2, chi=1, cuspidal=1, prec=10, bitprec=53, data=None, verbose=0,get_from_db=True): r""" Init self. INPUT: - 'k' -- weight - 'N' -- level - 'chi' -- character - 'cuspidal' -- 1 if space of cuspforms, 0 if all modforms """ wmf_logger.debug("WebModFormSpace with k,N,chi={0}".format( (k,N,chi))) super(WebModFormSpace_computing_class,self).__init__(N,k,chi,cuspidal,prec,bitprec,data, verbose,get_from_db=False) ## In this subclass we add properties which are not ## supposed to be used on the web or stored in the database self._dimension = None self._dimension_oldspace = None self._newforms = None self._modular_symbols = None self.compute_additional_properties() self.insert_into_db() def compute_additional_properties(self): r""" Compute additional properties. """ ### Set / Compute / fetch everything we need if self._group is None: self._group = Gamma0(self._N) self.get_modular_symbols() self._newspace = self._modular_symbols.cuspidal_submodule().new_submodule() self.get_newform_factors() if self._newforms == {} and self._newspace.dimension()>0: for i in self.labels(): self._newforms[i]=None if len(self._ap) == 0: self._ap = self._get_aps(prec=self._prec) self.set_dimensions() if self.dimension() == self.dimension_newspace(): self._is_new = True else: self._is_new = False self.set_sturm_bound() self.set_oldspace_decomposition() self.insert_into_db() def newform_factors(self): r""" Return newform factors of self. """ if self._newform_factors is None: self._newform_factors = self._get_newform_factors() return self._newform_factors def character_orbit_rep(self,k=None): r""" Returns canonical representative of the Galois orbit nr. k acting on the ambient space of self. """ if self._character_orbit_rep is None: x = self.character().character().galois_orbit()[0] self._character_orbit_rep = WebChar(x.modulus(),x.number()) return self._character_orbit_rep ## Database fetching functions. def insert_into_db(self): r""" Insert a dictionary of data for self into the collection WebModularforms.files """ wmf_logger.debug("inserting self into db! name={0}".format(self._name)) db = connect_to_modularforms_db('WebModformspace.files') fs = get_files_from_gridfs('WebModformspace') s = {'name':self._name,'version':emf_version} rec = db.find_one(s) if rec: id = rec.get('_id') else: id = None if id<>None: wmf_logger.debug("Removing self from db with id={0}".format(id)) fs.delete(id) fname = "webmodformspace-{0:0>5}-{1:0>3}-{2:0>3}".format(self._N,self._k,self._chi) d = self.to_dict() d.pop('_ap',None) # Since the ap's are already in the database we don't need them here id = fs.put(dumps(d),filename=fname,N=int(self._N),k=int(self._k),chi=int(self._chi),name=self._name,version=emf_version) wmf_logger.debug("inserted :{0}".format(id)) def get_from_db(self): r""" Fetch dictionary data from the database. """ db = connect_to_modularforms_db('WebModformspace.files') s = {'name':self._name,'version':emf_version} wmf_logger.debug("Looking in DB for rec={0}".format(s)) f = db.find_one(s) wmf_logger.debug("Found rec={0}".format(f)) if f<>None: id = f.get('_id') fs = get_files_from_gridfs('WebModformspace') f = fs.get(id) wmf_logger.debug("Getting rec={0}".format(f)) d = loads(f.read()) return d return {} def _get_aps(self, prec=-1): r""" Get aps from database if they exist. """ ap_files = connect_to_modularforms_db('ap.files') key = {'k': int(self._k), 'N': int(self._N), 'cchi': int(self._chi)} key['prec'] = {"$gt": int(prec - 1)} ap_from_db = ap_files.find(key).sort("prec") wmf_logger.debug("finds={0}".format(ap_from_db)) wmf_logger.debug("finds.count()={0}".format(ap_from_db.count())) fs = get_files_from_gridfs('ap') aplist = {} for i in range(len(self.labels())): aplist[self.labels()[i]]={} for rec in ap_from_db: wmf_logger.debug("rec={0}".format(rec)) ni = rec.get('newform') if ni is None: for a in self.labels(): aplist[a][prec]=None return aplist a = self.labels()[ni] cur_prec = rec['prec'] if aplist.get(a,{}).get(cur_prec,None) is None: aplist[a][prec]=loads(fs.get(rec['_id']).read()) if cur_prec > prec and prec>0: # We are happy with these coefficients. return aplist return aplist def get_modular_symbols(self):<|fim▁hole|> Get Modular Symbols from database they exist. """ if not self._modular_symbols is None: return modular_symbols = connect_to_modularforms_db('Modular_symbols.files') key = {'k': int(self._k), 'N': int(self._N), 'cchi': int(self._chi)} modular_symbols_from_db = modular_symbols.find_one(key) wmf_logger.debug("found ms={0}".format(modular_symbols_from_db)) if modular_symbols_from_db is None: ms = None else: id = modular_symbols_from_db['_id'] fs = get_files_from_gridfs('Modular_symbols') ms = loads(fs.get(id).read()) self._id = id self._modular_symbols = ms def get_newform_factors(self): r""" Get New form factors from database they exist. """ if not self._newforms is None and self._newforms == []: return factors = connect_to_modularforms_db('Newform_factors.files') key = {'k': int(self._k), 'N': int(self._N), 'cchi': int(self._chi),} factors_from_db = factors.find(key).sort('newform',int(1)) wmf_logger.debug("found factors={0}".format(factors_from_db)) self._newforms = {} if factors_from_db.count()==0: raise ValueError,"Space is not in database!" else: facts = [] self._labels = [] fs = get_files_from_gridfs('Newform_factors') for rec in factors_from_db: factor = loads(fs.get(rec['_id']).read()) label = orbit_label(rec['newform']) self._galois_orbits_labels.append(label) self._newforms[label] = factor def __reduce__(self): r""" Used for pickling. """ data = self.to_dict() return(unpickle_wmfs_v1, (self._k, self._N, self._chi, self._cuspidal, self._prec, self._bitprec, data)) def _repr_(self): r""" Return string representation of self. """ s = 'Space of Cusp forms on ' + str(self.group()) + ' of weight ' + str(self._k) s += ' and dimension ' + str(self.dimension()) return s def _computation_too_hard(self,comp='decomp'): r""" See if the supplied parameters make computation too hard or if we should try to do it on the fly. TODO: Actually check times. """ if comp=='decomp': if self._N > 50: return True if self._chi > 1 and self._N > 100: return True if self._k+self._N > 100: return True return False # internal methods to generate properties of self def galois_decomposition(self): r""" We compose the new subspace into galois orbits of new cusp forms. """ from sage.monoids.all import AlphabeticStrings if(len(self._galois_decomposition) != 0): return self._galois_decomposition if '_HeckeModule_free_module__decomposition' in self._newspace.__dict__: L = self._newspace.decomposition() else: decomp = self.newform_factors() if len(decomp)>0: L = filter(lambda x: x.is_new() and x.is_cuspidal(), decomp) wmf_logger.debug("found L:{0}".format(L)) elif self._computation_too_hard(): L = [] raise IndexError,"No decomposition was found in the database!" wmf_logger.debug("no decomp in database!") else: # compute L = self._newspace.decomposition() wmf_logger.debug("newspace :".format(self._newspace)) wmf_logger.debug("computed L:".format(L)) self._galois_decomposition = L # we also label the compnents x = AlphabeticStrings().gens() for j in range(len(L)): if(j < 26): label = str(x[j]).lower() else: j1 = j % 26 j2 = floor(QQ(j) / QQ(26)) label = str(x[j1]).lower() label = label + str(j2) if label not in self._galois_orbits_labels: self._galois_orbits_labels.append(label) return L def galois_orbit_label(self, j): r""" Return the label of the Galois orbit nr. j """ if(len(self._galois_orbits_labels) == 0): self.galois_decomposition() return self._galois_orbits_labels[j] ### Dimension formulas, calculates dimensions of subspaces of self. def set_dimensions(self): r""" The dimension of the subspace of newforms in self. """ if self._chi != 1: x = self.character().sage_character() else: x = self.level() k = self.weight() # Ambient modular formsspace if self._dimension_modular_forms is None: self._dimension_modular_forms = int(dimension_modular_forms(x,k)) # Cuspidal subspace if self._dimension_cusp_forms is None: self._dimension_cusp_forms = int(dimension_cusp_forms(x,k)) # New cuspidal subspace if self._dimension_new_cusp_forms is None: self._dimension_new_cusp_forms = int(dimension_new_cusp_forms(x,k)) # New subspace of ambient space if self._dimension_newspace is None: if self._cuspidal == 1: self._dimension_newspace = self.dimension_new_cusp_forms() else: self._dimension_newspace = self._newspace.dimension() # Old subspace of self. if self._dimension_oldspace is None: if self._cuspidal == 1: self._dimension_oldspace = self.dimension_cusp_forms() - self.dimension_new_cusp_forms() else: self._dimension_oldspace = self.dimension_modular_forms() - self.dimension_newforms() if self._dimension is None: if self._cuspidal == 1: self._dimension = self.dimension_cusp_forms() elif self._cuspidal == 0: self._dimension = self.dimension_modular_forms() def set_sturm_bound(self): r""" Return the Sturm bound of S_k(N,xi), i.e. the number of coefficients necessary to determine a form uniquely in the space. """ if self._sturm_bound is None: self._sturm_bound = self._modular_symbols.sturm_bound() def set_oldspace_decomposition(self): r""" Get decomposition of the oldspace in self into submodules. """ if not (self._oldspace_decomposition is None or self._oldspace_decomposition == []): return N = self._N k = self._k M = self._modular_symbols.cuspidal_submodule() L = list() L = [] check_dim = self.dimension_newspace() if(check_dim == self.dimension()): return L if(self._verbose > 1): wmf_logger.debug("check_dim:={0}".format(check_dim)) for d in divisors(N): if(d == 1): continue q = N.divide_knowing_divisible_by(d) if(self._verbose > 1): wmf_logger.debug("d={0}".format(d)) # since there is a bug in the current version of sage # we have to try this... try: O = M.old_submodule(d) except AttributeError: O = M.zero_submodule() Od = O.dimension() if(self._verbose > 1): wmf_logger.debug("O={0}".format(O)) wmf_logger.debug("Od={0}".format(Od)) if(d == N and k == 2 or Od == 0): continue if self.character().is_trivial(): # S=ModularSymbols(ZZ(N/d),k,sign=1).cuspidal_submodule().new_submodule(); Sd=S.dimension() wmf_logger.debug("q={0},{1}".format(q, type(q))) wmf_logger.debug("k={0},{1}".format(k, type(k))) Sd = dimension_new_cusp_forms(q, k) if(self._verbose > 1): wmf_logger.debug("Sd={0}".format(Sd)) if Sd > 0: mult = len(divisors(ZZ(d))) check_dim = check_dim + mult * Sd L.append((q, 0, mult, Sd)) else: xd = self.character().decomposition() for xx in xd: if xx.modulus() == q: Sd = dimension_new_cusp_forms(xx, k) if Sd > 0: # identify this character for internal storage... should be optimized x_k = self.conrey_character(xx).number() mult = len(divisors(ZZ(d))) check_dim = check_dim + mult * Sd L.append((q, x_k, mult, Sd)) if(self._verbose > 1): wmf_logger.debug("mult={0},N/d={1},Sd={2}".format(mult, ZZ(N / d), Sd)) wmf_logger.debug("check_dim={0}".format(check_dim)) check_dim = check_dim - M.dimension() if(check_dim != 0): raise ArithmeticError("Something wrong! check_dim=%s" % check_dim) self._oldspace_decomposition = L @cached_function def orbit_label(j): x = AlphabeticStrings().gens() if(j < 26): label = str(x[j]).lower() else: j1 = j % 26 j2 = floor(QQ(j) / QQ(26)) label = str(x[j1]).lower() label = label + str(j2) return label<|fim▁end|>
r"""
<|file_name|>placeholder.go<|end_file_name|><|fim▁begin|>/* Copyright 2020 The Knative Authors <|fim▁hole|>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Package deployments is a placeholder that allows us to pull in config files // via go mod vendor. package deployments<|fim▁end|>
<|file_name|>lxqt-notificationd_cy.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.1" language="cy"> <context> <name>NotificationActionsComboWidget</name> <message> <location filename="../notificationwidgets.cpp" line="123"/> <source>Actions:</source> <translation></translation> </message> <message> <location filename="../notificationwidgets.cpp" line="142"/> <source>OK</source> <translation></translation> </message> </context> <context> <name>Notifyd</name> <message> <location filename="../notifyd.cpp" line="266"/> <source>Clear All</source> <translation type="unfinished"></translation> </message> <message><|fim▁hole|> </message> <message numerus="yes"> <location filename="../notifyd.cpp" line="281"/> <location filename="../notifyd.cpp" line="338"/> <location filename="../notifyd.cpp" line="358"/> <source>%n Unattended Notification(s)</source> <translation type="unfinished"> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> <numerusform></numerusform> </translation> </message> </context> </TS><|fim▁end|>
<location filename="../notifyd.cpp" line="274"/> <source>Options</source> <translation type="unfinished"></translation>
<|file_name|>stdio.py<|end_file_name|><|fim▁begin|># Twisted, the Framework of Your Internet # Copyright (C) 2001 Matthew W. Lefkowitz # # This library is free software; you can redistribute it and/or # modify it under the terms of version 2.1 of the GNU Lesser General Public # License as published by the Free Software Foundation. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA """Standard input/out/err support. API Stability: semi-stable Future Plans: support for stderr, perhaps Maintainer: U{Itamar Shtull-Trauring<mailto:[email protected]>} """ # system imports import sys, os, select, errno # Sibling Imports import abstract, fdesc, protocol from main import CONNECTION_LOST _stdio_in_use = 0 class StandardIOWriter(abstract.FileDescriptor): connected = 1 ic = 0 def __init__(self): abstract.FileDescriptor.__init__(self) self.fileno = sys.__stdout__.fileno fdesc.setNonBlocking(self.fileno()) def writeSomeData(self, data): try: return os.write(self.fileno(), data) return rv except IOError, io: if io.args[0] == errno.EAGAIN: return 0 elif io.args[0] == errno.EPERM: return 0 return CONNECTION_LOST except OSError, ose: if ose.errno == errno.EPIPE: return CONNECTION_LOST if ose.errno == errno.EAGAIN:<|fim▁hole|> return 0 raise def connectionLost(self, reason): abstract.FileDescriptor.connectionLost(self, reason) os.close(self.fileno()) class StandardIO(abstract.FileDescriptor): """I can connect Standard IO to a twisted.protocol I act as a selectable for sys.stdin, and provide a write method that writes to stdout. """ def __init__(self, protocol): """Create me with a protocol. This will fail if a StandardIO has already been instantiated. """ abstract.FileDescriptor.__init__(self) global _stdio_in_use if _stdio_in_use: raise RuntimeError, "Standard IO already in use." _stdio_in_use = 1 self.fileno = sys.__stdin__.fileno fdesc.setNonBlocking(self.fileno()) self.protocol = protocol self.startReading() self.writer = StandardIOWriter() self.protocol.makeConnection(self) def write(self, data): """Write some data to standard output. """ self.writer.write(data) def doRead(self): """Some data's readable from standard input. """ return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived) def closeStdin(self): """Close standard input. """ self.writer.loseConnection() def connectionLost(self, reason): """The connection was lost. """ self.protocol.connectionLost()<|fim▁end|>
<|file_name|>ICacheReplaceEntryProcessor.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at *<|fim▁hole|> * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.adapter; import javax.cache.processor.EntryProcessor; import javax.cache.processor.EntryProcessorException; import javax.cache.processor.MutableEntry; import java.io.Serializable; public class ICacheReplaceEntryProcessor implements EntryProcessor<Integer, String, String>, Serializable { private static final long serialVersionUID = -396575576353368113L; @Override public String process(MutableEntry<Integer, String> entry, Object... arguments) throws EntryProcessorException { String value = entry.getValue(); if (value == null) { return null; } String oldString = (String) arguments[0]; String newString = (String) arguments[1]; String result = value.replace(oldString, newString); entry.setValue(result); return result; } }<|fim▁end|>
<|file_name|>foobar.py<|end_file_name|><|fim▁begin|># Copyright 2021, Kay Hayen, mailto:[email protected] # # Python tests originally created or extracted from other peoples work. The # parts were too small to be protected. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|># Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Using absolute import, do from module imports. """ from __future__ import absolute_import, print_function from foobar import util from . import local # pylint: disable=unused-import class Foobar(object): def __init__(self): print(util.someFunction())<|fim▁end|>
#
<|file_name|>ServiceMapParserTest.java<|end_file_name|><|fim▁begin|>package fr.adrienbrault.idea.symfony2plugin.tests; import fr.adrienbrault.idea.symfony2plugin.ServiceMap; import fr.adrienbrault.idea.symfony2plugin.ServiceMapParser; import org.junit.Test; import org.junit.Assert; import java.io.ByteArrayInputStream; import java.util.Map; /** * @author Adrien Brault <[email protected]> */ public class ServiceMapParserTest extends Assert { @Test public void testParse() throws Exception { ServiceMapParser serviceMapParser = new ServiceMapParser();<|fim▁hole|> "<service id=\"secret\" class=\"AdrienBrault\\Secret\" public=\"false\"/>" + "</container>"; ServiceMap serviceMap = serviceMapParser.parse(new ByteArrayInputStream(xmlString.getBytes())); assertTrue(serviceMap instanceof ServiceMap); assertEquals("\\AdrienBrault\\Awesome", serviceMap.getMap().get("adrienbrault")); assertEquals("\\AdrienBrault\\Awesome", serviceMap.getPublicMap().get("adrienbrault")); assertEquals("\\AdrienBrault\\Secret", serviceMap.getMap().get("secret")); assertNull(serviceMap.getPublicMap().get("secret")); assertEquals("\\Symfony\\Component\\HttpFoundation\\Request", serviceMap.getMap().get("request")); assertEquals("\\Symfony\\Component\\HttpFoundation\\Request", serviceMap.getPublicMap().get("request")); assertEquals("\\Symfony\\Component\\DependencyInjection\\ContainerInterface", serviceMap.getMap().get("service_container")); assertEquals("\\Symfony\\Component\\DependencyInjection\\ContainerInterface", serviceMap.getPublicMap().get("service_container")); assertEquals("\\Symfony\\Component\\HttpKernel\\KernelInterface", serviceMap.getMap().get("kernel")); assertEquals("\\Symfony\\Component\\HttpKernel\\KernelInterface", serviceMap.getPublicMap().get("kernel")); } }<|fim▁end|>
String xmlString = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + "<container>" + "<service id=\"adrienbrault\" class=\"AdrienBrault\\Awesome\"/>" +
<|file_name|>pull_data.py<|end_file_name|><|fim▁begin|>from path import Path import zipfile import urllib2 Path('tmp').mkdir_p() for model_name in ('seq2seq','seq2tree'): for data_name in ('jobqueries','geoqueries','atis'): fn = '%s_%s.zip' % (model_name, data_name) link = 'http://dong.li/lang2logic/' + fn with open('tmp/' + fn, 'wb') as f_out: f_out.write(urllib2.urlopen(link).read()) with zipfile.ZipFile('tmp/' + fn) as zf: zf.extractall('./%s/%s/data/' % (model_name, data_name))<|fim▁hole|>Path('tmp').rmtree()<|fim▁end|>
<|file_name|>cropsl.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python '''You can easily read off two sample,line coordinates from qview, but ISIS crop wants one sample,line and then offsets. This just takes two coordinates,<|fim▁hole|># Copyright 2016, 2019, Ross A. Beyer ([email protected]) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # The arguments to ISIS crop require a sample/line pair and then a set of offsets. # I typically have two sample/line pairs read from qview, and got tired of always # bringing up the calculator to compute the offsets. import argparse import subprocess import sys from pathlib import Path def crop(fr, to, samp, line, nsamp, nline): cmd = ('crop', f'from= {fr}', f'to= {to}', f'samp= {samp}', f'line= {line}', f'nsamp= {nsamp}', f'nline= {nline}') return subprocess.run(cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) def calcoffset(first, second): (f_samp, f_line) = first.split(':') (s_samp, s_line) = second.split(':') nsamp = int(s_samp) - int(f_samp) nline = int(s_line) - int(f_line) return(f_samp, f_line, str(nsamp), str(nline)) def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-o', '--output', help="The output filename.") parser.add_argument('-f', '--first', help='The sample and line of the first point, ' 'separated by a colon, like -f 3:10') parser.add_argument('-s', '--second', help='The sample and line of the second point, ' 'separated by a colon.') parser.add_argument('cube', help='Cube file(s) to crop.', nargs='+') args = parser.parse_args() for cub in args.cube: in_p = Path(cub) if(args.output): out_p = Path(args.output) else: out_p = in_p.with_suffix('.crop.cub') (samp, line, nsamp, nline) = calcoffset(args.first, args.second) print(crop(in_p, out_p, samp, line, nsamp, nline).args) if(args.output): # If there's a specific output filename, only do one. break if __name__ == "__main__": sys.exit(main())<|fim▁end|>
does the math, and then calls crop.'''
<|file_name|>fg-joomla-to-wordpress-admin.js<|end_file_name|><|fim▁begin|>(function( $ ) { 'use strict'; var that; var fgj2wp = { plugin_id: 'fgj2wp', fatal_error: '', /** * Manage the behaviour of the Skip Media checkbox */ hide_unhide_media: function() { $("#media_import_box").toggle(!$("#skip_media").is(':checked')); }, /** * Security question before deleting WordPress content */ check_empty_content_option: function () { var confirm_message; var action = $('input:radio[name=empty_action]:checked').val(); switch ( action ) { case 'newposts': confirm_message = objectL10n.delete_new_posts_confirmation_message; break; case 'all': confirm_message = objectL10n.delete_all_confirmation_message; break; default: alert(objectL10n.delete_no_answer_message); return false; break; } return confirm(confirm_message); }, /** * Start the logger */ start_logger: function() { that.stop_logger_triggered = false; clearTimeout(that.timeout); that.timeout = setTimeout(that.update_display, 1000); }, /** * Stop the logger */ stop_logger: function() { that.stop_logger_triggered = true; }, /** * Update the display */ update_display: function() { that.timeout = setTimeout(that.update_display, 1000); // Actions if ( $("#logger_autorefresh").is(":checked") ) { that.display_logs(); } that.update_progressbar(); that.update_wordpress_info(); if ( that.stop_logger_triggered ) { clearTimeout(that.timeout); } }, /** * Display the logs */ display_logs: function() { $.ajax({ url: objectPlugin.log_file_url, cache: false }).done(function(result) { $("#logger").html(''); result.split("\n").forEach(function(row) { if ( row.substr(0, 7) === '[ERROR]' || row === 'IMPORT STOPPED BY USER') { row = '<span class="error_msg">' + row + '</span>'; // Mark the errors in red } // Test if the import is complete else if ( row === 'IMPORT COMPLETE' ) { row = '<span class="complete_msg">' + row + '</span>'; // Mark the complete message in green $('#action_message').html(objectL10n.import_complete) .removeClass('failure').addClass('success'); } $("#logger").append(row + "<br />\n"); }); $("#logger").append('<span class="error_msg">' + that.fatal_error + '</span>' + "<br />\n"); }); }, /** * Update the progressbar */ update_progressbar: function() { $.ajax({ url: objectPlugin.progress_url, cache: false, dataType: 'json' }).done(function(result) { // Move the progress bar var progress = Number(result.current) / Number(result.total) * 100; $('#progressbar').progressbar('option', 'value', progress);<|fim▁hole|> /** * Update WordPress database info */ update_wordpress_info: function() { var data = 'action=' + that.plugin_id + '_import&plugin_action=update_wordpress_info'; $.ajax({ method: "POST", url: ajaxurl, data: data }).done(function(result) { $('#fgj2wp_database_info_content').html(result); }); }, /** * Empty WordPress content * * @returns {Boolean} */ empty_wp_content: function() { if (that.check_empty_content_option()) { // Start displaying the logs that.start_logger(); $('#empty').attr('disabled', 'disabled'); // Disable the button var data = $('#form_empty_wordpress_content').serialize() + '&action=' + that.plugin_id + '_import&plugin_action=empty'; $.ajax({ method: "POST", url: ajaxurl, data: data }).done(function() { that.stop_logger(); $('#empty').removeAttr('disabled'); // Enable the button alert(objectL10n.content_removed_from_wordpress); }); } return false; }, /** * Test the database connection * * @returns {Boolean} */ test_database: function() { // Start displaying the logs that.start_logger(); $('#test_database').attr('disabled', 'disabled'); // Disable the button var data = $('#form_import').serialize() + '&action=' + that.plugin_id + '_import&plugin_action=test_database'; $.ajax({ method: 'POST', url: ajaxurl, data: data, dataType: 'json' }).done(function(result) { that.stop_logger(); $('#test_database').removeAttr('disabled'); // Enable the button if ( typeof result.message !== 'undefined' ) { $('#database_test_message').toggleClass('success', result.status === 'OK') .toggleClass('failure', result.status !== 'OK') .html(result.message); } }).fail(function(result) { that.stop_logger(); $('#test_database').removeAttr('disabled'); // Enable the button that.fatal_error = result.responseText; }); return false; }, /** * Test the FTP connection * * @returns {Boolean} */ test_ftp: function() { // Start displaying the logs that.start_logger(); $('#test_ftp').attr('disabled', 'disabled'); // Disable the button var data = $('#form_import').serialize() + '&action=' + that.plugin_id + '_import&plugin_action=test_ftp'; $.ajax({ method: 'POST', url: ajaxurl, data: data, dataType: 'json' }).done(function(result) { that.stop_logger(); $('#test_ftp').removeAttr('disabled'); // Enable the button if ( typeof result.message !== 'undefined' ) { $('#ftp_test_message').toggleClass('success', result.status === 'OK') .toggleClass('failure', result.status !== 'OK') .html(result.message); } }).fail(function(result) { that.stop_logger(); $('#test_ftp').removeAttr('disabled'); // Enable the button that.fatal_error = result.responseText; }); return false; }, /** * Save the settings * * @returns {Boolean} */ save: function() { // Start displaying the logs that.start_logger(); $('#save').attr('disabled', 'disabled'); // Disable the button var data = $('#form_import').serialize() + '&action=' + that.plugin_id + '_import&plugin_action=save'; $.ajax({ method: "POST", url: ajaxurl, data: data }).done(function() { that.stop_logger(); $('#save').removeAttr('disabled'); // Enable the button alert(objectL10n.settings_saved); }); return false; }, /** * Start the import * * @returns {Boolean} */ start_import: function() { that.fatal_error = ''; // Start displaying the logs that.start_logger(); // Disable the import button that.import_button_label = $('#import').val(); $('#import').val(objectL10n.importing).attr('disabled', 'disabled'); // Show the stop button $('#stop-import').show(); // Clear the action message $('#action_message').html(''); // Run the import var data = $('#form_import').serialize() + '&action=' + that.plugin_id + '_import&plugin_action=import'; $.ajax({ method: "POST", url: ajaxurl, data: data }).done(function(result) { if (result) { that.fatal_error = result; } that.stop_logger(); that.reactivate_import_button(); }); return false; }, /** * Reactivate the import button * */ reactivate_import_button: function() { $('#import').val(that.import_button_label).removeAttr('disabled'); $('#stop-import').hide(); }, /** * Stop import * * @returns {Boolean} */ stop_import: function() { $('#stop-import').attr('disabled', 'disabled'); $('#action_message').html(objectL10n.import_stopped_by_user) .removeClass('success').addClass('failure'); // Stop the import var data = $('#form_import').serialize() + '&action=' + that.plugin_id + '_import&plugin_action=stop_import'; $.ajax({ method: "POST", url: ajaxurl, data: data }).done(function() { $('#stop-import').removeAttr('disabled'); // Enable the button that.reactivate_import_button(); }); return false; }, /** * Modify the internal links * * @returns {Boolean} */ modify_links: function() { // Start displaying the logs that.start_logger(); $('#modify_links').attr('disabled', 'disabled'); // Disable the button var data = $('#form_modify_links').serialize() + '&action=' + that.plugin_id + '_import&plugin_action=modify_links'; $.ajax({ method: "POST", url: ajaxurl, data: data }).done(function(result) { if (result) { that.fatal_error = result; } that.stop_logger(); $('#modify_links').removeAttr('disabled'); // Enable the button alert(objectL10n.internal_links_modified); }); return false; } }; /** * Actions to run when the DOM is ready */ $(function() { that = fgj2wp; $('#progressbar').progressbar({value : 0}); // Skip media checkbox $("#skip_media").bind('click', that.hide_unhide_media); that.hide_unhide_media(); // Empty WordPress content confirmation $("#form_empty_wordpress_content").bind('submit', that.check_empty_content_option); // Partial import checkbox $("#partial_import").hide(); $("#partial_import_toggle").click(function() { $("#partial_import").slideToggle("slow"); }); // Empty button $('#empty').click(that.empty_wp_content); // Test database button $('#test_database').click(that.test_database); // Test FTP button $('#test_ftp').click(that.test_ftp); // Save settings button $('#save').click(that.save); // Import button $('#import').click(that.start_import); // Stop import button $('#stop-import').click(that.stop_import); // Modify links button $('#modify_links').click(that.modify_links); // Display the logs $('#logger_autorefresh').click(that.display_logs); }); /** * Actions to run when the window is loaded */ $( window ).load(function() { }); })( jQuery );<|fim▁end|>
}); },
<|file_name|>mod438.js<|end_file_name|><|fim▁begin|><|fim▁hole|>var value=mod437+1; export default value;<|fim▁end|>
import mod437 from './mod437';
<|file_name|>che-svn.spec.ts<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2015-2017 Red Hat, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Red Hat, Inc. - initial API and implementation */ 'use strict'; /** * Test of the CheSvn */ describe('CheSvn', function () { /** * User Factory for the test */ var factory; /** * API builder. */ var apiBuilder; var workspace; /** * Backend for handling http operations */ var httpBackend; /** * Che backend */ var cheBackend; /** * setup module */ beforeEach(angular.mock.module('userDashboard')); /** * Inject factory and http backend */ beforeEach(inject(function (cheWorkspace, cheAPIBuilder, cheHttpBackend) {<|fim▁hole|> apiBuilder = cheAPIBuilder; cheBackend = cheHttpBackend; httpBackend = cheHttpBackend.getHttpBackend(); })); /** * Check assertion after the test */ afterEach(function () { httpBackend.verifyNoOutstandingExpectation(); httpBackend.verifyNoOutstandingRequest(); }); /** * Check that we're able to fetch remote svn url */ it('Fetch remote svn url', function () { // setup tests objects var agentUrl = 'localhost:3232/wsagent/ext'; var workspaceId = 'workspace456test'; var projectPath = '/testSvnProject'; var remoteSvnUrl = 'https://svn.apache.org' + projectPath; var runtime = {'links': [{'href': agentUrl, 'rel': 'wsagent'}]}; var workspace1 = apiBuilder.getWorkspaceBuilder().withId(workspaceId).withRuntime(runtime).build(); cheBackend.addWorkspaces([workspace1]); // providing request // add test remote svn url on http backend cheBackend.addRemoteSvnUrl(workspaceId, encodeURIComponent(projectPath), remoteSvnUrl); // setup backend cheBackend.setup(); workspace.fetchWorkspaceDetails(workspaceId); httpBackend.expectGET('/api/workspace/' + workspaceId); // flush command httpBackend.flush(); var factory = workspace.getWorkspaceAgent(workspaceId).getSvn(); cheBackend.getRemoteSvnUrl(workspaceId, encodeURIComponent(projectPath)); // fetch remote url factory.fetchRemoteUrl(workspaceId, projectPath); // expecting POST httpBackend.expectPOST(agentUrl + '/svn/info?workspaceId='+workspaceId); // flush command httpBackend.flush(); // now, check var repo = factory.getRemoteUrlByKey(workspaceId, projectPath); // check local url expect(remoteSvnUrl).toEqual(repo.url); } ); });<|fim▁end|>
workspace = cheWorkspace;
<|file_name|>PopularMoviesFragment.java<|end_file_name|><|fim▁begin|>package com.gbaldera.yts.fragments; import android.content.Loader; import com.gbaldera.yts.loaders.PopularMoviesLoader; import com.jakewharton.trakt.entities.Movie; import java.util.List; public class PopularMoviesFragment extends BaseMovieFragment { @Override protected int getLoaderId() { return BaseMovieFragment.POPULAR_MOVIES_LOADER_ID; } @Override protected Loader<List<Movie>> getLoader() {<|fim▁hole|> return new PopularMoviesLoader(getActivity()); } }<|fim▁end|>
<|file_name|>queue.go<|end_file_name|><|fim▁begin|>package main import ( "bytes"<|fim▁hole|> "github.com/bitly/go-nsq" ) // BackendQueue represents the behavior for the secondary message // storage system type BackendQueue interface { Put([]byte) error ReadChan() chan []byte // this is expected to be an *unbuffered* channel Close() error Delete() error Depth() int64 Empty() error } type DummyBackendQueue struct { readChan chan []byte } func NewDummyBackendQueue() BackendQueue { return &DummyBackendQueue{readChan: make(chan []byte)} } func (d *DummyBackendQueue) Put([]byte) error { return nil } func (d *DummyBackendQueue) ReadChan() chan []byte { return d.readChan } func (d *DummyBackendQueue) Close() error { return nil } func (d *DummyBackendQueue) Delete() error { return nil } func (d *DummyBackendQueue) Depth() int64 { return int64(0) } func (d *DummyBackendQueue) Empty() error { return nil } func WriteMessageToBackend(buf *bytes.Buffer, msg *nsq.Message, bq BackendQueue) error { buf.Reset() err := msg.Write(buf) if err != nil { return err } err = bq.Put(buf.Bytes()) if err != nil { return err } return nil }<|fim▁end|>
<|file_name|>debugshell.py<|end_file_name|><|fim▁begin|># debugshell extension """a python shell with repo, changelog & manifest objects""" import mercurial import code def debugshell(ui, repo, **opts): objects = { 'mercurial': mercurial, 'repo': repo, 'cl': repo.changelog, 'mf': repo.manifest,<|fim▁hole|> mercurial.__path__[0]) code.interact(bannermsg, local=objects) cmdtable = { "debugshell|dbsh": (debugshell, []) }<|fim▁end|>
} bannermsg = "loaded repo : %s\n" \ "using source: %s" % (repo.root,
<|file_name|>token.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use ast; use ext::mtwt; use ptr::P; use util::interner::{RcStr, StrInterner}; use util::interner; use serialize::{Decodable, Decoder, Encodable, Encoder}; use std::fmt; use std::mem; use std::path::BytesContainer; use std::rc::Rc; #[allow(non_camel_case_types)] #[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash, Show)] pub enum BinOpToken { Plus, Minus, Star, Slash, Percent, Caret, And, Or, Shl, Shr, } /// A delimeter token #[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash, Show)] pub enum DelimToken { /// A round parenthesis: `(` or `)` Paren, /// A square bracket: `[` or `]` Bracket, /// A curly brace: `{` or `}` Brace, } #[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash, Show)] pub enum IdentStyle { /// `::` follows the identifier with no whitespace in-between. ModName, Plain, } #[allow(non_camel_case_types)] #[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash, Show)] pub enum Token { /* Expression-operator symbols. */ Eq, Lt, Le, EqEq, Ne, Ge, Gt, AndAnd, OrOr, Not, Tilde, BinOp(BinOpToken), BinOpEq(BinOpToken), /* Structural symbols */ At, Dot, DotDot, DotDotDot, Comma, Semi, Colon, ModSep, RArrow, LArrow, FatArrow, Pound, Dollar, Question, /// An opening delimeter, eg. `{` OpenDelim(DelimToken), /// A closing delimeter, eg. `}` CloseDelim(DelimToken), /* Literals */ LitByte(ast::Name), LitChar(ast::Name), LitInteger(ast::Name), LitFloat(ast::Name), LitStr(ast::Name), LitStrRaw(ast::Name, uint), /* raw str delimited by n hash symbols */ LitBinary(ast::Name), LitBinaryRaw(ast::Name, uint), /* raw binary str delimited by n hash symbols */ /* Name components */ Ident(ast::Ident, IdentStyle), Underscore, Lifetime(ast::Ident), /* For interpolation */ Interpolated(Nonterminal), // Can be expanded into several tokens. /// Doc comment DocComment(ast::Name), // In left-hand-sides of MBE macros: /// Parse a nonterminal (name to bind, name of NT, styles of their idents) MatchNt(ast::Ident, ast::Ident, IdentStyle, IdentStyle), // In right-hand-sides of MBE macros: /// A syntactic variable that will be filled in by macro expansion. SubstNt(ast::Ident, IdentStyle), // Junk. These carry no data because we don't really care about the data // they *would* carry, and don't really want to allocate a new ident for // them. Instead, users could extract that from the associated span. /// Whitespace Whitespace, /// Comment Comment, Shebang(ast::Name), Eof, } impl Token { /// Returns `true` if the token can appear at the start of an expression. pub fn can_begin_expr(&self) -> bool { match *self { OpenDelim(_) => true, Ident(_, _) => true, Underscore => true, Tilde => true, LitByte(_) => true, LitChar(_) => true, LitInteger(_) => true, LitFloat(_) => true, LitStr(_) => true, LitStrRaw(_, _) => true, LitBinary(_) => true, LitBinaryRaw(_, _) => true, Pound => true, At => true, Not => true, BinOp(Minus) => true, BinOp(Star) => true, BinOp(And) => true, BinOp(Or) => true, // in lambda syntax OrOr => true, // in lambda syntax ModSep => true, Interpolated(NtExpr(..)) => true, Interpolated(NtIdent(..)) => true, Interpolated(NtBlock(..)) => true, Interpolated(NtPath(..)) => true, _ => false, } } /// Returns `true` if the token is any literal pub fn is_lit(&self) -> bool { match *self { LitByte(_) => true, LitChar(_) => true, LitInteger(_) => true, LitFloat(_) => true, LitStr(_) => true, LitStrRaw(_, _) => true, LitBinary(_) => true, LitBinaryRaw(_, _) => true, _ => false, } } /// Returns `true` if the token is an identifier. pub fn is_ident(&self) -> bool { match *self { Ident(_, _) => true, _ => false, } } /// Returns `true` if the token is an interpolated path. pub fn is_path(&self) -> bool { match *self { Interpolated(NtPath(..)) => true, _ => false, } } /// Returns `true` if the token is a path that is not followed by a `::` /// token. #[allow(non_upper_case_globals)] pub fn is_plain_ident(&self) -> bool { match *self { Ident(_, Plain) => true, _ => false, } } /// Returns `true` if the token is a lifetime. pub fn is_lifetime(&self) -> bool { match *self { Lifetime(..) => true, _ => false, } } /// Returns `true` if the token is either the `mut` or `const` keyword. pub fn is_mutability(&self) -> bool { self.is_keyword(keywords::Mut) || self.is_keyword(keywords::Const) } /// Maps a token to its corresponding binary operator. pub fn to_binop(&self) -> Option<ast::BinOp> { match *self { BinOp(Star) => Some(ast::BiMul), BinOp(Slash) => Some(ast::BiDiv), BinOp(Percent) => Some(ast::BiRem), BinOp(Plus) => Some(ast::BiAdd), BinOp(Minus) => Some(ast::BiSub), BinOp(Shl) => Some(ast::BiShl), BinOp(Shr) => Some(ast::BiShr), BinOp(And) => Some(ast::BiBitAnd), BinOp(Caret) => Some(ast::BiBitXor), BinOp(Or) => Some(ast::BiBitOr), Lt => Some(ast::BiLt), Le => Some(ast::BiLe), Ge => Some(ast::BiGe), Gt => Some(ast::BiGt), EqEq => Some(ast::BiEq), Ne => Some(ast::BiNe), AndAnd => Some(ast::BiAnd), OrOr => Some(ast::BiOr), _ => None, } } /// Returns `true` if the token is a given keyword, `kw`. #[allow(non_upper_case_globals)] pub fn is_keyword(&self, kw: keywords::Keyword) -> bool { match *self { Ident(sid, Plain) => kw.to_name() == sid.name, _ => false, } } /// Returns `true` if the token is either a special identifier, or a strict /// or reserved keyword. #[allow(non_upper_case_globals)] pub fn is_any_keyword(&self) -> bool { match *self {<|fim▁hole|> let n = sid.name; n == SELF_KEYWORD_NAME || n == STATIC_KEYWORD_NAME || n == SUPER_KEYWORD_NAME || STRICT_KEYWORD_START <= n && n <= RESERVED_KEYWORD_FINAL }, _ => false } } /// Returns `true` if the token may not appear as an identifier. #[allow(non_upper_case_globals)] pub fn is_strict_keyword(&self) -> bool { match *self { Ident(sid, Plain) => { let n = sid.name; n == SELF_KEYWORD_NAME || n == STATIC_KEYWORD_NAME || n == SUPER_KEYWORD_NAME || STRICT_KEYWORD_START <= n && n <= STRICT_KEYWORD_FINAL }, Ident(sid, ModName) => { let n = sid.name; n != SELF_KEYWORD_NAME && n != SUPER_KEYWORD_NAME && STRICT_KEYWORD_START <= n && n <= STRICT_KEYWORD_FINAL } _ => false, } } /// Returns `true` if the token is a keyword that has been reserved for /// possible future use. #[allow(non_upper_case_globals)] pub fn is_reserved_keyword(&self) -> bool { match *self { Ident(sid, Plain) => { let n = sid.name; RESERVED_KEYWORD_START <= n && n <= RESERVED_KEYWORD_FINAL }, _ => false, } } /// Hygienic identifier equality comparison. /// /// See `styntax::ext::mtwt`. pub fn mtwt_eq(&self, other : &Token) -> bool { match (self, other) { (&Ident(id1,_), &Ident(id2,_)) | (&Lifetime(id1), &Lifetime(id2)) => mtwt::resolve(id1) == mtwt::resolve(id2), _ => *self == *other } } } #[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash)] /// For interpolation during macro expansion. pub enum Nonterminal { NtItem(P<ast::Item>), NtBlock(P<ast::Block>), NtStmt(P<ast::Stmt>), NtPat(P<ast::Pat>), NtExpr(P<ast::Expr>), NtTy(P<ast::Ty>), NtIdent(Box<ast::Ident>, IdentStyle), /// Stuff inside brackets for attributes NtMeta(P<ast::MetaItem>), NtPath(Box<ast::Path>), NtTT(P<ast::TokenTree>), // needs P'ed to break a circularity } impl fmt::Show for Nonterminal { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { NtItem(..) => f.pad("NtItem(..)"), NtBlock(..) => f.pad("NtBlock(..)"), NtStmt(..) => f.pad("NtStmt(..)"), NtPat(..) => f.pad("NtPat(..)"), NtExpr(..) => f.pad("NtExpr(..)"), NtTy(..) => f.pad("NtTy(..)"), NtIdent(..) => f.pad("NtIdent(..)"), NtMeta(..) => f.pad("NtMeta(..)"), NtPath(..) => f.pad("NtPath(..)"), NtTT(..) => f.pad("NtTT(..)"), } } } // Get the first "argument" macro_rules! first { ( $first:expr, $( $remainder:expr, )* ) => ( $first ) } // Get the last "argument" (has to be done recursively to avoid phoney local ambiguity error) macro_rules! last { ( $first:expr, $( $remainder:expr, )+ ) => ( last!( $( $remainder, )+ ) ); ( $first:expr, ) => ( $first ) } // In this macro, there is the requirement that the name (the number) must be monotonically // increasing by one in the special identifiers, starting at 0; the same holds for the keywords, // except starting from the next number instead of zero, and with the additional exception that // special identifiers are *also* allowed (they are deduplicated in the important place, the // interner), an exception which is demonstrated by "static" and "self". macro_rules! declare_special_idents_and_keywords {( // So now, in these rules, why is each definition parenthesised? // Answer: otherwise we get a spurious local ambiguity bug on the "}" pub mod special_idents { $( ($si_name:expr, $si_static:ident, $si_str:expr); )* } pub mod keywords { 'strict: $( ($sk_name:expr, $sk_variant:ident, $sk_str:expr); )* 'reserved: $( ($rk_name:expr, $rk_variant:ident, $rk_str:expr); )* } ) => { static STRICT_KEYWORD_START: ast::Name = first!($( ast::Name($sk_name), )*); static STRICT_KEYWORD_FINAL: ast::Name = last!($( ast::Name($sk_name), )*); static RESERVED_KEYWORD_START: ast::Name = first!($( ast::Name($rk_name), )*); static RESERVED_KEYWORD_FINAL: ast::Name = last!($( ast::Name($rk_name), )*); pub mod special_idents { use ast; $( #[allow(non_upper_case_globals)] pub const $si_static: ast::Ident = ast::Ident { name: ast::Name($si_name), ctxt: 0, }; )* } pub mod special_names { use ast; $( #[allow(non_upper_case_globals)] pub const $si_static: ast::Name = ast::Name($si_name); )* } /** * All the valid words that have meaning in the Rust language. * * Rust keywords are either 'strict' or 'reserved'. Strict keywords may not * appear as identifiers at all. Reserved keywords are not used anywhere in * the language and may not appear as identifiers. */ pub mod keywords { use ast; pub enum Keyword { $( $sk_variant, )* $( $rk_variant, )* } impl Keyword { pub fn to_name(&self) -> ast::Name { match *self { $( $sk_variant => ast::Name($sk_name), )* $( $rk_variant => ast::Name($rk_name), )* } } } } fn mk_fresh_ident_interner() -> IdentInterner { // The indices here must correspond to the numbers in // special_idents, in Keyword to_name(), and in static // constants below. let mut init_vec = Vec::new(); $(init_vec.push($si_str);)* $(init_vec.push($sk_str);)* $(init_vec.push($rk_str);)* interner::StrInterner::prefill(init_vec.as_slice()) } }} // If the special idents get renumbered, remember to modify these two as appropriate pub const SELF_KEYWORD_NAME: ast::Name = ast::Name(SELF_KEYWORD_NAME_NUM); const STATIC_KEYWORD_NAME: ast::Name = ast::Name(STATIC_KEYWORD_NAME_NUM); const SUPER_KEYWORD_NAME: ast::Name = ast::Name(SUPER_KEYWORD_NAME_NUM); pub const SELF_KEYWORD_NAME_NUM: u32 = 1; const STATIC_KEYWORD_NAME_NUM: u32 = 2; const SUPER_KEYWORD_NAME_NUM: u32 = 3; // NB: leaving holes in the ident table is bad! a different ident will get // interned with the id from the hole, but it will be between the min and max // of the reserved words, and thus tagged as "reserved". declare_special_idents_and_keywords! { pub mod special_idents { // These ones are statics (0, invalid, ""); (super::SELF_KEYWORD_NAME_NUM, self_, "self"); (super::STATIC_KEYWORD_NAME_NUM, statik, "static"); (super::SUPER_KEYWORD_NAME_NUM, super_, "super"); (4, static_lifetime, "'static"); // for matcher NTs (5, tt, "tt"); (6, matchers, "matchers"); // outside of libsyntax (7, clownshoe_abi, "__rust_abi"); (8, opaque, "<opaque>"); (9, unnamed_field, "<unnamed_field>"); (10, type_self, "Self"); (11, prelude_import, "prelude_import"); } pub mod keywords { // These ones are variants of the Keyword enum 'strict: (12, As, "as"); (13, Break, "break"); (14, Crate, "crate"); (15, Else, "else"); (16, Enum, "enum"); (17, Extern, "extern"); (18, False, "false"); (19, Fn, "fn"); (20, For, "for"); (21, If, "if"); (22, Impl, "impl"); (23, In, "in"); (24, Let, "let"); (25, Loop, "loop"); (26, Match, "match"); (27, Mod, "mod"); (28, Move, "move"); (29, Mut, "mut"); (30, Pub, "pub"); (31, Ref, "ref"); (32, Return, "return"); // Static and Self are also special idents (prefill de-dupes) (super::STATIC_KEYWORD_NAME_NUM, Static, "static"); (super::SELF_KEYWORD_NAME_NUM, Self, "self"); (33, Struct, "struct"); (super::SUPER_KEYWORD_NAME_NUM, Super, "super"); (34, True, "true"); (35, Trait, "trait"); (36, Type, "type"); (37, Unsafe, "unsafe"); (38, Use, "use"); (39, Virtual, "virtual"); (40, While, "while"); (41, Continue, "continue"); (42, Proc, "proc"); (43, Box, "box"); (44, Const, "const"); (45, Where, "where"); 'reserved: (46, Alignof, "alignof"); (47, Be, "be"); (48, Offsetof, "offsetof"); (49, Priv, "priv"); (50, Pure, "pure"); (51, Sizeof, "sizeof"); (52, Typeof, "typeof"); (53, Unsized, "unsized"); (54, Yield, "yield"); (55, Do, "do"); (56, Abstract, "abstract"); (57, Final, "final"); (58, Override, "override"); } } // looks like we can get rid of this completely... pub type IdentInterner = StrInterner; // if an interner exists in TLS, return it. Otherwise, prepare a // fresh one. // FIXME(eddyb) #8726 This should probably use a task-local reference. pub fn get_ident_interner() -> Rc<IdentInterner> { local_data_key!(key: Rc<::parse::token::IdentInterner>) match key.get() { Some(interner) => interner.clone(), None => { let interner = Rc::new(mk_fresh_ident_interner()); key.replace(Some(interner.clone())); interner } } } /// Represents a string stored in the task-local interner. Because the /// interner lives for the life of the task, this can be safely treated as an /// immortal string, as long as it never crosses between tasks. /// /// FIXME(pcwalton): You must be careful about what you do in the destructors /// of objects stored in TLS, because they may run after the interner is /// destroyed. In particular, they must not access string contents. This can /// be fixed in the future by just leaking all strings until task death /// somehow. #[deriving(Clone, PartialEq, Hash, PartialOrd, Eq, Ord)] pub struct InternedString { string: RcStr, } impl InternedString { #[inline] pub fn new(string: &'static str) -> InternedString { InternedString { string: RcStr::new(string), } } #[inline] fn new_from_rc_str(string: RcStr) -> InternedString { InternedString { string: string, } } #[inline] pub fn get<'a>(&'a self) -> &'a str { self.string.as_slice() } } impl BytesContainer for InternedString { fn container_as_bytes<'a>(&'a self) -> &'a [u8] { // FIXME #12938: This is a workaround for the incorrect signature // of `BytesContainer`, which is itself a workaround for the lack of // DST. unsafe { let this = self.get(); mem::transmute::<&[u8],&[u8]>(this.container_as_bytes()) } } } impl fmt::Show for InternedString { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.string.as_slice()) } } impl<'a> Equiv<&'a str> for InternedString { fn equiv(&self, other: & &'a str) -> bool { (*other) == self.string.as_slice() } } impl<D:Decoder<E>, E> Decodable<D, E> for InternedString { fn decode(d: &mut D) -> Result<InternedString, E> { Ok(get_name(get_ident_interner().intern( try!(d.read_str()).as_slice()))) } } impl<S:Encoder<E>, E> Encodable<S, E> for InternedString { fn encode(&self, s: &mut S) -> Result<(), E> { s.emit_str(self.string.as_slice()) } } /// Returns the string contents of a name, using the task-local interner. #[inline] pub fn get_name(name: ast::Name) -> InternedString { let interner = get_ident_interner(); InternedString::new_from_rc_str(interner.get(name)) } /// Returns the string contents of an identifier, using the task-local /// interner. #[inline] pub fn get_ident(ident: ast::Ident) -> InternedString { get_name(ident.name) } /// Interns and returns the string contents of an identifier, using the /// task-local interner. #[inline] pub fn intern_and_get_ident(s: &str) -> InternedString { get_name(intern(s)) } /// Maps a string to its interned representation. #[inline] pub fn intern(s: &str) -> ast::Name { get_ident_interner().intern(s) } /// gensym's a new uint, using the current interner. #[inline] pub fn gensym(s: &str) -> ast::Name { get_ident_interner().gensym(s) } /// Maps a string to an identifier with an empty syntax context. #[inline] pub fn str_to_ident(s: &str) -> ast::Ident { ast::Ident::new(intern(s)) } /// Maps a string to a gensym'ed identifier. #[inline] pub fn gensym_ident(s: &str) -> ast::Ident { ast::Ident::new(gensym(s)) } // create a fresh name that maps to the same string as the old one. // note that this guarantees that str_ptr_eq(ident_to_string(src),interner_get(fresh_name(src))); // that is, that the new name and the old one are connected to ptr_eq strings. pub fn fresh_name(src: &ast::Ident) -> ast::Name { let interner = get_ident_interner(); interner.gensym_copy(src.name) // following: debug version. Could work in final except that it's incompatible with // good error messages and uses of struct names in ambiguous could-be-binding // locations. Also definitely destroys the guarantee given above about ptr_eq. /*let num = rand::task_rng().gen_uint_range(0,0xffff); gensym(format!("{}_{}",ident_to_string(src),num))*/ } // create a fresh mark. pub fn fresh_mark() -> ast::Mrk { gensym("mark").uint() as u32 } #[cfg(test)] mod test { use super::*; use ast; use ext::mtwt; fn mark_ident(id : ast::Ident, m : ast::Mrk) -> ast::Ident { ast::Ident { name: id.name, ctxt:mtwt::apply_mark(m, id.ctxt) } } #[test] fn mtwt_token_eq_test() { assert!(Gt.mtwt_eq(&Gt)); let a = str_to_ident("bac"); let a1 = mark_ident(a,92); assert!(Ident(a, ModName).mtwt_eq(&Ident(a1, Plain))); } }<|fim▁end|>
Ident(sid, Plain) => {
<|file_name|>graph_test.go<|end_file_name|><|fim▁begin|>package report import ( "bytes" "io" "os" "testing" "github.com/remyoudompheng/go-misc/pprof/parser" ) func TestCpuProfileGraph(t *testing.T) { syms := readSymbols("testdata/cpu.prof.symbols") resolve := func(u uint64) string { s, _ := lookup(u, syms); return s } f, err := os.Open("testdata/cpu.prof") if err != nil { t.Fatal(err) } defer f.Close() p, err := parser.NewCpuProfParser(f) if err != nil { t.Fatal(err) } reporter := &Reporter{Resolver: resolve} total := int64(0) for { trace, count, err := p.ReadTrace() if trace == nil && err == io.EOF { break } reporter.Add(trace, int64(count)) total += int64(count) }<|fim▁hole|> g := reporter.GraphByFunc(ColCPU) t.Logf("%#v", g) report := GraphReport{ Prog: "pprof.test", Total: total, Unit: "samples", Graph: g, } buf := new(bytes.Buffer) err = graphvizTpl.Execute(buf, report) if err != nil { t.Fatal(err) } t.Log(buf.String()) }<|fim▁end|>
<|file_name|>_hash.py<|end_file_name|><|fim▁begin|>from __future__ import division <|fim▁hole|># X.flags.writeable = False # h = hash(X.tobytes()) # X.flags.writeable = writeable # return h def filehash(filepath): r"""Compute sha256 from a given file.""" import hashlib BUF_SIZE = 65536 sha256 = hashlib.sha256() with open(filepath, "rb") as f: while True: data = f.read(BUF_SIZE) if not data: break sha256.update(data) return sha256.hexdigest()<|fim▁end|>
# # TODO: document those functions # def array_hash(X): # writeable = X.flags.writeable
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models, DEFAULT_DB_ALIAS, connection from django.contrib.auth.models import User from django.conf import settings class Animal(models.Model): name = models.CharField(max_length=150) latin_name = models.CharField(max_length=150) count = models.IntegerField() weight = models.FloatField() # use a non-default name for the default manager specimens = models.Manager() def __unicode__(self): return self.name class Plant(models.Model): name = models.CharField(max_length=150) class Meta: # For testing when upper case letter in app name; regression for #4057 db_table = "Fixtures_regress_plant" class Stuff(models.Model): name = models.CharField(max_length=20, null=True) owner = models.ForeignKey(User, null=True) def __unicode__(self): return unicode(self.name) + u' is owned by ' + unicode(self.owner) class Absolute(models.Model): name = models.CharField(max_length=40) load_count = 0 def __init__(self, *args, **kwargs): super(Absolute, self).__init__(*args, **kwargs) Absolute.load_count += 1 class Parent(models.Model): name = models.CharField(max_length=10) class Meta: ordering = ('id',) class Child(Parent): data = models.CharField(max_length=10) # Models to regression test #7572 class Channel(models.Model): name = models.CharField(max_length=255) class Article(models.Model): title = models.CharField(max_length=255) channels = models.ManyToManyField(Channel) class Meta: ordering = ('id',) # Models to regression test #11428 class Widget(models.Model): name = models.CharField(max_length=255) class Meta: ordering = ('name',) def __unicode__(self): return self.name class WidgetProxy(Widget): class Meta: proxy = True # Check for forward references in FKs and M2Ms with natural keys class TestManager(models.Manager): def get_by_natural_key(self, key): return self.get(name=key) class Store(models.Model): objects = TestManager() name = models.CharField(max_length=255) class Meta: ordering = ('name',) def __unicode__(self): return self.name def natural_key(self): return (self.name,) class Person(models.Model): objects = TestManager() name = models.CharField(max_length=255) class Meta: ordering = ('name',) def __unicode__(self): return self.name # Person doesn't actually have a dependency on store, but we need to define # one to test the behaviour of the dependency resolution algorithm. def natural_key(self): return (self.name,) natural_key.dependencies = ['fixtures_regress.store'] class Book(models.Model): name = models.CharField(max_length=255) author = models.ForeignKey(Person) stores = models.ManyToManyField(Store) class Meta: ordering = ('name',) def __unicode__(self): return u'%s by %s (available at %s)' % ( self.name, self.author.name, ', '.join(s.name for s in self.stores.all()) ) class NKManager(models.Manager): def get_by_natural_key(self, data): return self.get(data=data) class NKChild(Parent): data = models.CharField(max_length=10, unique=True) objects = NKManager() def natural_key(self): return self.data def __unicode__(self): return u'NKChild %s:%s' % (self.name, self.data) class RefToNKChild(models.Model): text = models.CharField(max_length=10) nk_fk = models.ForeignKey(NKChild, related_name='ref_fks') nk_m2m = models.ManyToManyField(NKChild, related_name='ref_m2ms') def __unicode__(self): return u'%s: Reference to %s [%s]' % ( self.text, self.nk_fk, ', '.join(str(o) for o in self.nk_m2m.all()) )<|fim▁hole|> name = models.CharField(max_length=255) def natural_key(self): return self.name natural_key.dependencies = ['fixtures_regress.circle2'] class Circle2(models.Model): name = models.CharField(max_length=255) def natural_key(self): return self.name natural_key.dependencies = ['fixtures_regress.circle1'] class Circle3(models.Model): name = models.CharField(max_length=255) def natural_key(self): return self.name natural_key.dependencies = ['fixtures_regress.circle3'] class Circle4(models.Model): name = models.CharField(max_length=255) def natural_key(self): return self.name natural_key.dependencies = ['fixtures_regress.circle5'] class Circle5(models.Model): name = models.CharField(max_length=255) def natural_key(self): return self.name natural_key.dependencies = ['fixtures_regress.circle6'] class Circle6(models.Model): name = models.CharField(max_length=255) def natural_key(self): return self.name natural_key.dependencies = ['fixtures_regress.circle4'] class ExternalDependency(models.Model): name = models.CharField(max_length=255) def natural_key(self): return self.name natural_key.dependencies = ['fixtures_regress.book'] # Model for regression test of #11101 class Thingy(models.Model): name = models.CharField(max_length=255)<|fim▁end|>
# ome models with pathological circular dependencies class Circle1(models.Model):
<|file_name|>hierarchy-utils.js<|end_file_name|><|fim▁begin|>/* * Waltz - Enterprise Architecture * Copyright (C) 2016, 2017, 2018, 2019 Waltz open source project * See README.md for more information * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific * */ import _ from "lodash"; /** * Given a set of nodes with id and parentId constructs a 'searchStr' property for each * node which is the concatenation of a specified property (attr) (or function) of all the nodes * parent nodes. */ export function prepareSearchNodes(nodes = [], attr = "name", parentKey = "parentId") { const nodesById = _.keyBy(nodes, "id"); const attrFn = _.isString(attr) ? n => n[attr] : attr; return _.map(nodes, n => { let ptr = n; let searchStr = ""; const nodePath = []; while (ptr) { nodePath.push(ptr); searchStr += (attrFn(ptr) || "") + " "; const parentId = ptr[parentKey]; ptr = nodesById[parentId]; } return { searchStr: searchStr.toLowerCase(), node: n, nodePath<|fim▁hole|> /** * The given `termStr` will be tokenised and * all nodes (given in `searchNodes`) which contain all tokens * will be returned (de-duped). * * Use `prepareSearchNodes` to prepare the search nodes. * @param termStr * @param searchNodes */ export function doSearch(termStr = "", searchNodes = []) { const terms = _.split(termStr.toLowerCase(), /\W+/); return _ .chain(searchNodes) .filter(sn => { const noTerms = termStr.trim().length === 0; const allMatch = _.every(terms, t => sn.searchStr.indexOf(t) >=0); return noTerms || allMatch; }) .flatMap("nodePath") .uniqBy(n => n.id) .value(); } /** * Given data that looks like: * * [ { id: "", parentId: ?, ... } , .. ] * * Gives back an array of top level objects which have children * nested in them, the result looks something like: * * [ id: "", parentId : ?, parent : {}?, children : [ .. ], ... }, .. ] * * @param nodes * @param parentsAsRefs - whether to include parent as references or simple ids * @returns {Array} */ export function populateParents(nodes, parentsAsRefs = true) { const byId = _.chain(_.cloneDeep(nodes)) .map(u => _.merge(u, { children: [], parent: null })) .keyBy("id") .value(); _.each(_.values(byId), u => { if (u.parentId) { const parent = byId[u.parentId]; if (parent) { parent.children.push(u); u.parent = parentsAsRefs ? parent : parent.id; } } }); return _.values(byId); } export function buildHierarchies(nodes, parentsAsRefs = true) { // only give back root element/s return _.reject(populateParents(nodes, parentsAsRefs), n => n.parent); } export const reduceToSelectedNodesOnly = (nodes, selectedNodeIds = []) => { const byId = _.keyBy(nodes, d => d.id); const selectedNodesOnly = _ .chain(selectedNodeIds) .map(nId => byId[nId]) .compact() .value(); const selectedWithParents = _ .chain(selectedNodesOnly) .flatMap(n => _.concat([n], getParents(n, d => byId[d.parentId]))) .uniq() .value(); return selectedWithParents; } /** * Given a forest like structure (typically generated by buildHierarchies) * returns a flattened map object representing the hierarchical structure, * the map is indexed by the value returned by the keyFn. * * The second argument is a function which returns the key value for a given node * * End users should call this function without passing a third argument * as it is simply the accumulator used when recursing down the branches of the * trees. */ export function indexHierarchyByKey(tree = [], keyFn = n => n.id, acc = {}) { _.forEach(tree, node => { acc[keyFn(node)] = node; indexHierarchyByKey(node.children, keyFn, acc); }); return acc; } export function groupHierarchyByKey(tree = [], keyFn = n => n.id, acc = {}) { _.forEach(tree, node => { const key = keyFn(node); const bucket = acc[key] || []; bucket.push(node) acc[key] = bucket; groupHierarchyByKey(node.children, keyFn, acc); }); return acc; } export function flattenChildren(node, acc = []) { _.forEach(node.children || [], child => { acc.push(child); flattenChildren(child, acc); }); return acc; } /** The wix tree widget does deep comparisons. Having parents as refs therefore blows the callstack. This method will replace refs with id's. */ export function switchToParentIds(treeData = []) { _.each(treeData, td => { td.parent = td.parent ? td.parent.id : null; switchToParentIds(td.children); }); return treeData; } export function findNode(nodes = [], id) { const found = _.find(nodes, { id }); if (found) return found; for(let i = 0; i < nodes.length; i++) { const f = findNode(nodes[i].children, id); if (f) return f; } return null; } /** * * @param node * @param getParentFn - function to resolve parent, defaults to `n => n.parent` * @returns {Array} */ export function getParents(node, getParentFn = (n) => n.parent) { if (! node) return []; let ptr = getParentFn(node); const result = []; while (ptr) { result.push(ptr); ptr = getParentFn(ptr); } return result; }<|fim▁end|>
}; }); }
<|file_name|>OutgoingCallListFragment.java<|end_file_name|><|fim▁begin|>/** * Copyright (C) 2010-2012 Regis Montoya (aka r3gis - www.r3gis.fr) * This file is part of CSipSimple. * * CSipSimple is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * If you own a pjsip commercial license you can also redistribute it * and/or modify it under the terms of the GNU Lesser General Public License * as an android library. * * CSipSimple is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with CSipSimple. If not, see <http://www.gnu.org/licenses/>. */ package com.csipsimple.ui.outgoingcall; import android.app.PendingIntent; import android.app.PendingIntent.CanceledException; import android.database.Cursor; import android.os.Bundle; import android.os.RemoteException; import android.support.v4.content.Loader; import android.view.View; import android.widget.ListView; import com.csipsimple.api.ISipService; import com.csipsimple.api.SipProfile; import com.csipsimple.ui.account.AccountsLoader; import com.csipsimple.utils.CallHandlerPlugin; import com.csipsimple.utils.Log; import com.csipsimple.widgets.CSSListFragment; public class OutgoingCallListFragment extends CSSListFragment { private static final String THIS_FILE = "OutgoingCallListFragment"; private OutgoingAccountsAdapter mAdapter; private AccountsLoader accLoader; private long startDate; private boolean callMade = false; @Override public void onCreate(Bundle state) { super.onCreate(state); setHasOptionsMenu(true); } @Override public void onResume() { super.onResume(); callMade = false; attachAdapter(); getLoaderManager().initLoader(0, null, this); startDate = System.currentTimeMillis(); } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); } private void attachAdapter() { if(getListAdapter() == null) { if(mAdapter == null) { mAdapter = new OutgoingAccountsAdapter(this, null); } setListAdapter(mAdapter); } } @Override public Loader<Cursor> onCreateLoader(int loader, Bundle args) { OutgoingCallChooser superActivity = ((OutgoingCallChooser) getActivity()); accLoader = new AccountsLoader(getActivity(), superActivity.getPhoneNumber(), superActivity.shouldIgnoreRewritingRules()); return accLoader; } final long MOBILE_CALL_DELAY_MS = 600; /** * Place the call for a given cursor positionned at right index in list * @param c The cursor pointing the entry we'd like to call * @return true if call performed, false else */ private boolean placeCall(Cursor c) { OutgoingCallChooser superActivity = ((OutgoingCallChooser)getActivity()); ISipService service = superActivity.getConnectedService(); long accountId = c.getLong(c.getColumnIndex(SipProfile.FIELD_ID)); if(accountId > SipProfile.INVALID_ID) { // Extra check for the account id. if(service == null) { return false; } boolean canCall = c.getInt(c.getColumnIndex(AccountsLoader.FIELD_STATUS_OUTGOING)) == 1; if(!canCall) { return false; } try { String toCall = c.getString(c.getColumnIndex(AccountsLoader.FIELD_NBR_TO_CALL)); service.makeCall(toCall, (int) accountId); superActivity.finishServiceIfNeeded(true); return true; } catch (RemoteException e) { Log.e(THIS_FILE, "Unable to make the call", e); } }else if(accountId < SipProfile.INVALID_ID) { // This is a plugin row. if(accLoader != null) { CallHandlerPlugin ch = accLoader.getCallHandlerWithAccountId(accountId); if(ch == null) { Log.w(THIS_FILE, "Call handler not anymore available in loader... something gone wrong"); return false; } String nextExclude = ch.getNextExcludeTelNumber(); long delay = 0; if (nextExclude != null && service != null) { try { service.ignoreNextOutgoingCallFor(nextExclude); } catch (RemoteException e) { Log.e(THIS_FILE, "Ignore next outgoing number failed"); } delay = MOBILE_CALL_DELAY_MS - (System.currentTimeMillis() - startDate); } if(ch.getIntent() != null) { PluginCallRunnable pendingTask = new PluginCallRunnable(ch.getIntent(), delay); Log.d(THIS_FILE, "Deferring call task of " + delay); pendingTask.start(); } return true; } } return false; } private class PluginCallRunnable extends Thread { private PendingIntent pendingIntent; private long delay; public PluginCallRunnable(PendingIntent pi, long d) { pendingIntent = pi; delay = d; } @Override public void run() { if(delay > 0) { try { sleep(delay); } catch (InterruptedException e) { Log.e(THIS_FILE, "Thread that fires outgoing call has been interrupted"); } } OutgoingCallChooser superActivity = ((OutgoingCallChooser)getActivity()); try { pendingIntent.send(); } catch (CanceledException e) { Log.e(THIS_FILE, "Pending intent cancelled", e); } superActivity.finishServiceIfNeeded(false); } } @Override public synchronized void changeCursor(Cursor c) { if(c != null && callMade == false) { OutgoingCallChooser superActivity = ((OutgoingCallChooser)getActivity()); Long accountToCall = superActivity.getAccountToCallTo(); // Move to first to search in this cursor c.moveToFirst(); // First of all, if only one is available... try call with it if(c.getCount() == 1) { if(placeCall(c)) { c.close(); callMade = true; return; } }else { // Now lets search for one in for call mode if service is ready do { if(c.getInt(c.getColumnIndex(AccountsLoader.FIELD_FORCE_CALL)) == 1) { if(placeCall(c)) { c.close(); callMade = true; return; } } if(accountToCall != SipProfile.INVALID_ID) { if(accountToCall == c.getLong(c.getColumnIndex(SipProfile.FIELD_ID))) { if(placeCall(c)) { c.close(); callMade = true; return; } } } } while(c.moveToNext()); } } // Set adapter content if nothing to force was found if(mAdapter != null) { mAdapter.changeCursor(c); } } @Override public synchronized void onListItemClick(ListView l, View v, int position, long id) { if(mAdapter != null) { placeCall((Cursor) mAdapter.getItem(position)); } } public AccountsLoader getAccountLoader() { return accLoader; } <|fim▁hole|>}<|fim▁end|>
<|file_name|>naivebayes.py<|end_file_name|><|fim▁begin|>""" Simple implementation of mutinomial Naive Bayes for text classfification. TODO: Apply to 20 Newsgroups, Reuters-21578 datasets """ __author__ = 'Duong Nguyen' __version__ = '0.0' import math import sys<|fim▁hole|> def __init__(self): self.categories = set() self.vocabularies = set() self.wordcount = {} self.catcount = {} self.denom = {} def train(self, data): for d in data: cat = d[0] self.categories.add(cat) for cat in self.categories: self.wordcount[cat] = defaultdict(int) self.catcount[cat] = 0 for d in data: cat, doc = d[0], d[1:] self.catcount[cat] += 1 for word in doc: self.vocabularies.add(word) self.wordcount[cat][word] += 1 for cat in self.categories: self.denom[cat] = sum(self.wordcount[cat].values()) + len(self.vocabularies) def wordProb(self, word, cat): """ Compute P(word|cat) with Laplace smoothing. """ return float(self.wordcount[cat][word] + 1) / self.denom[cat] def docProb(self, doc, cat): """ Compute log P(cat|doc) = log P(cat) + sum_i log P(word_i|cat) """ total = sum(self.catcount.values()) # number of docs in training data score = math.log(float(self.catcount[cat])/total) # log P(cat) for word in doc: score += math.log(self.wordProb(word, cat)) # + sum_i log P(word_i|cat) return score def classify(self, doc): """ Classify doc by argmax_cat log P(cat|doc). """ best = None maxP = -sys.maxint for cat in self.categories: p = self.docProb(doc, cat) if p > maxP: maxP = p best = cat return best if __name__ == '__main__': pass<|fim▁end|>
from collections import defaultdict class NaiveBayes(object): """ Multinomial Naive Bayes"""
<|file_name|>MenuChildrenWrapper.spec.js<|end_file_name|><|fim▁begin|>import React from 'react' import {intlEnzyme} from 'tocco-test-util' import MenuChildrenWrapper from './MenuChildrenWrapper' import {StyledMenuChildrenWrapper} from './StyledComponents' describe('admin', () => { describe('components', () => { describe('Navigation', () => { describe('menuType', () => { describe('MenuChildrenWrapper', () => { test('should render children when expanded', () => { const isOpen = true const canCollapse = true const children = <div id="child">Hallo</div> const props = { isOpen, canCollapse, menuTreePath: 'address', preferencesPrefix: '' } const wrapper = intlEnzyme.mountWithIntl(<MenuChildrenWrapper {...props}>{children}</MenuChildrenWrapper>) expect(wrapper.find(StyledMenuChildrenWrapper).prop('isOpen')).to.be.true }) test('should not render children when collapsed', () => { const isOpen = false const canCollapse = true const children = <div id="child">Hallo</div> const props = { isOpen, canCollapse, menuTreePath: 'address', preferencesPrefix: '' } const wrapper = intlEnzyme.mountWithIntl(<MenuChildrenWrapper {...props}>{children}</MenuChildrenWrapper>) expect(wrapper.find(StyledMenuChildrenWrapper).prop('isOpen')).to.be.false }) test('should render children when not collapsible', () => { const isOpen = false const canCollapse = false const children = <div id="child">Hallo</div> const props = { isOpen, canCollapse, menuTreePath: 'address', preferencesPrefix: '' } const wrapper = intlEnzyme.mountWithIntl(<MenuChildrenWrapper {...props}>{children}</MenuChildrenWrapper>) expect(wrapper.find(StyledMenuChildrenWrapper).prop('isOpen')).to.be.true }) })<|fim▁hole|> }) }) }) })<|fim▁end|>
<|file_name|>traileraddict.py<|end_file_name|><|fim▁begin|>import re from .common import InfoExtractor class TrailerAddictIE(InfoExtractor): _VALID_URL = r'(?:http://)?(?:www\.)?traileraddict\.com/(?:trailer|clip)/(?P<movie>.+?)/(?P<trailer_name>.+)' _TEST = { u'url': u'http://www.traileraddict.com/trailer/prince-avalanche/trailer', u'file': u'76184.mp4', u'md5': u'57e39dbcf4142ceb8e1f242ff423fd71', u'info_dict': { u"title": u"Prince Avalanche Trailer", u"description": u"Trailer for Prince Avalanche.Two highway road workers spend the summer of 1988 away from their city lives. The isolated landscape becomes a place of misadventure as the men find themselves at odds with each other and the women they left behind." } } def _real_extract(self, url): mobj = re.match(self._VALID_URL, url)<|fim▁hole|> title = self._search_regex(r'<title>(.+?)</title>', webpage, 'video title').replace(' - Trailer Addict','') view_count = self._search_regex(r'Views: (.+?)<br />', webpage, 'Views Count') video_id = self._og_search_property('video', webpage, 'Video id').split('=')[1] # Presence of (no)watchplus function indicates HD quality is available if re.search(r'function (no)?watchplus()', webpage): fvar = "fvarhd" else: fvar = "fvar" info_url = "http://www.traileraddict.com/%s.php?tid=%s" % (fvar, str(video_id)) info_webpage = self._download_webpage(info_url, video_id , "Downloading the info webpage") final_url = self._search_regex(r'&fileurl=(.+)', info_webpage, 'Download url').replace('%3F','?') thumbnail_url = self._search_regex(r'&image=(.+?)&', info_webpage, 'thumbnail url') ext = final_url.split('.')[-1].split('?')[0] return [{ 'id' : video_id, 'url' : final_url, 'ext' : ext, 'title' : title, 'thumbnail' : thumbnail_url, 'description' : self._og_search_description(webpage), 'view_count' : view_count, }]<|fim▁end|>
name = mobj.group('movie') + '/' + mobj.group('trailer_name') webpage = self._download_webpage(url, name)
<|file_name|>plugin.py<|end_file_name|><|fim▁begin|>### # Copyright (c) 2004-2005, Kevin Murphy # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met:<|fim▁hole|># # * Redistributions of source code must retain the above copyright notice, # this list of conditions, and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions, and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the author of this software nor the name of # contributors to this software may be used to endorse or promote products # derived from this software without specific prior written consent. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. ### import SOAP import supybot.utils as utils from supybot.commands import * import supybot.callbacks as callbacks class UrbanDict(callbacks.Plugin): threaded = True server = SOAP.SOAPProxy('http://api.urbandictionary.com/soap') def _licenseCheck(self, irc): license = self.registryValue('licenseKey') if not license: irc.error('You must have a free UrbanDictionary API license key ' 'in order to use this command. You can get one at ' '<http://www.urbandictionary.com/api.php>. Once you ' 'have one, you can set it with the command ' '"config supybot.plugins.UrbanDict.licenseKey <key>".', Raise=True) return license def urbandict(self, irc, msg, args, words): """<phrase> Returns the definition and usage of <phrase> from UrbanDictionary.com. """ license = self._licenseCheck(irc) definitions = self.server.lookup(license, ' '.join(words)) if not len(definitions): irc.error('No definition found.', Raise=True) word = definitions[0].word definitions = ['%s (%s)' % (d.definition, d.example) for d in definitions] irc.reply(utils.web.htmlToText('%s: %s' % (word, '; '.join(definitions)))) urbandict = wrap(urbandict, [many('something')]) def _define(self, irc, getDefinition, license): definition = getDefinition(license) word = definition.word definitions = ['%s (%s)' % (definition.definition, definition.example)] irc.reply(utils.web.htmlToText('%s: %s' % (word, '; '.join(definitions)))) def daily(self, irc, msg, args): """takes no arguments Returns the definition and usage of the daily phrase from UrbanDictionary.com. """ license = self._licenseCheck(irc) self._define(irc, self.server.get_daily_definition, license) daily = wrap(daily) def random(self, irc, msg, args): """takes no arguments Returns the definition and usage of a random phrase from UrbanDictionary.com. """ license = self._licenseCheck(irc) self._define(irc, self.server.get_random_definition, license) random = wrap(random) Class = UrbanDict # vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:<|fim▁end|>
<|file_name|>graph_models.py<|end_file_name|><|fim▁begin|>import six import sys from optparse import make_option, NO_DEFAULT from django.core.management.base import BaseCommand, CommandError from django.conf import settings from django_extensions.management.modelviz import generate_dot try: import pygraphviz HAS_PYGRAPHVIZ = True except ImportError: HAS_PYGRAPHVIZ = False try: import pydot HAS_PYDOT = True except ImportError: HAS_PYDOT = False class Command(BaseCommand): graph_models_options = ( make_option('--pygraphviz', action='store_true', dest='pygraphviz', help='Use PyGraphViz to generate the image.'), make_option('--pydot', action='store_true', dest='pydot', help='Use PyDot to generate the image.'), make_option('--disable-fields', '-d', action='store_true', dest='disable_fields', help='Do not show the class member fields'), make_option('--group-models', '-g', action='store_true', dest='group_models', help='Group models together respective to their application'), make_option('--all-applications', '-a', action='store_true', dest='all_applications', help='Automatically include all applications from INSTALLED_APPS'), make_option('--output', '-o', action='store', dest='outputfile', help='Render output file. Type of output dependend on file extensions. Use png or jpg to render graph to image.'), make_option('--layout', '-l', action='store', dest='layout', default='dot', help='Layout to be used by GraphViz for visualization. Layouts: circo dot fdp neato nop nop1 nop2 twopi'), make_option('--verbose-names', '-n', action='store_true', dest='verbose_names', help='Use verbose_name of models and fields'), make_option('--language', '-L', action='store', dest='language', help='Specify language used for verbose_name localization'), make_option('--exclude-columns', '-x', action='store', dest='exclude_columns', help='Exclude specific column(s) from the graph. Can also load exclude list from file.'),<|fim▁hole|> help='Exclude specific model(s) from the graph. Can also load exclude list from file.'), make_option('--include-models', '-I', action='store', dest='include_models', help='Restrict the graph to specified models.'), make_option('--inheritance', '-e', action='store_true', dest='inheritance', default=True, help='Include inheritance arrows (default)'), make_option('--no-inheritance', '-E', action='store_false', dest='inheritance', help='Do not include inheritance arrows'), make_option('--hide-relations-from-fields', '-R', action='store_false', dest="relations_as_fields", default=True, help="Do not show relations as fields in the graph."), make_option('--disable-sort-fields', '-S', action="store_false", dest="sort_fields", default=True, help="Do not sort fields"), ) option_list = BaseCommand.option_list + graph_models_options help = "Creates a GraphViz dot file for the specified app names. You can pass multiple app names and they will all be combined into a single model. Output is usually directed to a dot file." args = "[appname]" label = 'application name' requires_model_validation = True can_import_settings = True def handle(self, *args, **options): self.options_from_settings(options) if len(args) < 1 and not options['all_applications']: raise CommandError("need one or more arguments for appname") use_pygraphviz = options.get('pygraphviz', False) use_pydot = options.get('pydot', False) cli_options = ' '.join(sys.argv[2:]) dotdata = generate_dot(args, cli_options=cli_options, **options) dotdata = dotdata.encode('utf-8') if options['outputfile']: if not use_pygraphviz and not use_pydot: if HAS_PYGRAPHVIZ: use_pygraphviz = True elif HAS_PYDOT: use_pydot = True if use_pygraphviz: self.render_output_pygraphviz(dotdata, **options) elif use_pydot: self.render_output_pydot(dotdata, **options) else: raise CommandError("Neither pygraphviz nor pydot could be found to generate the image") else: self.print_output(dotdata) def options_from_settings(self, options): defaults = getattr(settings, 'GRAPH_MODELS', None) if defaults: for option in self.graph_models_options: long_opt = option._long_opts[0] if long_opt: long_opt = long_opt.lstrip("-").replace("-", "_") if long_opt in defaults: default_value = None if not option.default == NO_DEFAULT: default_value = option.default if options[option.dest] == default_value: options[option.dest] = defaults[long_opt] def print_output(self, dotdata): if six.PY3 and isinstance(dotdata, six.binary_type): dotdata = dotdata.decode() print(dotdata) def render_output_pygraphviz(self, dotdata, **kwargs): """Renders the image using pygraphviz""" if not HAS_PYGRAPHVIZ: raise CommandError("You need to install pygraphviz python module") version = pygraphviz.__version__.rstrip("-svn") try: if tuple(int(v) for v in version.split('.')) < (0, 36): # HACK around old/broken AGraph before version 0.36 (ubuntu ships with this old version) import tempfile tmpfile = tempfile.NamedTemporaryFile() tmpfile.write(dotdata) tmpfile.seek(0) dotdata = tmpfile.name except ValueError: pass graph = pygraphviz.AGraph(dotdata) graph.layout(prog=kwargs['layout']) graph.draw(kwargs['outputfile']) def render_output_pydot(self, dotdata, **kwargs): """Renders the image using pydot""" if not HAS_PYDOT: raise CommandError("You need to install pydot python module") graph = pydot.graph_from_dot_data(dotdata) if not graph: raise CommandError("pydot returned an error") output_file = kwargs['outputfile'] formats = ['bmp', 'canon', 'cmap', 'cmapx', 'cmapx_np', 'dot', 'dia', 'emf', 'em', 'fplus', 'eps', 'fig', 'gd', 'gd2', 'gif', 'gv', 'imap', 'imap_np', 'ismap', 'jpe', 'jpeg', 'jpg', 'metafile', 'pdf', 'pic', 'plain', 'plain-ext', 'png', 'pov', 'ps', 'ps2', 'svg', 'svgz', 'tif', 'tiff', 'tk', 'vml', 'vmlz', 'vrml', 'wbmp', 'xdot'] ext = output_file[output_file.rfind('.') + 1:] format = ext if ext in formats else 'raw' graph.write(output_file, format=format)<|fim▁end|>
make_option('--exclude-models', '-X', action='store', dest='exclude_models',
<|file_name|>job_data.py<|end_file_name|><|fim▁begin|>"""Defines the data needed for executing a job""" from __future__ import unicode_literals import logging import os from numbers import Integral from job.configuration.data.data_file import DATA_FILE_PARSE_SAVER, DATA_FILE_STORE from job.configuration.data.exceptions import InvalidData from job.configuration.results.job_results import JobResults from job.execution.container import SCALE_JOB_EXE_INPUT_PATH from storage.brokers.broker import FileDownload from storage.models import ScaleFile from util.environment import normalize_env_var_name logger = logging.getLogger(__name__) DEFAULT_VERSION = '1.0' class ValidationWarning(object): """Tracks job data configuration warnings during validation that may not prevent the job from working.""" def __init__(self, key, details): """Constructor sets basic attributes. :param key: A unique identifier clients can use to recognize the warning. :type key: string :param details: A user-friendly description of the problem, including field names and/or associated values. :type details: string """ self.key = key self.details = details class JobData(object): """Represents the data needed for executing a job. Data includes details about the data inputs, links needed to connect shared resources to resource instances in Scale, and details needed to store all resulting output. """ def __init__(self, data=None): """Creates a job data object from the given dictionary. The general format is checked for correctness, but the actual input and output details are not checked for correctness against the job interface. If the data is invalid, a :class:`job.configuration.data.exceptions.InvalidData` will be thrown. :param data: The job data :type data: dict """ if not data: data = {} self.data_dict = data self.param_names = set() self.data_inputs_by_name = {} # string -> dict self.data_outputs_by_name = {} # string -> dict if 'version' not in self.data_dict: self.data_dict['version'] = DEFAULT_VERSION if not self.data_dict['version'] == '1.0': raise InvalidData('Invalid job data: %s is an unsupported version number' % self.data_dict['version']) if 'input_data' not in self.data_dict: self.data_dict['input_data'] = [] for data_input in self.data_dict['input_data']: if 'name' not in data_input: raise InvalidData('Invalid job data: Every data input must have a "name" field') name = data_input['name'] if name in self.param_names: raise InvalidData('Invalid job data: %s cannot be defined more than once' % name) else: self.param_names.add(name) self.data_inputs_by_name[name] = data_input if 'output_data' not in self.data_dict: self.data_dict['output_data'] = [] for data_output in self.data_dict['output_data']: if 'name' not in data_output: raise InvalidData('Invalid job data: Every data output must have a "name" field') name = data_output['name'] if name in self.param_names: raise InvalidData('Invalid job data: %s cannot be defined more than once' % name) else: self.param_names.add(name) self.data_outputs_by_name[name] = data_output def add_file_input(self, input_name, file_id): """Adds a new file parameter to this job data. This method does not perform validation on the job data. :param input_name: The file parameter name :type input_name: string :param file_id: The ID of the file :type file_id: long """ if input_name in self.param_names: raise Exception('Data already has a parameter named %s' % input_name) self.param_names.add(input_name) file_input = {'name': input_name, 'file_id': file_id} self.data_dict['input_data'].append(file_input) self.data_inputs_by_name[input_name] = file_input def add_file_list_input(self, input_name, file_ids): """Adds a new files parameter to this job data. This method does not perform validation on the job data. :param input_name: The files parameter name :type input_name: string :param file_ids: The ID of the file :type file_ids: [long] """ if input_name in self.param_names: raise Exception('Data already has a parameter named %s' % input_name) self.param_names.add(input_name) files_input = {'name': input_name, 'file_ids': file_ids} self.data_dict['input_data'].append(files_input) self.data_inputs_by_name[input_name] = files_input def add_file_output(self, data, add_to_internal=True): """Adds a new output files to this job data with a workspace ID. :param data: The output parameter dict :type data: dict :param add_to_internal: Whether we should add to private data dict. Unneeded when used from __init__ :type add_to_internal: bool """ # Call to legacy method self.add_output(data['name'], data['workspace_id']) def add_output(self, output_name, workspace_id): """Adds a new output parameter to this job data with a workspace ID. This method does not perform validation on the job data. :param output_name: The output parameter name :type output_name: string :param workspace_id: The ID of the workspace :type workspace_id: int """ if output_name in self.param_names: raise Exception('Data already has a parameter named %s' % output_name) self.param_names.add(output_name) output = {'name': output_name, 'workspace_id': workspace_id} self.data_dict['output_data'].append(output) self.data_outputs_by_name[output_name] = output def add_property_input(self, input_name, value): """Adds a new property parameter to this job data. This method does not perform validation on the job data. :param input_name: The property parameter name :type input_name: string :param value: The value of the property :type value: string """ if input_name in self.param_names: raise Exception('Data already has a parameter named %s' % input_name) self.param_names.add(input_name) prop_input = {'name': input_name, 'value': value} self.data_dict['input_data'].append(prop_input) self.data_inputs_by_name[input_name] = prop_input def get_all_properties(self): """Retrieves all properties from this job data and returns them in ascending order of their names :returns: List of strings containing name=value :rtype: [string] """ properties = [] names = sorted(self.data_inputs_by_name.keys()) for name in names: the_input = self.data_inputs_by_name[name] if 'value' in the_input: properties.append(name + '=' + the_input['value']) return properties def get_dict(self): """Returns the internal dictionary that represents this job data :returns: The internal dictionary :rtype: dict """ return self.data_dict def get_input_file_ids(self): """Returns a set of scale file identifiers for each file in the job input data. :returns: Set of scale file identifiers :rtype: {int} """ file_ids = set() for data_input in self.data_dict['input_data']: if 'file_id' in data_input: file_ids.add(data_input['file_id']) elif 'file_ids' in data_input: file_ids.update(data_input['file_ids']) return file_ids def get_input_file_ids_by_input(self): """Returns the list of file IDs for each input that holds files :returns: Dict where each file input name maps to its list of file IDs :rtype: dict """ file_ids = {} for data_input in self.data_dict['input_data']: if 'file_id' in data_input: file_ids[data_input['name']] = [data_input['file_id']] elif 'file_ids' in data_input: file_ids[data_input['name']] = data_input['file_ids'] return file_ids def get_input_file_info(self): """Returns a set of scale file identifiers and input names for each file in the job input data. :returns: Set of scale file identifiers and names :rtype: set[tuple] """ file_info = set() for data_input in self.data_dict['input_data']: if 'file_id' in data_input: file_info.add((data_input['file_id'], data_input['name'])) elif 'file_ids' in data_input: for file_id in data_input['file_ids']: file_info.add((file_id, data_input['name'])) return file_info def get_output_workspace_ids(self): """Returns a list of the IDs for every workspace used to store the output files for this data :returns: List of workspace IDs :rtype: [int] """ workspace_ids = set() for name in self.data_outputs_by_name: file_output = self.data_outputs_by_name[name] workspace_id = file_output['workspace_id'] workspace_ids.add(workspace_id) return list(workspace_ids) def get_output_workspaces(self): """Returns a dict of the output parameter names mapped to their output workspace ID :returns: A dict mapping output parameters to workspace IDs :rtype: dict """ workspaces = {} for name in self.data_outputs_by_name: file_output = self.data_outputs_by_name[name] workspace_id = file_output['workspace_id'] workspaces[name] = workspace_id return workspaces def get_property_values(self, property_names): """Retrieves the values contained in this job data for the given property names. If no value is available for a property name, it will not be included in the returned dict. :param property_names: List of property names :type property_names: [string] :returns: Dict with each property name mapping to its value :rtype: {string: string} """ property_values = {} for name in property_names: if name in self.data_inputs_by_name: property_input = self.data_inputs_by_name[name] if 'value' not in property_input: raise Exception('Property %s is missing required "value" field' % name) property_values[name] = property_input['value'] return property_values def get_injected_input_values(self, input_files_dict): """Apply all execution time values to job data :param input_files: Mapping of input names to InputFiles :type input_files: {str, :class:`job.execution.configuration.input_file.InputFile`} :return: Mapping of all input keys to their true file / property values :rtype: {str, str} """ input_values = {} for data_input in self.get_dict()['input_data']: input_name = data_input['name'] if 'value' in data_input: input_values[input_name] = data_input['value'] if 'file_id' in data_input: input_file = input_files_dict[input_name][0] file_name = input_file.file_name if input_file.local_file_name: file_name = input_file.local_file_name input_values[input_name] = os.path.join(SCALE_JOB_EXE_INPUT_PATH, input_name, file_name) elif 'file_ids' in data_input: input_values[input_name] = os.path.join(SCALE_JOB_EXE_INPUT_PATH, input_name) return input_values def get_injected_env_vars(self, input_files_dict): """Apply all execution time values to job data :param input_files: Mapping of input names to InputFiles :type input_files: {str, :class:`job.execution.configuration.input_file.InputFile`} :return: Mapping of all input keys to their true file / property values :rtype: {str, str} """ env_vars = {} for data_input in self.get_dict()['input_data']: input_name = data_input['name'] if 'value' in data_input: env_vars[normalize_env_var_name(input_name)] = data_input['value'] if 'file_id' in data_input: input_file = input_files_dict[input_name][0] file_name = os.path.basename(input_file.workspace_path) if input_file.local_file_name: file_name = input_file.local_file_name env_vars[normalize_env_var_name(input_name)] = os.path.join(SCALE_JOB_EXE_INPUT_PATH, input_name, file_name) elif 'file_ids' in data_input: env_vars[normalize_env_var_name(input_name)] = os.path.join(SCALE_JOB_EXE_INPUT_PATH, input_name) return env_vars def has_workspaces(self): """Whether this job data contains output wrkspaces :returns: Whether this job data contains output wrkspaces :rtype: bool """ return True def retrieve_input_data_files(self, data_files): """Retrieves the given data input files and writes them to the given local directories. Any given file parameters that do not appear in the data will not be returned in the results. :param data_files: Dict with each file parameter name mapping to a bool indicating if the parameter accepts multiple files (True), an absolute directory path and bool indicating if job supports partial file download (True). :type data_files: {string: tuple(bool, string, bool)} :returns: Dict with each file parameter name mapping to a list of absolute file paths of the written files :rtype: {string: [string]} """ # Organize the data files param_file_ids = {} # Parameter name -> [file IDs] files_to_retrieve = {} # File ID -> tuple(string, bool) for relative dir path and if partially accessed for name in data_files: multiple = data_files[name][0] dir_path = data_files[name][1] partial = data_files[name][2] if name not in self.data_inputs_by_name: continue file_input = self.data_inputs_by_name[name] file_ids = [] # TODO: Remove with legacy job types. This is a protection against multiple being specified for a single file or no file if multiple and 'file_ids' in file_input: for file_id in file_input['file_ids']: file_id = long(file_id) file_ids.append(file_id) files_to_retrieve[file_id] = (dir_path, partial) else: file_id = long(file_input['file_id']) file_ids.append(file_id) files_to_retrieve[file_id] = (dir_path, partial) param_file_ids[name] = file_ids # Retrieve all files retrieved_files = self._retrieve_files(files_to_retrieve) for file_id in retrieved_files: del files_to_retrieve[file_id] if files_to_retrieve: msg = 'Failed to retrieve file with ID %i' % files_to_retrieve.keys()[0] raise Exception(msg) # Organize the results retrieved_params = {} # Parameter name -> [file paths] for name in param_file_ids: file_path_list = [] for file_id in param_file_ids[name]: file_path_list.append(retrieved_files[file_id]) retrieved_params[name] = file_path_list return retrieved_params def save_parse_results(self, parse_results): """Saves the given parse results<|fim▁hole|> :param parse_results: Dict with each input file name mapping to a tuple of GeoJSON containing GIS meta-data (optionally None), the start time of the data contained in the file (optionally None), the end time of the data contained in the file (optionally None), the list of data types, and the new workspace path (optionally None) :type parse_results: {string: tuple(string, :class:`datetime.datetime`, :class:`datetime.datetime`, [], string, string)} """ input_file_ids = [] for name in self.data_inputs_by_name: data_input = self.data_inputs_by_name[name] if 'file_ids' in data_input: file_ids = data_input['file_ids'] for file_id in file_ids: input_file_ids.append(file_id) elif 'file_id' in data_input: file_id = data_input['file_id'] input_file_ids.append(file_id) data_file_parse_saver = DATA_FILE_PARSE_SAVER['DATA_FILE_PARSE_SAVER'] if not data_file_parse_saver: raise Exception('No data file parse saver found') data_file_parse_saver.save_parse_results(parse_results, input_file_ids) def setup_job_dir(self, data_files): """Sets up the directory structure for a job execution and downloads the given files :param data_files: Dict with each file parameter name mapping to a bool indicating if the parameter accepts multiple files (True) and an absolute directory path :type data_files: {string: tuple(bool, string)} :returns: Dict with each file parameter name mapping to a list of absolute file paths of the written files :rtype: {string: [string]} """ # Download the job execution input files self.retrieve_input_data_files(data_files) def store_output_data_files(self, data_files, job_exe): """Stores the given data output files :param data_files: Dict with each file parameter name mapping to a list of ProductFileMetadata classes :type data_files: {string: [`ProductFileMetadata`]} :param job_exe: The job execution model (with related job and job_type fields) that is storing the output data files :type job_exe: :class:`job.models.JobExecution` :returns: The job results :rtype: :class:`job.configuration.results.job_results.JobResults` """ # Organize the data files workspace_files = {} # Workspace ID -> [(absolute local file path, media type)] params_by_file_path = {} # Absolute local file path -> output parameter name output_workspaces = JobData.create_output_workspace_dict(data_files.keys(), self, job_exe) for name in data_files: workspace_id = output_workspaces[name] if workspace_id in workspace_files: workspace_file_list = workspace_files[workspace_id] else: workspace_file_list = [] workspace_files[workspace_id] = workspace_file_list data_file_entry = data_files[name] if isinstance(data_file_entry, list): for file_entry in data_file_entry: file_path = os.path.normpath(file_entry.local_path) if not os.path.isfile(file_path): raise Exception('%s is not a valid file' % file_path) params_by_file_path[file_path] = name workspace_file_list.append(file_entry) else: file_path = os.path.normpath(data_file_entry.local_path) if not os.path.isfile(file_path): raise Exception('%s is not a valid file' % file_path) params_by_file_path[file_path] = name data_file_entry.local_path = file_path workspace_file_list.append(data_file_entry) data_file_store = DATA_FILE_STORE['DATA_FILE_STORE'] if not data_file_store: raise Exception('No data file store found') stored_files = data_file_store.store_files(workspace_files, self.get_input_file_ids(), job_exe) # Organize results param_file_ids = {} # Output parameter name -> file ID or [file IDs] for file_path in stored_files: file_id = stored_files[file_path] name = params_by_file_path[file_path] if isinstance(data_files[name], list): if name in param_file_ids: file_id_list = param_file_ids[name] else: file_id_list = [] param_file_ids[name] = file_id_list file_id_list.append(file_id) else: param_file_ids[name] = file_id # Create job results results = JobResults() for name in param_file_ids: param_entry = param_file_ids[name] if isinstance(param_entry, list): results.add_file_list_parameter(name, param_entry) else: results.add_file_parameter(name, param_entry) return results def validate_input_files(self, files): """Validates the given file parameters to make sure they are valid with respect to the job interface. :param files: Dict of file parameter names mapped to a tuple with three items: whether the parameter is required (True), if the parameter is for multiple files (True), and the description of the expected file meta-data :type files: {string: tuple(bool, bool, :class:`job.configuration.interface.scale_file.ScaleFileDescription`)} :returns: A list of warnings discovered during validation. :rtype: [:class:`job.configuration.data.job_data.ValidationWarning`] :raises :class:`job.configuration.data.exceptions.InvalidData`: If there is a configuration problem. """ warnings = [] for name in files: required = files[name][0] multiple = files[name][1] file_desc = files[name][2] if name in self.data_inputs_by_name: # Have this input, make sure it is valid file_input = self.data_inputs_by_name[name] file_ids = [] if multiple: if 'file_ids' not in file_input: if 'file_id' in file_input: file_input['file_ids'] = [file_input['file_id']] else: msg = ('Invalid job data: Data input %s is a list of files and must have a "file_ids" or ' '"file_id" field') raise InvalidData(msg % name) if 'file_id' in file_input: del file_input['file_id'] value = file_input['file_ids'] if not isinstance(value, list): msg = 'Invalid job data: Data input %s must have a list of integers in its "file_ids" field' raise InvalidData(msg % name) for file_id in value: if not isinstance(file_id, Integral): msg = ('Invalid job data: Data input %s must have a list of integers in its "file_ids" ' 'field') raise InvalidData(msg % name) file_ids.append(long(file_id)) else: if 'file_id' not in file_input: msg = 'Invalid job data: Data input %s is a file and must have a "file_id" field' % name raise InvalidData(msg) if 'file_ids' in file_input: del file_input['file_ids'] file_id = file_input['file_id'] if not isinstance(file_id, Integral): msg = 'Invalid job data: Data input %s must have an integer in its "file_id" field' % name raise InvalidData(msg) file_ids.append(long(file_id)) warnings.extend(self._validate_file_ids(file_ids, file_desc)) else: # Don't have this input, check if it is required if required: raise InvalidData('Invalid job data: Data input %s is required and was not provided' % name) # Handle extra inputs in the data that are not defined in the interface for name in list(self.data_inputs_by_name.keys()): data_input = self.data_inputs_by_name[name] if 'file_id' in data_input or 'file_ids' in data_input: if name not in files: warn = ValidationWarning('unknown_input', 'Unknown input %s will be ignored' % name) warnings.append(warn) self._delete_input(name) return warnings def validate_output_files(self, files): """Validates the given file parameters to make sure they are valid with respect to the job interface. :param files: List of file parameter names :type files: [string] :returns: A list of warnings discovered during validation. :rtype: [:class:`job.configuration.data.job_data.ValidationWarning`] :raises :class:`job.configuration.data.exceptions.InvalidData`: If there is a configuration problem. """ warnings = [] workspace_ids = set() for name in files: if name not in self.data_outputs_by_name: raise InvalidData('Invalid job data: Data output %s was not provided' % name) file_output = self.data_outputs_by_name[name] if 'workspace_id' not in file_output: raise InvalidData('Invalid job data: Data output %s must have a "workspace_id" field' % name) workspace_id = file_output['workspace_id'] if not isinstance(workspace_id, Integral): msg = 'Invalid job data: Data output %s must have an integer in its "workspace_id" field' % name raise InvalidData(msg) workspace_ids.add(workspace_id) data_file_store = DATA_FILE_STORE['DATA_FILE_STORE'] if not data_file_store: raise Exception('No data file store found') workspaces = data_file_store.get_workspaces(workspace_ids) for workspace_id in workspaces: active = workspaces[workspace_id] if not active: raise InvalidData('Invalid job data: Workspace for ID %i is not active' % workspace_id) workspace_ids.remove(workspace_id) # Check if there were any workspace IDs that weren't found if workspace_ids: raise InvalidData('Invalid job data: Workspace for ID(s): %s do not exist' % str(workspace_ids)) return warnings def validate_properties(self, property_names): """Validates the given property names to ensure they are all populated correctly and exist if they are required. :param property_names: Dict of property names mapped to a bool indicating if they are required :type property_names: {string: bool} :returns: A list of warnings discovered during validation. :rtype: [:class:`job.configuration.data.job_data.ValidationWarning`] :raises :class:`job.configuration.data.exceptions.InvalidData`: If there is a configuration problem. """ warnings = [] for name in property_names: if name in self.data_inputs_by_name: # Have this input, make sure it is a valid property property_input = self.data_inputs_by_name[name] if 'value' not in property_input: msg = 'Invalid job data: Data input %s is a property and must have a "value" field' % name raise InvalidData(msg) value = property_input['value'] if not isinstance(value, basestring): raise InvalidData('Invalid job data: Data input %s must have a string in its "value" field' % name) else: # Don't have this input, check if it is required if property_names[name]: raise InvalidData('Invalid job data: Data input %s is required and was not provided' % name) # Handle extra inputs in the data that are not defined in the interface for name in list(self.data_inputs_by_name.keys()): data_input = self.data_inputs_by_name[name] if 'value' in data_input: if name not in property_names: warn = ValidationWarning('unknown_input', 'Unknown input %s will be ignored' % name) warnings.append(warn) self._delete_input(name) return warnings def _delete_input(self, name): """Deletes the input with the given name :param name: The name of the input to delete :type name: string """ if name in self.data_inputs_by_name: del self.data_inputs_by_name[name] self.param_names.discard(name) new_input_data = [] for data_input in self.data_dict['input_data']: if data_input['name'] != name: new_input_data.append(data_input) self.data_dict['input_data'] = new_input_data def _retrieve_files(self, data_files): """Retrieves the given data files and writes them to the given local directories. If no file with a given ID exists, it will not be retrieved and returned in the results. :param data_files: Dict with each file ID mapping to an absolute directory path for downloading and bool indicating if job supports partial file download (True). :type data_files: {long: type(string, bool)} :returns: Dict with each file ID mapping to its absolute local path :rtype: {long: string} :raises ArchivedWorkspace: If any of the files has an archived workspace (no longer active) :raises DeletedFile: If any of the files has been deleted """ file_ids = data_files.keys() files = ScaleFile.objects.filter(id__in=file_ids) file_downloads = [] results = {} local_paths = set() # Pay attention to file name collisions and update file name if needed counter = 0 for scale_file in files: partial = data_files[scale_file.id][1] local_path = os.path.join(data_files[scale_file.id][0], scale_file.file_name) while local_path in local_paths: # Path collision, try a different file name counter += 1 new_file_name = '%i_%s' % (counter, scale_file.file_name) local_path = os.path.join(data_files[scale_file.id][0], new_file_name) local_paths.add(local_path) file_downloads.append(FileDownload(scale_file, local_path, partial)) results[scale_file.id] = local_path ScaleFile.objects.download_files(file_downloads) return results def _validate_file_ids(self, file_ids, file_desc): """Validates the files with the given IDs against the given file description. If invalid, a :class:`job.configuration.data.exceptions.InvalidData` will be thrown. :param file_ids: List of file IDs :type file_ids: [long] :param file_desc: The description of the required file meta-data for validation :type file_desc: :class:`job.configuration.interface.scale_file.ScaleFileDescription` :returns: A list of warnings discovered during validation. :rtype: [:class:`job.configuration.data.job_data.ValidationWarning`] :raises :class:`job.configuration.data.exceptions.InvalidData`: If any of the files are missing. """ warnings = [] found_ids = set() for scale_file in ScaleFile.objects.filter(id__in=file_ids): found_ids.add(scale_file.id) media_type = scale_file.media_type if not file_desc.is_media_type_allowed(media_type): warn = ValidationWarning('media_type', 'Invalid media type for file: %i -> %s' % (scale_file.id, media_type)) warnings.append(warn) # Check if there were any file IDs that weren't found in the query for file_id in file_ids: if file_id not in found_ids: raise InvalidData('Invalid job data: Data file for ID %i does not exist' % file_id) return warnings @staticmethod def create_output_workspace_dict(output_params, job_data, job_exe): """Creates the mapping from output to workspace both ways: the old way from job data and the new way from job configuration :param output_params: The list of output parameter names :type output_params: :func:`list` :param job_data: The job data :type job_data: 1.0? 2.0? WHO KNOWZ? :param job_exe: The job execution model (with related job and job_type fields) :type job_exe: :class:`job.models.JobExecution` :return: Dict where output param name maps to workspace ID :rtype: dict """ workspace_dict = {} # {Output name: workspace ID} if job_data.has_workspaces(): # Do the old way of getting output workspaces from job data for name, output_dict in job_data.data_outputs_by_name.items(): workspace_id = output_dict['workspace_id'] workspace_dict[name] = workspace_id config = job_exe.job.get_job_configuration() if config and (config.default_output_workspace or config.output_workspaces): workspace_names_dict = {} # {Output name: workspace name} # Do the new way, grabbing output workspaces from job configuration for name in output_params: if name in config.output_workspaces: workspace_names_dict[name] = config.output_workspaces[name] elif config.default_output_workspace: workspace_names_dict[name] = config.default_output_workspace else: raise Exception('No output workspace configured for output \'%s\'' % name) from storage.models import Workspace workspace_mapping = {w.name: w.id for w in Workspace.objects.filter(name__in=workspace_names_dict.values())} for output_name, workspace_name in workspace_names_dict.items(): if workspace_name not in workspace_mapping: raise Exception('Workspace with name %s does not exist!' % workspace_name) workspace_dict[output_name] = workspace_mapping[workspace_name] return workspace_dict<|fim▁end|>
<|file_name|>t_model.py<|end_file_name|><|fim▁begin|>"""Tests for GP and SP classes""" import math import unittest import numpy as np from gpkit import (Model, Monomial, settings, VectorVariable, Variable, SignomialsEnabled, ArrayVariable) from gpkit.geometric_program import GeometricProgram from gpkit.small_classes import CootMatrix from gpkit.feasibility import feasibility_model NDIGS = {"cvxopt": 5, "mosek": 7, "mosek_cli": 5} # name: decimal places of accuracy class TestGP(unittest.TestCase): """ Test GeometricPrograms. This TestCase gets run once for each installed solver. """ name = "TestGP_" # solver and ndig get set in loop at bottom this file, a bit hacky solver = None ndig = None def test_trivial_gp(self): """ Create and solve a trivial GP: minimize x + 2y subject to xy >= 1 The global optimum is (x, y) = (sqrt(2), 1/sqrt(2)). """ x = Monomial('x') y = Monomial('y') prob = Model(cost=(x + 2*y), constraints=[x*y >= 1]) sol = prob.solve(solver=self.solver, verbosity=0) self.assertEqual(type(prob.latex()), str) self.assertEqual(type(prob._repr_latex_()), str) self.assertAlmostEqual(sol("x"), math.sqrt(2.), self.ndig) self.assertAlmostEqual(sol("y"), 1/math.sqrt(2.), self.ndig) self.assertAlmostEqual(sol("x") + 2*sol("y"), 2*math.sqrt(2), self.ndig) self.assertAlmostEqual(sol["cost"], 2*math.sqrt(2), self.ndig) def test_simple_united_gp(self): R = Variable('R', units="nautical_miles") a0 = Variable('a0', 340.29, 'm/s') theta = Variable(r'\theta', 0.7598) t = Variable('t', 10, 'hr') T_loiter = Variable('T_{loiter}', 1, 'hr') T_reserve = Variable('T_{reserve}', 45, 'min') M = VectorVariable(2, 'M') if R.units: prob = Model(1/R, [t >= sum(R/a0/M/theta**0.5) + T_loiter + T_reserve, M <= 0.76]) sol = prob.solve(verbosity=0) self.assertAlmostEqual(sol["cost"], 0.0005532, self.ndig) def test_trivial_vector_gp(self): """ Create and solve a trivial GP with VectorVariables """<|fim▁hole|> sol = prob.solve(solver=self.solver, verbosity=0) self.assertEqual(sol('x').shape, (2,)) self.assertEqual(sol('y').shape, (2,)) for x, y in zip(sol('x'), sol('y')): self.assertAlmostEqual(x, math.sqrt(2.), self.ndig) self.assertAlmostEqual(y, 1/math.sqrt(2.), self.ndig) self.assertAlmostEqual(sol["cost"]/(4*math.sqrt(2)), 1., self.ndig) def test_zero_lower_unbounded(self): x = Variable('x', value=4) y = Variable('y', value=0) z = Variable('z') t1 = Variable('t1') t2 = Variable('t2') prob = Model(z, [z >= x + t1, t1 >= t2, t2 >= y]) sol = prob.solve(verbosity=0) def test_mdd_example(self): Cl = Variable("Cl", 0.5, "-", "Lift Coefficient") Mdd = Variable("Mdd", "-", "Drag Divergence Mach Number") m1 = Model(1/Mdd, [1 >= 5*Mdd + 0.5, Mdd >= 0.00001]) m2 = Model(1/Mdd, [1 >= 5*Mdd + 0.5]) m3 = Model(1/Mdd, [1 >= 5*Mdd + Cl, Mdd >= 0.00001]) sol1 = m1.solve(solver=self.solver, verbosity=0) sol2 = m2.solve(solver=self.solver, verbosity=0) sol3 = m3.solve(solver=self.solver, verbosity=0) gp1, gp2, gp3 = [m.program for m in [m1, m2, m3]] self.assertEqual(gp1.A, CootMatrix(row=[0, 1, 2], col=[0, 0, 0], data=[-1, 1, -1])) self.assertEqual(gp2.A, CootMatrix(row=[0, 1], col=[0, 0], data=[-1, 1])) # order of variables within a posynomial is not stable # (though monomial order is) equiv1 = gp3.A == CootMatrix(row=[0, 2, 3, 2], col=[0, 0, 0, 0], data=[-1, 1, -1, 0]) equiv2 = gp3.A == CootMatrix(row=[0, 1, 3, 2], col=[0, 0, 0, 0], data=[-1, 1, -1, 0]) self.assertTrue(equiv1 or equiv2) self.assertAlmostEqual(sol1(Mdd), sol2(Mdd)) self.assertAlmostEqual(sol1(Mdd), sol3(Mdd)) self.assertAlmostEqual(sol2(Mdd), sol3(Mdd)) def test_additive_constants(self): x = Variable('x') m = Model(1/x, [1 >= 5*x + 0.5, 1 >= 10*x]) m.solve(verbosity=0) gp = m.program self.assertEqual(gp.cs[1], gp.cs[2]) self.assertEqual(gp.A.data[1], gp.A.data[2]) def test_zeroing(self): L = Variable("L") k = Variable("k", 0) with SignomialsEnabled(): constr = [L-5*k <= 10] sol = Model(1/L, constr).solve(verbosity=0, solver=self.solver) self.assertAlmostEqual(sol(L), 10, self.ndig) self.assertAlmostEqual(sol["cost"], 0.1, self.ndig) def test_singular(self): """ Create and solve GP with a singular A matrix """ if self.solver == 'cvxopt': # cvxopt can't solve this problem # (see https://github.com/cvxopt/cvxopt/issues/36) return x = Variable('x') y = Variable('y') m = Model(y*x, [y*x >= 12]) sol = m.solve(solver=self.solver, verbosity=0) self.assertAlmostEqual(sol["cost"], 12, self.ndig) def test_constants_in_objective_1(self): '''Issue 296''' x1 = Variable('x1') x2 = Variable('x2') m = Model(1.+ x1 + x2, [x1 >= 1., x2 >= 1.]) sol = m.solve(solver=self.solver, verbosity=0) self.assertAlmostEqual(sol["cost"], 3, self.ndig) def test_constants_in_objective_2(self): '''Issue 296''' x1 = Variable('x1') x2 = Variable('x2') m = Model(x1**2 + 100 + 3*x2, [x1 >= 10., x2 >= 15.]) sol = m.solve(solver=self.solver, verbosity=0) self.assertAlmostEqual(sol["cost"]/245., 1, self.ndig) def test_feasibility_gp_(self): x = Variable('x') m = Model(x, [x**2 >= 1, x <= 0.5]) self.assertRaises(RuntimeWarning, m.solve, verbosity=0) fm = feasibility_model(m, "max") sol1 = fm.solve(verbosity=0) fm = feasibility_model(m, "product") sol2 = fm.solve(verbosity=0) self.assertTrue(sol1["cost"] >= 1) self.assertTrue(sol2["cost"] >= 1) def test_terminating_constant_(self): x = Variable('x') y = Variable('y', value=0.5) prob = Model(1/x, [x + y <= 4]) sol = prob.solve(verbosity=0) self.assertAlmostEqual(sol["cost"], 1/3.5, self.ndig) def test_check_result(self): """issue 361""" N = 5 L = 5. dx = L/(N-1) EI = Variable("EI",10) p = VectorVariable(N, "p") p = p.sub(p, 100*np.ones(N)) V = VectorVariable(N, "V") M = VectorVariable(N, "M") th = VectorVariable(N, "th") w = VectorVariable(N, "w") eps = 1E-6 substitutions = {var: eps for var in [V[-1], M[-1], th[0], w[0]]} objective = w[-1] constraints = [EI*V.left[1:N] >= EI*V[1:N] + 0.5*dx*p.left[1:N] + 0.5*dx*p[1:N], EI*M.left[1:N] >= EI*M[1:N] + 0.5*dx*V.left[1:N] + 0.5*dx*V[1:N], EI*th.right[0:N-1] >= EI*th[0:N-1] + 0.5*dx*M.right[0:N-1] + 0.5*dx*M[0:N-1], EI*w.right[0:N-1] >= EI*w[0:N-1] + 0.5*dx*th.right[0:N-1] + 0.5*dx*th[0:N-1]] m = Model(objective, constraints, substitutions) sol = m.solve(verbosity=0) def test_exps_is_tuple(self): """issue 407""" x = Variable('x') m = Model(x, [x >= 1]) m.solve(verbosity=0) self.assertEqual(type(m.program.cost.exps), tuple) class TestSP(unittest.TestCase): """test case for SP class -- gets run for each installed solver""" name = "TestSP_" solver = None ndig = None def test_trivial_sp(self): x = Variable('x') y = Variable('y') with SignomialsEnabled(): m = Model(x, [x >= 1-y, y <= 0.1]) sol = m.localsolve(verbosity=0, solver=self.solver) self.assertAlmostEqual(sol["variables"]["x"], 0.9, self.ndig) with SignomialsEnabled(): m = Model(x, [x+y >= 1, y <= 0.1]) sol = m.localsolve(verbosity=0, solver=self.solver) self.assertAlmostEqual(sol["variables"]["x"], 0.9, self.ndig) def test_relaxation(self): x = Variable("x") y = Variable("y") with SignomialsEnabled(): constraints = [y + x >= 2, y <= x] objective = x m = Model(objective, constraints) m.localsolve(verbosity=0) # issue #257 A = VectorVariable(2, "A") B = ArrayVariable([2, 2], "B") C = VectorVariable(2, "C") with SignomialsEnabled(): constraints = [A <= B.dot(C), B <= 1, C <= 1] obj = 1/A[0] + 1/A[1] m = Model(obj, constraints) m.localsolve(verbosity=0) def test_issue180(self): L = Variable("L") Lmax = Variable("L_{max}", 10) W = Variable("W") Wmax = Variable("W_{max}", 10) A = Variable("A", 10) Obj = Variable("Obj") a_val = 0.01 a = Variable("a", a_val) with SignomialsEnabled(): eqns = [L <= Lmax, W <= Wmax, L*W >= A, Obj >= a*(2*L + 2*W) + (1-a)*(12 * W**-1 * L**-3)] m = Model(Obj, eqns) spsol = m.solve(verbosity=0, solver=self.solver) # now solve as GP eqns[-1] = (Obj >= a_val*(2*L + 2*W) + (1-a_val)*(12 * W**-1 * L**-3)) m = Model(Obj, eqns) gpsol = m.solve(verbosity=0, solver=self.solver) self.assertAlmostEqual(spsol['cost'], gpsol['cost']) def test_trivial_sp2(self): x = Variable("x") y = Variable("y") # converging from above with SignomialsEnabled(): constraints = [y + x >= 2, y >= x] objective = y x0 = 1 y0 = 2 m = Model(objective, constraints) sol1 = m.localsolve(x0={x: x0, y: y0}, verbosity=0, solver=self.solver) # converging from right with SignomialsEnabled(): constraints = [y + x >= 2, y <= x] objective = x x0 = 2 y0 = 1 m = Model(objective, constraints) sol2 = m.localsolve(x0={x: x0, y: y0}, verbosity=0, solver=self.solver) self.assertAlmostEqual(sol1["variables"]["x"], sol2["variables"]["x"], self.ndig) self.assertAlmostEqual(sol1["variables"]["y"], sol2["variables"]["x"], self.ndig) def test_sp_initial_guess_sub(self): x = Variable("x") y = Variable("y") x0 = 1 y0 = 2 with SignomialsEnabled(): constraints = [y + x >= 2, y <= x] objective = x m = Model(objective, constraints) try: sol = m.localsolve(x0={x: x0, y: y0}, verbosity=0, solver=self.solver) except TypeError: self.fail("Call to local solve with only variables failed") self.assertAlmostEqual(sol(x), 1, self.ndig) self.assertAlmostEqual(sol["cost"], 1, self.ndig) try: sol = m.localsolve(x0={"x": x0, "y": y0}, verbosity=0, solver=self.solver) except TypeError: self.fail("Call to local solve with only variable strings failed") self.assertAlmostEqual(sol("x"), 1, self.ndig) self.assertAlmostEqual(sol["cost"], 1, self.ndig) try: sol = m.localsolve(x0={"x": x0, y: y0}, verbosity=0, solver=self.solver) except TypeError: self.fail("Call to local solve with a mix of variable strings " "and variables failed") self.assertAlmostEqual(sol["cost"], 1, self.ndig) def test_small_signomial(self): x = Variable('x') z = Variable('z') local_ndig = 4 nonzero_adder = 0.1 # TODO: support reaching zero, issue #348 with SignomialsEnabled(): J = 0.01*(x - 1)**2 + nonzero_adder m = Model(z, [z >= J]) sol = m.localsolve(verbosity=0) self.assertAlmostEqual(sol['cost'], nonzero_adder, local_ndig) self.assertAlmostEqual(sol('x'), 0.987, 3) def test_signomials_not_allowed_in_objective(self): with SignomialsEnabled(): x = Variable('x') y = Variable('y') J = 0.01*((x - 1)**2 + (y - 1)**2) + (x*y - 1)**2 m = Model(J) with self.assertRaises(TypeError): sol = m.localsolve(verbosity=0) def test_partial_sub_signomial(self): """Test SP partial x0 initialization""" x = Variable('x') y = Variable('y') with SignomialsEnabled(): m = Model(x, [x + y >= 1, y <= 0.5]) m.localsolve(x0={x: 0.5}, verbosity=0) self.assertEqual(m.program.gps[0].constraints[0].exp[x], -1./3) TEST_CASES = [TestGP, TestSP] TESTS = [] for testcase in TEST_CASES: for solver in settings["installed_solvers"]: if solver: test = type(testcase.__name__+"_"+solver, (testcase,), {}) setattr(test, "solver", solver) setattr(test, "ndig", NDIGS[solver]) TESTS.append(test) if __name__ == "__main__": from gpkit.tests.helpers import run_tests run_tests(TESTS)<|fim▁end|>
x = VectorVariable(2, 'x') y = VectorVariable(2, 'y') prob = Model(cost=(sum(x) + 2*sum(y)), constraints=[x*y >= 1])
<|file_name|>connectionitem.cpp<|end_file_name|><|fim▁begin|>/* Copyright 2012 Arthur de Souza Ribeiro <[email protected]> Copyright 2012-2013 Lamarque V. Souza <[email protected]> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License or (at your option) version 3 or any later version accepted by the membership of KDE e.V. (or its successor approved by the membership of KDE e.V.), which shall act as a proxy defined in Section 14 of version 3 of the license. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ #include "connectionitem.h" #include <QtCore/QDebug> #include "remoteactivatable.h" #include "uiutils.h" #include <QPainter> #include <QGraphicsSceneHoverEvent> #include <QSize> #include <QtGui/QIcon> #include <QtCore/QSettings> #include <QtGui/QPushButton> #include <NetworkManagerQt/manager.h> #include <NetworkManagerQt/wirelessdevice.h> #include <activatable.h> #include <remotewirelessobject.h> #include <remotewirelessinterfaceconnection.h> #include <remotewirelessnetwork.h> #include <remotegsminterfaceconnection.h> #include <wirelesssecurityidentifier.h> #include "../libs/service/events.h" static const int m_iconSize = 48; ConnectionItem::ConnectionItem(RemoteActivatable *activatable, ItemType type, QObject *parent) : QObject(parent), m_activatable(activatable), m_hoverEnter(false), m_hasDefaultRoute(false), m_activationState(QLatin1String("unknown")), m_type(type), m_showMoreChecked(false), m_networkCount(0) { if (m_activatable) { RemoteInterfaceConnection *remote = interfaceConnection(); if (remote) { m_hasDefaultRoute = remote->hasDefaultRoute(); connect(remote, SIGNAL(hasDefaultRouteChanged(bool)), SLOT(handleHasDefaultRouteChanged(bool))); connect(remote, SIGNAL(activationStateChanged(Knm::InterfaceConnection::ActivationState, Knm::InterfaceConnection::ActivationState)), SLOT(activationStateChanged(Knm::InterfaceConnection::ActivationState, Knm::InterfaceConnection::ActivationState))); if (remote->activationState() == Knm::InterfaceConnection::Activated) { m_activationState = "activated"; } else if (remote->activationState() == Knm::InterfaceConnection::Activating) { m_activationState = "activating"; } } switch (m_activatable->activatableType()) { case Knm::Activatable::WirelessNetwork: connect(qobject_cast<RemoteWirelessNetwork *>(m_activatable), SIGNAL(strengthChanged(int)), this, SLOT(handlePropertiesChanges())); m_typeString = "wirelessNetwork"; break; case Knm::Activatable::WirelessInterfaceConnection: connect(qobject_cast<RemoteWirelessInterfaceConnection *>(m_activatable), SIGNAL(strengthChanged(int)), this, SLOT(handlePropertiesChanges())); m_typeString = "wireless"; break; case Knm::Activatable::InterfaceConnection: m_typeString = "wired"; break; case Knm::Activatable::VpnInterfaceConnection: m_typeString = "vpn"; break; case Knm::Activatable::GsmInterfaceConnection: connect(qobject_cast<RemoteGsmInterfaceConnection *>(m_activatable), SIGNAL(signalQualityChanged(int)), this, SLOT(handlePropertiesChanges())); connect(qobject_cast<RemoteGsmInterfaceConnection *>(m_activatable), SIGNAL(accessTechnologyChanged(int)), this, SLOT(handlePropertiesChanges())); m_typeString = "gsm"; break; /* TODO: add HiddenWirelessInterfaceConnection and UnconfiguredInterface, or just get rid of them. */ } } else if (type == ConnectionItem::HiddenNetwork) { m_typeString = "wirelessNetwork"; } else if (type == ConnectionItem::ShowMore) { m_typeString = "showMore"; } } QString ConnectionItem::protectedIcon() { if (m_activatable) { bool isShared = false; if (m_activatable) { isShared = m_activatable->isShared(); } RemoteWirelessObject *wobj = 0; if (m_activatable->activatableType() == Knm::Activatable::WirelessNetwork) { RemoteWirelessNetwork *rwic = qobject_cast<RemoteWirelessNetwork *>(m_activatable); if (rwic) { wobj = rwic; } } else if (m_activatable->activatableType() == Knm::Activatable::WirelessInterfaceConnection) { RemoteWirelessInterfaceConnection *rwic = qobject_cast<RemoteWirelessInterfaceConnection *>(m_activatable); if (rwic) { wobj = rwic; } } if (wobj) { Knm::WirelessSecurity::Type best = Knm::WirelessSecurity::best(wobj->interfaceCapabilities(), !isShared, (wobj->operationMode() == NetworkManager::WirelessDevice::Adhoc), wobj->apCapabilities(), wobj->wpaFlags(), wobj->rsnFlags()); return Knm::WirelessSecurity::iconName(best); } } return QString(); } QString ConnectionItem::ssid() { if (m_activatable) { if (m_activatable->activatableType() == Knm::Activatable::WirelessNetwork) { RemoteWirelessNetwork *rwic = qobject_cast<RemoteWirelessNetwork *>(m_activatable); if (rwic) { return rwic->ssid(); } } else if (m_activatable->activatableType() == Knm::Activatable::WirelessInterfaceConnection) { RemoteWirelessInterfaceConnection *rwic = qobject_cast<RemoteWirelessInterfaceConnection *>(m_activatable); if (rwic) { return rwic->ssid(); } } } return ""; } QString ConnectionItem::connectionName() { RemoteInterfaceConnection *remoteconnection = interfaceConnection(); if (remoteconnection) { return remoteconnection->connectionName(); } return ""; } QString ConnectionItem::connectionType() { return m_typeString; } bool ConnectionItem::hidden() { return (m_type == ConnectionItem::HiddenNetwork); } void ConnectionItem::handleHasDefaultRouteChanged(bool has) { m_hasDefaultRoute = has; emit itemChanged(); } QString ConnectionItem::connectionUuid() { RemoteInterfaceConnection *remoteconnection = interfaceConnection(); if (remoteconnection) { return remoteconnection->connectionUuid(); } return QString(); } QString ConnectionItem::connectionIcon() { if (!m_activatable) { return QString(); } switch (m_activatable->activatableType()) { case Knm::Activatable::WirelessInterfaceConnection: case Knm::Activatable::WirelessNetwork: return QString("network-wireless-connected-100"); case Knm::Activatable::InterfaceConnection: { RemoteInterfaceConnection *remote = interfaceConnection(); if (remote && remote->activationState() == Knm::InterfaceConnection::Activated) { return QString("network-wired-activated"); } return QString("network-wired"); } } RemoteInterfaceConnection *remote = interfaceConnection(); if (remote) { return interfaceConnection()->iconName(); } return QString(); } void ConnectionItem::disconnect() { RemoteInterfaceConnection *remote = interfaceConnection(); if (remote && (remote->activationState() == Knm::InterfaceConnection::Activating || remote->activationState() == Knm::InterfaceConnection::Activated)) { remote->deactivate(); } } void ConnectionItem::connectNetwork() { if (m_activatable) { RemoteInterfaceConnection *remote = interfaceConnection(); if (remote && (remote->activationState() == Knm::InterfaceConnection::Activating || remote->activationState() == Knm::InterfaceConnection::Activated)) { emit showInterfaceDetails(remote->deviceUni()); } else { QTimer::singleShot(0, m_activatable, SLOT(activate())); } } QTimer::singleShot(0, this, SLOT(notifyNetworkingState())); } void ConnectionItem::hoverEnter() { m_hoverEnter = true; emit itemChanged(); } void ConnectionItem::hoverLeft() { m_hoverEnter = false; emit itemChanged(); } bool ConnectionItem::hover() { return m_hoverEnter; } void ConnectionItem::notifyNetworkingState() { if (!NetworkManager::isNetworkingEnabled()) { // Do it notification //KNotification::event(Event::NetworkingDisabled, i18nc("@info:status Notification when the networking subsystem (NetworkManager, etc) is disabled", "Networking system disabled"), QPixmap(), 0, KNotification::CloseOnTimeout, *s_networkManagementComponentData)->sendEvent(); } else if (!NetworkManager::isWirelessEnabled() && m_activatable && m_activatable->activatableType() == Knm::Activatable::WirelessInterfaceConnection) { // Do it notification //KNotification::event(Event::RfOff, i18nc("@info:status Notification for radio kill switch turned off", "Wireless hardware disabled"), KIcon("network-wireless").pixmap(QSize(m_iconSize, m_iconSize)), 0, KNotification::CloseOnTimeout, *s_networkManagementComponentData)->sendEvent(); } } int ConnectionItem::signalStrength() { if (m_activatable) { if (m_activatable->activatableType() == Knm::Activatable::WirelessNetwork) { RemoteWirelessNetwork *rwic = qobject_cast<RemoteWirelessNetwork *>(m_activatable); if (rwic) { return rwic->strength(); } } else if (m_activatable->activatableType() == Knm::Activatable::WirelessInterfaceConnection) { RemoteWirelessInterfaceConnection *rwic = qobject_cast<RemoteWirelessInterfaceConnection *>(m_activatable); if (rwic) { return rwic->strength(); } } } return 0; } void ConnectionItem::handlePropertiesChanges() { emit itemChanged(); } bool ConnectionItem::defaultRoute() { return m_hasDefaultRoute; } int ConnectionItem::signalQuality() { if (m_activatable && m_activatable->activatableType() == Knm::Activatable::GsmInterfaceConnection) { RemoteGsmInterfaceConnection *giface = qobject_cast<RemoteGsmInterfaceConnection *>(m_activatable); if (giface) { return giface->getSignalQuality(); } } return -1; } QString ConnectionItem::accessTechnology() { if (m_activatable && m_activatable->activatableType() == Knm::Activatable::GsmInterfaceConnection) { RemoteGsmInterfaceConnection *giface = qobject_cast<RemoteGsmInterfaceConnection *>(m_activatable); if (giface) { ModemManager::ModemInterface::AccessTechnology tech = static_cast<ModemManager::ModemInterface::AccessTechnology>(giface->getAccessTechnology()); if (tech != ModemManager::ModemInterface::UnknownTechnology) { return UiUtils::convertAccessTechnologyToString(tech); } } } return QString(); } bool ConnectionItem::showMoreChecked() { return m_showMoreChecked; }<|fim▁hole|> m_showMoreChecked = show; emit itemChanged(); } QString ConnectionItem::activationState() { return m_activationState; } void ConnectionItem::activationStateChanged(Knm::InterfaceConnection::ActivationState oldState, Knm::InterfaceConnection::ActivationState newState) { Q_UNUSED(oldState); switch (newState) { case Knm::InterfaceConnection::Activated: m_activationState = "activated"; break; case Knm::InterfaceConnection::Unknown: m_activationState = "unknown"; break; case Knm::InterfaceConnection::Activating: m_activationState = "activating"; break; } emit itemChanged(); } RemoteInterfaceConnection *ConnectionItem::interfaceConnection() const { return qobject_cast<RemoteInterfaceConnection *>(m_activatable); } RemoteActivatable *ConnectionItem::activatable() const { return m_activatable; } QString ConnectionItem::deviceUni() { if (m_activatable) return m_activatable->deviceUni(); return QString(); } bool ConnectionItem::isShared() { return m_activatable->isShared(); } bool ConnectionItem::equals(const ConnectionItem *item) { if (!item || !item->activatable()) { return false; } if (!m_activatable) { return false; } if (m_activatable == item->activatable()) { return true; } RemoteInterfaceConnection *a = interfaceConnection(); RemoteInterfaceConnection *b = item->interfaceConnection(); if (a && b && a->connectionUuid() == b->connectionUuid()) { return true; } return false; }<|fim▁end|>
void ConnectionItem::setShowMoreChecked(const bool show) {
<|file_name|>Action_GIST.cpp<|end_file_name|><|fim▁begin|>#include <cmath> #include <cfloat> // DBL_MAX #include "Action_GIST.h" #include "CpptrajStdio.h" #include "Constants.h" #include "DataSet_MatrixFlt.h" #include "DataSet_GridFlt.h" #include "DataSet_GridDbl.h" #include "ProgressBar.h" #include "StringRoutines.h" #include "DistRoutines.h" #ifdef _OPENMP # include <omp.h> #endif const double Action_GIST::maxD_ = DBL_MAX; Action_GIST::Action_GIST() : debug_(0), numthreads_(1), #ifdef CUDA numberAtoms_(0), numberAtomTypes_(0), headAtomType_(0), solvent_(NULL), NBindex_c_(NULL), molecule_c_(NULL), paramsLJ_c_(NULL), max_c_(NULL), min_c_(NULL), result_w_c_(NULL), result_s_c_(NULL), result_O_c_(NULL), result_N_c_(NULL), #endif gridspacing_(0), gridcntr_(0.0), griddim_(0.0), gO_(0), gH_(0), Esw_(0), Eww_(0), dTStrans_(0), dTSorient_(0), dTSsix_(0), neighbor_norm_(0), dipole_(0), order_norm_(0), dipolex_(0), dipoley_(0), dipolez_(0), PME_(0), U_PME_(0), ww_Eij_(0), G_max_(0.0), CurrentParm_(0), datafile_(0), eijfile_(0), infofile_(0), fltFmt_(TextFormat::GDOUBLE), intFmt_(TextFormat::INTEGER), BULK_DENS_(0.0), temperature_(0.0), NeighborCut2_(12.25), // 3.5^2 // system_potential_energy_(0), // solute_potential_energy_(0), MAX_GRID_PT_(0), NSOLVENT_(0), N_ON_GRID_(0), nMolAtoms_(0), NFRAME_(0), max_nwat_(0), doOrder_(false), doEij_(false), skipE_(false), includeIons_(true) {} /** GIST help */ void Action_GIST::Help() const { mprintf("\t[doorder] [doeij] [skipE] [skipS] [refdens <rdval>] [temp <tval>]\n" "\t[noimage] [gridcntr <xval> <yval> <zval>] [excludeions]\n" "\t[griddim <nx> <ny> <nz>] [gridspacn <spaceval>] [neighborcut <ncut>]\n" "\t[prefix <filename prefix>] [ext <grid extension>] [out <output suffix>]\n" "\t[floatfmt {double|scientific|general}] [floatwidth <fw>] [floatprec <fp>]\n" "\t[intwidth <iw>]\n" "\t[info <info suffix>]\n"); # ifdef LIBPME mprintf("\t[nopme|pme %s\n\t %s\n\t %s]\n", EwaldOptions::KeywordsCommon1(), EwaldOptions::KeywordsCommon2(), EwaldOptions::KeywordsPME()); # endif mprintf("Perform Grid Inhomogenous Solvation Theory calculation.\n" #ifdef CUDA "The option doeij is not available, when using the CUDA accelerated version,\n" "as this would need way too much memory." #endif ); } /** Init GIST action. */ Action::RetType Action_GIST::Init(ArgList& actionArgs, ActionInit& init, int debugIn) { debug_ = debugIn; # ifdef MPI if (init.TrajComm().Size() > 1) { mprinterr("Error: 'gist' action does not work with > 1 process (%i processes currently).\n", init.TrajComm().Size()); return Action::ERR; } # endif gist_init_.Start(); prefix_ = actionArgs.GetStringKey("prefix"); if (prefix_.empty()) prefix_.assign("gist"); std::string ext = actionArgs.GetStringKey("ext"); if (ext.empty()) ext.assign(".dx"); std::string gistout = actionArgs.GetStringKey("out"); if (gistout.empty()) gistout.assign(prefix_ + "-output.dat"); datafile_ = init.DFL().AddCpptrajFile( gistout, "GIST output" ); if (datafile_ == 0) return Action::ERR; // Info file: if not specified use STDOUT gistout = actionArgs.GetStringKey("info"); if (!gistout.empty()) gistout = prefix_ + "-" + gistout; infofile_ = init.DFL().AddCpptrajFile( gistout, "GIST info", DataFileList::TEXT, true ); if (infofile_ == 0) return Action::ERR; // Grid files DataFile* file_gO = init.DFL().AddDataFile( prefix_ + "-gO" + ext ); DataFile* file_gH = init.DFL().AddDataFile( prefix_ + "-gH" + ext ); DataFile* file_Esw = init.DFL().AddDataFile(prefix_ + "-Esw-dens" + ext); DataFile* file_Eww = init.DFL().AddDataFile(prefix_ + "-Eww-dens" + ext); DataFile* file_dTStrans = init.DFL().AddDataFile(prefix_ + "-dTStrans-dens" + ext); DataFile* file_dTSorient = init.DFL().AddDataFile(prefix_ + "-dTSorient-dens" + ext); DataFile* file_dTSsix = init.DFL().AddDataFile(prefix_ + "-dTSsix-dens" + ext); DataFile* file_neighbor_norm = init.DFL().AddDataFile(prefix_ + "-neighbor-norm" + ext); DataFile* file_dipole = init.DFL().AddDataFile(prefix_ + "-dipole-dens" + ext); DataFile* file_order_norm = init.DFL().AddDataFile(prefix_ + "-order-norm" + ext); DataFile* file_dipolex = init.DFL().AddDataFile(prefix_ + "-dipolex-dens" + ext); DataFile* file_dipoley = init.DFL().AddDataFile(prefix_ + "-dipoley-dens" + ext); DataFile* file_dipolez = init.DFL().AddDataFile(prefix_ + "-dipolez-dens" + ext); // Output format keywords std::string floatfmt = actionArgs.GetStringKey("floatfmt"); if (!floatfmt.empty()) { if (floatfmt == "double") fltFmt_.SetFormatType(TextFormat::DOUBLE); else if (floatfmt == "scientific") fltFmt_.SetFormatType(TextFormat::SCIENTIFIC); else if (floatfmt == "general") fltFmt_.SetFormatType(TextFormat::GDOUBLE); else { mprinterr("Error: Unrecognized format type for 'floatfmt': %s\n", floatfmt.c_str()); return Action::ERR; } } fltFmt_.SetFormatWidthPrecision( actionArgs.getKeyInt("floatwidth", 0), actionArgs.getKeyInt("floatprec", -1) ); intFmt_.SetFormatWidth( actionArgs.getKeyInt("intwidth", 0) ); // Other keywords double neighborCut = actionArgs.getKeyDouble("neighborcut", 3.5); NeighborCut2_ = neighborCut * neighborCut; includeIons_ = !actionArgs.hasKey("excludeions"); imageOpt_.InitImaging( !(actionArgs.hasKey("noimage")), actionArgs.hasKey("nonortho") ); doOrder_ = actionArgs.hasKey("doorder"); doEij_ = actionArgs.hasKey("doeij"); #ifdef CUDA if (this->doEij_) { mprinterr("Error: 'doeij' cannot be specified when using CUDA.\n"); return Action::ERR; } #endif skipE_ = actionArgs.hasKey("skipE"); if (skipE_) { if (doEij_) { mprinterr("Error: 'doeij' cannot be specified if 'skipE' is specified.\n"); return Action::ERR; } } // Parse PME options // TODO once PME output is stable, make pme true the default when LIBPME present. //# ifdef LIBPME // usePme_ = true; //# else usePme_ = false; //# endif # ifdef CUDA // Disable PME for CUDA usePme_ = false; # endif if (actionArgs.hasKey("pme")) usePme_ = true; else if (actionArgs.hasKey("nopme")) usePme_ = false; // PME and doeij are not compatible if (usePme_ && doEij_) { mprinterr("Error: 'doeij' cannot be used with PME. Specify 'nopme' to use 'doeij'\n"); return Action::ERR; } if (usePme_) { # ifdef LIBPME pmeOpts_.AllowLjPme(false); if (pmeOpts_.GetOptions(EwaldOptions::PME, actionArgs, "GIST")) { mprinterr("Error: Getting PME options for GIST failed.\n"); return Action::ERR; } # else mprinterr("Error: 'pme' with GIST requires compilation with LIBPME.\n"); return Action::ERR; # endif } DataFile* file_energy_pme = 0; DataFile* file_U_energy_pme = 0; if (usePme_) { file_energy_pme = init.DFL().AddDataFile(prefix_ + "-Water-Etot-pme-dens" + ext); file_U_energy_pme = init.DFL().AddDataFile(prefix_ + "-Solute-Etot-pme-dens"+ ext); } this->skipS_ = actionArgs.hasKey("skipS"); if (doEij_) { eijfile_ = init.DFL().AddCpptrajFile(prefix_ + "-Eww_ij.dat", "GIST Eij matrix file"); if (eijfile_ == 0) return Action::ERR; } // Set Bulk Density 55.5M BULK_DENS_ = actionArgs.getKeyDouble("refdens", 0.0334); if ( BULK_DENS_ > (0.0334*1.2) ) mprintf("Warning: water reference density is high, consider using 0.0334 for 1g/cc water density\n"); else if ( BULK_DENS_ < (0.0334*0.8) ) mprintf("Warning: water reference density is low, consider using 0.0334 for 1g/cc water density\n"); temperature_ = actionArgs.getKeyDouble("temp", 300.0); if (temperature_ < 0.0) { mprinterr("Error: Negative temperature specified.\n"); return Action::ERR; } // Grid spacing gridspacing_ = actionArgs.getKeyDouble("gridspacn", 0.50); // Grid center gridcntr_ = Vec3(0.0); if ( actionArgs.hasKey("gridcntr") ) { gridcntr_[0] = actionArgs.getNextDouble(-1); gridcntr_[1] = actionArgs.getNextDouble(-1); gridcntr_[2] = actionArgs.getNextDouble(-1); } else mprintf("Warning: No grid center values specified, using default (origin)\n"); // Grid dimensions int nx = 40; int ny = 40; int nz = 40; if ( actionArgs.hasKey("griddim") ) { nx = actionArgs.getNextInteger(-1); ny = actionArgs.getNextInteger(-1); nz = actionArgs.getNextInteger(-1); } else mprintf("Warning: No grid dimension values specified, using default (40,40,40)\n"); griddim_ = Vec3((double)nx, (double)ny, (double)nz); // Data set name std::string dsname = actionArgs.GetStringKey("name"); if (dsname.empty()) dsname = init.DSL().GenerateDefaultName("GIST"); // Set up DataSets. gO_ = (DataSet_3D*)init.DSL().AddSet(DataSet::GRID_FLT, MetaData(dsname, "gO")); gH_ = (DataSet_3D*)init.DSL().AddSet(DataSet::GRID_FLT, MetaData(dsname, "gH")); Esw_ = (DataSet_3D*)init.DSL().AddSet(DataSet::GRID_FLT, MetaData(dsname, "Esw")); Eww_ = (DataSet_3D*)init.DSL().AddSet(DataSet::GRID_FLT, MetaData(dsname, "Eww")); dTStrans_ = (DataSet_3D*)init.DSL().AddSet(DataSet::GRID_FLT, MetaData(dsname, "dTStrans")); dTSorient_ = (DataSet_3D*)init.DSL().AddSet(DataSet::GRID_FLT, MetaData(dsname, "dTSorient")); dTSsix_ = (DataSet_3D*)init.DSL().AddSet(DataSet::GRID_FLT, MetaData(dsname, "dTSsix")); neighbor_norm_ = (DataSet_3D*)init.DSL().AddSet(DataSet::GRID_FLT, MetaData(dsname, "neighbor")); dipole_ = (DataSet_3D*)init.DSL().AddSet(DataSet::GRID_FLT, MetaData(dsname, "dipole")); order_norm_ = (DataSet_3D*)init.DSL().AddSet(DataSet::GRID_DBL, MetaData(dsname, "order")); dipolex_ = (DataSet_3D*)init.DSL().AddSet(DataSet::GRID_DBL, MetaData(dsname, "dipolex")); dipoley_ = (DataSet_3D*)init.DSL().AddSet(DataSet::GRID_DBL, MetaData(dsname, "dipoley")); dipolez_ = (DataSet_3D*)init.DSL().AddSet(DataSet::GRID_DBL, MetaData(dsname, "dipolez")); if (gO_==0 || gH_==0 || Esw_==0 || Eww_==0 || dTStrans_==0 || dTSorient_==0 || dTSsix_==0 || neighbor_norm_==0 || dipole_==0 || order_norm_==0 || dipolex_==0 || dipoley_==0 || dipolez_==0) return Action::ERR; if (usePme_) { PME_ = (DataSet_3D*)init.DSL().AddSet(DataSet::GRID_FLT, MetaData(dsname,"PME")); U_PME_ = (DataSet_3D*)init.DSL().AddSet(DataSet::GRID_FLT,MetaData(dsname,"U_PME")); if (PME_ == 0 || U_PME_ == 0) return Action::ERR; } if (doEij_) { ww_Eij_ = (DataSet_MatrixFlt*)init.DSL().AddSet(DataSet::MATRIX_FLT, MetaData(dsname, "Eij")); if (ww_Eij_ == 0) return Action::ERR; } // Allocate DataSets. TODO non-orthogonal grids as well Vec3 v_spacing( gridspacing_ ); gO_->Allocate_N_C_D(nx, ny, nz, gridcntr_, v_spacing); MAX_GRID_PT_ = gO_->Size(); gH_->Allocate_N_C_D(nx, ny, nz, gridcntr_, v_spacing); Esw_->Allocate_N_C_D(nx, ny, nz, gridcntr_, v_spacing); Eww_->Allocate_N_C_D(nx, ny, nz, gridcntr_, v_spacing); dTStrans_->Allocate_N_C_D(nx, ny, nz, gridcntr_, v_spacing); dTSorient_->Allocate_N_C_D(nx, ny, nz, gridcntr_, v_spacing); dTSsix_->Allocate_N_C_D(nx, ny, nz, gridcntr_, v_spacing); neighbor_norm_->Allocate_N_C_D(nx, ny, nz, gridcntr_, v_spacing); dipole_->Allocate_N_C_D(nx, ny, nz, gridcntr_, v_spacing); order_norm_->Allocate_N_C_D(nx, ny, nz, gridcntr_, v_spacing); dipolex_->Allocate_N_C_D(nx, ny, nz, gridcntr_, v_spacing); dipoley_->Allocate_N_C_D(nx, ny, nz, gridcntr_, v_spacing); dipolez_->Allocate_N_C_D(nx, ny, nz, gridcntr_, v_spacing); if (usePme_) { PME_->Allocate_N_C_D(nx,ny,nz,gridcntr_,v_spacing); U_PME_->Allocate_N_C_D(nx,ny,nz,gridcntr_,v_spacing); } if (ww_Eij_ != 0) { if (ww_Eij_->AllocateTriangle( MAX_GRID_PT_ )) { mprinterr("Error: Could not allocate memory for water-water Eij matrix.\n"); return Action::ERR; } } // Add sets to files file_gO->AddDataSet( gO_ ); file_gH->AddDataSet( gH_ ); file_Esw->AddDataSet( Esw_ ); file_Eww->AddDataSet( Eww_ ); file_dTStrans->AddDataSet( dTStrans_ ); file_dTSorient->AddDataSet( dTSorient_ ); file_dTSsix->AddDataSet( dTSsix_ ); file_neighbor_norm->AddDataSet( neighbor_norm_ ); file_dipole->AddDataSet( dipole_ ); file_order_norm->AddDataSet( order_norm_ ); file_dipolex->AddDataSet( dipolex_ ); file_dipoley->AddDataSet( dipoley_ ); file_dipolez->AddDataSet( dipolez_ ); if (usePme_) { file_energy_pme->AddDataSet(PME_); file_U_energy_pme->AddDataSet(U_PME_); } // Set up grid params TODO non-orthogonal as well G_max_ = Vec3( (double)nx * gridspacing_ + 1.5, (double)ny * gridspacing_ + 1.5, (double)nz * gridspacing_ + 1.5 ); N_waters_.assign( MAX_GRID_PT_, 0 ); N_solute_atoms_.assign( MAX_GRID_PT_, 0); N_hydrogens_.assign( MAX_GRID_PT_, 0 ); voxel_xyz_.resize( MAX_GRID_PT_ ); // [] = X Y Z voxel_Q_.resize( MAX_GRID_PT_ ); // [] = W4 X4 Y4 Z4 numthreads_ = 1; # ifdef _OPENMP # pragma omp parallel { if (omp_get_thread_num() == 0) numthreads_ = omp_get_num_threads(); } # endif if (!skipE_) { E_UV_VDW_.resize( numthreads_ ); E_UV_Elec_.resize( numthreads_ ); E_VV_VDW_.resize( numthreads_ ); E_VV_Elec_.resize( numthreads_ ); neighbor_.resize( numthreads_ ); for (int thread = 0; thread != numthreads_; thread++) { E_UV_VDW_[thread].assign( MAX_GRID_PT_, 0 ); E_UV_Elec_[thread].assign( MAX_GRID_PT_, 0 ); E_VV_VDW_[thread].assign( MAX_GRID_PT_, 0 ); E_VV_Elec_[thread].assign( MAX_GRID_PT_, 0 ); neighbor_[thread].assign( MAX_GRID_PT_, 0 ); } if (usePme_) { E_pme_.assign( MAX_GRID_PT_, 0 ); U_E_pme_.assign( MAX_GRID_PT_, 0 ); //E_pme_.resize( numthreads_); //U_E_pme_.resize(numthreads_); //for (int thread = 0; thread != numthreads_; thread++) { // E_pme_[thread].assign( MAX_GRID_PT_,0); // U_E_pme_[thread].assign( MAX_GRID_PT_,0); //} } # ifdef _OPENMP if (doEij_) { // Since allocating a separate matrix for every thread will consume a lot // of memory and since the Eij matrices tend to be sparse since solute is // often present, each thread will record any interaction energies they // calculate separately and add to the Eij matrix afterwards to avoid // memory clashes. Probably not ideal if the bulk of the grid is water however. EIJ_V1_.resize( numthreads_ ); EIJ_V2_.resize( numthreads_ ); EIJ_EN_.resize( numthreads_ ); } # endif #ifdef CUDA if (this->skipE_ && this->doOrder_) { mprintf("When the keyword \"skipE\" is supplied, \"doorder\" cannot be" " chosen, as both calculations are done on the GPU at the same" " time.\nIgnoring \"doorder!\"\n"); } #endif } //Box gbox; //gbox.SetBetaLengths( 90.0, (double)nx * gridspacing_, // (double)ny * gridspacing_, // (double)nz * gridspacing_ ); //grid_.Setup_O_Box( nx, ny, nz, gO_->GridOrigin(), gbox ); //grid_.Setup_O_D( nx, ny, nz, gO_->GridOrigin(), v_spacing ); mprintf(" GIST:\n"); mprintf("\tOutput prefix= '%s', grid output extension= '%s'\n", prefix_.c_str(), ext.c_str()); mprintf("\tOutput float format string= '%s', output integer format string= '%s'\n", fltFmt_.fmt(), intFmt_.fmt()); mprintf("\tGIST info written to '%s'\n", infofile_->Filename().full()); mprintf("\tName for data sets: %s\n", dsname.c_str()); if (doOrder_) mprintf("\tDoing order calculation.\n"); else mprintf("\tSkipping order calculation.\n"); if (skipE_) mprintf("\tSkipping energy calculation.\n"); else { mprintf("\tPerforming energy calculation.\n"); if (numthreads_ > 1) mprintf("\tParallelizing energy calculation with %i threads.\n", numthreads_); if (usePme_) { mprintf("\tUsing PME.\n"); pmeOpts_.PrintOptions(); } } mprintf("\tCut off for determining solvent O-O neighbors is %f Ang\n", sqrt(NeighborCut2_)); if (includeIons_) mprintf("\tIons will be included in the solute region.\n"); else mprintf("\tIons will be excluded from the calculation.\n"); if (doEij_) { mprintf("\tComputing and printing water-water Eij matrix, output to '%s'\n", eijfile_->Filename().full()); mprintf("\tWater-water Eij matrix size is %s\n", ByteString(ww_Eij_->MemUsageInBytes(), BYTE_DECIMAL).c_str()); } else mprintf("\tSkipping water-water Eij matrix.\n"); mprintf("\tWater reference density: %6.4f molecules/Ang^3\n", BULK_DENS_); mprintf("\tSimulation temperature: %6.4f K\n", temperature_); if (imageOpt_.UseImage()) mprintf("\tDistances will be imaged.\n"); else mprintf("\tDistances will not be imaged.\n"); if (imageOpt_.ForceNonOrtho()) mprintf("\tWill use non-orthogonal imaging routines for all cell types.\n"); gO_->GridInfo(); mprintf("\tNumber of voxels: %u, voxel volume: %f Ang^3\n", MAX_GRID_PT_, gO_->Bin().VoxelVolume()); mprintf("#Please cite these papers if you use GIST results in a publication:\n" "# Steven Ramsey, Crystal Nguyen, Romelia Salomon-Ferrer, Ross C. Walker, Michael K. Gilson, and Tom Kurtzman. J. Comp. Chem. 37 (21) 2016\n" "# Crystal Nguyen, Michael K. Gilson, and Tom Young, arXiv:1108.4876v1 (2011)\n" "# Crystal N. Nguyen, Tom Kurtzman Young, and Michael K. Gilson,\n" "# J. Chem. Phys. 137, 044101 (2012)\n" "# Lazaridis, J. Phys. Chem. B 102, 3531–3541 (1998)\n" #ifdef LIBPME "#When using the PME-enhanced version of GIST, please cite:\n" "# Lieyang Chen, Anthony Cruz, Daniel R. Roe, Andy C. Simmonett, Lauren Wickstrom, Nanjie Deng, Tom Kurtzman. JCTC (2021) DOI: 10.1021/acs.jctc.0c01185\n" #endif #ifdef CUDA "#When using the GPU parallelized version of GIST, please cite:\n" "# Johannes Kraml, Anna S. Kamenik, Franz Waibl, Michael Schauperl, Klaus R. Liedl, JCTC (2019)\n" #endif ); # ifdef GIST_USE_NONORTHO_DIST2 mprintf("DEBUG: Using regular non-orthogonal distance routine.\n"); # endif gist_init_.Stop(); return Action::OK; } /// \return True if given floating point values are not equal within a tolerance static inline bool NotEqual(double v1, double v2) { return ( fabs(v1 - v2) > Constants::SMALL ); } /** Set up GIST action. */ Action::RetType Action_GIST::Setup(ActionSetup& setup) { gist_setup_.Start(); CurrentParm_ = setup.TopAddress(); // We need box info if (!setup.CoordInfo().TrajBox().HasBox()) { mprinterr("Error: Must have explicit solvent with periodic boundaries!"); return Action::ERR; } imageOpt_.SetupImaging( setup.CoordInfo().TrajBox().HasBox() ); #ifdef CUDA this->numberAtoms_ = setup.Top().Natom(); this->solvent_ = new bool[this->numberAtoms_]; #endif // Initialize PME if (usePme_) { # ifdef LIBPME if (gistPme_.Init( setup.CoordInfo().TrajBox(), pmeOpts_, debug_ )) { mprinterr("Error: GIST PME init failed.\n"); return Action::ERR; } // By default all atoms are selected for GIST PME to match up with atom_voxel_ array. if (gistPme_.Setup_PME_GIST( setup.Top(), numthreads_, NeighborCut2_ )) { mprinterr("Error: GIST PME setup/array allocation failed.\n"); return Action::ERR; } # else mprinterr("Error: GIST PME requires compilation with LIBPME.\n"); return Action::ERR; # endif } // Get molecule number for each solvent molecule //mol_nums_.clear(); O_idxs_.clear(); A_idxs_.clear(); atom_voxel_.clear(); atomIsSolute_.clear(); atomIsSolventO_.clear(); U_idxs_.clear(); // NOTE: these are just guesses O_idxs_.reserve( setup.Top().Nsolvent() ); A_idxs_.reserve( setup.Top().Natom() ); // atom_voxel_ and atomIsSolute will be indexed by atom # atom_voxel_.assign( setup.Top().Natom(), OFF_GRID_ ); atomIsSolute_.assign(setup.Top().Natom(), false); atomIsSolventO_.assign(setup.Top().Natom(), false); U_idxs_.reserve(setup.Top().Natom()-setup.Top().Nsolvent()*nMolAtoms_); unsigned int midx = 0; unsigned int NsolventAtoms = 0; unsigned int NsoluteAtoms = 0; bool isFirstSolvent = true; for (Topology::mol_iterator mol = setup.Top().MolStart(); mol != setup.Top().MolEnd(); ++mol, ++midx) { if (mol->IsSolvent()) { // NOTE: We assume the oxygen is the first atom! int o_idx = mol->MolUnit().Front(); #ifdef CUDA this->headAtomType_ = setup.Top()[o_idx].TypeIndex(); #endif // Check that molecule has correct # of atoms unsigned int molNumAtoms = (unsigned int)mol->NumAtoms(); if (nMolAtoms_ == 0) { nMolAtoms_ = molNumAtoms; mprintf("\tEach solvent molecule has %u atoms\n", nMolAtoms_); } else if (molNumAtoms != nMolAtoms_) { mprinterr("Error: All solvent molecules must have same # atoms.\n" "Error: Molecule '%s' has %u atoms, expected %u.\n", setup.Top().TruncResNameNum( setup.Top()[o_idx].ResNum() ).c_str(), molNumAtoms, nMolAtoms_); return Action::ERR; } //mol_nums_.push_back( midx ); // TODO needed? // Check that first atom is actually Oxygen if (setup.Top()[o_idx].Element() != Atom::OXYGEN) { mprinterr("Error: Molecule '%s' is not water or does not have oxygen atom.\n", setup.Top().TruncResNameNum( setup.Top()[o_idx].ResNum() ).c_str()); return Action::ERR; } O_idxs_.push_back( o_idx ); atomIsSolventO_[o_idx] = true; // Check that the next two atoms are Hydrogens if (setup.Top()[o_idx+1].Element() != Atom::HYDROGEN || setup.Top()[o_idx+2].Element() != Atom::HYDROGEN) { mprinterr("Error: Molecule '%s' does not have hydrogen atoms.\n", setup.Top().TruncResNameNum( setup.Top()[o_idx].ResNum() ).c_str()); return Action::ERR; } // Save all atom indices for energy calc, including extra points for (unsigned int IDX = 0; IDX != nMolAtoms_; IDX++) { A_idxs_.push_back( o_idx + IDX ); atomIsSolute_[A_idxs_.back()] = false; // The identity of the atom is water atom_voxel_[A_idxs_.back()] = OFF_GRID_; #ifdef CUDA this->molecule_.push_back( setup.Top()[o_idx + IDX ].MolNum() ); this->charges_.push_back( setup.Top()[o_idx + IDX ].Charge() ); this->atomTypes_.push_back( setup.Top()[o_idx + IDX ].TypeIndex() ); this->solvent_[ o_idx + IDX ] = true; #endif } NsolventAtoms += nMolAtoms_; // If first solvent molecule, save charges. If not, check that charges match. if (isFirstSolvent) { double q_sum = 0.0; Q_.reserve( nMolAtoms_ ); for (unsigned int IDX = 0; IDX != nMolAtoms_; IDX++) { Q_.push_back( setup.Top()[o_idx+IDX].Charge() ); q_sum += Q_.back(); //mprintf("DEBUG: Q= %20.10E q_sum= %20.10E\n", setup.Top()[o_idx+IDX].Charge(), q_sum); } // Sanity checks. // NOTE: We know indices 1 and 2 are hydrogens (with 0 being oxygen); this is checked above. if (NotEqual(Q_[1], Q_[2])) mprintf("Warning: Charges on water hydrogens do not match (%g, %g).\n", Q_[1], Q_[2]); if (fabs( q_sum ) > 0.0) mprintf("Warning: Charges on water do not sum to 0 (%g)\n", q_sum); //mprintf("DEBUG: Water charges: O=%g H1=%g H2=%g\n", q_O_, q_H1_, q_H2_); } else { for (unsigned int IDX = 0; IDX < nMolAtoms_; IDX++) { double q_atom = setup.Top()[o_idx+IDX].Charge(); if (NotEqual(Q_[IDX], q_atom)) { mprintf("Warning: Charge on water '%s' (%g) does not match first water (%g).\n", setup.Top().TruncResAtomName( o_idx+IDX ).c_str(), q_atom, Q_[IDX]); } } } isFirstSolvent = false; } else { // This is a non-solvent molecule. Save atom indices. May want to exclude // if only 1 atom (probably ion). if (mol->NumAtoms() > 1 || includeIons_) { for (Unit::const_iterator seg = mol->MolUnit().segBegin(); seg != mol->MolUnit().segEnd(); ++seg) { for (int u_idx = seg->Begin(); u_idx != seg->End(); ++u_idx) { A_idxs_.push_back( u_idx ); atomIsSolute_[A_idxs_.back()] = true; // the identity of the atom is solute NsoluteAtoms++; U_idxs_.push_back( u_idx ); // store the solute atom index for locating voxel index #ifdef CUDA this->molecule_.push_back( setup.Top()[ u_idx ].MolNum() ); this->charges_.push_back( setup.Top()[ u_idx ].Charge() ); this->atomTypes_.push_back( setup.Top()[ u_idx ].TypeIndex() ); this->solvent_[ u_idx ] = false; #endif } } } } } NSOLVENT_ = O_idxs_.size(); mprintf("\t%zu solvent molecules, %u solvent atoms, %u solute atoms (%zu total).\n", O_idxs_.size(), NsolventAtoms, NsoluteAtoms, A_idxs_.size()); if (doOrder_ && NSOLVENT_ < 5) { mprintf("Warning: Less than 5 solvent molecules. Cannot perform order calculation.\n"); doOrder_ = false; } // Allocate space for saving indices of water atoms that are on the grid // Estimate how many solvent molecules can possibly fit onto the grid. // Add some extra voxels as a buffer. double max_voxels = (double)MAX_GRID_PT_ + (1.10 * (double)MAX_GRID_PT_); double totalVolume = max_voxels * gO_->Bin().VoxelVolume(); double max_mols = totalVolume * BULK_DENS_; //mprintf("\tEstimating grid can fit a max of %.0f solvent molecules (w/ 10%% buffer).\n", // max_mols); OnGrid_idxs_.reserve( (size_t)max_mols * (size_t)nMolAtoms_ ); N_ON_GRID_ = 0; if (!skipE_) { if (imageOpt_.ImagingEnabled()) mprintf("\tImaging enabled for energy distance calculations.\n"); else mprintf("\tNo imaging will be performed for energy distance calculations.\n"); } #ifdef CUDA NonbondParmType nb = setup.Top().Nonbond(); this->NBIndex_ = nb.NBindex(); this->numberAtomTypes_ = nb.Ntypes(); for (unsigned int i = 0; i < nb.NBarray().size(); ++i) { this->lJParamsA_.push_back( (float) nb.NBarray().at(i).A() ); this->lJParamsB_.push_back( (float) nb.NBarray().at(i).B() ); } try { allocateCuda(((void**)&this->NBindex_c_), this->NBIndex_.size() * sizeof(int)); allocateCuda((void**)&this->max_c_, 3 * sizeof(float)); allocateCuda((void**)&this->min_c_, 3 * sizeof(float)); allocateCuda((void**)&this->result_w_c_, this->numberAtoms_ * sizeof(float)); allocateCuda((void**)&this->result_s_c_, this->numberAtoms_ * sizeof(float)); allocateCuda((void**)&this->result_O_c_, this->numberAtoms_ * 4 * sizeof(int)); allocateCuda((void**)&this->result_N_c_, this->numberAtoms_ * sizeof(int)); } catch (CudaException &e) { mprinterr("Error: Could not allocate memory on GPU!\n"); this->freeGPUMemory(); return Action::ERR; } try { this->copyToGPU(); } catch (CudaException &e) { return Action::ERR; } #endif gist_setup_.Stop(); return Action::OK; } const Vec3 Action_GIST::x_lab_ = Vec3(1.0, 0.0, 0.0); const Vec3 Action_GIST::y_lab_ = Vec3(0.0, 1.0, 0.0); const Vec3 Action_GIST::z_lab_ = Vec3(0.0, 0.0, 1.0); const double Action_GIST::QFAC_ = Constants::ELECTOAMBER * Constants::ELECTOAMBER; const int Action_GIST::OFF_GRID_ = -1; /* Calculate the charge-charge, vdw interaction using pme, frame by frame * */ void Action_GIST::NonbondEnergy_pme(Frame const& frameIn) { # ifdef LIBPME // Two energy terms for the whole system //double ene_pme_all = 0.0; //double ene_vdw_all = 0.0; // pointer to the E_pme_, where has the voxel-wise pme energy for water double* E_pme_grid = &E_pme_[0]; // pointer to U_E_pme_, where has the voxel-wise pme energy for solute double* U_E_pme_grid = &U_E_pme_[0]; gistPme_.CalcNonbondEnergy_GIST(frameIn, atom_voxel_, atomIsSolute_, atomIsSolventO_, E_UV_VDW_, E_UV_Elec_, E_VV_VDW_, E_VV_Elec_, neighbor_); // system_potential_energy_ += ene_pme_all + ene_vdw_all; // Water energy on the GIST grid double pme_sum = 0.0; for (unsigned int gidx=0; gidx < N_ON_GRID_; gidx++ ) { int a = OnGrid_idxs_[gidx]; // index of the atom of on-grid solvent; int a_voxel = atom_voxel_[a]; // index of the voxel double nonbond_energy = gistPme_.E_of_atom(a); pme_sum += nonbond_energy; E_pme_grid[a_voxel] += nonbond_energy; } // Solute energy on the GIST grid double solute_on_grid_sum = 0.0; // To sum up the potential energy on solute atoms that on the grid for (unsigned int uidx=0; uidx < U_onGrid_idxs_.size(); uidx++ ) { int u = U_onGrid_idxs_[uidx]; // index of the solute atom on the grid int u_voxel = atom_voxel_[u]; double u_nonbond_energy = gistPme_.E_of_atom(u); solute_on_grid_sum += u_nonbond_energy; U_E_pme_grid[u_voxel] += u_nonbond_energy; } /* // Total solute energy double solute_sum = 0.0; for (unsigned int uidx=0; uidx < U_idxs_.size(); uidx++) { int u = U_idxs_[uidx]; double u_nonbond_energy = gistPme_.E_of_atom(u); solute_sum += u_nonbond_energy; solute_potential_energy_ += u_nonbond_energy; // used to calculated the ensemble energy for all solute, will print out in terminal } */ //mprintf("The total potential energy on water atoms: %f \n", pme_sum); # else /*LIBPME */ mprinterr("Error: Compiled without LIBPME\n"); return; # endif /*LIBPME */ } /** Non-bonded energy calc. */ void Action_GIST::Ecalc(double rij2, double q1, double q2, NonbondType const& LJ, double& Evdw, double& Eelec) { double rij = sqrt(rij2); // VDW double r2 = 1.0 / rij2; double r6 = r2 * r2 * r2; double r12 = r6 * r6; double f12 = LJ.A() * r12; // A/r^12 double f6 = LJ.B() * r6; // B/r^6 Evdw = f12 - f6; // (A/r^12)-(B/r^6) // Coulomb double qiqj = QFAC_ * q1 * q2; Eelec = qiqj / rij; } /** Calculate the energy between all solute/solvent atoms and solvent atoms * on the grid. This is done after the intial GIST calculations * so that all waters have voxels assigned in atom_voxel_. * NOTE: This routine modifies the coordinates in OnGrid_XYZ_ when the cell * has nonorthogonal shape in order to properly satsify the minimum * image convention, so any calculations that rely on the on grid * coordinates (like Order()) must be done *BEFORE* this routine. */ void Action_GIST::NonbondEnergy(Frame const& frameIn, Topology const& topIn) { // Set up imaging info. if (imageOpt_.ImagingType() == ImageOption::NONORTHO) { // Wrap on-grid water coords back to primary cell TODO openmp double* ongrid_xyz = &OnGrid_XYZ_[0]; int maxXYZ = (int)OnGrid_XYZ_.size(); int idx; # ifdef _OPENMP # pragma omp parallel private(idx) { # pragma omp for # endif for (idx = 0; idx < maxXYZ; idx += 3) { double* XYZ = ongrid_xyz + idx; // Convert to frac coords frameIn.BoxCrd().FracCell().TimesVec( XYZ, XYZ ); // Wrap to primary cell XYZ[0] = XYZ[0] - floor(XYZ[0]); XYZ[1] = XYZ[1] - floor(XYZ[1]); XYZ[2] = XYZ[2] - floor(XYZ[2]); // Convert back to Cartesian frameIn.BoxCrd().UnitCell().TransposeMult( XYZ, XYZ ); } # ifdef _OPENMP } # endif } // mprintf("DEBUG: NSolventAtoms= %zu NwatAtomsOnGrid= %u\n", O_idxs_.size()*nMolAtoms_, N_ON_GRID_); double* E_UV_VDW = &(E_UV_VDW_[0][0]); double* E_UV_Elec = &(E_UV_Elec_[0][0]); double* E_VV_VDW = &(E_VV_VDW_[0][0]); double* E_VV_Elec = &(E_VV_Elec_[0][0]); float* Neighbor = &(neighbor_[0][0]); double Evdw, Eelec; int aidx; int maxAidx = (int)A_idxs_.size(); // Loop over all solute + solvent atoms # ifdef _OPENMP int mythread; Iarray* eij_v1 = 0; Iarray* eij_v2 = 0; Farray* eij_en = 0; # pragma omp parallel private(aidx, mythread, E_UV_VDW, E_UV_Elec, E_VV_VDW, E_VV_Elec, Neighbor, Evdw, Eelec, eij_v1, eij_v2, eij_en) { mythread = omp_get_thread_num(); E_UV_VDW = &(E_UV_VDW_[mythread][0]); E_UV_Elec = &(E_UV_Elec_[mythread][0]); E_VV_VDW = &(E_VV_VDW_[mythread][0]); E_VV_Elec = &(E_VV_Elec_[mythread][0]); Neighbor = (&neighbor_[mythread][0]); if (doEij_) { eij_v1 = &(EIJ_V1_[mythread]); eij_v2 = &(EIJ_V2_[mythread]); eij_en = &(EIJ_EN_[mythread]); eij_v1->clear(); eij_v2->clear(); eij_en->clear(); } # pragma omp for # endif for (aidx = 0; aidx < maxAidx; aidx++) { int a1 = A_idxs_[aidx]; // Index of atom1 int a1_voxel = atom_voxel_[a1]; // Voxel of atom1 int a1_mol = topIn[ a1 ].MolNum(); // Molecule # of atom 1 Vec3 A1_XYZ( frameIn.XYZ( a1 ) ); // Coord of atom1 double qA1 = topIn[ a1 ].Charge(); // Charge of atom1 bool a1IsO = atomIsSolventO_[a1]; std::vector<Vec3> vImages; if (imageOpt_.ImagingType() == ImageOption::NONORTHO) { // Convert to frac coords Vec3 vFrac = frameIn.BoxCrd().FracCell() * A1_XYZ; // Wrap to primary unit cell vFrac[0] = vFrac[0] - floor(vFrac[0]); vFrac[1] = vFrac[1] - floor(vFrac[1]); vFrac[2] = vFrac[2] - floor(vFrac[2]); // Calculate all images of this atom vImages.reserve(27); for (int ix = -1; ix != 2; ix++) for (int iy = -1; iy != 2; iy++) for (int iz = -1; iz != 2; iz++) // Convert image back to Cartesian vImages.push_back( frameIn.BoxCrd().UnitCell().TransposeMult( vFrac + Vec3(ix,iy,iz) ) ); } // Loop over all solvent atoms on the grid for (unsigned int gidx = 0; gidx < N_ON_GRID_; gidx++) { int a2 = OnGrid_idxs_[gidx]; // Index of on-grid solvent int a2_mol = topIn[ a2 ].MolNum(); // Molecule # of on-grid solvent if (a1_mol != a2_mol) { int a2_voxel = atom_voxel_[a2]; // Voxel of on-grid solvent const double* A2_XYZ = (&OnGrid_XYZ_[0])+gidx*3; // Coord of on-grid solvent if (atomIsSolute_[a1]) { // Solute to on-grid solvent energy // Calculate distance //gist_nonbond_dist_.Start(); double rij2; if (imageOpt_.ImagingType() == ImageOption::NONORTHO) { # ifdef GIST_USE_NONORTHO_DIST2 rij2 = DIST2_ImageNonOrtho(A1_XYZ, A2_XYZ, frameIn.BoxCrd().UnitCell(), frameIn.BoxCrd().FracCell()); # else rij2 = maxD_; for (std::vector<Vec3>::const_iterator vCart = vImages.begin(); vCart != vImages.end(); ++vCart) { double x = (*vCart)[0] - A2_XYZ[0]; double y = (*vCart)[1] - A2_XYZ[1]; double z = (*vCart)[2] - A2_XYZ[2]; rij2 = std::min(rij2, x*x + y*y + z*z); } # endif } else if (imageOpt_.ImagingType() == ImageOption::ORTHO) rij2 = DIST2_ImageOrtho( A1_XYZ, A2_XYZ, frameIn.BoxCrd() ); else rij2 = DIST2_NoImage( A1_XYZ, A2_XYZ ); //gist_nonbond_dist_.Stop(); //gist_nonbond_UV_.Start(); // Calculate energy Ecalc( rij2, qA1, topIn[ a2 ].Charge(), topIn.GetLJparam(a1, a2), Evdw, Eelec ); E_UV_VDW[a2_voxel] += Evdw; E_UV_Elec[a2_voxel] += Eelec; //gist_nonbond_UV_.Stop(); } else { // Off-grid/on-grid solvent to on-grid solvent energy // Only do the energy calculation if not previously done or atom1 not on grid if (a2 != a1 && (a2 > a1 || a1_voxel == OFF_GRID_)) { // Calculate distance //gist_nonbond_dist_.Start(); double rij2; if (imageOpt_.ImagingType() == ImageOption::NONORTHO) { # ifdef GIST_USE_NONORTHO_DIST2 rij2 = DIST2_ImageNonOrtho(A1_XYZ, A2_XYZ, frameIn.BoxCrd().UnitCell(), frameIn.BoxCrd().FracCell()); # else rij2 = maxD_; for (std::vector<Vec3>::const_iterator vCart = vImages.begin(); vCart != vImages.end(); ++vCart) { double x = (*vCart)[0] - A2_XYZ[0]; double y = (*vCart)[1] - A2_XYZ[1]; double z = (*vCart)[2] - A2_XYZ[2]; rij2 = std::min(rij2, x*x + y*y + z*z); } # endif } else if (imageOpt_.ImagingType() == ImageOption::ORTHO) rij2 = DIST2_ImageOrtho( A1_XYZ, A2_XYZ, frameIn.BoxCrd() ); else rij2 = DIST2_NoImage( A1_XYZ, A2_XYZ ); //gist_nonbond_dist_.Stop(); //gist_nonbond_VV_.Start(); // Calculate energy Ecalc( rij2, qA1, topIn[ a2 ].Charge(), topIn.GetLJparam(a1, a2), Evdw, Eelec ); //mprintf("DEBUG1: v1= %i v2= %i EVV %i %i Vdw= %f Elec= %f\n", a2_voxel, a1_voxel, a2, a1, Evdw, Eelec); E_VV_VDW[a2_voxel] += Evdw; E_VV_Elec[a2_voxel] += Eelec; // Store water neighbor using only O-O distance bool is_O_O = (a1IsO && atomIsSolventO_[a2]); if (is_O_O && rij2 < NeighborCut2_) Neighbor[a2_voxel] += 1.0; // If water atom1 was also on the grid update its energy as well. if ( a1_voxel != OFF_GRID_ ) { E_VV_VDW[a1_voxel] += Evdw; E_VV_Elec[a1_voxel] += Eelec; if (is_O_O && rij2 < NeighborCut2_) Neighbor[a1_voxel] += 1.0; if (doEij_) { if (a1_voxel != a2_voxel) { # ifdef _OPENMP eij_v1->push_back( a1_voxel ); eij_v2->push_back( a2_voxel ); eij_en->push_back( Evdw + Eelec ); # else ww_Eij_->UpdateElement(a1_voxel, a2_voxel, Evdw + Eelec); # endif } } } //gist_nonbond_VV_.Stop(); } } } // END a1 and a2 not in same molecule } // End loop over all solvent atoms on grid } // End loop over all solvent + solute atoms # ifdef _OPENMP } // END pragma omp parallel if (doEij_) { // Add any Eijs to matrix for (unsigned int thread = 0; thread != EIJ_V1_.size(); thread++) for (unsigned int idx = 0; idx != EIJ_V1_[thread].size(); idx++) ww_Eij_->UpdateElement(EIJ_V1_[thread][idx], EIJ_V2_[thread][idx], EIJ_EN_[thread][idx]); } # endif } /** GIST order calculation. */ void Action_GIST::Order(Frame const& frameIn) { // Loop over all solvent molecules that are on the grid for (unsigned int gidx = 0; gidx < N_ON_GRID_; gidx += nMolAtoms_) { int oidx1 = OnGrid_idxs_[gidx]; int voxel1 = atom_voxel_[oidx1]; Vec3 XYZ1( (&OnGrid_XYZ_[0])+gidx*3 ); // Find coordinates for 4 closest neighbors to this water (on or off grid). // TODO set up overall grid in DoAction. // TODO initialize WAT? Vec3 WAT[4]; double d1 = maxD_; double d2 = maxD_; double d3 = maxD_; double d4 = maxD_; for (unsigned int sidx2 = 0; sidx2 < NSOLVENT_; sidx2++) { int oidx2 = O_idxs_[sidx2]; if (oidx2 != oidx1) { const double* XYZ2 = frameIn.XYZ( oidx2 ); double dist2 = DIST2_NoImage( XYZ1.Dptr(), XYZ2 ); if (dist2 < d1) { d4 = d3; d3 = d2; d2 = d1; d1 = dist2; WAT[3] = WAT[2]; WAT[2] = WAT[1]; WAT[1] = WAT[0]; WAT[0] = XYZ2; } else if (dist2 < d2) { d4 = d3; d3 = d2; d2 = dist2; WAT[3] = WAT[2]; WAT[2] = WAT[1]; WAT[1] = XYZ2; } else if (dist2 < d3) { d4 = d3; d3 = dist2; WAT[3] = WAT[2]; WAT[2] = XYZ2; } else if (dist2 < d4) { d4 = dist2; WAT[3] = XYZ2; } } } // Compute the tetrahedral order parameter double sum = 0.0; for (int mol1 = 0; mol1 < 3; mol1++) { for (int mol2 = mol1 + 1; mol2 < 4; mol2++) { Vec3 v1 = WAT[mol1] - XYZ1; Vec3 v2 = WAT[mol2] - XYZ1; double r1 = v1.Magnitude2(); double r2 = v2.Magnitude2(); double cos = (v1* v2) / sqrt(r1 * r2); sum += (cos + 1.0/3)*(cos + 1.0/3); } } order_norm_->UpdateVoxel(voxel1, (1.0 - (3.0/8)*sum)); //mprintf("DBG: gidx= %u oidx1=%i voxel1= %i XYZ1={%g, %g, %g} sum= %g\n", gidx, oidx1, voxel1, XYZ1[0], XYZ1[1], XYZ1[2], sum); } // END loop over all solvent molecules } /** GIST action */ Action::RetType Action_GIST::DoAction(int frameNum, ActionFrame& frm) { gist_action_.Start(); NFRAME_++; // TODO only !skipE? N_ON_GRID_ = 0; OnGrid_idxs_.clear(); OnGrid_XYZ_.clear(); // Determine imaging type # ifdef DEBUG_GIST //mprintf("DEBUG: Is_X_Aligned_Ortho() = %i Is_X_Aligned() = %i\n", (int)frm.Frm().BoxCrd().Is_X_Aligned_Ortho(), (int)frm.Frm().BoxCrd().Is_X_Aligned()); frm.Frm().BoxCrd().UnitCell().Print("Ucell"); frm.Frm().BoxCrd().FracCell().Print("Frac"); # endif if (imageOpt_.ImagingEnabled()) imageOpt_.SetImageType( frm.Frm().BoxCrd().Is_X_Aligned_Ortho() ); # ifdef DEBUG_GIST switch (imageOpt_.ImagingType()) { case ImageOption::NO_IMAGE : mprintf("DEBUG: No Image.\n"); break; case ImageOption::ORTHO : mprintf("DEBUG: Orthogonal image.\n"); break; case ImageOption::NONORTHO : mprintf("DEBUG: Nonorthogonal image.\n"); break; } # endif // CUDA necessary information size_t bin_i, bin_j, bin_k; Vec3 const& Origin = gO_->Bin().GridOrigin(); // Loop over each solvent molecule for (unsigned int sidx = 0; sidx < NSOLVENT_; sidx++) { gist_grid_.Start(); int oidx = O_idxs_[sidx]; for (unsigned int IDX = 0; IDX != nMolAtoms_; IDX++) atom_voxel_[oidx+IDX] = OFF_GRID_; const double* O_XYZ = frm.Frm().XYZ( oidx ); // Get vector of water oxygen to grid origin. Vec3 W_G( O_XYZ[0] - Origin[0], O_XYZ[1] - Origin[1], O_XYZ[2] - Origin[2] ); gist_grid_.Stop(); // Check if water oxygen is no more then 1.5 Ang from grid // NOTE: using <= to be consistent with original code if ( W_G[0] <= G_max_[0] && W_G[0] >= -1.5 && W_G[1] <= G_max_[1] && W_G[1] >= -1.5 && W_G[2] <= G_max_[2] && W_G[2] >= -1.5 ) { const double* H1_XYZ = frm.Frm().XYZ( oidx + 1 ); const double* H2_XYZ = frm.Frm().XYZ( oidx + 2 ); // Try to bin the oxygen if ( gO_->Bin().Calc( O_XYZ[0], O_XYZ[1], O_XYZ[2], bin_i, bin_j, bin_k ) ) { // Oxygen is inside the grid. Record the voxel. // NOTE hydrogens/EP always assigned to same voxel for energy purposes. int voxel = (int)gO_->CalcIndex(bin_i, bin_j, bin_k); const double* wXYZ = O_XYZ; for (unsigned int IDX = 0; IDX != nMolAtoms_; IDX++) { atom_voxel_[oidx+IDX] = voxel; //OnGrid_idxs_[N_ON_GRID_+IDX] = oidx + IDX; OnGrid_idxs_.push_back( oidx+IDX ); OnGrid_XYZ_.push_back( wXYZ[0] ); OnGrid_XYZ_.push_back( wXYZ[1] ); OnGrid_XYZ_.push_back( wXYZ[2] ); wXYZ+=3; } N_ON_GRID_ += nMolAtoms_; //mprintf("DEBUG1: Water atom %i voxel %i\n", oidx, voxel); N_waters_[voxel]++; max_nwat_ = std::max( N_waters_[voxel], max_nwat_ ); // ----- EULER --------------------------- gist_euler_.Start(); // Record XYZ coords of water atoms (nonEP) in voxel TODO need EP? voxel_xyz_[voxel].push_back( O_XYZ[0] ); voxel_xyz_[voxel].push_back( O_XYZ[1] ); voxel_xyz_[voxel].push_back( O_XYZ[2] ); // Get O-HX vectors Vec3 H1_wat( H1_XYZ[0]-O_XYZ[0], H1_XYZ[1]-O_XYZ[1], H1_XYZ[2]-O_XYZ[2] ); Vec3 H2_wat( H2_XYZ[0]-O_XYZ[0], H2_XYZ[1]-O_XYZ[1], H2_XYZ[2]-O_XYZ[2] ); H1_wat.Normalize(); H2_wat.Normalize(); Vec3 ar1 = H1_wat.Cross( x_lab_ ); // ar1 = V cross U Vec3 sar = ar1; // sar = V cross U ar1.Normalize(); //mprintf("------------------------------------------\n"); //H1_wat.Print("DEBUG: H1_wat"); //x_lab_.Print("DEBUG: x_lab_"); //ar1.Print("DEBUG: ar1"); //sar.Print("DEBUG: sar"); double dp1 = x_lab_ * H1_wat; // V dot U double theta = acos(dp1); double sign = sar * H1_wat; //mprintf("DEBUG0: dp1= %f theta= %f sign= %f\n", dp1, theta, sign); // NOTE: Use SMALL instead of 0 to avoid issues with denormalization if (sign > Constants::SMALL) theta /= 2.0; else theta /= -2.0; double w1 = cos(theta); double sin_theta = sin(theta); //mprintf("DEBUG0: theta= %f w1= %f sin_theta= %f\n", theta, w1, sin_theta); double x1 = ar1[0] * sin_theta; double y1 = ar1[1] * sin_theta; double z1 = ar1[2] * sin_theta; double w2 = w1; double x2 = x1; double y2 = y1; double z2 = z1; Vec3 H_temp; H_temp[0] = ((w2*w2+x2*x2)-(y2*y2+z2*z2))*H1_wat[0]; H_temp[0] = (2*(x2*y2 + w2*z2)*H1_wat[1]) + H_temp[0]; H_temp[0] = (2*(x2*z2-w2*y2)*H1_wat[2]) + H_temp[0]; H_temp[1] = 2*(x2*y2 - w2*z2)* H1_wat[0]; H_temp[1] = ((w2*w2-x2*x2+y2*y2-z2*z2)*H1_wat[1]) + H_temp[1]; H_temp[1] = (2*(y2*z2+w2*x2)*H1_wat[2]) +H_temp[1]; H_temp[2] = 2*(x2*z2+w2*y2) *H1_wat[0]; H_temp[2] = (2*(y2*z2-w2*x2)*H1_wat[1]) + H_temp[2]; H_temp[2] = ((w2*w2-x2*x2-y2*y2+z2*z2)*H1_wat[2]) + H_temp[2]; H1_wat = H_temp; Vec3 H_temp2; H_temp2[0] = ((w2*w2+x2*x2)-(y2*y2+z2*z2))*H2_wat[0]; H_temp2[0] = (2*(x2*y2 + w2*z2)*H2_wat[1]) + H_temp2[0]; H_temp2[0] = (2*(x2*z2-w2*y2)*H2_wat[2]) +H_temp2[0]; H_temp2[1] = 2*(x2*y2 - w2*z2) *H2_wat[0]; H_temp2[1] = ((w2*w2-x2*x2+y2*y2-z2*z2)*H2_wat[1]) +H_temp2[1]; H_temp2[1] = (2*(y2*z2+w2*x2)*H2_wat[2]) +H_temp2[1]; H_temp2[2] = 2*(x2*z2+w2*y2)*H2_wat[0]; H_temp2[2] = (2*(y2*z2-w2*x2)*H2_wat[1]) +H_temp2[2]; H_temp2[2] = ((w2*w2-x2*x2-y2*y2+z2*z2)*H2_wat[2]) + H_temp2[2]; H2_wat = H_temp2; Vec3 ar2 = H_temp.Cross(H_temp2); ar2.Normalize(); double dp2 = ar2 * z_lab_; theta = acos(dp2); sar = ar2.Cross( z_lab_ ); sign = sar * H_temp; if (sign < 0) theta /= 2.0; else theta /= -2.0; double w3 = cos(theta); sin_theta = sin(theta); double x3 = x_lab_[0] * sin_theta; double y3 = x_lab_[1] * sin_theta; double z3 = x_lab_[2] * sin_theta; double w4 = w1*w3 - x1*x3 - y1*y3 - z1*z3; double x4 = w1*x3 + x1*w3 + y1*z3 - z1*y3; double y4 = w1*y3 - x1*z3 + y1*w3 + z1*x3; double z4 = w1*z3 + x1*y3 - y1*x3 + z1*w3; voxel_Q_[voxel].push_back( w4 ); voxel_Q_[voxel].push_back( x4 ); voxel_Q_[voxel].push_back( y4 ); voxel_Q_[voxel].push_back( z4 ); //mprintf("DEBUG1: sidx= %u voxel= %i wxyz4= %g %g %g %g\n", sidx, voxel, w4, x4, y4, z4); //mprintf("DEBUG2: wxyz3= %g %g %g %g wxyz2= %g %g %g %g wxyz1= %g %g %g\n", // w3, x3, y3, z3, // w2, x2, y2, z2, // w1, x1, y1, z1); // NOTE: No need for nw_angle_ here, it is same as N_waters_ gist_euler_.Stop(); // ----- DIPOLE -------------------------- gist_dipole_.Start(); //mprintf("DEBUG1: voxel %i dipole %f %f %f\n", voxel, // O_XYZ[0]*q_O_ + H1_XYZ[0]*q_H1_ + H2_XYZ[0]*q_H2_, // O_XYZ[1]*q_O_ + H1_XYZ[1]*q_H1_ + H2_XYZ[1]*q_H2_, // O_XYZ[2]*q_O_ + H1_XYZ[2]*q_H1_ + H2_XYZ[2]*q_H2_); double DPX = 0.0; double DPY = 0.0; double DPZ = 0.0; for (unsigned int IDX = 0; IDX != nMolAtoms_; IDX++) { const double* XYZ = frm.Frm().XYZ( oidx+IDX ); DPX += XYZ[0] * Q_[IDX]; DPY += XYZ[1] * Q_[IDX]; DPZ += XYZ[2] * Q_[IDX]; } dipolex_->UpdateVoxel(voxel, DPX); dipoley_->UpdateVoxel(voxel, DPY); dipolez_->UpdateVoxel(voxel, DPZ); gist_dipole_.Stop(); // --------------------------------------- } // Water is at most 1.5A away from grid, so we need to check for H // even if O is outside grid. if (gO_->Bin().Calc( H1_XYZ[0], H1_XYZ[1], H1_XYZ[2], bin_i, bin_j, bin_k ) ) N_hydrogens_[ (int)gO_->CalcIndex(bin_i, bin_j, bin_k) ]++; if (gO_->Bin().Calc( H2_XYZ[0], H2_XYZ[1], H2_XYZ[2], bin_i, bin_j, bin_k ) ) N_hydrogens_[ (int)gO_->CalcIndex(bin_i, bin_j, bin_k) ]++; } // END water is within 1.5 Ang of grid } // END loop over each solvent molecule // Do solute grid assignment for PME if (usePme_) { U_onGrid_idxs_.clear(); gist_grid_.Start(); for (unsigned int s = 0; s != U_idxs_.size(); s++) { int uidx = U_idxs_[s]; // the solute atom index atom_voxel_[uidx] = OFF_GRID_; const double* u_XYZ = frm.Frm().XYZ( uidx ); // get the vector of this solute atom to the grid origin Vec3 U_G( u_XYZ[0] - Origin[0], u_XYZ[1] - Origin[1], u_XYZ[2] - Origin[2]); //size_t bin_i, bin_j, bin_k; if ( U_G[0] <= G_max_[0] && U_G[0] >= -1.5 && U_G[1] <= G_max_[1] && U_G[1] >= -1.5 && U_G[2] <= G_max_[2] && U_G[2] >- -1.5) { if ( gO_->Bin().Calc(u_XYZ[0],u_XYZ[1],u_XYZ[2],bin_i,bin_j,bin_k)) // used the gO class function to calcaute voxel index { int voxel = (int)gO_->CalcIndex(bin_i,bin_j,bin_k); atom_voxel_[uidx] = voxel; // asign the voxel index to the solute atom //U_ON_GRID_ +=1; // add +1 to the number of atom on the GIST Grid N_solute_atoms_[voxel] +=1; // add +1 to the solute atom num in this voxel U_onGrid_idxs_.push_back(uidx); // The index of the solute atom on GIST Grid } } } gist_grid_.Stop(); } # ifndef CUDA // Do order calculation if requested. // Do not do this for CUDA since CUDA nonbond routine handles the order calc. // NOTE: This has to be done before the nonbond energy calc since // the nonbond calc can modify the on-grid coordinates (for minimum // image convention when cell is non-orthogonal). gist_order_.Start(); if (doOrder_) Order(frm.Frm()); gist_order_.Stop(); # endif // Do nonbond energy calc if not skipping energy gist_nonbond_.Start(); if (!skipE_) { if (usePme_) { // PME NonbondEnergy_pme( frm.Frm() ); } else { // Non-PME # ifdef CUDA NonbondCuda(frm); # else NonbondEnergy(frm.Frm(), *CurrentParm_); # endif } } gist_nonbond_.Stop(); gist_action_.Stop(); return Action::OK; } /** Translational entropy calc between given water and all waters in voxel 2. * \param VX voxel 1 water X * \param VY voxel 1 water Y * \param VZ voxel 1 water Z * \param W4 voxel 1 water W4 * \param X4 voxel 1 water X4 * \param Y4 voxel 1 water Y4 * \param Z4 voxel 1 water Z4 * \param voxel2 Index of second voxel */ void Action_GIST::TransEntropy(float VX, float VY, float VZ, float W4, float X4, float Y4, float Z4, int voxel2, double& NNd, double& NNs) const { int nw_tot = N_waters_[voxel2]; Farray const& V_XYZ = voxel_xyz_[voxel2]; Farray const& V_Q = voxel_Q_[voxel2]; for (int n1 = 0; n1 != nw_tot; n1++) { int i1 = n1 * 3; // index into V_XYZ for n1 double dx = (double)(VX - V_XYZ[i1 ]); double dy = (double)(VY - V_XYZ[i1+1]); double dz = (double)(VZ - V_XYZ[i1+2]); double dd = dx*dx+dy*dy+dz*dz; if (dd < NNd && dd > 0) { NNd = dd; } int q1 = n1 * 4; // index into V_Q for n1 double rR = 2.0 * acos( fabs(W4 * V_Q[q1 ] + X4 * V_Q[q1+1] + Y4 * V_Q[q1+2] + Z4 * V_Q[q1+3] )); //add fabs for quaternions distance calculation double ds = rR*rR + dd; if (ds < NNs && ds > 0) { NNs = ds; } } } // Action_GIST::SumEVV() void Action_GIST::SumEVV() { if (E_VV_VDW_.size() > 1) { for (unsigned int gr_pt = 0; gr_pt != MAX_GRID_PT_; gr_pt++) { for (unsigned int thread = 1; thread < E_VV_VDW_.size(); thread++) { E_UV_VDW_[0][gr_pt] += E_UV_VDW_[thread][gr_pt]; E_UV_Elec_[0][gr_pt] += E_UV_Elec_[thread][gr_pt]; E_VV_VDW_[0][gr_pt] += E_VV_VDW_[thread][gr_pt]; E_VV_Elec_[0][gr_pt] += E_VV_Elec_[thread][gr_pt]; neighbor_[0][gr_pt] += neighbor_[thread][gr_pt]; } } } } /** Calculate average voxel energy for PME grids. */ void Action_GIST::CalcAvgVoxelEnergy_PME(double Vvox, DataSet_GridFlt& PME_dens, DataSet_GridFlt& U_PME_dens, Farray& PME_norm) const { double PME_tot =0.0; double U_PME_tot = 0.0; mprintf("\t Calculating average voxel energies: \n"); ProgressBar E_progress(MAX_GRID_PT_); for ( unsigned int gr_pt =0; gr_pt < MAX_GRID_PT_; gr_pt++) { E_progress.Update(gr_pt); int nw_total = N_waters_[gr_pt]; if (nw_total >=1) { PME_dens[gr_pt] = E_pme_[gr_pt] / (NFRAME_ * Vvox); PME_norm[gr_pt] = E_pme_[gr_pt] / nw_total; PME_tot += PME_dens[gr_pt]; }else{ PME_dens[gr_pt]=0; PME_norm[gr_pt]=0; } int ns_total = N_solute_atoms_[gr_pt]; if (ns_total >=1) { U_PME_dens[gr_pt] = U_E_pme_[gr_pt] / (NFRAME_ * Vvox); U_PME_tot += U_PME_dens[gr_pt]; }else{ U_PME_dens[gr_pt]=0; } } PME_tot *=Vvox; U_PME_tot *=Vvox; infofile_->Printf("Ensemble total water energy on the grid: %9.5f Kcal/mol \n", PME_tot); infofile_->Printf("Ensemble total solute energy on the grid: %9.5f Kcal/mol \n",U_PME_tot); // infofile_->Printf("Ensemble solute's total potential energy : %9.5f Kcal/mol \n", solute_potential_energy_ / NFRAME_); // infofile_->Printf("Ensemble system's total potential energy: %9.5f Kcal/mol \n", system_potential_energy_/NFRAME_); } /** Calculate average voxel energy for GIST grids. */ void Action_GIST::CalcAvgVoxelEnergy(double Vvox, DataSet_GridFlt& Eww_dens, DataSet_GridFlt& Esw_dens, Farray& Eww_norm, Farray& Esw_norm, DataSet_GridDbl& qtet, DataSet_GridFlt& neighbor_norm, Farray& neighbor_dens) { #ifndef CUDA Darray const& E_UV_VDW = E_UV_VDW_[0]; Darray const& E_UV_Elec = E_UV_Elec_[0]; Darray const& E_VV_VDW = E_VV_VDW_[0]; Darray const& E_VV_Elec = E_VV_Elec_[0]; #endif Farray const& Neighbor = neighbor_[0]; #ifndef CUDA // Sum values from other threads if necessary SumEVV(); #endif double Eswtot = 0.0; double Ewwtot = 0.0; mprintf("\tCalculating average voxel energies:\n"); ProgressBar E_progress( MAX_GRID_PT_ ); for (unsigned int gr_pt = 0; gr_pt < MAX_GRID_PT_; gr_pt++) { E_progress.Update( gr_pt ); //mprintf("DEBUG1: VV vdw=%f elec=%f\n", E_VV_VDW_[gr_pt], E_VV_Elec_[gr_pt]); int nw_total = N_waters_[gr_pt]; // Total number of waters that have been in this voxel. if (nw_total > 0) { #ifndef CUDA Esw_dens[gr_pt] = (E_UV_VDW[gr_pt] + E_UV_Elec[gr_pt]) / (NFRAME_ * Vvox); Esw_norm[gr_pt] = (E_UV_VDW[gr_pt] + E_UV_Elec[gr_pt]) / nw_total; Eww_dens[gr_pt] = (E_VV_VDW[gr_pt] + E_VV_Elec[gr_pt]) / (2 * NFRAME_ * Vvox); Eww_norm[gr_pt] = (E_VV_VDW[gr_pt] + E_VV_Elec[gr_pt]) / (2 * nw_total); #else double esw = this->Esw_->operator[](gr_pt); double eww = this->Eww_->operator[](gr_pt); Esw_dens[gr_pt] = esw / (this->NFRAME_ * Vvox); Esw_norm[gr_pt] = esw / nw_total; Eww_dens[gr_pt] = eww / (this->NFRAME_ * Vvox); Eww_norm[gr_pt] = eww / nw_total; #endif Eswtot += Esw_dens[gr_pt]; Ewwtot += Eww_dens[gr_pt]; } else { Esw_dens[gr_pt]=0; Esw_norm[gr_pt]=0; Eww_norm[gr_pt]=0; Eww_dens[gr_pt]=0; } // Compute the average number of water neighbor and average order parameter. if (nw_total > 0) { qtet[gr_pt] /= nw_total; //mprintf("DEBUG1: neighbor= %8.1f nw_total= %8i\n", neighbor[gr_pt], nw_total); neighbor_norm[gr_pt] = (double)Neighbor[gr_pt] / nw_total; } neighbor_dens[gr_pt] = (double)Neighbor[gr_pt] / (NFRAME_ * Vvox); } // END loop over all grid points (voxels) Eswtot *= Vvox; Ewwtot *= Vvox; infofile_->Printf("Total water-solute energy of the grid: Esw = %9.5f kcal/mol\n", Eswtot); infofile_->Printf("Total unreferenced water-water energy of the grid: Eww = %9.5f kcal/mol\n", Ewwtot); } /** Handle averaging for grids and output from GIST. */ void Action_GIST::Print() { gist_print_.Start(); double Vvox = gO_->Bin().VoxelVolume(); mprintf(" GIST OUTPUT:\n"); // The variables are kept outside, so that they are declared for later use. // Calculate orientational entropy DataSet_GridFlt& dTSorient_dens = static_cast<DataSet_GridFlt&>( *dTSorient_ ); Farray dTSorient_norm( MAX_GRID_PT_, 0.0 ); double dTSorienttot = 0; int nwtt = 0; double dTSo = 0; if (! this->skipS_) { // LOOP over all voxels mprintf("\tCalculating orientational entropy:\n"); ProgressBar oe_progress( MAX_GRID_PT_ ); for (unsigned int gr_pt = 0; gr_pt < MAX_GRID_PT_; gr_pt++) { oe_progress.Update( gr_pt ); dTSorient_dens[gr_pt] = 0; dTSorient_norm[gr_pt] = 0; int nw_total = N_waters_[gr_pt]; // Total number of waters that have been in this voxel. nwtt += nw_total; //mprintf("DEBUG1: %u nw_total %i\n", gr_pt, nw_total); if (nw_total > 1) { for (int n0 = 0; n0 < nw_total; n0++) { double NNr = 10000; int q0 = n0 * 4; // Index into voxel_Q_ for n0 for (int n1 = 0; n1 < nw_total; n1++) { if (n0 != n1) { int q1 = n1 * 4; // Index into voxel_Q_ for n1 //mprintf("DEBUG1:\t\t q1= %8i {%12.4f %12.4f %12.4f %12.4f} q0= %8i {%12.4f %12.4f %12.4f %12.4f}\n", // q1, voxel_Q_[gr_pt][q1 ], voxel_Q_[gr_pt][q1+1], voxel_Q_[gr_pt][q1+2], voxel_Q_[gr_pt][q1+3], // q0, voxel_Q_[gr_pt][q0 ], voxel_Q_[gr_pt][q0+1], voxel_Q_[gr_pt][q0+2], voxel_Q_[gr_pt][q0+3]); double rR = 2.0 * acos( fabs(voxel_Q_[gr_pt][q1 ] * voxel_Q_[gr_pt][q0 ] + voxel_Q_[gr_pt][q1+1] * voxel_Q_[gr_pt][q0+1] + voxel_Q_[gr_pt][q1+2] * voxel_Q_[gr_pt][q0+2] + voxel_Q_[gr_pt][q1+3] * voxel_Q_[gr_pt][q0+3] )); // add fabs for quaternion distance calculation //mprintf("DEBUG1:\t\t %8i %8i %g\n", n0, n1, rR); if (rR > 0 && rR < NNr) NNr = rR; } } // END inner loop over all waters for this voxel if (NNr < 9999 && NNr > 0) { double dbl = log(NNr*NNr*NNr*nw_total / (3.0*Constants::TWOPI)); //mprintf("DEBUG1: %u nw_total= %i NNr= %f dbl= %f\n", gr_pt, nw_total, NNr, dbl); dTSorient_norm[gr_pt] += dbl; dTSo += dbl; } } // END outer loop over all waters for this voxel //mprintf("DEBUG1: dTSorient_norm %f\n", dTSorient_norm[gr_pt]); dTSorient_norm[gr_pt] = Constants::GASK_KCAL * temperature_ * ((dTSorient_norm[gr_pt]/nw_total) + Constants::EULER_MASC); double dtso_norm_nw = (double)dTSorient_norm[gr_pt] * (double)nw_total; dTSorient_dens[gr_pt] = (dtso_norm_nw / (NFRAME_ * Vvox)); dTSorienttot += dTSorient_dens[gr_pt]; //mprintf("DEBUG1: %f\n", dTSorienttot); } } // END loop over all grid points (voxels) dTSorienttot *= Vvox; infofile_->Printf("Maximum number of waters found in one voxel for %d frames = %d\n", NFRAME_, max_nwat_); infofile_->Printf("Total referenced orientational entropy of the grid:" " dTSorient = %9.5f kcal/mol, Nf=%d\n", dTSorienttot, NFRAME_); } // Compute translational entropy for each voxel double dTStranstot = 0.0; double dTSt = 0.0; double dTSs = 0.0; int nwts = 0; unsigned int nx = gO_->NX(); unsigned int ny = gO_->NY(); unsigned int nz = gO_->NZ(); unsigned int addx = ny * nz; unsigned int addy = nz; unsigned int addz = 1; DataSet_GridFlt& gO = static_cast<DataSet_GridFlt&>( *gO_ ); DataSet_GridFlt& gH = static_cast<DataSet_GridFlt&>( *gH_ ); DataSet_GridFlt& dTStrans = static_cast<DataSet_GridFlt&>( *dTStrans_ ); DataSet_GridFlt& dTSsix = static_cast<DataSet_GridFlt&>( *dTSsix_ ); Farray dTStrans_norm( MAX_GRID_PT_, 0.0 ); Farray dTSsix_norm( MAX_GRID_PT_, 0.0 ); // Loop over all grid points if (! this->skipS_) mprintf("\tCalculating translational entropy:\n"); else mprintf("Calculating Densities:\n"); ProgressBar te_progress( MAX_GRID_PT_ ); for (unsigned int gr_pt = 0; gr_pt < MAX_GRID_PT_; gr_pt++) { te_progress.Update( gr_pt ); int numplane = gr_pt / addx; double W_dens = 1.0 * N_waters_[gr_pt] / (NFRAME_*Vvox); gO[gr_pt] = W_dens / BULK_DENS_; gH[gr_pt] = 1.0 * N_hydrogens_[gr_pt] / (NFRAME_*Vvox*2*BULK_DENS_); if (! this->skipS_) { int nw_total = N_waters_[gr_pt]; // Total number of waters that have been in this voxel. for (int n0 = 0; n0 < nw_total; n0++) { double NNd = 10000; double NNs = 10000; int i0 = n0 * 3; // index into voxel_xyz_ for n0 float VX = voxel_xyz_[gr_pt][i0 ]; float VY = voxel_xyz_[gr_pt][i0+1]; float VZ = voxel_xyz_[gr_pt][i0+2]; int q0 = n0 * 4; // index into voxel_Q_ for n0 float W4 = voxel_Q_[gr_pt][q0 ]; float X4 = voxel_Q_[gr_pt][q0+1]; float Y4 = voxel_Q_[gr_pt][q0+2]; float Z4 = voxel_Q_[gr_pt][q0+3]; // First do own voxel for (int n1 = 0; n1 < nw_total; n1++) { if ( n1 != n0) { int i1 = n1 * 3; // index into voxel_xyz_ for n1 double dx = (double)(VX - voxel_xyz_[gr_pt][i1 ]); double dy = (double)(VY - voxel_xyz_[gr_pt][i1+1]); double dz = (double)(VZ - voxel_xyz_[gr_pt][i1+2]); double dd = dx*dx+dy*dy+dz*dz; if (dd < NNd && dd > 0) { NNd = dd; } int q1 = n1 * 4; // index into voxel_Q_ for n1 double rR = 2 * acos( fabs(W4*voxel_Q_[gr_pt][q1 ] + X4*voxel_Q_[gr_pt][q1+1] + Y4*voxel_Q_[gr_pt][q1+2] + Z4*voxel_Q_[gr_pt][q1+3] )); //add fabs for quaternion distance calculation double ds = rR*rR + dd; if (ds < NNs && ds > 0) { NNs = ds; } } } // END self loop over all waters for this voxel //mprintf("DEBUG1: self NNd=%f NNs=%f\n", NNd, NNs); // Determine which directions are possible. bool cannotAddZ = (nz == 0 || ( gr_pt%nz == nz-1 )); bool cannotAddY = ((nz == 0 || ny-1 == 0) || ( gr_pt%(nz*(ny-1)+(numplane*addx)) < nz)); bool cannotAddX = (gr_pt >= addx * (nx-1) && gr_pt < addx * nx ); bool cannotSubZ = (nz == 0 || gr_pt%nz == 0); bool cannotSubY = ((nz == 0 || ny == 0) || (gr_pt%addx < nz)); bool cannotSubX = ((nz == 0 || ny == 0) || (gr_pt < addx)); bool boundary = ( cannotAddZ || cannotAddY || cannotAddX || cannotSubZ || cannotSubY || cannotSubX ); if (!boundary) { TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt + addz, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt + addy, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt + addx, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt - addz, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt - addy, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt - addx, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt + addz + addy, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt + addz - addy, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt - addz + addy, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt - addz - addy, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt + addz + addx, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt + addz - addx, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt - addz + addx, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt - addz - addx, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt + addy + addx, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt + addy - addx, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt - addy + addx, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt - addy - addx, NNd, NNs); // add the 8 more voxels for NNr searching TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt + addx + addy + addz, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt + addx + addy - addz, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt + addx - addy + addz, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt + addx - addy - addz, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt - addx + addy + addz, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt - addx + addy - addz, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt - addx - addy + addz, NNd, NNs); TransEntropy(VX, VY, VZ, W4, X4, Y4, Z4, gr_pt - addx - addy - addz, NNd, NNs); NNd = sqrt(NNd); NNs = sqrt(NNs); if (NNd < 3 && NNd > 0/*NNd < 9999 && NNd > 0*/) { double dbl = log((NNd*NNd*NNd*NFRAME_*4*Constants::PI*BULK_DENS_)/3); dTStrans_norm[gr_pt] += dbl; dTSt += dbl; dbl = log((NNs*NNs*NNs*NNs*NNs*NNs*NFRAME_*Constants::PI*BULK_DENS_)/48); dTSsix_norm[gr_pt] += dbl; dTSs += dbl; //mprintf("DEBUG1: dbl=%f NNs=%f\n", dbl, NNs); } } } // END loop over all waters for this voxel if (dTStrans_norm[gr_pt] != 0) { nwts += nw_total; dTStrans_norm[gr_pt] = Constants::GASK_KCAL*temperature_*( (dTStrans_norm[gr_pt]/nw_total) + Constants::EULER_MASC ); dTSsix_norm[gr_pt] = Constants::GASK_KCAL*temperature_*( (dTSsix_norm[gr_pt]/nw_total) + Constants::EULER_MASC ); } double dtst_norm_nw = (double)dTStrans_norm[gr_pt] * (double)nw_total; dTStrans[gr_pt] = (dtst_norm_nw / (NFRAME_*Vvox)); double dtss_norm_nw = (double)dTSsix_norm[gr_pt] * (double)nw_total; dTSsix[gr_pt] = (dtss_norm_nw / (NFRAME_*Vvox)); dTStranstot += dTStrans[gr_pt]; } // END loop over all grid points (voxels) } if (!this->skipS_) { dTStranstot *= Vvox; double dTSst = 0.0; double dTStt = 0.0; if (nwts > 0) { dTSst = Constants::GASK_KCAL*temperature_*((dTSs/nwts) + Constants::EULER_MASC); dTStt = Constants::GASK_KCAL*temperature_*((dTSt/nwts) + Constants::EULER_MASC); } double dTSot = Constants::GASK_KCAL*temperature_*((dTSo/nwtt) + Constants::EULER_MASC); infofile_->Printf("watcount in vol = %d\n", nwtt); infofile_->Printf("watcount in subvol = %d\n", nwts); infofile_->Printf("Total referenced translational entropy of the grid:" " dTStrans = %9.5f kcal/mol, Nf=%d\n", dTStranstot, NFRAME_); infofile_->Printf("Total 6d if all one vox: %9.5f kcal/mol\n", dTSst); infofile_->Printf("Total t if all one vox: %9.5f kcal/mol\n", dTStt); infofile_->Printf("Total o if all one vox: %9.5f kcal/mol\n", dTSot); } // Compute average voxel energy. Allocate these sets even if skipping energy // to be consistent with previous output. DataSet_GridFlt& PME_dens = static_cast<DataSet_GridFlt&>( *PME_); DataSet_GridFlt& U_PME_dens = static_cast<DataSet_GridFlt&>( *U_PME_); DataSet_GridFlt& Esw_dens = static_cast<DataSet_GridFlt&>( *Esw_ ); DataSet_GridFlt& Eww_dens = static_cast<DataSet_GridFlt&>( *Eww_ ); DataSet_GridFlt& neighbor_norm = static_cast<DataSet_GridFlt&>( *neighbor_norm_ ); DataSet_GridDbl& qtet = static_cast<DataSet_GridDbl&>( *order_norm_ ); Farray Esw_norm( MAX_GRID_PT_, 0.0 ); Farray Eww_norm( MAX_GRID_PT_, 0.0 ); Farray PME_norm( MAX_GRID_PT_,0.0); Farray neighbor_dens( MAX_GRID_PT_, 0.0 ); if (!skipE_) { if (usePme_) { CalcAvgVoxelEnergy_PME(Vvox, PME_dens, U_PME_dens, PME_norm); }// else { CalcAvgVoxelEnergy(Vvox, Eww_dens, Esw_dens, Eww_norm, Esw_norm, qtet, neighbor_norm, neighbor_dens); //} } // Compute average dipole density. DataSet_GridFlt& pol = static_cast<DataSet_GridFlt&>( *dipole_ ); DataSet_GridDbl& dipolex = static_cast<DataSet_GridDbl&>( *dipolex_ ); DataSet_GridDbl& dipoley = static_cast<DataSet_GridDbl&>( *dipoley_ ); DataSet_GridDbl& dipolez = static_cast<DataSet_GridDbl&>( *dipolez_ ); for (unsigned int gr_pt = 0; gr_pt < MAX_GRID_PT_; gr_pt++) { dipolex[gr_pt] /= (Constants::DEBYE_EA * NFRAME_ * Vvox); dipoley[gr_pt] /= (Constants::DEBYE_EA * NFRAME_ * Vvox); dipolez[gr_pt] /= (Constants::DEBYE_EA * NFRAME_ * Vvox); pol[gr_pt] = sqrt( dipolex[gr_pt]*dipolex[gr_pt] + dipoley[gr_pt]*dipoley[gr_pt] + dipolez[gr_pt]*dipolez[gr_pt] ); } // Write the GIST output file. // TODO: Make a data file format? if (datafile_ != 0) { mprintf("\tWriting GIST results for each voxel:\n"); // Create the format strings. std::string fmtstr = intFmt_.Fmt() + // grid point " " + fltFmt_.Fmt() + // grid X " " + fltFmt_.Fmt() + // grid Y " " + fltFmt_.Fmt() + // grid Z " " + intFmt_.Fmt() + // # waters " " + fltFmt_.Fmt() + // gO " " + fltFmt_.Fmt() + // gH " " + fltFmt_.Fmt() + // dTStrans " " + fltFmt_.Fmt() + // dTStrans_norm " " + fltFmt_.Fmt() + // dTSorient_dens " " + fltFmt_.Fmt() + // dTSorient_norm " " + fltFmt_.Fmt() + // dTSsix " " + fltFmt_.Fmt() + // dTSsix_norm " " + fltFmt_.Fmt() + // Esw_dens " " + fltFmt_.Fmt() + // Esw_norm " " + fltFmt_.Fmt() + // Eww_dens " " + fltFmt_.Fmt(); // EWW_norm if (usePme_) { fmtstr += " " + fltFmt_.Fmt() + // PME_dens + " " + fltFmt_.Fmt(); // PME_norm } fmtstr += " " + fltFmt_.Fmt() + // dipolex " " + fltFmt_.Fmt() + // dipoley " " + fltFmt_.Fmt() + // dipolez " " + fltFmt_.Fmt() + // pol " " + fltFmt_.Fmt() + // neighbor_dens " " + fltFmt_.Fmt() + // neighbor_norm " " + fltFmt_.Fmt() + // qtet " \n"; // NEWLINE if (debug_ > 0) mprintf("DEBUG: Fmt='%s'\n", fmtstr.c_str()); const char* gistOutputVersion; if (usePme_) gistOutputVersion = "v3"; else gistOutputVersion = "v2"; // Do the header datafile_->Printf("GIST Output %s " "spacing=%.4f center=%.6f,%.6f,%.6f dims=%i,%i,%i \n" "voxel xcoord ycoord zcoord population g_O g_H" " dTStrans-dens(kcal/mol/A^3) dTStrans-norm(kcal/mol)" " dTSorient-dens(kcal/mol/A^3) dTSorient-norm(kcal/mol)" " dTSsix-dens(kcal/mol/A^3) dTSsix-norm(kcal/mol)" " Esw-dens(kcal/mol/A^3) Esw-norm(kcal/mol)" " Eww-dens(kcal/mol/A^3) Eww-norm-unref(kcal/mol)", gistOutputVersion, gridspacing_, gridcntr_[0], gridcntr_[1], gridcntr_[2], (int)griddim_[0], (int)griddim_[1], (int)griddim_[2]); if (usePme_) datafile_->Printf(" PME-dens(kcal/mol/A^3) PME-norm(kcal/mol)"); datafile_->Printf(" Dipole_x-dens(D/A^3) Dipole_y-dens(D/A^3) Dipole_z-dens(D/A^3)" " Dipole-dens(D/A^3) neighbor-dens(1/A^3) neighbor-norm order-norm\n"); // Loop over voxels ProgressBar O_progress( MAX_GRID_PT_ ); for (unsigned int gr_pt = 0; gr_pt < MAX_GRID_PT_; gr_pt++) { O_progress.Update( gr_pt ); size_t i, j, k; gO_->ReverseIndex( gr_pt, i, j, k ); Vec3 XYZ = gO_->Bin().Center( i, j, k ); if (usePme_) { datafile_->Printf(fmtstr.c_str(), gr_pt, XYZ[0], XYZ[1], XYZ[2], N_waters_[gr_pt], gO[gr_pt], gH[gr_pt], dTStrans[gr_pt], dTStrans_norm[gr_pt], dTSorient_dens[gr_pt], dTSorient_norm[gr_pt], dTSsix[gr_pt], dTSsix_norm[gr_pt], Esw_dens[gr_pt], Esw_norm[gr_pt], Eww_dens[gr_pt], Eww_norm[gr_pt], PME_dens[gr_pt], PME_norm[gr_pt], dipolex[gr_pt], dipoley[gr_pt], dipolez[gr_pt], pol[gr_pt], neighbor_dens[gr_pt], neighbor_norm[gr_pt], qtet[gr_pt]); } else { datafile_->Printf(fmtstr.c_str(), gr_pt, XYZ[0], XYZ[1], XYZ[2], N_waters_[gr_pt], gO[gr_pt], gH[gr_pt], dTStrans[gr_pt], dTStrans_norm[gr_pt], dTSorient_dens[gr_pt], dTSorient_norm[gr_pt], dTSsix[gr_pt], dTSsix_norm[gr_pt], Esw_dens[gr_pt], Esw_norm[gr_pt], Eww_dens[gr_pt], Eww_norm[gr_pt], dipolex[gr_pt], dipoley[gr_pt], dipolez[gr_pt], pol[gr_pt], neighbor_dens[gr_pt], neighbor_norm[gr_pt], qtet[gr_pt]); } } // END loop over voxels } // END datafile_ not null // Write water-water interaction energy matrix if (ww_Eij_ != 0) { DataSet_MatrixFlt& ww_Eij = static_cast<DataSet_MatrixFlt&>( *ww_Eij_ ); double fac = 1.0 / (double)(NFRAME_ * 2); for (unsigned int idx = 0; idx != ww_Eij.Size(); idx++) { if (fabs(ww_Eij[idx]) < Constants::SMALL) ww_Eij[idx] = 0.0; else { double val = (double)ww_Eij[idx]; ww_Eij[idx] = (float)(val * fac); } } // Eij matrix output, skip any zeros. for (unsigned int a = 1; a < MAX_GRID_PT_; a++) { for (unsigned int l = 0; l < a; l++) { double dbl = ww_Eij_->GetElement(a, l); if (dbl != 0) eijfile_->Printf("%10d %10d %12.5E\n", a, l, dbl); } } } gist_print_.Stop(); double total = gist_init_.Total() + gist_setup_.Total() + gist_action_.Total() + gist_print_.Total(); mprintf("\tGIST timings:\n"); gist_init_.WriteTiming(1, "Init: ", total); gist_setup_.WriteTiming(1, "Setup: ", total); gist_action_.WriteTiming(1, "Action:", total); gist_grid_.WriteTiming(2, "Grid: ", gist_action_.Total()); gist_nonbond_.WriteTiming(2, "Nonbond:", gist_action_.Total()); # ifdef LIBPME if (usePme_) gistPme_.Timing( gist_nonbond_.Total() ); # endif //gist_nonbond_dist_.WriteTiming(3, "Dist2:", gist_nonbond_.Total()); //gist_nonbond_UV_.WriteTiming(3, "UV:", gist_nonbond_.Total()); //gist_nonbond_VV_.WriteTiming(3, "VV:", gist_nonbond_.Total()); //gist_nonbond_OV_.WriteTiming(3, "OV:", gist_nonbond_.Total()); gist_euler_.WriteTiming(2, "Euler: ", gist_action_.Total()); gist_dipole_.WriteTiming(2, "Dipole: ", gist_action_.Total()); gist_order_.WriteTiming(2, "Order: ", gist_action_.Total()); gist_print_.WriteTiming(1, "Print:", total); mprintf("TIME:\tTotal: %.4f s\n", total); #ifdef CUDA this->freeGPUMemory(); #endif } #ifdef CUDA void Action_GIST::NonbondCuda(ActionFrame frm) { // Simply to get the information for the energetic calculations std::vector<float> eww_result(this->numberAtoms_); std::vector<float> esw_result(this->numberAtoms_); std::vector<std::vector<int> > order_indices; this->gist_nonbond_.Start(); float *recip = NULL; float *ucell = NULL; int boxinfo; // Check Boxinfo and write the necessary data into recip, ucell and boxinfo. switch(imageOpt_.ImagingType()) { case ImageOption::NONORTHO: recip = new float[9]; ucell = new float[9]; for (int i = 0; i < 9; ++i) { ucell[i] = (float) frm.Frm().BoxCrd().UnitCell()[i]; recip[i] = (float) frm.Frm().BoxCrd().FracCell()[i]; } boxinfo = 2; break; case ImageOption::ORTHO: recip = new float[9]; recip[0] = frm.Frm().BoxCrd().Param(Box::X); recip[1] = frm.Frm().BoxCrd().Param(Box::Y); recip[2] = frm.Frm().BoxCrd().Param(Box::Z); ucell = NULL; boxinfo = 1; break; case ImageOption::NO_IMAGE: recip = NULL; ucell = NULL; boxinfo = 0; break; default: mprinterr("Error: Unexpected box information found."); return; } std::vector<int> result_o = std::vector<int>(4 * this->numberAtoms_); std::vector<int> result_n = std::vector<int>(this->numberAtoms_); // Call the GPU Wrapper, which subsequently calls the kernel, after setup operations. // Must create arrays from the vectors, does that by getting the address of the first element of the vector. std::vector<std::vector<float> > e_result = doActionCudaEnergy(frm.Frm().xAddress(), this->NBindex_c_, this->numberAtomTypes_, this->paramsLJ_c_, this->molecule_c_, boxinfo, recip, ucell, this->numberAtoms_, this->min_c_, this->max_c_, this->headAtomType_,this->NeighborCut2_, &(result_o[0]), &(result_n[0]), this->result_w_c_, this->result_s_c_, this->result_O_c_, this->result_N_c_, this->doOrder_); eww_result = e_result.at(0); esw_result = e_result.at(1); if (this->doOrder_) { int counter = 0; for (unsigned int i = 0; i < (4 * this->numberAtoms_); i += 4) { ++counter; std::vector<int> temp; for (unsigned int j = 0; j < 4; ++j) { temp.push_back(result_o.at(i + j)); } order_indices.push_back(temp); } } delete[] recip; // Free memory delete[] ucell; // Free memory for (unsigned int sidx = 0; sidx < NSOLVENT_; sidx++) { int headAtomIndex = O_idxs_[sidx]; size_t bin_i, bin_j, bin_k; const double *vec = frm.Frm().XYZ(headAtomIndex); int voxel = -1; if (this->gO_->Bin().Calc(vec[0], vec[1], vec[2], bin_i, bin_j, bin_k)) { voxel = this->gO_->CalcIndex(bin_i, bin_j, bin_k); this->neighbor_.at(0).at(voxel) += result_n.at(headAtomIndex); // This is not nice, as it assumes that O is set before the two Hydrogens // might be the case, but is still not nice (in my opinion) for (unsigned int IDX = 0; IDX != nMolAtoms_; IDX++) { this->Esw_->UpdateVoxel(voxel, esw_result.at(headAtomIndex + IDX)); this->Eww_->UpdateVoxel(voxel, eww_result.at(headAtomIndex + IDX)); } // Order calculation if (this->doOrder_) { double sum = 0; Vec3 cent( frm.Frm().xAddress() + (headAtomIndex) * 3 ); std::vector<Vec3> vectors; switch(imageOpt_.ImagingType()) { case ImageOption::NONORTHO: case ImageOption::ORTHO: { Vec3 vec(frm.Frm().xAddress() + (order_indices.at(headAtomIndex).at(0) * 3)); vectors.push_back( MinImagedVec(vec, cent, frm.Frm().BoxCrd().UnitCell(), frm.Frm().BoxCrd().FracCell())); vec = Vec3(frm.Frm().xAddress() + (order_indices.at(headAtomIndex).at(1) * 3));<|fim▁hole|> vectors.push_back( MinImagedVec(vec, cent, frm.Frm().BoxCrd().UnitCell(), frm.Frm().BoxCrd().FracCell())); } break; default: vectors.push_back( Vec3( frm.Frm().xAddress() + (order_indices.at(headAtomIndex).at(0) * 3) ) - cent ); vectors.push_back( Vec3( frm.Frm().xAddress() + (order_indices.at(headAtomIndex).at(1) * 3) ) - cent ); vectors.push_back( Vec3( frm.Frm().xAddress() + (order_indices.at(headAtomIndex).at(2) * 3) ) - cent ); vectors.push_back( Vec3( frm.Frm().xAddress() + (order_indices.at(headAtomIndex).at(3) * 3) ) - cent ); } for (int i = 0; i < 3; ++i) { for (int j = i + 1; j < 4; ++j) { double cosThet = (vectors.at(i) * vectors.at(j)) / sqrt(vectors.at(i).Magnitude2() * vectors.at(j).Magnitude2()); sum += (cosThet + 1.0/3) * (cosThet + 1.0/3); } } this->order_norm_->UpdateVoxel(voxel, 1.0 - (3.0/8.0) * sum); } } } this->gist_nonbond_.Stop(); } /** * Frees all the Memory on the GPU. */ void Action_GIST::freeGPUMemory(void) { freeCuda(this->NBindex_c_); freeCuda(this->molecule_c_); freeCuda(this->paramsLJ_c_); freeCuda(this->max_c_); freeCuda(this->min_c_); freeCuda(this->result_w_c_); freeCuda(this->result_s_c_); freeCuda(this->result_O_c_); freeCuda(this->result_N_c_); this->NBindex_c_ = NULL; this->molecule_c_ = NULL; this->paramsLJ_c_ = NULL; this->max_c_ = NULL; this->min_c_ = NULL; this->result_w_c_= NULL; this->result_s_c_= NULL; this->result_O_c_ = NULL; this->result_N_c_ = NULL; } /** * Copies data from the CPU to the GPU. * @throws: CudaException */ void Action_GIST::copyToGPU(void) { try { copyMemoryToDevice(&(this->NBIndex_[0]), this->NBindex_c_, this->NBIndex_.size() * sizeof(int)); copyMemoryToDeviceStruct(&(this->charges_[0]), &(this->atomTypes_[0]), this->solvent_, &(this->molecule_[0]), this->numberAtoms_, &(this->molecule_c_), &(this->lJParamsA_[0]), &(this->lJParamsB_[0]), this->lJParamsA_.size(), &(this->paramsLJ_c_)); } catch (CudaException &ce) { this->freeGPUMemory(); mprinterr("Error: Could not copy data to the device.\n"); throw ce; } catch (std::exception &e) { this->freeGPUMemory(); throw e; } } #endif<|fim▁end|>
vectors.push_back( MinImagedVec(vec, cent, frm.Frm().BoxCrd().UnitCell(), frm.Frm().BoxCrd().FracCell())); vec = Vec3(frm.Frm().xAddress() + (order_indices.at(headAtomIndex).at(2) * 3)); vectors.push_back( MinImagedVec(vec, cent, frm.Frm().BoxCrd().UnitCell(), frm.Frm().BoxCrd().FracCell())); vec = Vec3(frm.Frm().xAddress() + (order_indices.at(headAtomIndex).at(3) * 3));
<|file_name|>Pymolecule.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright (c) 2016-2017, Zhijiang Yao, Jie Dong and Dongsheng Cao # All rights reserved. # This file is part of the PyBioMed. # The contents are covered by the terms of the BSD license # which is included in the file license.txt, found at the root # of the PyBioMed source tree. """ ############################################################################## A class used for computing different types of drug descriptors! You can freely use and distribute it. If you have any problem, you could contact with us timely. Authors: Dongsheng Cao and Yizeng Liang. Date: 2012.09.24 Email: [email protected] ############################################################################## """ # Core Library modules import string # Third party modules from rdkit import Chem # First party modules from PyBioMed.PyGetMol import Getmol as getmol from PyBioMed.PyMolecule import ( AtomTypes, basak, bcut, cats2d, charge, connectivity, constitution, estate, fingerprint, geary, ghosecrippen, kappa, moe, molproperty, moran, moreaubroto, topology, ) Version = 1.0 FingerprintName = [ "FP2", "FP3", "FP4", "topological", "Estate", "atompairs", "torsions", "morgan", "ECFP2", "ECFP4", "ECFP6", "MACCS", "FCFP2", "FCFP4", "FCFP6", "Pharm2D2point", "Pharm2D3point", "GhoseCrippen", "PubChem", ] ############################################################################## class PyMolecule: """ ################################################################# A PyDrug class used for computing drug descriptors. ################################################################# """ def __init__(self): """ ################################################################# constructor of PyMolecule. ################################################################# """ pass def ReadMolFromMOL(self, filename=""): """ ################################################################# Read a molecule by SDF or MOL file format. Usage: res=ReadMolFromFile(filename) Input: filename is a file name. Output: res is a molecule object. ################################################################# """ self.mol = Chem.MolFromMolMOL(filename) return self.mol def ReadMolFromSmile(self, smi=""): """ ################################################################# Read a molecule by SMILES string. Usage: res=ReadMolFromSmile(smi) Input: smi is a SMILES string. Output: res is a molecule object. ################################################################# """ self.mol = Chem.MolFromSmiles(smi.strip()) return self.mol def ReadMolFromInchi(self, inchi=""): """ ################################################################# Read a molecule by Inchi string. Usage: res=ReadMolFromInchi(inchi) Input: inchi is a InChi string. Output: res is a molecule object. ################################################################# """ from openbabel import pybel temp = pybel.readstring("inchi", inchi) smi = temp.write("smi") self.mol = Chem.MolFromSmiles(smi.strip()) return self.mol def ReadMolFromMol(self, filename=""): """ ################################################################# Read a molecule with mol file format. Usage: res=ReadMolFromMol(filename) Input: filename is a file name. Output: res is a molecule object. ################################################################# """ self.mol = Chem.MolFromMolFile(filename) return self.mol def GetMolFromNCBI(self, ID=""): """ ################################################################# Get a molecule by NCBI id (e.g., 2244). Usage: res=GetMolFromNCBI(ID) Input: ID is a compound ID (CID) in NCBI. Output: res is a SMILES string. ################################################################# """ res = getmol.GetMolFromNCBI(cid=ID) return res def GetMolFromEBI(self, ID=""): """ ################################################################# Get a molecule by EBI id. Usage: res=GetMolFromEBI(ID) Input: ID is a compound identifier in EBI. Output: res is a SMILES string. ################################################################# """ res = getmol.GetMolFromEBI(ID) return res def GetMolFromCAS(self, ID=""): """ ################################################################# Get a molecule by kegg id (e.g., 50-29-3). Usage: res=GetMolFromCAS(ID) Input: ID is a CAS identifier. Output: res is a SMILES string. ################################################################# """ res = getmol.GetMolFromCAS(casid=ID) return res def GetMolFromKegg(self, ID=""): """ ################################################################# Get a molecule by kegg id (e.g., D02176). Usage: res=GetMolFromKegg(ID) Input: ID is a compound identifier in KEGG. Output: res is a SMILES string. ################################################################# """ res = getmol.GetMolFromKegg(kid=ID) return res def GetMolFromDrugbank(self, ID=""): """ ################################################################# Get a molecule by drugbank id (e.g.,DB00133). Usage: res=GetMolFromDrugbank(ID) Input: ID is a compound identifier in Drugbank. Output: res is a SMILES string. ################################################################# """ res = getmol.GetMolFromDrugbank(dbid=ID) return res def GetKappa(self): """ ################################################################# Calculate all kappa descriptors (7). Usage: res=GetKappa() res is a dict form. ################################################################# """ res = kappa.GetKappa(self.mol) return res def GetCharge(self): """ ################################################################# Calculate all charge descriptors (25). Usage: res=GetCharge() res is a dict form. ################################################################# """ res = charge.GetCharge(self.mol) return res def GetConnectivity(self): """ ################################################################# Calculate all conenctivity descriptors (44). Usage: res=GetConnectivity() res is a dict form. ################################################################# """ res = connectivity.GetConnectivity(self.mol) return res def GetConstitution(self): """ ################################################################# Calculate all constitutional descriptors (30). Usage: res=GetConstitution() res is a dict form. ################################################################# """ res = constitution.GetConstitutional(self.mol) return res def GetBasak(self): """ ################################################################# Calculate all basak's information content descriptors (21). Usage: res=GetBasak() res is a dict form. ################################################################# """ res = basak.Getbasak(self.mol) return res def GetBurden(self): """ ################################################################# Calculate all Burden descriptors (64). Usage: res=GetBurden() res is a dict form. ################################################################# """ res = bcut.GetBurden(self.mol) return res def GetEstate(self): """ ################################################################# Calculate estate descriptors (316). Usage: res=GetEstate() res is a dict form. ################################################################# """ res = estate._GetEstate(self.mol) return res def GetGeary(self): """ ################################################################# Calculate all Geary autocorrelation descriptors (32). Usage: res=GetGeary() res is a dict form. ################################################################# """ res = geary.GetGearyAuto(self.mol)<|fim▁hole|> ################################################################# Calculate all MOE-type descriptors (60). Usage: res=GetMOE() res is a dict form. ################################################################# """ res = moe.GetMOE(self.mol) return res def GetMolProperty(self): """ ################################################################# Calculate all molecular properties (6). Usage: res=GetMolProperty() res is a dict form. ################################################################# """ res = molproperty.GetMolecularProperty(self.mol) return res def GetMoran(self): """ ################################################################# Calculate all Moran autocorrealtion descriptors (32). Usage: res=GetMoran() res is a dict form. ################################################################# """ res = moran.GetMoranAuto(self.mol) return res def GetMoreauBroto(self): """ ################################################################# Calculate all Moreau-Broto autocorrelation descriptors(32). Usage: res=GetMoreauBroto() res is a dict form. ################################################################# """ res = moreaubroto.GetMoreauBrotoAuto(self.mol) return res def GetTopology(self): """ ################################################################# Calculate all topological descriptors (25). Usage: res=GetTopology() res is a dict form. ################################################################# """ res = topology.GetTopology(self.mol) return res def GetFingerprint(self, FPName="topological", **kwargs): """ ################################################################# Calculate all fingerprint descriptors. see the fingerprint type in FingerprintName Usage: res=GetFingerprint(FPName='topological') res is a tuple or list or dict. ################################################################# """ if FPName in FingerprintName: temp = fingerprint._FingerprintFuncs[FPName] res = temp(self.mol, **kwargs) return res else: # res=fingerprint.CalculateDaylightFingerprint(self.mol) res = "This is not a valid fingerprint name!!" return res def GetCATS2D(self): """ ################################################################# The main program for calculating the CATS descriptors. CATS: chemically advanced template serach ----> CATS_DA0 .... Usage: result=CATS2D(mol,PathLength = 10,scale = 1) Input: mol is a molecule object. PathLength is the max topological distance between two atoms. scale is the normalization method (descriptor scaling method) scale = 1 indicates that no normalization. That is to say: the values of the vector represent raw counts ("counts"). scale = 2 indicates that division by the number of non-hydrogen atoms (heavy atoms) in the molecule. scale = 3 indicates that division of each of 15 possible PPP pairs by the added occurrences of the two respective PPPs. Output: result is a dict format with the definitions of each descritor. ################################################################# """ res = cats2d.CATS2D(self.mol, PathLength=10, scale=3) return res # def GetGhoseCrippenFingerprint(self, FPName='GhoseCrippenFingerprint'): # """ # ################################################################# # Ghose-Crippen substructures based on the definitions of # # SMARTS from Ghose-Crippen's paper. (110 dimension) # # The result is a dict format. # ################################################################# # """ # res = ghosecrippen.GhoseCrippenFingerprint(self.mol) # # return res # # # def GetGhoseCrippen(self, FPName='GetGhoseCrippen'): # """ # ################################################################# # Ghose-Crippen counts based on the definitions of # # SMARTS from Ghose-Crippen's paper. (110 dimension) # # The result is a dict format. # ################################################################# # """ # res = ghosecrippen.GhoseCrippenFingerprint(self.mol, count = True) # # return res def GetAllDescriptor(self): """ ################################################################# Calculate all descriptors (608). Usage: res=GetAllDescriptor() res is a dict form. ################################################################# """ res = {} res.update(self.GetKappa()) res.update(self.GetCharge()) res.update(self.GetConnectivity()) res.update(self.GetConstitution()) res.update(self.GetEstate()) res.update(self.GetGeary()) res.update(self.GetMOE()) res.update(self.GetMoran()) res.update(self.GetMoreauBroto()) res.update(self.GetTopology()) res.update(self.GetMolProperty()) res.update(self.GetBasak()) res.update(self.GetBurden()) res.update(self.GetCATS2D()) return res ############################################################################## if __name__ == "__main__": drugclass = PyMolecule() drugclass.ReadMolFromSmile("CCC1(c2ccccc2)C(=O)N(C)C(=N1)O") print(drugclass.GetCharge()) print(drugclass.GetKappa()) print(len(drugclass.GetKappa())) print(drugclass.GetTopology()) print(len(drugclass.GetTopology())) print(drugclass.GetMoreauBroto()) res = drugclass.GetAllDescriptor() print(len(res)) # print drugclass.GetMolFromDrugbank(ID="DB00133") # res=drugclass.GetFingerprint(FPName='Estate') print(res) print(len(res)) print(drugclass.GetConnectivity()) DrugBankID = "DB01014" drugclass = PyMolecule() smi = drugclass.GetMolFromDrugbank(DrugBankID) drugclass.ReadMolFromSmile(smi) print(drugclass.GetKappa()) print(drugclass.GetCATS2D()) print(drugclass.GetFingerprint(FPName="Estate")) # print drugclass.GetGhoseCrippen() # print drugclass.GetGhoseCrippenFingerprint() print(len(drugclass.GetBasak())) print(len(drugclass.GetBurden()))<|fim▁end|>
return res def GetMOE(self): """
<|file_name|>shorten.js<|end_file_name|><|fim▁begin|>var counter = require('mongodb-counter'); var s3redirect = require('./s3redirect'); module.exports = shortener; module.exports.redis = require('./redisStore'); module.exports.mongodb = require('./mongoStore'); module.exports.s3 = s3redirect; module.exports.counter = counter; function shortener(options) { var store = options.store || s3redirect(options); var uniqueIdGenerator = options.uniqueIdGenerator || (options.counters || counter.createCounters( _({}).assign(options).assign({collectionName: options.countersCollectionName}).value() ))(options.uniqueIdCounterName || 'shortener'); <|fim▁hole|> shorten: shorten, shortenUnique: shortenUnique, unshorten: unshorten }; function shorten(longUrl, done) { getUniqueId(function (err, uniqueId) { if (err) return done(err); store.set(uniqueId, longUrl, finish); function finish(err, path) { return done(null, options.shortUrlPrefix + uniqueId); } }); } function shortenUnique(longUrl, done) { getUniqueId(function (err, uniqueId) { if (err) return done(err); store.getOrSet(uniqueId, longUrl, finish); function finish(err, path) { return done(null, options.shortUrlPrefix + uniqueId); } }); } function unshorten(shortUrl, done) { store.get(shortUrl.replace(options.shortUrlPrefix, ''), done); } function getUniqueId(done) { if (typeof(uniqueIdGenerator) == 'function') return uniqueIdGenerator(complete); return uniqueIdGenerator.getUniqueId(complete); function complete(err, value) { if (err) return done(err); var prefix = config.uniqueIdPrefix || ''; if (typeof(value) == 'number') return done(null, prefix + value.toString(36)); return done(null, prefix + value.toString()); } } }<|fim▁end|>
return {
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use std::env; use std::error::Error; use std::ffi::OsStr; use std::fmt; use std::fs; use std::io::prelude::*; use std::os; use std::path::{Path, PathBuf}; use std::process::Output; use std::str; use std::usize; use url::Url; use hamcrest as ham; use cargo::util::ProcessBuilder; use cargo::util::ProcessError; use cargo::util::process; use support::paths::CargoPathExt; pub mod paths; pub mod git; pub mod registry; /* * * ===== Builders ===== * */ #[derive(PartialEq,Clone)] struct FileBuilder { path: PathBuf, body: String } impl FileBuilder { pub fn new(path: PathBuf, body: &str) -> FileBuilder { FileBuilder { path: path, body: body.to_string() } } fn mk(&self) -> Result<(), String> { try!(mkdir_recursive(&self.dirname())); let mut file = try!( fs::File::create(&self.path) .with_err_msg(format!("Could not create file; path={}", self.path.display()))); file.write_all(self.body.as_bytes()) .with_err_msg(format!("Could not write to file; path={}", self.path.display())) } fn dirname(&self) -> &Path { self.path.parent().unwrap() } } #[derive(PartialEq,Clone)] struct SymlinkBuilder { dst: PathBuf, src: PathBuf, } impl SymlinkBuilder { pub fn new(dst: PathBuf, src: PathBuf) -> SymlinkBuilder {<|fim▁hole|> fn mk(&self) -> Result<(), String> { try!(mkdir_recursive(&self.dirname())); os::unix::fs::symlink(&self.dst, &self.src) .with_err_msg(format!("Could not create symlink; dst={} src={}", self.dst.display(), self.src.display())) } #[cfg(windows)] fn mk(&self) -> Result<(), String> { try!(mkdir_recursive(&self.dirname())); os::windows::fs::symlink_file(&self.dst, &self.src) .with_err_msg(format!("Could not create symlink; dst={} src={}", self.dst.display(), self.src.display())) } fn dirname(&self) -> &Path { self.src.parent().unwrap() } } #[derive(PartialEq,Clone)] pub struct ProjectBuilder { name: String, root: PathBuf, files: Vec<FileBuilder>, symlinks: Vec<SymlinkBuilder> } impl ProjectBuilder { pub fn new(name: &str, root: PathBuf) -> ProjectBuilder { ProjectBuilder { name: name.to_string(), root: root, files: vec![], symlinks: vec![] } } pub fn root(&self) -> PathBuf { self.root.clone() } pub fn url(&self) -> Url { path2url(self.root()) } pub fn bin(&self, b: &str) -> PathBuf { self.build_dir().join("debug").join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) } pub fn release_bin(&self, b: &str) -> PathBuf { self.build_dir().join("release").join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) } pub fn target_bin(&self, target: &str, b: &str) -> PathBuf { self.build_dir().join(target).join("debug") .join(&format!("{}{}", b, env::consts::EXE_SUFFIX)) } pub fn build_dir(&self) -> PathBuf { self.root.join("target") } pub fn process<T: AsRef<OsStr>>(&self, program: T) -> ProcessBuilder { let mut p = process(program); p.cwd(&self.root()) .env("HOME", &paths::home()) .env_remove("CARGO_HOME") // make sure we don't pick up an outer one .env_remove("CARGO_TARGET_DIR") // we assume 'target' .env_remove("MSYSTEM"); // assume cmd.exe everywhere on windows return p; } pub fn cargo(&self, cmd: &str) -> ProcessBuilder { let mut p = self.process(&cargo_dir().join("cargo")); p.arg(cmd); return p; } pub fn cargo_process(&self, cmd: &str) -> ProcessBuilder { self.build(); self.cargo(cmd) } pub fn file<B: AsRef<Path>>(mut self, path: B, body: &str) -> ProjectBuilder { self.files.push(FileBuilder::new(self.root.join(path), body)); self } pub fn symlink<T: AsRef<Path>>(mut self, dst: T, src: T) -> ProjectBuilder { self.symlinks.push(SymlinkBuilder::new(self.root.join(dst), self.root.join(src))); self } // TODO: return something different than a ProjectBuilder pub fn build(&self) -> &ProjectBuilder { match self.build_with_result() { Err(e) => panic!(e), _ => return self } } pub fn build_with_result(&self) -> Result<(), String> { // First, clean the directory if it already exists try!(self.rm_root()); // Create the empty directory try!(mkdir_recursive(&self.root)); for file in self.files.iter() { try!(file.mk()); } for symlink in self.symlinks.iter() { try!(symlink.mk()); } Ok(()) } fn rm_root(&self) -> Result<(), String> { if self.root.c_exists() { rmdir_recursive(&self.root) } else { Ok(()) } } } // Generates a project layout pub fn project(name: &str) -> ProjectBuilder { ProjectBuilder::new(name, paths::root().join(name)) } // === Helpers === pub fn mkdir_recursive(path: &Path) -> Result<(), String> { fs::create_dir_all(path) .with_err_msg(format!("could not create directory; path={}", path.display())) } pub fn rmdir_recursive(path: &Path) -> Result<(), String> { path.rm_rf() .with_err_msg(format!("could not rm directory; path={}", path.display())) } pub fn main_file(println: &str, deps: &[&str]) -> String { let mut buf = String::new(); for dep in deps.iter() { buf.push_str(&format!("extern crate {};\n", dep)); } buf.push_str("fn main() { println!("); buf.push_str(&println); buf.push_str("); }\n"); buf.to_string() } trait ErrMsg<T> { fn with_err_msg(self, val: String) -> Result<T, String>; } impl<T, E: fmt::Display> ErrMsg<T> for Result<T, E> { fn with_err_msg(self, val: String) -> Result<T, String> { match self { Ok(val) => Ok(val), Err(err) => Err(format!("{}; original={}", val, err)) } } } // Path to cargo executables pub fn cargo_dir() -> PathBuf { env::var_os("CARGO_BIN_PATH").map(PathBuf::from).or_else(|| { env::current_exe().ok().as_ref().and_then(|s| s.parent()) .map(|s| s.to_path_buf()) }).unwrap_or_else(|| { panic!("CARGO_BIN_PATH wasn't set. Cannot continue running test") }) } /// Returns an absolute path in the filesystem that `path` points to. The /// returned path does not contain any symlinks in its hierarchy. /* * * ===== Matchers ===== * */ #[derive(Clone)] pub struct Execs { expect_stdout: Option<String>, expect_stdin: Option<String>, expect_stderr: Option<String>, expect_exit_code: Option<i32>, expect_stdout_contains: Vec<String> } impl Execs { pub fn with_stdout<S: ToString>(mut self, expected: S) -> Execs { self.expect_stdout = Some(expected.to_string()); self } pub fn with_stderr<S: ToString>(mut self, expected: S) -> Execs { self.expect_stderr = Some(expected.to_string()); self } pub fn with_status(mut self, expected: i32) -> Execs { self.expect_exit_code = Some(expected); self } pub fn with_stdout_contains<S: ToString>(mut self, expected: S) -> Execs { self.expect_stdout_contains.push(expected.to_string()); self } fn match_output(&self, actual: &Output) -> ham::MatchResult { self.match_status(actual) .and(self.match_stdout(actual)) .and(self.match_stderr(actual)) } fn match_status(&self, actual: &Output) -> ham::MatchResult { match self.expect_exit_code { None => ham::success(), Some(code) => { ham::expect( actual.status.code() == Some(code), format!("exited with {}\n--- stdout\n{}\n--- stderr\n{}", actual.status, String::from_utf8_lossy(&actual.stdout), String::from_utf8_lossy(&actual.stderr))) } } } fn match_stdout(&self, actual: &Output) -> ham::MatchResult { try!(self.match_std(self.expect_stdout.as_ref(), &actual.stdout, "stdout", &actual.stderr, false)); for expect in self.expect_stdout_contains.iter() { try!(self.match_std(Some(expect), &actual.stdout, "stdout", &actual.stderr, true)); } Ok(()) } fn match_stderr(&self, actual: &Output) -> ham::MatchResult { self.match_std(self.expect_stderr.as_ref(), &actual.stderr, "stderr", &actual.stdout, false) } #[allow(deprecated)] // connect => join in 1.3 fn match_std(&self, expected: Option<&String>, actual: &[u8], description: &str, extra: &[u8], partial: bool) -> ham::MatchResult { let out = match expected { Some(out) => out, None => return ham::success(), }; let actual = match str::from_utf8(actual) { Err(..) => return Err(format!("{} was not utf8 encoded", description)), Ok(actual) => actual, }; // Let's not deal with \r\n vs \n on windows... let actual = actual.replace("\r", ""); let actual = actual.replace("\t", "<tab>"); let mut a = actual.lines(); let e = out.lines(); let diffs = if partial { let mut min = self.diff_lines(a.clone(), e.clone(), partial); while let Some(..) = a.next() { let a = self.diff_lines(a.clone(), e.clone(), partial); if a.len() < min.len() { min = a; } } min } else { self.diff_lines(a, e, partial) }; ham::expect(diffs.len() == 0, format!("differences:\n\ {}\n\n\ other output:\n\ `{}`", diffs.connect("\n"), String::from_utf8_lossy(extra))) } fn diff_lines<'a>(&self, actual: str::Lines<'a>, expected: str::Lines<'a>, partial: bool) -> Vec<String> { let actual = actual.take(if partial { expected.clone().count() } else { usize::MAX }); zip_all(actual, expected).enumerate().filter_map(|(i, (a,e))| { match (a, e) { (Some(a), Some(e)) => { if lines_match(&e, &a) { None } else { Some(format!("{:3} - |{}|\n + |{}|\n", i, e, a)) } }, (Some(a), None) => { Some(format!("{:3} -\n + |{}|\n", i, a)) }, (None, Some(e)) => { Some(format!("{:3} - |{}|\n +\n", i, e)) }, (None, None) => panic!("Cannot get here") } }).collect() } } fn lines_match(expected: &str, mut actual: &str) -> bool { for part in expected.split("[..]") { match actual.find(part) { Some(i) => actual = &actual[i + part.len()..], None => { return false } } } actual.len() == 0 || expected.ends_with("[..]") } struct ZipAll<I1: Iterator, I2: Iterator> { first: I1, second: I2, } impl<T, I1: Iterator<Item=T>, I2: Iterator<Item=T>> Iterator for ZipAll<I1, I2> { type Item = (Option<T>, Option<T>); fn next(&mut self) -> Option<(Option<T>, Option<T>)> { let first = self.first.next(); let second = self.second.next(); match (first, second) { (None, None) => None, (a, b) => Some((a, b)) } } } fn zip_all<T, I1: Iterator<Item=T>, I2: Iterator<Item=T>>(a: I1, b: I2) -> ZipAll<I1, I2> { ZipAll { first: a, second: b, } } impl fmt::Display for Execs { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "execs") } } impl ham::Matcher<ProcessBuilder> for Execs { fn matches(&self, mut process: ProcessBuilder) -> ham::MatchResult { self.matches(&mut process) } } impl<'a> ham::Matcher<&'a mut ProcessBuilder> for Execs { fn matches(&self, process: &'a mut ProcessBuilder) -> ham::MatchResult { let res = process.exec_with_output(); match res { Ok(out) => self.match_output(&out), Err(ProcessError { output: Some(ref out), .. }) => { self.match_output(out) } Err(e) => { let mut s = format!("could not exec process {}: {}", process, e); match e.cause() { Some(cause) => s.push_str(&format!("\ncaused by: {}", cause.description())), None => {} } Err(s) } } } } pub fn execs() -> Execs { Execs { expect_stdout: None, expect_stderr: None, expect_stdin: None, expect_exit_code: None, expect_stdout_contains: vec![] } } #[derive(Clone)] pub struct ShellWrites { expected: String } impl fmt::Display for ShellWrites { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "`{}` written to the shell", self.expected) } } impl<'a> ham::Matcher<&'a [u8]> for ShellWrites { fn matches(&self, actual: &[u8]) -> ham::MatchResult { let actual = String::from_utf8_lossy(actual); let actual = actual.to_string(); ham::expect(actual == self.expected, actual) } } pub fn shell_writes<T: fmt::Display>(string: T) -> ShellWrites { ShellWrites { expected: string.to_string() } } pub trait Tap { fn tap<F: FnOnce(&mut Self)>(mut self, callback: F) -> Self; } impl<T> Tap for T { fn tap<F: FnOnce(&mut Self)>(mut self, callback: F) -> T { callback(&mut self); self } } pub fn basic_bin_manifest(name: &str) -> String { format!(r#" [package] name = "{}" version = "0.5.0" authors = ["[email protected]"] [[bin]] name = "{}" "#, name, name) } pub fn basic_lib_manifest(name: &str) -> String { format!(r#" [package] name = "{}" version = "0.5.0" authors = ["[email protected]"] [lib] name = "{}" "#, name, name) } pub fn path2url(p: PathBuf) -> Url { Url::from_file_path(&*p).ok().unwrap() } pub static RUNNING: &'static str = " Running"; pub static COMPILING: &'static str = " Compiling"; pub static DOCUMENTING: &'static str = " Documenting"; pub static FRESH: &'static str = " Fresh"; pub static UPDATING: &'static str = " Updating"; pub static ADDING: &'static str = " Adding"; pub static REMOVING: &'static str = " Removing"; pub static DOCTEST: &'static str = " Doc-tests"; pub static PACKAGING: &'static str = " Packaging"; pub static DOWNLOADING: &'static str = " Downloading"; pub static UPLOADING: &'static str = " Uploading"; pub static VERIFYING: &'static str = " Verifying"; pub static ARCHIVING: &'static str = " Archiving"; pub static INSTALLING: &'static str = " Installing";<|fim▁end|>
SymlinkBuilder { dst: dst, src: src } } #[cfg(unix)]
<|file_name|>api.py<|end_file_name|><|fim▁begin|>from avatar.templatetags.avatar_tags import avatar_url from django.conf import settings from django.contrib.auth.models import User from django.core.urlresolvers import reverse from tastypie import fields from tastypie.resources import ModelResource from accounts.models import UserProfile from main.api.authentication import UberAuthentication from main.api.serializers import UberSerializer class UserResource(ModelResource): #profile = fields.ForeignKey('accounts.api.UserProfileResource', 'profile', full=True) class Meta: queryset = User.objects.all() authentication = UberAuthentication() #authorization = CourseAuthorization() resource_name = 'users'<|fim▁hole|> allowed_methods = ['get'] include_absolute_url = True serializer = UberSerializer() def dehydrate(self, bundle): bundle.data['absolute_url'] = reverse('account_user_profile_with_username', kwargs={'username': bundle.obj.username}) bundle.data['best_name'] = bundle.obj.profile.get_best_name() bundle.data['tiny_thumbnail'] = avatar_url(bundle.obj, size=settings.AVATAR_SIZE_IN_ENROLLMENTS_GRID) return bundle class UserProfileResource(ModelResource): class Meta: queryset = UserProfile.objects.all() authentication = UberAuthentication() resource_name = 'profiles'<|fim▁end|>
fields = ['username', 'first_name', 'last_name', 'last_login', 'profile']
<|file_name|>management.js<|end_file_name|><|fim▁begin|>$('#new-orders-button').click(function(){ $('#orders-pane').find('div').remove(); $.ajax({ type: 'POST', url: "http://localhost/3k/orders/app/getNewOrders", //data: {activitiesArray : pass_order}, dataType: 'json' }).done(function(response) { $.each(response, function(){ $.each(this, function(){ $('#orders-title').text(this.status+" Orders"); $('#orders-pane').append('<div class="row"> <div class="col-sm-2">'+this.datecreated+'</div>'+'<div class="col-sm-8">'+this.customer_order+'</div>'+'<div class="col-sm-2"><i class="fa fa-check" data-id="'+this.id+'"></i><i class="fa fa-remove" data-id="'+this.id+'"></i></div></div>'); //console.log(this.id); }); }); }); }); $('#old-orders-button').click(function(){ $('#orders-pane').find('div').remove(); $.ajax({ type: 'POST', url: "http://localhost/3k/orders/app/getOldOrders", //data: {activitiesArray : pass_order}, dataType: 'json' }).done(function(response) { $.each(response, function(){ $.each(this, function(){ $('#orders-title').text("Fulfilled Orders"); $('#orders-pane').append('<div class="row"> <div class="col-sm-2">'+this.datecreated+'</div>'+'<div class="col-sm-8">'+this.customer_order+'</div>'+'<div class="col-sm-2"><i class="fa fa-check" data-id="'+this.id+'"></i><i class="fa fa-remove" data-id="'+this.id+'"></i><i class="fa fa-undo" data-id="'+this.id+'"></i></div></div>'); //console.log(this.id); }); }); }); }); $('#orders-pane').on("click", ".fa-check", function(){ //$(this).closest('.row').remove(); var order_line = $(this); <|fim▁hole|> //$(this).closest('.row').remove(); $.ajax({ type: 'POST', url: "http://localhost/3k/orders/app/fulfill/"+id, //data: {activitiesArray : pass_order}, dataType: 'json' }).done(function(response) { console.log(response); if (response == 'success'){ order_line.closest('.row').remove(); console.log('Order Fulfilled'); }else{ console.log('Error Processing Order'); }; }); }) $('#orders-pane').on("click", ".fa-remove", function(){ $(this).closest('.row').remove(); }) $('#orders-pane').on("click", ".fa-undo", function(){ $(this).closest('.row').remove(); var order_line = $(this); var id = $(this).attr('data-id'); //console.log(id); //$(this).closest('.row').remove(); $.ajax({ type: 'POST', url: "http://localhost/3k/orders/app/undo/"+id, //data: {activitiesArray : pass_order}, dataType: 'json' }).done(function(response) { console.log(response); if (response == 'success'){ order_line.closest('.row').remove(); console.log('Order Reversed'); }else{ console.log('Error Reversing Order'); }; }); })<|fim▁end|>
var id = $(this).attr('data-id'); //console.log(id);
<|file_name|>mcs_player.rs<|end_file_name|><|fim▁begin|>extern crate time; extern crate rand; use {Player, MoveResult}; use game_manager::{Game, State}; use gdl::{Move, Score, Role};<|fim▁hole|>/// A Monte Carlo search player. This player should only be used for 2 player, constant sum, /// turn based games. pub struct McsPlayer { depth_limit: u32, best_move: Option<Move>, charge_count: u32, } impl McsPlayer { /// Returns an McsPlayer that begins the random terminal state searches at depth `depth` pub fn new(depth: u32, charge_count: u32) -> McsPlayer { McsPlayer { depth_limit: depth, best_move: None, charge_count: charge_count } } fn best_move(&mut self, game: &Game) -> MoveResult<Move> { let role = game.role(); let cur_state = game.current_state(); let mut moves = game.legal_moves(cur_state, role); assert!(!moves.is_empty(), "No legal moves"); if moves.len() == 1 { return Ok(moves.swap_remove(0)); } let mut res = moves[0].clone(); self.best_move = Some(res.clone()); let mut max = 0; self.best_move = Some(res.clone()); let opponent = opponent(game, role); for m in moves { let score = match self.min_score(game, cur_state, opponent, m.clone(), 0, 100, 0) { Ok(score) => score, Err(m) => return Err(m) }; if score == 100 { return Ok(m); } else if score > max { max = score; self.best_move = Some(m.clone()); res = m } check_time_result!(self, game); } Ok(res) } fn max_score(&mut self, game: &Game, state: &State, role: &Role, alpha: u8, beta: u8, depth: u32) -> MoveResult<Score> { if depth >= self.depth_limit { return self.monte_carlo(role, game, state); } if game.is_terminal(state) { return Ok(game.goal(state, game.role())); } let moves = game.legal_moves(state, role); assert!(!moves.is_empty(), "No legal moves"); let opponent = opponent(game, role); let mut alpha = alpha; for m in moves { let res = match self.min_score(game, state, &opponent, m, alpha, beta, depth + 1) { Ok(score) => score, e @ Err(_) => return e }; alpha = max(res, alpha); if alpha >= beta { return Ok(beta); } check_time_result!(self, game); } Ok(alpha) } fn min_score(&mut self, game: &Game, state: &State, role: &Role, last_move: Move, alpha: u8, beta: u8, depth: u32) -> MoveResult<Score> { let moves = game.legal_moves(state, role); assert!(moves.len() >= 1, "No legal moves"); let mut beta = beta; for m in moves { let move_vec = if game.roles()[0] == *role { vec![m, last_move.clone()] } else { vec![last_move.clone(), m] }; let s = game.next_state(state, &*move_vec); let opponent = opponent(game, role); let res = match self.max_score(game, &s, &opponent, alpha, beta, depth) { Ok(score) => score, e @ Err(_) => return e }; beta = min(res, beta); if beta <= alpha { return Ok(alpha); } check_time_result!(self, game); } Ok(beta) } fn monte_carlo(&mut self, role: &Role, game: &Game, state: &State) -> MoveResult<Score> { let mut total: u32 = 0; for _ in 0..self.charge_count { match self.depth_charge(role, game, state) { Ok(res) => total += res as u32, Err(e) => return Err(e) } } Ok((total / self.charge_count) as u8) } fn depth_charge(&mut self, role: &Role, game: &Game, state: &State) -> MoveResult<Score> { let mut new_state = state.clone(); let mut moves = Vec::with_capacity(game.roles().len()); while !game.is_terminal(&new_state) { moves.clear(); for r in game.roles().into_iter() { let mut legals = game.legal_moves(&new_state, r); let r = rand::random::<usize>() % legals.len(); moves.push(legals.swap_remove(r)); } new_state = game.next_state(&new_state, &moves); check_time_result!(self, game); } return Ok(game.goal(state, role)); } } fn opponent<'a>(game: &'a Game, role: &'a Role) -> &'a Role { let roles = game.roles(); assert!(roles.len() == 2, "Must be a two player game"); let res: Vec<_> = roles.into_iter().filter(|r| *r != role).collect(); assert_eq!(res.len(), 1); res[0] } impl Player for McsPlayer { fn name(&self) -> String { "McsPlayer".to_string() } fn select_move(&mut self, game: &Game) -> Move { let m = match self.best_move(&game) { Ok(m) => m, Err(m) => { warn!("Out of time"); m } }; info!("Selecting move {}", m.to_string()); m } fn out_of_time(&mut self, _: &Game) -> Move { self.best_move.take().unwrap() } }<|fim▁end|>
use std::cmp::{max, min};
<|file_name|>sky.rs<|end_file_name|><|fim▁begin|>use std::f32::consts::PI; use cgmath; use cgmath::prelude::*; use rand::Rng; use three::{self, Object}; use COLOR_WHITE; pub struct Sky { pub group: three::Group, } impl Sky { fn make_cloud<R: Rng>( rng: &mut R, factory: &mut three::Factory, ) -> three::Group { let group = factory.group(); let geo = three::Geometry::cuboid(20.0, 20.0, 20.0); let material = three::material::Lambert { color: COLOR_WHITE, flat: true, }; let template = factory.mesh(geo, material.clone()); for i in 0i32 .. rng.gen_range(3, 6) { let m = factory.mesh_instance(&template); let rot = cgmath::Quaternion::<f32>::new(rng.gen(), rng.gen(), rng.gen(), rng.gen()); let q = rot.normalize(); m.set_transform(<|fim▁hole|> rng.gen::<f32>() * 10.0, ], q, rng.gen_range(0.1, 1.0), ); group.add(&m); } group } pub fn new<R: Rng>( rng: &mut R, factory: &mut three::Factory, ) -> Self { let group = factory.group(); let num = 20i32; let step_angle = PI * 2.0 / num as f32; for i in 0 .. num { let cloud = Self::make_cloud(rng, factory); let angle = cgmath::Rad(i as f32 * step_angle); let dist = rng.gen_range(750.0, 950.0); let pos = [ angle.cos() * dist, angle.sin() * dist, rng.gen_range(-800.0, -400.0), ]; let q = cgmath::Quaternion::from_angle_z(angle + cgmath::Rad::turn_div_4()); cloud.set_transform(pos, q, rng.gen_range(1.0, 3.0)); group.add(&cloud); } Sky { group } } }<|fim▁end|>
[ i as f32 * 15.0, rng.gen::<f32>() * 10.0,
<|file_name|>mid-path-type-params.rs<|end_file_name|><|fim▁begin|>struct S<T> { contents: T, } impl<T> S<T> { fn new<U>(x: T, _: U) -> S<T> { S { contents: x, } } } trait Trait<T> { fn new<U>(x: T, y: U) -> Self; } struct S2 { contents: int, } impl Trait<int> for S2 { fn new<U>(x: int, _: U) -> S2 { S2 { contents: x, } }<|fim▁hole|> let _: S2 = Trait::<int>::new::<float>(1, 1.0); }<|fim▁end|>
} fn main() { let _ = S::<int>::new::<float>(1, 1.0);
<|file_name|>AbsAnalyst.py<|end_file_name|><|fim▁begin|>import re<|fim▁hole|> LOGTIME_REGEXP = re.compile("(?P<log_time>\w{4}-\w{2}-\w{2} \w{2}:\w{2}:\w{2})") def __init__(self): raise NotImplemented def isMatch(self, line): raise NotImplemented def doStatistic(self): raise NotImplemented def doAnalyse(self): raise NotImplemented<|fim▁end|>
import Queue class AbsAnalyst(object): """docstring for AbsAnalyst"""
<|file_name|>smartnet2decode.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """ This program decodes the Motorola SmartNet II trunking protocol from the control channel Tune it to the control channel center freq, and it'll spit out the decoded packets. In what format? Who knows. Based on your AIS decoding software, which is in turn based on the gr-pager code and the gr-air code. """ from gnuradio import gr, gru, blks2, optfir, digital from gnuradio import audio from gnuradio import eng_notation from gnuradio import uhd from fsk_demod import fsk_demod from optparse import OptionParser from gnuradio.eng_option import eng_option from gnuradio import smartnet import time import gnuradio.gr.gr_threading as _threading import csv class top_block_runner(_threading.Thread): def __init__(self, tb): _threading.Thread.__init__(self) self.setDaemon(1) self.tb = tb self.done = False self.start() def run(self): self.tb.run() self.done = True class my_top_block(gr.top_block): def __init__(self, options, queue): gr.top_block.__init__(self) if options.filename is not None: self.fs = gr.file_source(gr.sizeof_gr_complex, options.filename) self.rate = options.rate else: self.u = uhd.usrp_source(options.addr, io_type=uhd.io_type.COMPLEX_FLOAT32, num_channels=1) if options.subdev is not None: self.u.set_subdev_spec(options.subdev, 0) self.u.set_samp_rate(options.rate) self.rate = self.u.get_samp_rate() # Set the antenna if(options.antenna): self.u.set_antenna(options.antenna, 0) self.centerfreq = options.centerfreq print "Tuning to: %fMHz" % (self.centerfreq - options.error) if not(self.tune(options.centerfreq - options.error)): print "Failed to set initial frequency" if options.gain is None: #set to halfway g = self.u.get_gain_range() options.gain = (g.start()+g.stop()) / 2.0 print "Setting gain to %i" % options.gain self.u.set_gain(options.gain) self.u.set_bandwidth(options.bandwidth) print "Samples per second is %i" % self.rate self._syms_per_sec = 3600; options.samples_per_second = self.rate options.syms_per_sec = self._syms_per_sec options.gain_mu = 0.01 options.mu=0.5 options.omega_relative_limit = 0.3 options.syms_per_sec = self._syms_per_sec options.offset = options.centerfreq - options.freq print "Control channel offset: %f" % options.offset self.demod = fsk_demod(options) self.start_correlator = gr.correlate_access_code_tag_bb("10101100", 0, "smartnet_preamble") #should mark start of packet self.smartnet_deinterleave = smartnet.deinterleave() self.smartnet_crc = smartnet.crc(queue) if options.filename is None: self.connect(self.u, self.demod) else: self.connect(self.fs, self.demod) self.connect(self.demod, self.start_correlator, self.smartnet_deinterleave, self.smartnet_crc) #hook up the audio patch if options.audio: self.audiorate = 48000 self.audiotaps = gr.firdes.low_pass(1, self.rate, 8000, 2000, gr.firdes.WIN_HANN) self.prefilter_decim = int(self.rate / self.audiorate) #might have to use a rational resampler for audio print "Prefilter decimation: %i" % self.prefilter_decim self.audio_prefilter = gr.freq_xlating_fir_filter_ccf(self.prefilter_decim, #decimation self.audiotaps, #taps 0, #freq offset self.rate) #sampling rate #on a trunked network where you know you will have good signal, a carrier power squelch works well. real FM receviers use a noise squelch, where #the received audio is high-passed above the cutoff and then fed to a reverse squelch. If the power is then BELOW a threshold, open the squelch. self.squelch = gr.pwr_squelch_cc(options.squelch, #squelch point alpha = 0.1, #wat ramp = 10, #wat gate = False) self.audiodemod = blks2.fm_demod_cf(self.rate/self.prefilter_decim, #rate 1, #audio decimation 4000, #deviation 3000, #audio passband 4000, #audio stopband 1, #gain 75e-6) #deemphasis constant #the filtering removes FSK data woobling from the subaudible channel (might be able to combine w/lpf above) self.audiofilttaps = gr.firdes.high_pass(1, self.audiorate, 300, 50, gr.firdes.WIN_HANN) self.audiofilt = gr.fir_filter_fff(1, self.audiofilttaps) self.audiogain = gr.multiply_const_ff(options.volume) self.audiosink = audio.sink (self.audiorate, "") # self.audiosink = gr.wavfile_sink("test.wav", 1, self.audiorate, 8) self.mute() if options.filename is None: self.connect(self.u, self.audio_prefilter) else: self.connect(self.fs, self.audio_prefilter) # self.connect(self.audio_prefilter, self.squelch, self.audiodemod, self.audiofilt, self.audiogain, self.audioresamp, self.audiosink) self.connect(self.audio_prefilter, self.squelch, self.audiodemod, self.audiofilt, self.audiogain, self.audiosink) ###########SUBCHANNEL DECODING EXPERIMENT########### #here we set up the low-pass filter for audio subchannel data decoding. gain of 10, decimation of 10. # self.subchannel_decimation = 50 # self.subchannel_gain = 10 # self.subchannelfilttaps = gr.firdes.low_pass(self.subchannel_gain, self.audiorate, 200, 40, firdes.WIN_HANN) # self.subchannelfilt = gr.fir_filter_fff(self.subchannel_decimation, self.subchannelfilttaps) # self.subchannel_syms_per_sec = 150 # self.subchannel_samples_per_symbol = (self.audiorate / self.subchannel_decimation) / self.subchannel_syms_per_sec # print "Subchannel samples per symbol: %f" % self.subchannel_samples_per_symbol # self.subchannel_clockrec = gr.clock_recovery_mm_ff(self.subchannel_samples_per_symbol, # 0.25*0.01*0.01, # 0.5, # 0.01, # 0.3) # self.subchannel_slicer = gr.binary_slicer_fb() # self.subchannel_correlator = gr.correlate_access_code_bb("01000",0) # self.subchannel_framer = smartnet.subchannel_framer() # self.subchannel_sink = gr.null_sink(1); #just so it doesn't bitch until we do something with it # self.connect(self.audiodemod, self.subchannelfilt, self.subchannel_clockrec, self.subchannel_slicer, self.subchannel_correlator, self.subchannel_framer, self.subchannel_sink) def tune(self, freq): result = self.u.set_center_freq(freq) return True def tuneoffset(self, target_freq, rffreq): #print "Setting offset; target freq is %f, Center freq is %f" % (target_freq, rffreq) self.audio_prefilter.set_center_freq(rffreq-target_freq*1e6) def setvolume(self, vol): self.audiogain.set_k(vol) def mute(self): self.setvolume(0) def unmute(self, volume): self.setvolume(volume) def getfreq(chanlist, cmd): if chanlist is None: if cmd < 0x2d0: freq = float(cmd * 0.025 + 851.0125) else: freq = None else: if chanlist.get(str(cmd), None) is not None: freq = float(chanlist[str(cmd)]) else: freq = None return freq def parsefreq(s, chanlist): retfreq = None [address, groupflag, command] = s.split(",") command = int(command) address = int(address) & 0xFFF0 groupflag = bool(groupflag) if chanlist is None: if command < 0x2d0: retfreq = getfreq(chanlist, command) else: if chanlist.get(str(command), None) is not None: #if it falls into the channel somewhere retfreq = getfreq(chanlist, command) return [retfreq, address] # mask so the squelch opens up on the entire group def parse(s, shorttglist, longtglist, chanlist, elimdupes): #this is the main parser. it takes in commands in the form "address,command" (no quotes of course) and outputs text via print #it is also responsible for using the talkgroup list, if any [address, groupflag, command] = s.split(",") command = int(command) address = int(address) lookupaddr = address & 0xFFF0 groupflag = bool(groupflag) # print "Command is",command if longtglist is not None and longtglist.get(str(lookupaddr), None) is not None: longname = longtglist[str(lookupaddr)] #the mask is to screen out extra status bits, which we can add in later (see the RadioReference.com wiki on SmartNet Type II) else: longname = None if shorttglist is not None and shorttglist.get(str(lookupaddr), None) is not None: shortname = shorttglist[str(lookupaddr)] else: shortname = None retval = None if command == 0x30B and groupflag is True and lastmsg.get("command", None) == 0x308 and address & 0x2000 and address & 0x0800: retval = "SysID: Sys #" + hex(lastmsg["address"]) + " on " + str(getfreq(chanlist, address & 0x3FF)) else: if getfreq(chanlist, command) is not None and dupes.get(command, None) != address: retval = "Freq assignment: " + str(shortname) + " (" + str(address) + ")" + " @ " + str(getfreq(chanlist, command)) + " (" + str(longname) + ")" if elimdupes is True: dupes[command] = address lastlastmsg = lastmsg lastmsg["command"]=command lastmsg["address"]=address return retval def main(): # Create Options Parser: parser = OptionParser (option_class=eng_option, conflict_handler="resolve") expert_grp = parser.add_option_group("Expert") parser.add_option("-f", "--freq", type="eng_float", default=866.9625e6, help="set control channel frequency to MHz [default=%default]", metavar="FREQ") parser.add_option("-c", "--centerfreq", type="eng_float", default=867.5e6, help="set center receive frequency to MHz [default=%default]. Set to center of 800MHz band for best results") parser.add_option("-g", "--gain", type="int", default=None, help="set RF gain", metavar="dB") parser.add_option("-b", "--bandwidth", type="eng_float", default=3e6, help="set bandwidth of DBS RX frond end [default=%default]") parser.add_option("-F", "--filename", type="string", default=None, help="read data from filename rather than USRP") parser.add_option("-t", "--tgfile", type="string", default="sf_talkgroups.csv", help="read in CSV-formatted talkgroup list for pretty printing of talkgroup names") parser.add_option("-C", "--chanlistfile", type="string", default="motochan14.csv", help="read in list of Motorola channel frequencies (improves accuracy of frequency decoding) [default=%default]") parser.add_option("-e", "--allowdupes", action="store_false", default=True, help="do not eliminate duplicate records (produces lots of noise)") parser.add_option("-E", "--error", type="eng_float", default=0, help="enter an offset error to compensate for USRP clock inaccuracy") parser.add_option("-u", "--audio", action="store_true", default=False, help="output audio on speaker") parser.add_option("-m", "--monitor", type="int", default=None, help="monitor a specific talkgroup") parser.add_option("-v", "--volume", type="eng_float", default=0.2, help="set volume gain for audio output [default=%default]") parser.add_option("-s", "--squelch", type="eng_float", default=28, help="set audio squelch level (default=%default, play with it)") parser.add_option("-s", "--subdev", type="string", help="UHD subdev spec", default=None) parser.add_option("-A", "--antenna", type="string", default=None, help="select Rx Antenna where appropriate") parser.add_option("-r", "--rate", type="eng_float", default=64e6/18, help="set sample rate [default=%default]") parser.add_option("-a", "--addr", type="string", default="", help="address options to pass to UHD") #receive_path.add_options(parser, expert_grp) (options, args) = parser.parse_args () if len(args) != 0: parser.print_help(sys.stderr) sys.exit(1) if options.tgfile is not None: tgreader=csv.DictReader(open(options.tgfile), quotechar='"') shorttglist = {"0": 0} longtglist = {"0": 0} for record in tgreader: # print record['tgnum'] shorttglist[record['tgnum']] = record['shortname'] longtglist[record['tgnum']] = record['longname'] else: shorttglist = None longtglist = None if options.chanlistfile is not None: clreader=csv.DictReader(open(options.chanlistfile), quotechar='"') chanlist={"0": 0} for record in clreader: chanlist[record['channel']] = record['frequency'] else: chanlist = None # build the graph queue = gr.msg_queue(10) tb = my_top_block(options, queue) runner = top_block_runner(tb) global dupes dupes = {0: 0} global lastmsg lastmsg = {"command": 0x0000, "address": 0x0000} global lastlastmsg lastlastmsg = lastmsg currentoffset = 0 updaterate = 10 #tb.setvolume(options.volume) #tb.mute() try: while 1: if not queue.empty_p(): msg = queue.delete_head() # Blocking read sentence = msg.to_string() s = parse(sentence, shorttglist, longtglist, chanlist, options.allowdupes) if s is not None: print s if options.audio: [newfreq, newaddr] = parsefreq(sentence, chanlist) if newfreq == currentoffset and newaddr != (options.monitor & 0xFFF0): tb.mute() if newaddr == (options.monitor & 0xFFF0): #the mask is to allow listening to all "flags" within a talkgroup: emergency, broadcast, etc. tb.unmute(options.volume) if newfreq is not None and newfreq != currentoffset: print "Changing freq to %f" % newfreq currentoffset = newfreq tb.tuneoffset(newfreq, options.centerfreq) elif runner.done: break else: time.sleep(1.0/updaterate) # tb.run() except KeyboardInterrupt: tb.stop() runner = None<|fim▁hole|><|fim▁end|>
if __name__ == '__main__': main()